Exemplo n.º 1
0
  /**
   * Method declaration
   *
   * @throws SQLException
   */
  void checkpoint(boolean defrag) throws SQLException {

    if (defrag) {
      ArrayList rootsArray = cCache.defrag();

      for (int i = 0; i < rootsArray.size(); i++) {
        int[] roots = (int[]) rootsArray.get(i);

        if (roots != null) {
          Trace.printSystemOut(org.hsqldb.lib.StringUtil.getList(roots, " ", ""));
        }
      }

      DataFileDefrag2.updateTableIndexRoots(dDatabase.getTables(), rootsArray);
    }

    close(false);
    pProperties.setProperty("modified", "yes");
    pProperties.save();

    if (cCache != null) {
      cCache.open(false);
    }

    reopenAllTextCaches();
    openScript();
  }
  public static void getTimestampString(StringBuffer sb, long seconds, int nanos, int scale) {

    synchronized (sdfts) {
      tempDate.setTime(seconds * 1000);
      sb.append(sdfts.format(tempDate));

      if (scale > 0) {
        sb.append('.');
        sb.append(StringUtil.toZeroPaddedString(nanos, 9, scale));
      }
    }
  }
  /**
   * Translates null or zero length value for address key to the special value
   * ServerConstants.SC_DEFAULT_ADDRESS which causes ServerSockets to be constructed without
   * specifying an InetAddress.
   *
   * @param p The properties object upon which to perform the translation
   */
  public static void translateAddressProperty(HsqlProperties p) {

    if (p == null) {
      return;
    }

    String address = p.getProperty(SC_KEY_ADDRESS);

    if (StringUtil.isEmpty(address)) {
      p.setProperty(SC_KEY_ADDRESS, SC_DEFAULT_ADDRESS);
    }
  }
  HsqlArrayList defrag(Database db, ScaledRAFile sourcenotused, String filename)
      throws IOException, HsqlException {

    Trace.printSystemOut("Defrag Transfer begins");

    HsqlArrayList rootsList = new HsqlArrayList();
    HsqlArrayList tTable = db.getTables();

    // erik        to specify scale;
    ScaledRAFile dest =
        ScaledRAFile.newScaledRAFile(filename + ".new", false, 1, ScaledRAFile.DATA_FILE_RAF);

    // erik        desl.seek(Cache.INITIAL_FREE_POS / cacheFileScale);
    dest.seek(Cache.INITIAL_FREE_POS);

    for (int i = 0, tSize = tTable.size(); i < tSize; i++) {
      Table t = (Table) tTable.get(i);

      if (t.tableType == Table.CACHED_TABLE) {
        int[] rootsArray = writeTableToDataFile(t, dest);

        rootsList.add(rootsArray);
      } else {
        rootsList.add(null);
      }

      Trace.printSystemOut(t.getName().name, " complete");
    }

    // erik        no change
    int pos = (int) dest.getFilePointer();

    // erik        desl.seek(Cache.FREE_POS_POS / cacheFileScale);
    dest.seek(Cache.FREE_POS_POS);
    dest.writeInt(pos);
    dest.close();

    for (int i = 0, size = rootsList.size(); i < size; i++) {
      int[] roots = (int[]) rootsList.get(i);

      if (roots != null) {
        Trace.printSystemOut(org.hsqldb.lib.StringUtil.getList(roots, ",", ""));
      }
    }

    Trace.printSystemOut("Transfer complete: ", stopw.elapsedTime());

    return rootsList;
  }
  /**
   * Retrieves a new HsqlProperties object, if possible, loaded from the specified file.
   *
   * @param path the file's path, without the .properties extention (which is added automatically)
   * @return a new properties object loaded from the specified file
   */
  public static HsqlProperties getPropertiesFromFile(String path) {

    if (StringUtil.isEmpty(path)) {
      return null;
    }

    HsqlProperties p = new HsqlProperties(path);

    try {
      p.load();
    } catch (Exception e) {
    }

    return p;
  }
Exemplo n.º 6
0
  /**
   * constructs a new instance of ResultSetParsedSection, interpreting the supplied results as one
   * or more lines of delimited field values
   *
   * @param lines String[]
   */
  protected ResultSetParsedSection(String[] lines) {

    super(lines);

    type = 'r';

    // now we'll populate the expectedResults array
    expectedRows = new String[(resEndRow + 1)];

    for (int i = 0; i <= resEndRow; i++) {
      int skip = StringUtil.skipSpaces(lines[i], 0);

      expectedRows[i] = lines[i].substring(skip);
    }
  }
Exemplo n.º 7
0
  private static String getResourceString(int code) {

    String key = StringUtil.toZeroPaddedString(code, SQL_CODE_DIGITS, SQL_CODE_DIGITS);

    return BundleHandler.getString(bundleHandle, key);
  }
  void process() throws IOException {

    boolean complete = false;

    Error.printSystemOut("Defrag Transfer begins");

    transactionRowLookup = database.txManager.getTransactionIDList();

    HsqlArrayList allTables = database.schemaManager.getAllTables();

    rootsList = new int[allTables.size()][];

    Storage dest = null;

    try {
      OutputStream fos =
          database.logger.getFileAccess().openOutputStreamElement(dataFileName + ".new");

      fileStreamOut = new BufferedOutputStream(fos, 1 << 12);

      for (int i = 0; i < DataFileCache.INITIAL_FREE_POS; i++) {
        fileStreamOut.write(0);
      }

      fileOffset = DataFileCache.INITIAL_FREE_POS;

      for (int i = 0, tSize = allTables.size(); i < tSize; i++) {
        Table t = (Table) allTables.get(i);

        if (t.getTableType() == TableBase.CACHED_TABLE) {
          int[] rootsArray = writeTableToDataFile(t);

          rootsList[i] = rootsArray;
        } else {
          rootsList[i] = null;
        }

        Error.printSystemOut(t.getName().name + " complete");
      }

      writeTransactionRows();
      fileStreamOut.flush();
      fileStreamOut.close();

      fileStreamOut = null;

      // write out the end of file position
      dest =
          ScaledRAFile.newScaledRAFile(
              database,
              dataFileName + ".new",
              false,
              ScaledRAFile.DATA_FILE_RAF,
              database
                  .getURLProperties()
                  .getProperty(HsqlDatabaseProperties.url_storage_class_name),
              database.getURLProperties().getProperty(HsqlDatabaseProperties.url_storage_key));

      dest.seek(DataFileCache.LONG_FREE_POS_POS);
      dest.writeLong(fileOffset);

      // set shadowed flag;
      int flags = 0;

      if (database.logger.propIncrementBackup) {
        flags = BitMap.set(flags, DataFileCache.FLAG_ISSHADOWED);
      }

      flags = BitMap.set(flags, DataFileCache.FLAG_190);
      flags = BitMap.set(flags, DataFileCache.FLAG_ISSAVED);

      dest.seek(DataFileCache.FLAGS_POS);
      dest.writeInt(flags);
      dest.close();

      dest = null;

      for (int i = 0, size = rootsList.length; i < size; i++) {
        int[] roots = rootsList[i];

        if (roots != null) {
          Error.printSystemOut(org.hsqldb.lib.StringUtil.getList(roots, ",", ""));
        }
      }

      complete = true;
    } catch (IOException e) {
      throw Error.error(ErrorCode.FILE_IO_ERROR, dataFileName + ".new");
    } catch (OutOfMemoryError e) {
      throw Error.error(ErrorCode.OUT_OF_MEMORY);
    } finally {
      if (fileStreamOut != null) {
        fileStreamOut.close();
      }

      if (dest != null) {
        dest.close();
      }

      if (!complete) {
        database.logger.getFileAccess().removeElement(dataFileName + ".new");
      }
    }

    // Error.printSystemOut("Transfer complete: ", stopw.elapsedTime());
  }
Exemplo n.º 9
0
  @Override
  protected void map(LongWritable key, Text value, Context context)
      throws IOException, InterruptedException {
    // 如果没有任何pluginid=2的配置就不继续处理
    if (filterSet.size() <= 0) {
      return;
    }
    // 读取一行记录进来
    // 消费日期,渠道,区服,设备ID,账号ID,mac,imei,idfa,类型,产出/消耗,原因,数量
    String[] paraArr = value.toString().split(MRConstants.SEPERATOR_IN);
    EventLog eventLog = null;
    try {
      // 日志里某些字段有换行字符,导致这条日志换行而不完整 用 try-catch 过滤掉这类型的错误日志
      eventLog = new EventLog(paraArr);
    } catch (Exception e) {
      // TODO do something to mark the error here
      return;
    }
    if (!FieldValidationUtil.validateAppIdLength(eventLog.getAppID())) {
      return;
    }
    // 获取appID和version,如果appID不需要过滤就不处理
    String[] appInfo = eventLog.getAppID().split("\\|");

    String appId = appInfo[0];
    String appVersion = appInfo[1];
    if (!filterSet.contains(appId)) {
      return;
    }

    String platform = eventLog.getPlatform();
    String uid = eventLog.getUID();
    String mac = eventLog.getMac();
    mac = StringUtil.isEmpty(mac) ? "-" : mac;
    String imei = eventLog.getImei();
    imei = StringUtil.isEmpty(imei) ? "-" : imei;
    String idfa = eventLog.getIdfa();
    idfa = StringUtil.isEmpty(idfa) ? "-" : idfa;
    String accountId = eventLog.getAccountID();
    String channel = eventLog.getChannel();
    String gameServer = eventLog.getGameServer();

    String coinType = eventLog.getArrtMap().get("coinType");
    coinType = null == coinType ? Constants.DEFAULT_COIN_TYPE : coinType;

    String reason = eventLog.getArrtMap().get("id");
    reason = StringUtil.isEmpty(reason) ? "-" : reason;
    String num = eventLog.getArrtMap().get("num");
    if (StringUtil.isEmpty(num)) {
      return;
    }
    // 消费日期,,mac,imei,idfa,类型,产出/消耗,原因,数量
    keyObj.setOutFields(
        new String[] {
          appId,
          platform,
          appVersion,
          channel,
          gameServer,
          uid,
          accountId,
          mac,
          imei,
          idfa,
          Event_Coin_Lost.equals(eventLog.getEventId()) ? "0" : "1",
          coinType
        });
    valueObj.setOutFields(new String[] {reason, num});
    keyObj.setSuffix(Constants.SUFFIX_PLUGIN_CoinReport);
    context.write(keyObj, valueObj);
  }
Exemplo n.º 10
0
  protected boolean test(Statement aStatement) {

    try {
      try {

        // execute the SQL
        aStatement.execute(getSql());
      } catch (SQLException s) {
        throw new Exception("Expected a ResultSet, but got the error: " + s.getMessage());
      }

      // check that update count != -1
      if (aStatement.getUpdateCount() != -1) {
        throw new Exception(
            "Expected a ResultSet, but got an update count of " + aStatement.getUpdateCount());
      }

      // iterate over the ResultSet
      ResultSet results = aStatement.getResultSet();
      int count = 0;

      while (results.next()) {
        if (count < getExpectedRows().length) {

          //                    String[] expectedFields = getExpectedRows()[count].split(delim);
          String[] expectedFields = StringUtil.split(getExpectedRows()[count], delim);

          // check that we have the number of columns expected...
          if (results.getMetaData().getColumnCount() == expectedFields.length) {

            // ...and if so, check that the column values are as expected...
            int j = 0;

            for (int i = 0; i < expectedFields.length; i++) {
              j = i + 1;

              String actual = results.getString(j);

              // ...including null values...
              if (actual == null) { // ..then we have a null

                // ...check to see if we were expecting it...
                if (!expectedFields[i].equalsIgnoreCase("NULL")) {
                  throw new Exception(
                      "Expected row "
                          + count
                          + " of the ResultSet to contain:"
                          + LS
                          + getExpectedRows()[count]
                          + LS
                          + "but field "
                          + j
                          + " contained NULL");
                }
              } else if (!actual.equals(expectedFields[i])) {

                // then the results are different
                throw new Exception(
                    "Expected row "
                        + (count + 1)
                        + " of the ResultSet to contain:"
                        + LS
                        + getExpectedRows()[count]
                        + LS
                        + "but field "
                        + j
                        + " contained "
                        + results.getString(j));
              }
            }
          } else {

            // we have the wrong number of columns
            throw new Exception(
                "Expected the ResultSet to contain "
                    + expectedFields.length
                    + " fields, but it contained "
                    + results.getMetaData().getColumnCount()
                    + " fields.");
          }
        }

        count++;
      }

      // check that we got as many rows as expected
      if (count != getExpectedRows().length) {

        // we don't have the expected number of rows
        throw new Exception(
            "Expected the ResultSet to contain "
                + getExpectedRows().length
                + " rows, but it contained "
                + count
                + " rows.");
      }
    } catch (Exception x) {
      message = x.getMessage();

      return false;
    }

    return true;
  }