示例#1
0
  /**
   * Loads configuration. It verifies the constraints defined in {@link WireByMethod}.
   *
   * @param prefix the configuration prefix for this class
   */
  public WireByMethod(String prefix) {
    super(prefix);

    // get the method
    try {
      final Class wire =
          Configuration.getClass(
              prefix + "." + PAR_CLASS, Class.forName("peersim.graph.GraphFactory"));
      method =
          WireByMethod.getMethod(wire, Configuration.getString(prefix + "." + PAR_METHOD, "wire"));
    } catch (Exception e) {
      throw new RuntimeException(e);
    }

    // set the constant args (those other than 0th)
    Class[] argt = method.getParameterTypes();
    args = new Object[argt.length];
    for (int i = 1; i < args.length; ++i) {

      if (argt[i] == int.class) args[i] = Configuration.getInt(prefix + "." + PAR_ARG + i);
      else if (argt[i] == long.class) args[i] = Configuration.getLong(prefix + "." + PAR_ARG + i);
      else if (argt[i] == double.class)
        args[i] = Configuration.getDouble(prefix + "." + PAR_ARG + i);
      else if (i == args.length - 1 && argt[i].isInstance(CommonState.r)) args[i] = CommonState.r;
      else {
        // we should neve get here
        throw new RuntimeException(
            "Unexpected error, please " + "report this problem to the peersim team");
      }
    }
  }
  public int run(String[] args) throws Exception {
    Configuration argConf = getConf();

    // JobConf conf = new JobConf(diffdb.class);
    Configuration config = HBaseConfiguration.create();
    HBaseAdmin hbAdmin = new HBaseAdmin(config);
    dbutil db_util = new dbutil(config);

    HTable runTable = new HTable(config, "gestore_runs");
    Get runGet = new Get(argConf.get("id").getBytes());
    Result pipeline = runTable.get(runGet);

    NavigableMap<byte[], byte[]> pipeMap = pipeline.getFamilyMap("d".getBytes());

    Map.Entry<byte[], byte[]> results = pipeMap.pollFirstEntry();

    HashMap<String, HashMap<String, String>> resultMap =
        new HashMap<String, HashMap<String, String>>();

    while (results != null) {
      String resultKey = new String(results.getKey());
      String resultValue = new String(results.getValue());
      String field = "type";
      HashMap<String, String> tempMap = new HashMap<String, String>();
      String entry = resultKey;

      if (resultKey.endsWith("_db_timestamp")) {
        field = "db_timestamp";
        entry = resultKey.substring(0, resultKey.lastIndexOf("_db_timestamp"));
      } else if (resultKey.endsWith("_filename")) {
        field = "filename";
        entry = resultKey.substring(0, resultKey.lastIndexOf("_filename"));
      } else if (resultKey.endsWith("_regex")) {
        field = "regex";
        entry = resultKey.substring(0, resultKey.lastIndexOf("_regex"));
      }

      if (resultMap.containsKey(entry)) {
        tempMap = resultMap.get(entry);
      }

      tempMap.put(field, resultValue);
      resultMap.put(entry, tempMap);

      // System.out.println("Key: " + resultKey + " Value: " + resultValue);
      results = pipeMap.pollFirstEntry();
    }

    for (String key : resultMap.keySet()) {
      System.out.println("File ID: " + key);
      for (String subKey : resultMap.get(key).keySet()) {
        // System.out.println("\t " + subKey + "\t\t" + resultMap.get(key).get(subKey));
        System.out.format("  %1$-20s  %2$s\n", subKey, resultMap.get(key).get(subKey));
      }
    }

    return 0;
  }
示例#3
0
 /**
  * Construct a new EnumConstantsBuilder.
  *
  * @param configuration the current configuration of the doclet.
  * @param classDoc the class whoses members are being documented.
  * @param writer the doclet specific writer.
  */
 public static EnumConstantBuilder getInstance(
     Configuration configuration, ClassDoc classDoc, EnumConstantWriter writer) {
   EnumConstantBuilder builder = new EnumConstantBuilder(configuration);
   builder.classDoc = classDoc;
   builder.writer = writer;
   builder.visibleMemberMap =
       new VisibleMemberMap(classDoc, VisibleMemberMap.ENUM_CONSTANTS, configuration.nodeprecated);
   builder.enumConstants =
       new ArrayList<ProgramElementDoc>(builder.visibleMemberMap.getMembersFor(classDoc));
   if (configuration.getMemberComparator() != null) {
     Collections.sort(builder.enumConstants, configuration.getMemberComparator());
   }
   return builder;
 }
示例#4
0
  public static void toJSON(ConfigurationAdmin admin, Writer osw, String filter) throws Exception {

    Configuration[] list = admin.listConfigurations(filter);
    Encoder encoder = codec.enc().to(osw);

    Protocol p = new Protocol();
    p.version = 1;
    p.date = new Date();
    p.size = list.length;
    encoder.put(p).append('\n');

    if (list != null)
      for (Configuration c : list) {
        Dictionary<String, Object> d = c.getProperties();
        Export export = new Export();
        export.values = new HashMap<String, Object>();
        export.factoryPid = c.getFactoryPid();
        export.pid = c.getPid();

        for (Enumeration<String> e = d.keys(); e.hasMoreElements(); ) {
          String k = e.nextElement();
          Object v = d.get(k);

          if (!(v instanceof String)) {

            if (export.types == null) export.types = new HashMap<String, Type>();

            Type type = new Type();

            Class<?> clazz = v.getClass();
            if (v instanceof Collection) {
              Collection<?> coll = (Collection<?>) v;
              clazz = String.class;
              if (coll.size() > 0) type.vectorOf = shortName(coll.iterator().next().getClass());
              else type.vectorOf = shortName(String.class);
            } else if (v.getClass().isArray()) {
              type.arrayOf = shortName(clazz.getComponentType());
            } else type.scalar = shortName(v.getClass());

            export.types.put(k, type);
          }
          export.values.put(k, v);
        }

        encoder.mark().put(export);
        // encoder.put(encoder.digest());
        encoder.append('\n');
      }
    osw.flush();
  }
示例#5
0
 /**
  * Construct a new FieldBuilder.
  *
  * @param configuration the current configuration of the doclet.
  * @param classDoc the class whoses members are being documented.
  * @param writer the doclet specific writer.
  */
 public static FieldBuilder getInstance(
     Configuration configuration, ClassDoc classDoc, FieldWriter writer) {
   FieldBuilder builder = new FieldBuilder(configuration);
   builder.classDoc = classDoc;
   builder.writer = writer;
   builder.visibleMemberMap =
       new VisibleMemberMap(classDoc, VisibleMemberMap.FIELDS, configuration.nodeprecated);
   builder.fields =
       new ArrayList<ProgramElementDoc>(
           builder.visibleMemberMap.getLeafClassMembers(configuration));
   if (configuration.getMemberComparator() != null) {
     Collections.sort(builder.fields, configuration.getMemberComparator());
   }
   return builder;
 }
  public String[] getCleanSql() {
    if (cleanSql == null) {
      // loop over all foreign key constraints
      List dropForeignKeysSql = new ArrayList();
      List createForeignKeysSql = new ArrayList();
      Iterator iter = configuration.getTableMappings();
      while (iter.hasNext()) {
        Table table = (Table) iter.next();
        if (table.isPhysicalTable()) {
          Iterator subIter = table.getForeignKeyIterator();
          while (subIter.hasNext()) {
            ForeignKey fk = (ForeignKey) subIter.next();
            if (fk.isPhysicalConstraint()) {
              // collect the drop key constraint
              dropForeignKeysSql.add(
                  fk.sqlDropString(
                      dialect,
                      properties.getProperty(Environment.DEFAULT_CATALOG),
                      properties.getProperty(Environment.DEFAULT_SCHEMA)));
              createForeignKeysSql.add(
                  fk.sqlCreateString(
                      dialect,
                      mapping,
                      properties.getProperty(Environment.DEFAULT_CATALOG),
                      properties.getProperty(Environment.DEFAULT_SCHEMA)));
            }
          }
        }
      }

      List deleteSql = new ArrayList();
      iter = configuration.getTableMappings();
      while (iter.hasNext()) {
        Table table = (Table) iter.next();
        deleteSql.add("delete from " + table.getName());
      }

      List cleanSqlList = new ArrayList();
      cleanSqlList.addAll(dropForeignKeysSql);
      cleanSqlList.addAll(deleteSql);
      cleanSqlList.addAll(createForeignKeysSql);

      cleanSql = (String[]) cleanSqlList.toArray(new String[cleanSqlList.size()]);
    }
    return cleanSql;
  }
示例#7
0
  public static void fromJSON(Reader r, ConfigurationAdmin admin) throws Exception {

    Decoder decoder = codec.dec().from(r);
    Protocol p = decoder.get(Protocol.class);
    for (int i = 0; i < p.size; i++) {
      Export e = decoder.get(Export.class);
      Configuration c;
      if (e.factoryPid != null) {
        c = admin.createFactoryConfiguration(e.factoryPid, "?*");
      } else {
        c = admin.getConfiguration(e.pid, "?*");
      }

      Dictionary<String, Object> d = getDictionary(e.types, e.values);
      c.update(d);
    }
  }
示例#8
0
 private static String getConfigString(Configuration config) {
   String output = "";
   Iterator<Map.Entry<String, String>> iterConfig = config.iterator();
   while (iterConfig.hasNext()) {
     Map.Entry<String, String> curEntry = iterConfig.next();
     output = output + "Key: \t" + curEntry.getKey() + "\nValue: \t" + curEntry.getValue() + "\n";
   }
   return output;
 }
 public IdentitySchema(Configuration configuration) {
   this.configuration = configuration;
   this.properties = configuration.getProperties();
   this.dialect = Dialect.getDialect(properties);
   try {
     // get the mapping field via reflection :-(
     Field mappingField = Configuration.class.getDeclaredField("mapping");
     mappingField.setAccessible(true);
     this.mapping = (Mapping) mappingField.get(configuration);
   } catch (Exception e) {
     throw new RuntimeException("couldn't get the hibernate mapping", e);
   }
 }
 public String[] getDropSql() {
   if (dropSql == null) {
     dropSql = configuration.generateDropSchemaScript(dialect);
   }
   return dropSql;
 }
 public String[] getCreateSql() {
   if (createSql == null) {
     createSql = configuration.generateSchemaCreationScript(dialect);
   }
   return createSql;
 }
  /**
   * Test how IPC cache map works.
   *
   * @throws Exception If failed.
   */
  @SuppressWarnings("unchecked")
  public void testIpcCache() throws Exception {
    Field cacheField = GridGgfsHadoopIpcIo.class.getDeclaredField("ipcCache");

    cacheField.setAccessible(true);

    Field activeCntField = GridGgfsHadoopIpcIo.class.getDeclaredField("activeCnt");

    activeCntField.setAccessible(true);

    Map<String, GridGgfsHadoopIpcIo> cache =
        (Map<String, GridGgfsHadoopIpcIo>) cacheField.get(null);

    String name = "ggfs:" + getTestGridName(0) + "@";

    Configuration cfg = new Configuration();

    cfg.addResource(U.resolveGridGainUrl(HADOOP_FS_CFG));
    cfg.setBoolean("fs.ggfs.impl.disable.cache", true);
    cfg.setBoolean(String.format(GridGgfsHadoopUtils.PARAM_GGFS_ENDPOINT_NO_EMBED, name), true);

    // Ensure that existing IO is reused.
    FileSystem fs1 = FileSystem.get(new URI("ggfs://" + name + "/"), cfg);

    assertEquals(1, cache.size());

    GridGgfsHadoopIpcIo io = null;

    System.out.println("CACHE: " + cache);

    for (String key : cache.keySet()) {
      if (key.contains("10500")) {
        io = cache.get(key);

        break;
      }
    }

    assert io != null;

    assertEquals(1, ((AtomicInteger) activeCntField.get(io)).get());

    // Ensure that when IO is used by multiple file systems and one of them is closed, IO is not
    // stopped.
    FileSystem fs2 = FileSystem.get(new URI("ggfs://" + name + "/abc"), cfg);

    assertEquals(1, cache.size());
    assertEquals(2, ((AtomicInteger) activeCntField.get(io)).get());

    fs2.close();

    assertEquals(1, cache.size());
    assertEquals(1, ((AtomicInteger) activeCntField.get(io)).get());

    Field stopField = GridGgfsHadoopIpcIo.class.getDeclaredField("stopping");

    stopField.setAccessible(true);

    assert !(Boolean) stopField.get(io);

    // Ensure that IO is stopped when nobody else is need it.
    fs1.close();

    assert cache.isEmpty();

    assert (Boolean) stopField.get(io);
  }
示例#13
0
  public int run(String[] args) throws Exception {
    // printUsage();
    /*
     * SETUP
     */
    Configuration argConf = getConf();
    Hashtable<String, String> confArg = new Hashtable<String, String>();
    setup(confArg, argConf);
    Date currentTime = new Date();
    Date endDate = new Date(new Long(confArg.get("timestamp_stop")));
    Boolean full_run = confArg.get("intermediate").matches("(?i).*true.*");
    Boolean quick_add = confArg.get("quick_add").matches("(?i).*true.*");
    logger.info("Running GeStore");

    // ZooKeeper setup
    Configuration config = HBaseConfiguration.create();
    zkWatcher = new ZooKeeperWatcher(config, "Testing", new HBaseAdmin(config));
    zkInstance =
        new ZooKeeper(
            ZKConfig.getZKQuorumServersString(config),
            config.getInt("zookeeper.session.timeout", -1),
            zkWatcher);

    if (!confArg.get("task_id").isEmpty()) {
      confArg.put("temp_path", confArg.get("temp_path") + confArg.get("task_id"));
    }

    String lockRequest = confArg.get("file_id");
    if (!confArg.get("run_id").isEmpty())
      lockRequest = lockRequest + "_" + confArg.get("run_id") + "_";
    if (!confArg.get("task_id").isEmpty())
      lockRequest = lockRequest + "_" + confArg.get("task_id") + "_";

    // Get type of movement
    toFrom type_move = checkArgs(confArg);
    if (type_move == toFrom.LOCAL2REMOTE && !confArg.get("format").equals("unknown")) {
      List<String> arguments = new ArrayList<String>();
      arguments.add("-Dinput=" + confArg.get("local_path"));
      arguments.add("-Dtable=" + confArg.get("file_id"));
      arguments.add("-Dtimestamp=" + confArg.get("timestamp_stop"));
      arguments.add("-Dtype=" + confArg.get("format"));
      arguments.add("-Dtarget_dir=" + confArg.get("base_path") + "_" + confArg.get("file_id"));
      arguments.add("-Dtemp_hdfs_path=" + confArg.get("temp_path"));
      arguments.add("-Drun_id=" + confArg.get("run_id"));
      if (!confArg.get("run_id").isEmpty()) arguments.add("-Drun_id=" + confArg.get("run_id"));
      if (!confArg.get("task_id").isEmpty()) arguments.add("-Dtask_id=" + confArg.get("task_id"));
      if (quick_add) arguments.add("-Dquick_add=" + confArg.get("quick_add"));
      String lockName = lock(lockRequest);
      String[] argumentString = arguments.toArray(new String[arguments.size()]);
      adddb.main(argumentString);
      unlock(lockName);
      System.exit(0);
    }

    // Database registration

    dbutil db_util = new dbutil(config);
    db_util.register_database(confArg.get("db_name_files"), true);
    db_util.register_database(confArg.get("db_name_runs"), true);
    db_util.register_database(confArg.get("db_name_updates"), true);
    FileSystem hdfs = FileSystem.get(config);
    FileSystem localFS = FileSystem.getLocal(config);

    // Get source type
    confArg.put("source", getSource(db_util, confArg.get("db_name_files"), confArg.get("file_id")));
    confArg.put(
        "database", isDatabase(db_util, confArg.get("db_name_files"), confArg.get("file_id")));
    if (!confArg.get("source").equals("local")
        && type_move == toFrom.REMOTE2LOCAL
        && !confArg.get("timestamp_stop").equals(Integer.toString(Integer.MAX_VALUE))) {
      confArg.put("timestamp_stop", Long.toString(latestVersion(confArg, db_util)));
    }

    /*
     * Get previous timestamp
     */
    Get run_id_get = new Get(confArg.get("run_id").getBytes());
    Result run_get = db_util.doGet(confArg.get("db_name_runs"), run_id_get);
    KeyValue run_file_prev =
        run_get.getColumnLatest(
            "d".getBytes(), (confArg.get("file_id") + "_db_timestamp").getBytes());
    String last_timestamp = new String("0");
    if (null != run_file_prev && !confArg.get("source").equals("local")) {
      long last_timestamp_real = run_file_prev.getTimestamp();
      Long current_timestamp = new Long(confArg.get("timestamp_real"));
      if ((current_timestamp - last_timestamp_real) > 36000) {
        last_timestamp = new String(run_file_prev.getValue());
        Integer lastTimestamp = new Integer(last_timestamp);
        lastTimestamp += 1;
        last_timestamp = lastTimestamp.toString();
        logger.info("Last timestamp: " + last_timestamp + " End data: " + endDate);
        Date last_run = new Date(run_file_prev.getTimestamp());
        if (last_run.before(endDate) && !full_run) {
          confArg.put("timestamp_start", last_timestamp);
        }
      }
    }

    Integer tse = new Integer(confArg.get("timestamp_stop"));
    Integer tss = new Integer(confArg.get("timestamp_start"));
    if (tss > tse) {
      logger.info("No new version of requested file.");
      return 0;
    }

    /*
     * Generate file
     */

    String lockName = lock(lockRequest);

    Get file_id_get = new Get(confArg.get("file_id").getBytes());
    Result file_get = db_util.doGet(confArg.get("db_name_files"), file_id_get);
    if (!file_get.isEmpty()) {
      boolean found =
          hasFile(
              db_util,
              hdfs,
              confArg.get("db_name_files"),
              confArg.get("file_id"),
              getFullPath(confArg));
      if (confArg.get("source").equals("fullfile")) {
        found = false;
      }
      String filenames_put =
          getFileNames(
              db_util, confArg.get("db_name_files"), confArg.get("file_id"), getFullPath(confArg));
      // Filename not found in file database
      if (!found && type_move == toFrom.REMOTE2LOCAL) {
        if (!confArg.get("source").equals("local")) {
          // Generate intermediate file
          if (getFile(hdfs, confArg, db_util) == null) {
            unlock(lockName);
            return 1;
          }
          // Put generated file into file database
          if (!confArg.get("format").equals("fullfile")) {
            putFileEntry(
                db_util,
                hdfs,
                confArg.get("db_name_files"),
                confArg.get("file_id"),
                confArg.get("full_file_name"),
                confArg.get("source"));
          }
        } else {
          logger.warn("Remote file not found, and cannot be generated! File: " + confArg);
          unlock(lockName);
          return 1;
        }
      }
    } else {
      if (type_move == toFrom.REMOTE2LOCAL) {
        logger.warn("Remote file not found, and cannot be generated.");
        unlock(lockName);
        return 1;
      }
    }

    /*
     * Copy file
     * Update tables
     */

    if (type_move == toFrom.LOCAL2REMOTE) {
      if (!confArg.get("format").equals("fullfile")) {
        putFileEntry(
            db_util,
            hdfs,
            confArg.get("db_name_files"),
            confArg.get("file_id"),
            getFullPath(confArg),
            confArg.get("source"));
      }
      putRunEntry(
          db_util,
          confArg.get("db_name_runs"),
          confArg.get("run_id"),
          confArg.get("file_id"),
          confArg.get("type"),
          confArg.get("timestamp_real"),
          confArg.get("timestamp_stop"),
          getFullPath(confArg),
          confArg.get("delimiter"));
      hdfs.copyFromLocalFile(new Path(confArg.get("local_path")), new Path(getFullPath(confArg)));
    } else if (type_move == toFrom.REMOTE2LOCAL) {
      FileStatus[] files = hdfs.globStatus(new Path(getFullPath(confArg) + "*"));
      putRunEntry(
          db_util,
          confArg.get("db_name_runs"),
          confArg.get("run_id"),
          confArg.get("file_id"),
          confArg.get("type"),
          confArg.get("timestamp_real"),
          confArg.get("timestamp_stop"),
          getFullPath(confArg),
          confArg.get("delimiter"));
      unlock(lockName);
      for (FileStatus file : files) {
        Path cur_file = file.getPath();
        Path cur_local_path =
            new Path(new String(confArg.get("local_path") + confArg.get("file_id")));
        String suffix = getSuffix(getFileName(confArg), cur_file.getName());
        if (suffix.length() > 0) {
          cur_local_path = cur_local_path.suffix(new String("." + suffix));
        }
        if (confArg.get("copy").equals("true")) {
          String crc = hdfs.getFileChecksum(cur_file).toString();
          if (checksumLocalTest(cur_local_path, crc)) {
            continue;
          } else {
            hdfs.copyToLocalFile(cur_file, cur_local_path);
            writeChecksum(cur_local_path, crc);
          }
        } else {
          System.out.println(cur_local_path + "\t" + cur_file);
        }
      }
    }
    unlock(lockName);
    return 0;
  }
示例#14
0
  /** Sets up configuration based on params */
  private static boolean setup(Hashtable<String, String> curConf, Configuration argConf) {

    if (argConf.get("file") == null) {
      logger.fatal("Missing file parameter");
      System.exit(1);
    }

    if (argConf.get("hdfs_base_path") == null) {
      logger.fatal("Missing HDFS base path, check gestore-conf.xml");
      System.exit(1);
    }

    if (argConf.get("hdfs_temp_path") == null) {
      logger.fatal("Missing HDFS temp path, check gestore-conf.xml");
      System.exit(1);
    }

    if (argConf.get("local_temp_path") == null) {
      logger.fatal("Missing local temp path, check gestore-conf.xml");
      System.exit(1);
    }

    // Input paramaters
    curConf.put("run_id", argConf.get("run", ""));
    curConf.put("task_id", argConf.get("task", ""));
    curConf.put("file_id", argConf.get("file"));
    curConf.put("local_path", argConf.get("path", ""));
    curConf.put("type", argConf.get("type", "l2r"));
    curConf.put("timestamp_start", argConf.get("timestamp_start", "1"));
    curConf.put(
        "timestamp_stop", argConf.get("timestamp_stop", Integer.toString(Integer.MAX_VALUE)));
    curConf.put("delimiter", argConf.get("regex", "ID=.*"));
    curConf.put("taxon", argConf.get("taxon", "all"));
    curConf.put("intermediate", argConf.get("full_run", "false"));
    curConf.put("quick_add", argConf.get("quick_add", "false"));
    Boolean full_run = curConf.get("intermediate").matches("(?i).*true.*");
    curConf.put("format", argConf.get("format", "unknown"));
    curConf.put("split", argConf.get("split", "1"));
    curConf.put("copy", argConf.get("copy", "true"));

    // Constants
    curConf.put("base_path", argConf.get("hdfs_base_path"));
    curConf.put("temp_path", argConf.get("hdfs_temp_path"));
    curConf.put("local_temp_path", argConf.get("local_temp_path"));
    curConf.put("db_name_files", argConf.get("hbase_file_table"));
    curConf.put("db_name_runs", argConf.get("hbase_run_table"));
    curConf.put("db_name_updates", argConf.get("hbase_db_update_table"));

    // Timestamps
    Date currentTime = new Date();
    Date endDate = new Date(new Long(curConf.get("timestamp_stop")));
    curConf.put("timestamp_real", Long.toString(currentTime.getTime()));

    return true;
  }