Пример #1
1
 /* (non-Javadoc)
  * @see java.lang.Object#hashCode()
  */
 @Override
 public int hashCode() {
   final int prime = 31;
   int result = 1;
   result = prime * result + ((indexFieldMap == null) ? 0 : indexFieldMap.hashCode());
   result =
       prime * result + ((sensorParams == null) ? 0 : Arrays.deepHashCode(sensorParams.keys()));
   return result;
 }
Пример #2
0
 public Keywords findKeywords(User user, Request request) throws ParseException, IOException {
   Path dumpFile = dumpSearchResults(user, request);
   Scanner scanner = new Scanner(dumpFile);
   TObjectIntHashMap<String> words = new TObjectIntHashMap<>();
   while (scanner.hasNextLine()) {
     String line = scanner.nextLine();
     for (String word : StringUtils.tokenize(line, /* Remove stop words */ true)) {
       if (request.query.contains(word)) continue;
       Integer cnt = words.get(word);
       if (cnt == null) {
         cnt = 1;
       } else {
         cnt++;
       }
       words.put(word, cnt);
     }
   }
   PriorityQueue<WordAndCount> pq = createPriorityQueue();
   words.forEachEntry((a, b) -> pq.add(new WordAndCount(a, b)));
   scanner.close();
   Keywords kw = new Keywords();
   WordAndCount[] wc = new WordAndCount[Math.min(request.pageSize, pq.size())];
   for (int i = 0; i < wc.length; i++) wc[i] = pq.poll();
   kw.keywords = wc;
   return kw;
 }
Пример #3
0
 public Map<String, List<WordAndCount>> getHistogram(User user, Request request)
     throws ParseException, IOException {
   Map<String, TObjectIntHashMap<String>> results = new HashMap<>();
   IndexSearcher searcher = getSearcher(request, user);
   TopDocs topDocs = searcher.search(searchService.getQuery(request), request.scanSize);
   for (ScoreDoc sd : topDocs.scoreDocs) {
     Document doc = searcher.doc(sd.doc);
     for (SchemaField r : request.histogramFields) {
       String value = doc.get(r.name());
       TObjectIntHashMap<String> counts = results.get(r.name());
       if (counts == null) {
         counts = new TObjectIntHashMap<>();
         results.put(r.name(), counts);
       }
       counts.adjustOrPutValue(value, 1, 1);
     }
   }
   Map<String, PriorityQueue<WordAndCount>> pqMap = new HashMap<>();
   for (Entry<String, TObjectIntHashMap<String>> e : results.entrySet()) {
     PriorityQueue<WordAndCount> pq = createPriorityQueue();
     pqMap.put(e.getKey(), pq);
     e.getValue().forEachEntry((a, b) -> pq.add(new WordAndCount(a, b)));
   }
   Map<String, List<WordAndCount>> wcMap = new HashMap<>();
   for (Entry<String, PriorityQueue<WordAndCount>> e : pqMap.entrySet()) {
     List<WordAndCount> wc = new ArrayList<>(request.pageSize);
     for (int i = 0; i < request.pageSize; i++) {
       WordAndCount w = e.getValue().poll();
       if (w == null) break;
       wc.add(w);
     }
     wcMap.put(e.getKey(), wc);
   }
   return wcMap;
 }
Пример #4
0
 @EventHandler
 public void onInventoryClose(InventoryCloseEvent e) {
   if (recipeWindows.containsKey(e.getPlayer().getName())) {
     int id = recipeWindows.remove(e.getPlayer().getName());
     RPGItem item = ItemManager.getItemById(id);
     if (item.recipe == null) {
       item.recipe = new ArrayList<ItemStack>();
     }
     item.recipe.clear();
     for (int y = 0; y < 3; y++) {
       for (int x = 0; x < 3; x++) {
         int i = x + y * 9;
         ItemStack it = e.getInventory().getItem(i);
         item.recipe.add(it);
       }
     }
     item.hasRecipe = true;
     item.resetRecipe(true);
     ItemManager.save(Plugin.plugin);
     ((Player) e.getPlayer()).sendMessage(ChatColor.AQUA + "Recipe set for " + item.getName());
   } else if (useLocaleInv && e.getView() instanceof LocaleInventory) {
     localeInventories.remove(e.getView());
     ((LocaleInventory) e.getView()).getView().close();
   }
 }
Пример #5
0
 public static TIntObjectHashMap<String> getAllWordIds() {
   TObjectIntHashMap<String> wordIds = DataAccess.getAllWordIds();
   TIntObjectHashMap<String> idWords = new TIntObjectHashMap<String>(wordIds.size());
   for (TObjectIntIterator<String> itr = wordIds.iterator(); itr.hasNext(); ) {
     itr.advance();
     idWords.put(itr.value(), itr.key());
   }
   return idWords;
 }
Пример #6
0
 public int get(Text text) throws IOException {
   int res = numbers.get(text);
   if (res == 0) {
     res = current++;
     numbers.put(new Text(text), res);
     value.set(res);
     writer.append(text, value);
   }
   return res;
 }
Пример #7
0
  /**
   * Lookup the hash table to find item
   *
   * @param entry
   * @param isUpdate boolean variable to insert new item if not contained.
   * @return id
   */
  public int lookup(Object entry, boolean isUpdate) {
    if (entry == null)
      throw new IllegalArgumentException("Can't find \"null\" entry in Dictionary");

    int ret = -1;
    if (map.containsKey(entry)) ret = map.get(entry);
    else if (isUpdate) {
      ret = list.size();
      map.put(entry, ret);
      list.add(entry);
    }

    return ret;
  }
Пример #8
0
  /** Load input file */
  private void loadInputFile() {

    URI_ID = new TObjectIntHashMap<String>();
    // load uri--id from input file
    TextFileUtils.loadInputURIs(inputFile, URI_ID, false);
    logger.debug("Input URIs loading. " + URI_ID.size() + " URIs loaded.");
  }
Пример #9
0
 public ValidityChecker(int ari, IntVar[] vars) {
   arity = ari;
   sortedvs = new IntVar[arity];
   mapinit = new TObjectIntHashMap<IntVar>(arity);
   position = new int[arity];
   for (int i = 0; i < vars.length; i++) {
     sortedvs[i] = vars[i];
     mapinit.put(vars[i], i);
     position[i] = i;
   }
 }
Пример #10
0
 /**
  * Deserialize alphabet
  *
  * @param in
  * @throws IOException
  * @throws ClassNotFoundException
  */
 private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
   int version = in.readInt();
   int size = in.readInt();
   list = new ArrayList(size);
   map = new TObjectIntHashMap(size);
   for (int i = 0; i < size; i++) {
     Object o = in.readObject();
     map.put(o, i);
     list.add(o);
   }
   if (version > 0) instanceId = (VMID) in.readObject();
 }
Пример #11
0
 /* (non-Javadoc)
  * @see java.lang.Object#equals(java.lang.Object)
  */
 @Override
 public boolean equals(Object obj) {
   if (this == obj) return true;
   if (obj == null) return false;
   if (getClass() != obj.getClass()) return false;
   HTMSensor<?> other = (HTMSensor<?>) obj;
   if (indexFieldMap == null) {
     if (other.indexFieldMap != null) return false;
   } else if (!indexFieldMap.equals(other.indexFieldMap)) return false;
   if (sensorParams == null) {
     if (other.sensorParams != null) return false;
   } else if (!Arrays.equals(sensorParams.keys(), other.sensorParams.keys())) return false;
   return true;
 }
Пример #12
0
  private static double[] coordinates(
      String tokens[], TObjectIntHashMap<String> colNames, String prefix) {
    StringBuilder builder = new StringBuilder();

    builder.append(tokens[colNames.get(prefix + "_STRA")]);
    builder.append(" ");
    builder.append(tokens[colNames.get(prefix + "_HNR")]);
    builder.append(" ");

    String plz = tokens[colNames.get(prefix + "_PLZ")];
    if (!plz.equalsIgnoreCase("-97")) {
      builder.append(plz);
      builder.append(" ");
    }

    builder.append(tokens[colNames.get(prefix + "_ORT")]);
    builder.append(" ");
    builder.append(tokens[colNames.get(prefix + "_LAND")]);
    builder.append(" ");

    String query = builder.toString().trim();
    Coord c = null;

    if (query.isEmpty()) c = null;
    else {
      LatLng coord = googleLookup.requestCoordinate(builder.toString());
      if (coord != null) c = new Coord(coord.getLng().doubleValue(), coord.getLat().doubleValue());
    }

    if (c == null) {
      logger.warn(String.format("No results for query \"%1$s\".", query));
      return null;
    } else {
      return new double[] {c.getX(), c.getY()};
    }
  }
Пример #13
0
  public TreeMap<Integer, Integer> addStringFeatureVector(
      String[] strFeatures, String label, boolean flagTest) {
    HashSet<String> setFeatures = new HashSet<String>();
    TreeMap<Integer, Integer> vector = new TreeMap<Integer, Integer>();

    for (String feature : strFeatures) {
      setFeatures.add(feature);
    }

    if (setFeatures.size() == 0) return null;

    if (!label.equals(""))
      if (labels.contains(label)) {
        vector.put(labelKey, labels.indexOf(label));
      } else {
        if (!flagTest) {
          labels.add(label);
          vector.put(labelKey, labels.indexOf(label));
        } else {
          // throw new IllegalArgumentException("Label of Testing Data is error!!!");
          return null;
        }
      }

    for (String feature : setFeatures) {
      if (wordlist.contains(feature)) {
        vector.put(wordlist.get(feature), 1);
      } else {
        if (!flagTest) {
          wordlist.put(feature, wordlist.size());
          vector.put(wordlist.get(feature), 1);
        }
      }
    }
    return vector;
  }
Пример #14
0
  public Dictionary(SequenceFile.Reader reader, boolean reverse) throws IOException {
    this.writer = null;
    this.numbers = new TObjectIntHashMap<Text>();
    if (!reverse) {
      this.reverse = null;
    } else {
      this.reverse = new TIntObjectHashMap<Text>();
    }
    Text text;

    while (reader.next(text = new Text(), value)) {
      numbers.put(text, value.get());
      if (reverse) {
        this.reverse.put(value.get(), text);
      }
    }
  }
Пример #15
0
  public DataByteArray exec(Tuple input) throws IOException {
    try {
      Properties prop = UDFContext.getUDFContext().getUDFProperties(this.getClass());
      byte inputType = Byte.parseByte(prop.getProperty(INPUT_TYPE_SIGNATURE));
      byte arrayType = Byte.parseByte(prop.getProperty(COLLECTION_TYPE_SIGNATURE));

      if (arrayType == PigCollection.INT_ARRAY) {
        Tuple t = getTupleToEncode(input, inputType);
        int arr[] = new int[t.size()];
        for (int i = 0; i < t.size(); i++) {
          arr[i] = (Integer) t.get(i);
        }
        return PigCollection.serialize(arr);
      } else if (arrayType == PigCollection.FLOAT_ARRAY) {
        Tuple t = getTupleToEncode(input, inputType);
        float arr[] = new float[t.size()];
        for (int i = 0; i < t.size(); i++) {
          arr[i] = (Float) t.get(i);
        }
        return PigCollection.serialize(arr);
      } else if (arrayType == PigCollection.INT_INT_MAP) {
        DataBag bag = (DataBag) input.get(0);
        TIntIntHashMap map = new TIntIntHashMap((int) bag.size());
        for (Tuple t : bag) {
          map.put((Integer) t.get(0), (Integer) t.get(1));
        }
        return PigCollection.serialize(map);
      } else if (arrayType == PigCollection.INT_FLOAT_MAP) {
        DataBag bag = (DataBag) input.get(0);
        TIntFloatHashMap map = new TIntFloatHashMap((int) bag.size());
        for (Tuple t : bag) {
          map.put((Integer) t.get(0), (Float) t.get(1));
        }
        return PigCollection.serialize(map);
      } else if (arrayType == PigCollection.STRING_INT_MAP) {
        DataBag bag = (DataBag) input.get(0);
        TObjectIntHashMap map = new TObjectIntHashMap((int) bag.size());
        for (Tuple t : bag) {
          map.put((String) t.get(0), (Integer) t.get(1));
        }
        return PigCollection.serialize(map);
      } else if (arrayType == PigCollection.STRING_FLOAT_MAP) {
        DataBag bag = (DataBag) input.get(0);
        TObjectFloatHashMap map = new TObjectFloatHashMap((int) bag.size());
        for (Tuple t : bag) {
          map.put((String) t.get(0), (Float) t.get(1));
        }
        return PigCollection.serialize(map);
      } else if (arrayType == PigCollection.INT_SET) {
        DataBag bag = (DataBag) input.get(0);
        TIntHashSet set = new TIntHashSet((int) bag.size());
        for (Tuple t : bag) {
          set.add((Integer) t.get(0));
        }
        return PigCollection.serialize(set);
      } else if (arrayType == PigCollection.STRING_SET) {
        DataBag bag = (DataBag) input.get(0);
        Set<String> set = new HashSet<String>();
        for (Tuple t : bag) {
          set.add((String) t.get(0));
        }
        return PigCollection.serialize(set);
      } else {
        throw new RuntimeException("Invalid PigCollection type requested");
      }
    } catch (ExecException e) {
      throw new RuntimeException(e);
    }
  }
Пример #16
0
 /** Sort the variable to speedup the check */
 public void sortvars() {
   Arrays.sort(sortedvs, this);
   for (int i = 0; i < arity; i++) {
     position[i] = mapinit.get(sortedvs[i]);
   }
 }
Пример #17
0
  /**
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    BufferedReader reader = new BufferedReader(new FileReader(args[0]));
    /*
     * read header
     */
    String header = reader.readLine();
    TObjectIntHashMap<String> colIndices = new TObjectIntHashMap<String>();
    int idx = 0;
    for (String token : header.split("\t")) {
      colIndices.put(token, idx);
      idx++;
    }
    /*
     * create new header
     */
    BufferedWriter writer = new BufferedWriter(new FileWriter(args[1]));

    StringBuilder builder = new StringBuilder(header.length() + 50);
    builder.append(header);
    builder.append("\t");
    builder.append("x_start");
    builder.append("\t");
    builder.append("y_start");
    builder.append("\t");
    builder.append("x_dest");
    builder.append("\t");
    builder.append("y_dest");

    writer.write(builder.toString());
    writer.newLine();
    /*
     * parse file
     */
    logger.info("Starting geo coding...");
    int lineCount = 0;
    int invalid = 0;
    String line;
    while ((line = reader.readLine()) != null) {
      String[] tokens = line.split("\t");
      /*
       * get coordinates
       */
      try {
        double[] start = coordinates(tokens, colIndices, "S");
        double[] dest = coordinates(tokens, colIndices, "Z");

        /*
         * write new line
         */
        builder = new StringBuilder(line.length() + 50);
        builder.append(line);
        builder.append("\t");

        if (start != null) {
          builder.append(String.valueOf(start[0]));
          builder.append("\t");
          builder.append(String.valueOf(start[1]));
          builder.append("\t");

        } else {
          builder.append("\t");
          builder.append("\t");
        }

        if (dest != null) {
          builder.append(String.valueOf(dest[0]));
          builder.append("\t");
          builder.append(String.valueOf(dest[1]));
        } else {
          builder.append("\t");
        }

        writer.write(builder.toString());
        writer.newLine();
        writer.flush();

        lineCount++;
        if (start == null || dest == null) invalid++;

        if (lineCount % 20 == 0)
          logger.info(
              String.format("Parsed %1$s lines. %2$s addresses not found.", lineCount, invalid));
      } catch (RequestLimitException e) {
        e.printStackTrace();

        writer.close();

        BufferedWriter remainingWriter = new BufferedWriter(new FileWriter(args[1] + ".remaining"));
        remainingWriter.write(header);
        remainingWriter.newLine();

        remainingWriter.write(line);
        remainingWriter.newLine();
        while ((line = reader.readLine()) != null) {
          remainingWriter.write(line);
          remainingWriter.newLine();
        }
        logger.info("Writing remaining file done.");
        System.exit(0);
      }
    }
    writer.close();

    logger.info("Done.");
  }
Пример #18
0
  public void importBukmak(
      List<Bookmark2> xbukmak,
      boolean tumpuk,
      TObjectIntHashMap<Bookmark2> bukmakToRelIdMap,
      TIntLongHashMap labelRelIdToAbsIdMap,
      TIntObjectHashMap<TIntList> bukmak2RelIdToLabelRelIdsMap) {
    SQLiteDatabase db = helper.getWritableDatabase();
    db.beginTransaction();
    try {
      TIntLongHashMap bukmakRelIdToAbsIdMap = new TIntLongHashMap();

      { // tulis bukmak2 baru
        String[] params1 = new String[1];
        String[] params2 = new String[2];
        for (Bookmark2 bukmak : xbukmak) {
          int bukmak2_relId = bukmakToRelIdMap.get(bukmak);

          params2[0] = String.valueOf(bukmak.ari);
          params2[1] = String.valueOf(bukmak.jenis);

          long _id = -1;

          boolean ada = false;
          Cursor cursor =
              db.query(
                  Db.TABEL_Bukmak2,
                  null,
                  Db.Bukmak2.ari + "=? and " + Db.Bukmak2.jenis + "=?",
                  params2,
                  null,
                  null,
                  null); //$NON-NLS-1$ //$NON-NLS-2$
          if (cursor.moveToNext()) {
            ada = true;
            _id = cursor.getLong(cursor.getColumnIndexOrThrow(BaseColumns._ID)); /* [1] */
          }
          cursor.close();

          // --------------------------------- dapet _id dari
          //  ada  tumpuk:     delete insert     [2]
          //  ada !tumpuk: (nop)                 [1]
          // !ada  tumpuk:            insert     [2]
          // !ada !tumpuk:            insert     [2]

          if (ada && tumpuk) {
            params1[0] = String.valueOf(_id);
            db.delete(Db.TABEL_Bukmak2, "_id=?", params1); // $NON-NLS-1$
            db.delete(
                Db.TABEL_Bukmak2_Label, Db.Bukmak2_Label.bukmak2_id + "=?", params1); // $NON-NLS-1$
          }
          if ((ada && tumpuk) || (!ada)) {
            _id = db.insert(Db.TABEL_Bukmak2, null, bukmak.toContentValues()); /* [2] */
          }

          // map it
          bukmakRelIdToAbsIdMap.put(bukmak2_relId, _id);
        }
      }

      { // sekarang pemasangan label
        String where = Db.Bukmak2_Label.bukmak2_id + "=?"; // $NON-NLS-1$
        String[] params = {null};
        ContentValues cv = new ContentValues();

        // nlabel>0  tumpuk:  delete insert
        // nlabel>0 !tumpuk: (nop)
        // nlabel=0  tumpuk:         insert
        // nlabel=0 !tumpuk:         insert

        for (int bukmak2_relId : bukmak2RelIdToLabelRelIdsMap.keys()) {
          TIntList label_relIds = bukmak2RelIdToLabelRelIdsMap.get(bukmak2_relId);

          long bukmak2_id = bukmakRelIdToAbsIdMap.get(bukmak2_relId);

          if (bukmak2_id > 0) {
            params[0] = String.valueOf(bukmak2_id);

            // cek ada berapa label untuk bukmak2_id ini
            int nlabel = 0;
            Cursor c =
                db.rawQuery(
                    "select count(*) from " + Db.TABEL_Bukmak2_Label + " where " + where,
                    params); //$NON-NLS-1$ //$NON-NLS-2$
            try {
              c.moveToNext();
              nlabel = c.getInt(0);
            } finally {
              c.close();
            }

            if (nlabel > 0 && tumpuk) {
              db.delete(Db.TABEL_Bukmak2_Label, where, params);
            }
            if ((nlabel > 0 && tumpuk) || (!(nlabel > 0))) {
              for (int label_relId : label_relIds.toArray()) {
                long label_id = labelRelIdToAbsIdMap.get(label_relId);
                if (label_id > 0) {
                  cv.put(Db.Bukmak2_Label.bukmak2_id, bukmak2_id);
                  cv.put(Db.Bukmak2_Label.label_id, label_id);
                  db.insert(Db.TABEL_Bukmak2_Label, null, cv);
                } else {
                  Log.w(TAG, "label_id ngaco!: " + label_id); // $NON-NLS-1$
                }
              }
            }
          } else {
            Log.w(TAG, "bukmak2_id ngaco!: " + bukmak2_id); // $NON-NLS-1$
          }
        }
      }

      db.setTransactionSuccessful();
    } finally {
      db.endTransaction();
    }
  }
Пример #19
0
 /**
  * Check whether the map contains the item
  *
  * @param entry
  * @return
  */
 public boolean contains(Object entry) {
   return map.containsKey(entry);
 }
Пример #20
0
 public FeatureSet() {
   wordlist = new TObjectIntHashMap<String>();
   wordlist.put("NO_USE", 0);
   labels = new ArrayList<String>();
 }
Пример #21
0
 /** Clear */
 public void clear() {
   map.clear();
   list.clear();
 }
Пример #22
0
  /**
   * Returns the encoded output stream of the underlying {@link Stream}'s encoder.
   *
   * @return the encoded output stream.
   */
  public Stream<int[]> getOutputStream() {
    if (isTerminal()) {
      throw new IllegalStateException("Stream is already \"terminal\" (operated upon or empty)");
    }

    final MultiEncoder encoder = (MultiEncoder) getEncoder();
    if (encoder == null) {
      throw new IllegalStateException(
          "setLocalParameters(Parameters) must be called before calling this method.");
    }

    // Protect outputStream formation and creation of "fan out" also make sure
    // that no other thread is trying to update the fan out lists
    Stream<int[]> retVal = null;
    try {
      criticalAccessLock.lock();

      final String[] fieldNames = getFieldNames();
      final FieldMetaType[] fieldTypes = getFieldTypes();

      if (outputStream == null) {
        if (indexFieldMap.isEmpty()) {
          for (int i = 0; i < fieldNames.length; i++) {
            indexFieldMap.put(fieldNames[i], i);
          }
        }

        // NOTE: The "inputMap" here is a special local implementation
        //       of the "Map" interface, overridden so that we can access
        //       the keys directly (without hashing). This map is only used
        //       for this use case so it is ok to use this optimization as
        //       a convenience.
        if (inputMap == null) {
          inputMap = new InputMap();
          inputMap.fTypes = fieldTypes;
        }

        final boolean isParallel = delegate.getInputStream().isParallel();

        output = new ArrayList<>();

        outputStream =
            delegate
                .getInputStream()
                .map(
                    l -> {
                      String[] arr = (String[]) l;
                      inputMap.arr = arr;
                      return input(arr, fieldNames, fieldTypes, output, isParallel);
                    });

        mainIterator = outputStream.iterator();
      }

      LinkedList<int[]> l = new LinkedList<int[]>();
      fanOuts.add(l);
      Copy copy = new Copy(l);

      retVal =
          StreamSupport.stream(
              Spliterators.spliteratorUnknownSize(
                  copy, Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.IMMUTABLE),
              false);

    } catch (Exception e) {
      e.printStackTrace();
    } finally {
      criticalAccessLock.unlock();
    }

    return retVal;
  }
Пример #23
0
 public int getOnly(Text text) {
   return numbers.get(text);
 }
Пример #24
0
  /** Start RDF triple extraction */
  private void run() {

    // get processors number for multi-threading
    int n_threads = Runtime.getRuntime().availableProcessors();
    n_threads = 4;
    logger.debug("Threads number: " + n_threads);

    ExecutorService executor;
    executor = Executors.newFixedThreadPool(n_threads);

    metadata_counter = new SynchronizedCounter();
    properties_counter = new SynchronizedCounter();
    metadata_index = new TObjectIntHashMap<String>();
    props_index = new TObjectIntHashMap<String>();

    logger.info("Risorse da interrogare: " + num_items);

    try {

      TextFileManager textWriter = null;
      if (outputTextFormat) textWriter = new TextFileManager(textFile);

      ItemFileManager fileManager = new ItemFileManager(metadataFile, ItemFileManager.WRITE);

      for (int i = 0; i < num_items; i++) {

        String uri = (String) URI_ID.keys()[i];

        Runnable worker;

        if (model == null) {
          // create worker thread - extraction from endpoint
          worker =
              new QueryExecutor(
                  uri,
                  URI_ID.get(uri),
                  props_index,
                  graphURI,
                  endpoint,
                  metadata_counter,
                  properties_counter,
                  metadata_index,
                  textWriter,
                  fileManager,
                  depth);
        } else {
          // create worker thread - extraction from tdb local dataset
          worker =
              new QueryExecutor(
                  uri,
                  URI_ID.get(uri),
                  props_index,
                  graphURI,
                  endpoint,
                  metadata_counter,
                  properties_counter,
                  metadata_index,
                  textWriter,
                  fileManager,
                  depth,
                  model);
        }

        executor.execute(worker);
      }

      // This will make the executor accept no new threads
      // and finish all existing threads in the queue
      executor.shutdown();
      // Wait until all threads are finish
      executor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

      if (textWriter != null) textWriter.close();

      fileManager.close();

    } catch (Exception e) {
      e.printStackTrace();
    }

    // write metadata index file
    TextFileUtils.writeData(metadataFile + "_index", metadata_index);
    // write metadata index file
    TextFileUtils.writeData("props_index", props_index);
  }
  public static void main(String[] args) {
    // String file = args[0];

    IntInt2IntHashMap lEr = new IntInt2IntHashMap();

    HashMap<String, TObjectIntHashMap<String>> stat =
        new HashMap<String, TObjectIntHashMap<String>>();

    try {
      HashSet<String> headPat = PatternFileParser.parse(new File("ipa_head_pat.txt"));
      HashSet<String> funcPat = PatternFileParser.parse(new File("ipa_func_pat.txt"));

      CaboCha2Dep pipe = new CaboCha2Dep(System.in);
      JapaneseDependencyTree2CaboCha caboChaOutPipe = new JapaneseDependencyTree2CaboCha();
      caboChaOutPipe.setFormat(CaboChaFormat.OLD);

      BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(System.out, "utf-8"));
      while (!pipe.eof()) {
        DependencyTree tree = pipe.pipePerSentence();
        if (tree == null) {
          continue;
        }
        JapaneseDependencyTreeLib.setBunsetsuHead(tree, funcPat, headPat);
        PredicateArgumentStructure[] pasList = tree.getPASList();

        for (int j = 0; j < pasList.length; j++) {
          int predId = pasList[j].getPredicateId();
          String predType = pasList[j].predicateType;
          int[] aIds = pasList[j].getIds();
          String[] aLabels = pasList[j].getLabels();
          for (int k = 0; k < aIds.length; k++) {
            DepType dt = getDepType(tree, predId, aIds[k]);
            ArgPositionType apt = getArgPositionType(tree, aIds[k]);
            if (!stat.containsKey(aLabels[k])) {
              stat.put(aLabels[k], new TObjectIntHashMap<String>());
            }
            TObjectIntHashMap<String> inner = stat.get(aLabels[k]);
            if (!inner.containsKey(dt.toString() + ":" + apt.toString())) {
              inner.put(dt.toString() + ":" + apt.toString(), 0);
            }
            inner.increment(dt.toString() + ":" + apt.toString());
            aLabels[k] += ":" + dt + ":" + apt;
          }
        }
        StringBuilder resultStr = new StringBuilder();
        resultStr.append(caboChaOutPipe.pipePerSentence(tree));
        writer.write(resultStr.toString());
      }
      // print statistics
      for (Iterator it = stat.keySet().iterator(); it.hasNext(); ) {
        String key = (String) it.next();
        TObjectIntHashMap inner = stat.get(key);
        for (TObjectIntIterator iit = inner.iterator(); iit.hasNext(); ) {
          iit.advance();
          System.err.print(key + "\t" + iit.key() + "\t" + iit.value() + "\n");
        }
      }
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
  @Test
  public void testSimpleAwareness() throws Exception {
    Settings commonSettings =
        ImmutableSettings.settingsBuilder()
            .put("cluster.routing.schedule", "10ms")
            .put("cluster.routing.allocation.awareness.attributes", "rack_id")
            .build();

    logger.info("--> starting 2 nodes on the same rack");
    startNode(
        "node1",
        ImmutableSettings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_1"));
    startNode(
        "node2",
        ImmutableSettings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_1"));

    client("node1").admin().indices().prepareCreate("test1").execute().actionGet();
    client("node1").admin().indices().prepareCreate("test2").execute().actionGet();

    ClusterHealthResponse health =
        client("node1")
            .admin()
            .cluster()
            .prepareHealth()
            .setWaitForEvents(Priority.LANGUID)
            .setWaitForGreenStatus()
            .execute()
            .actionGet();
    assertThat(health.isTimedOut(), equalTo(false));

    logger.info("--> starting 1 node on a different rack");
    startNode(
        "node3",
        ImmutableSettings.settingsBuilder().put(commonSettings).put("node.rack_id", "rack_2"));

    long start = System.currentTimeMillis();
    TObjectIntHashMap<String> counts;
    // On slow machines the initial relocation might be delayed
    do {
      Thread.sleep(100);
      logger.info("--> waiting for no relocation");
      health =
          client("node1")
              .admin()
              .cluster()
              .prepareHealth()
              .setWaitForEvents(Priority.LANGUID)
              .setWaitForGreenStatus()
              .setWaitForNodes("3")
              .setWaitForRelocatingShards(0)
              .execute()
              .actionGet();
      assertThat(health.isTimedOut(), equalTo(false));

      logger.info("--> checking current state");
      ClusterState clusterState =
          client("node1").admin().cluster().prepareState().execute().actionGet().getState();
      // System.out.println(clusterState.routingTable().prettyPrint());
      // verify that we have 10 shards on node3
      counts = new TObjectIntHashMap<String>();
      for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
        for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
          for (ShardRouting shardRouting : indexShardRoutingTable) {
            counts.adjustOrPutValue(
                clusterState.nodes().get(shardRouting.currentNodeId()).name(), 1, 1);
          }
        }
      }
    } while (counts.get("node3") != 10 && (System.currentTimeMillis() - start) < 10000);
    assertThat(counts.get("node3"), equalTo(10));
  }