@Override
  public Document vectorize(int gID, Graph g, Map<Integer, String> gHash) throws ParseException {
    Document gDoc = new Document();

    if (g.getEdgeCount() == 0) return gDoc;
    String graphString = gParser.serialize(g);
    Field stringField = new Field("gString", graphString, Field.Store.YES, Field.Index.NO);
    gDoc.add(stringField);
    Field IDField = new Field("gID", new Integer(gID).toString(), Field.Store.YES, Field.Index.NO);
    gDoc.add(IDField);

    List<Integer> allIDs = searcher.subgraphs(g, new SearchStatus());

    // 0. Add One "-1" to the subGraphs fields [for pure mustNot search]
    gDoc.add(
        new Field(
            "subGraphs", (new Integer(-1)).toString(), Field.Store.NO, Field.Index.NOT_ANALYZED));

    if (allIDs == null || allIDs.size() == 0) return gDoc;

    // 1.
    Collections.sort(allIDs);
    for (int i = 0; i < allIDs.size(); i++) {
      if (gHash == null) {
        String byteString = allIDs.get(i).toString();
        gDoc.add(new Field("subGraphs", byteString, Field.Store.NO, Field.Index.NOT_ANALYZED));
      } else {
        gDoc.add(
            new Field(
                "subGraphs", gHash.get(allIDs.get(i)), Field.Store.NO, Field.Index.NOT_ANALYZED));
      }
    }
    // StringBuffer sBuf = new StringBuffer();
    // for(int i = 0; i< allIDs.length; i++ ){
    // if(gHash==null)
    // sBuf.append(new Integer(allIDs[i]).toString());
    // else {
    // sBuf.append(gHash.get(allIDs[i]));
    // // //TEST
    // // String indexString = gHash.get(allIDs[i]);
    // // String indexString2 = searcher.getLabel(allIDs[i]);
    // // if(!indexString.equals(indexString2)){
    // // System.out.println("lala");
    // // }
    // // //END OF TEST
    // }
    // sBuf.append(" ");
    // }
    // String termString = sBuf.substring(0, sBuf.length()-1);
    //
    // Field subgraphField = new Field("subGraphs", termString,
    // Field.Store.NO, Field.Index.ANALYZED_NO_NORMS);
    // gDoc.add(subgraphField);
    return gDoc;
  }
 public static void saveGDB(Graph[] graphs, GraphParser gParser, String fileName)
     throws IOException {
   // First Step: filter out graphs that are not in between the boundary
   BufferedWriter dbWriter = new BufferedWriter(new FileWriter(fileName));
   int count = 0;
   float edgeNum = 0, nodeNum = 0;
   for (int i = 0; i < graphs.length; i++) {
     Graph theGraph = graphs[i];
     dbWriter.write(count++ + " => " + gParser.serialize(theGraph) + "\n");
     edgeNum += theGraph.getEdgeCount();
     nodeNum += theGraph.getNodeCount();
   }
   dbWriter.close();
   // Intrigue java garbage collector
   Runtime r = Runtime.getRuntime();
   r.gc();
   // Write the meta information of the new file
   BufferedWriter metaWriter = new BufferedWriter(new FileWriter(fileName + "_Meta"));
   // 1. Processing Date
   SimpleDateFormat bartDateFormat = new SimpleDateFormat("EEEE-MMMM-dd-yyyy");
   Date date = new Date();
   metaWriter.write(bartDateFormat.format(date));
   metaWriter.newLine();
   // 2. Number of graphs in this file
   metaWriter.write("Number of Graphs:" + count);
   metaWriter.newLine();
   metaWriter.write(
       "Average EdgeNum: "
           + (float) (edgeNum) / count
           + ", Average NodeNum: "
           + (float) (nodeNum) / count);
   // Close meta data file
   try {
     metaWriter.close();
   } catch (IOException e) {
     e.printStackTrace();
   }
 }