Exemple #1
0
 private static Map<String, String> nodeAllocationToMap(AssociationEntry entry) {
   final HashMap<String, String> map = new HashMap(8);
   map.put(FIELD_HOSTNAME, entry.getHostname());
   map.put(FIELD_IP, entry.getIpAddress());
   map.put(FIELD_MAC, entry.getMac());
   map.put(FIELD_BROADCAST, entry.getBroadcast());
   map.put(FIELD_SUBNET_MASK, entry.getSubnetMask());
   map.put(FIELD_GATEWAY, entry.getGateway());
   map.put(FIELD_IN_USE, Boolean.toString(entry.isInUse()));
   map.put(FIELD_EXPLICIT_MAC, Boolean.toString(entry.isExplicitMac()));
   return map;
 }
Exemple #2
0
 private static Map<String, String> nodeToMap(VmmNode node) {
   final HashMap<String, String> map = new HashMap<String, String>(7);
   map.put(FIELD_HOSTNAME, node.getHostname());
   map.put(FIELD_POOL, node.getPoolName());
   map.put(FIELD_MEMORY, String.valueOf(node.getMemory()));
   map.put(FIELD_MEM_REMAIN, String.valueOf(node.getMemRemain()));
   map.put(FIELD_NETWORKS, node.getNetworkAssociations());
   map.put(FIELD_IN_USE, String.valueOf(!node.isVacant()));
   map.put(FIELD_ACTIVE, String.valueOf(node.isActive()));
   return map;
 }
Exemple #3
0
  public static void main(String[] args) throws Exception {
    String inputFileName = "";
    String inputFileName2 = "";
    String outputFileName = "";
    String solrServerHost = "";
    String keepListFileName = "";
    String filterListFileName = "";
    String searchTerm = "";
    HashMap<String, String> keepGrayList = new HashMap<String, String>();
    HashMap<String, String> filterGrayList = new HashMap<String, String>();
    boolean useAlias = false;

    CommandLineParser parser = new GnuParser();
    Options options = createCLIOptions();
    try {
      CommandLine line = parser.parse(options, args);

      if (line.hasOption("f1")) {
        // get the input file
        inputFileName = line.getOptionValue("f1");
      }
      if (line.hasOption("f2")) {
        inputFileName2 = line.getOptionValue("f2");
      }
      if (line.hasOption("o")) {
        // get the output file
        outputFileName = line.getOptionValue("o");
      }
      if (line.hasOption("s")) {
        // get the server host name
        solrServerHost = line.getOptionValue("s");
      }
      if (line.hasOption("term")) {
        searchTerm = line.getOptionValue("term");
      }
      if (line.hasOption("a")) {
        useAlias = true;
      }
      if (line.hasOption("k")) {
        keepListFileName = line.getOptionValue("k");
      }
      if (line.hasOption("r")) {
        filterListFileName = line.getOptionValue("r");
      }
    } catch (ParseException exp) {
      log.warning("Command line parsing failed.  Reason:" + exp.getMessage());
      HelpFormatter formatter = new HelpFormatter();
      formatter.printHelp("pubcrawl", options);
      System.exit(1);
    }

    if (isEmpty(outputFileName) || isEmpty(inputFileName) && isEmpty(searchTerm)) {
      // missing required elements, print usage and exit
      HelpFormatter formatter = new HelpFormatter();
      formatter.printHelp("pubcrawl", options);
      System.exit(1);
    }

    if (!isEmpty(keepListFileName)) {
      // need to load the keepList hashmap
      FileReader inputReader = new FileReader(keepListFileName);
      BufferedReader bufReader = new BufferedReader(inputReader);
      String keepTerm = bufReader.readLine();
      while (keepTerm != null) {
        String[] keepInfoArr = keepTerm.trim().split("\t");
        keepGrayList.put(keepInfoArr[0].toLowerCase(), keepInfoArr[1]);
        keepTerm = bufReader.readLine();
      }
      bufReader.close();
    }

    log.info("loading filterlist filename");
    if (!isEmpty(filterListFileName)) {
      // need to load the filterlist hashmap
      FileReader inputReader = new FileReader(filterListFileName);
      BufferedReader bufReader = new BufferedReader(inputReader);
      String filterTerm = bufReader.readLine();
      while (filterTerm != null) {
        String[] filterInfoArr = filterTerm.trim().split("\t");
        filterGrayList.put(filterInfoArr[0].toLowerCase(), filterInfoArr[1]);
        filterTerm = bufReader.readLine();
      }
      bufReader.close();
    }

    SolrServer server = getSolrServer(solrServerHost);

    String logname = outputFileName + "_log.out";
    // create output files
    FileWriter logFileStream = new FileWriter(logname);
    BufferedWriter logFileOut = new BufferedWriter(logFileStream);
    FileWriter dataResultsStream = new FileWriter(outputFileName);
    BufferedWriter dataResultsOut = new BufferedWriter(dataResultsStream);

    final Map<String, Integer> singleCountMap = new HashMap<String, Integer>();
    final List<String> term2List = new ArrayList<String>();

    // now load the appropriate list of gene terms  - if the second file name wasn't entered
    if (isEmpty(inputFileName2)) {
      String sql = "Select term1,count from singletermcount";
      if (useAlias) {
        sql = "Select alias,count from singletermcount_alias";
      }

      JdbcTemplate jdbcTemplate = getJdbcTemplate();
      jdbcTemplate.query(
          sql,
          new ResultSetExtractor() {
            public Object extractData(ResultSet rs) throws SQLException, DataAccessException {
              while (rs.next()) {
                String geneName = rs.getString(1).trim();
                int count = rs.getInt(2);
                singleCountMap.put(geneName.toLowerCase(), count);
                if (count > 0) {
                  term2List.add(geneName.toLowerCase());
                }
              }
              return null;
            }
          });
    } else { // have a second input file, so read the file in and put those as the terms in the
             // term2List, set the SingleCountMap to empty
      FileReader inputReader2 = new FileReader(inputFileName2);
      BufferedReader bufReader2 = new BufferedReader(inputReader2);
      String searchTerm2 = bufReader2.readLine();
      while (searchTerm2 != null) {
        term2List.add(searchTerm2.trim().toLowerCase());
        searchTerm2 = bufReader2.readLine();
      }
    }

    Long totalDocCount = getTotalDocCount(server);
    logFileOut.write("Total doc count: " + totalDocCount);
    Pubcrawl p = new Pubcrawl();
    if (isEmpty(inputFileName)) { // entered term option, just have one to calculate
      SearchTermAndList searchTermArray = getTermAndTermList(searchTerm.trim(), useAlias, false);
      Long searchTermCount =
          getTermCount(server, singleCountMap, searchTermArray, filterGrayList, keepGrayList);

      ExecutorService pool = Executors.newFixedThreadPool(32);
      Set<Future<NGDItem>> set = new HashSet<Future<NGDItem>>();
      Date firstTime = new Date();

      for (String secondTerm : term2List) {
        SearchTermAndList secondTermArray = getTermAndTermList(secondTerm, useAlias, false);
        long secondTermCount =
            getTermCount(server, singleCountMap, secondTermArray, filterGrayList, keepGrayList);
        Callable<NGDItem> callable =
            p
            .new SolrCallable(
                searchTermArray,
                secondTermArray,
                searchTermCount,
                secondTermCount,
                server,
                useAlias,
                filterGrayList,
                keepGrayList,
                totalDocCount);
        Future<NGDItem> future = pool.submit(callable);
        set.add(future);
      }

      for (Future<NGDItem> future : set) {
        dataResultsOut.write(future.get().printItem());
      }

      Date secondTime = new Date();
      logFileOut.write(
          "First set of queries took "
              + (secondTime.getTime() - firstTime.getTime()) / 1000
              + " seconds.\n");
      logFileOut.flush();
      logFileOut.close();
      dataResultsOut.flush();
      dataResultsOut.close();
      pool.shutdown();

    } else {

      FileReader inputReader = new FileReader(inputFileName);
      BufferedReader bufReader = new BufferedReader(inputReader);
      String fileSearchTerm = bufReader.readLine();
      SearchTermAndList searchTermArray = getTermAndTermList(fileSearchTerm, useAlias, false);
      Long searchTermCount =
          getTermCount(server, singleCountMap, searchTermArray, filterGrayList, keepGrayList);

      // do this once with a lower amount of threads, in case we are running on a server where new
      // caching is taking place
      ExecutorService pool = Executors.newFixedThreadPool(32);
      List<Future<NGDItem>> set = new ArrayList<Future<NGDItem>>();
      long firstTime = currentTimeMillis();
      int count = 0;

      for (String secondTerm : term2List) {
        count++;
        SearchTermAndList secondTermArray = getTermAndTermList(secondTerm, useAlias, false);
        long secondTermCount =
            getTermCount(server, singleCountMap, secondTermArray, filterGrayList, keepGrayList);
        Callable<NGDItem> callable =
            p
            .new SolrCallable(
                searchTermArray,
                secondTermArray,
                searchTermCount,
                secondTermCount,
                server,
                useAlias,
                filterGrayList,
                keepGrayList,
                totalDocCount);
        Future<NGDItem> future = pool.submit(callable);
        set.add(future);

        if (count > 5000) {
          for (Future<NGDItem> futureItem : set) {
            dataResultsOut.write(futureItem.get().printItem());
            futureItem = null;
          }
          count = 0;
          set.clear();
        }
      }

      for (Future<NGDItem> future : set) {
        dataResultsOut.write(future.get().printItem());
      }

      long secondTime = currentTimeMillis();
      logFileOut.write(
          "First set of queries took " + (secondTime - firstTime) / 1000 + " seconds.\n");
      logFileOut.flush();
      set.clear();

      pool = Executors.newFixedThreadPool(32);
      fileSearchTerm = bufReader.readLine();
      count = 0;
      while (fileSearchTerm != null) {
        searchTermArray = getTermAndTermList(fileSearchTerm, useAlias, false);
        searchTermCount =
            getTermCount(server, singleCountMap, searchTermArray, filterGrayList, keepGrayList);
        secondTime = currentTimeMillis();
        for (String secondTerm : term2List) {
          SearchTermAndList secondTermArray = getTermAndTermList(secondTerm, useAlias, false);
          long secondTermCount =
              getTermCount(server, singleCountMap, secondTermArray, filterGrayList, keepGrayList);
          Callable<NGDItem> callable =
              p
              .new SolrCallable(
                  searchTermArray,
                  secondTermArray,
                  searchTermCount,
                  secondTermCount,
                  server,
                  useAlias,
                  filterGrayList,
                  keepGrayList,
                  totalDocCount);
          Future<NGDItem> future = pool.submit(callable);
          set.add(future);
          count++;
          if (count > 5000) {
            for (Future<NGDItem> futureItem : set) {
              dataResultsOut.write(futureItem.get().printItem());
              futureItem = null;
            }
            count = 0;
            set.clear();
          }
        }

        for (Future<NGDItem> future : set) {
          dataResultsOut.write(future.get().printItem());
          future = null;
        }

        logFileOut.write("Query took " + (currentTimeMillis() - secondTime) / 1000 + " seconds.\n");
        logFileOut.flush();
        set.clear();
        fileSearchTerm = bufReader.readLine();
      }

      long fourthTime = currentTimeMillis();
      logFileOut.write("Final time: " + (fourthTime - firstTime) / 1000 + " seconds.\n");
      bufReader.close();
      logFileOut.flush();
      logFileOut.close();
      dataResultsOut.flush();
      dataResultsOut.close();
      pool.shutdown();
    }
    System.exit(0);
  }
Exemple #4
0
 private static Map<String, String> nodeReportToMap(NodeReport nodeReport) {
   final HashMap<String, String> map = new HashMap<String, String>(2);
   map.put(FIELD_HOSTNAME, nodeReport.getHostname());
   map.put(FIELD_RESULT, nodeReport.getState());
   final VmmNode node = nodeReport.getNode();
   if (node == null) {
     map.put(FIELD_POOL, null);
     map.put(FIELD_MEMORY, null);
     map.put(FIELD_NETWORKS, null);
     map.put(FIELD_IN_USE, null);
     map.put(FIELD_ACTIVE, null);
   } else {
     map.put(FIELD_POOL, node.getPoolName());
     map.put(FIELD_MEMORY, String.valueOf(node.getMemory()));
     map.put(FIELD_NETWORKS, node.getNetworkAssociations());
     map.put(FIELD_IN_USE, String.valueOf(!node.isVacant()));
     map.put(FIELD_ACTIVE, String.valueOf(node.isActive()));
   }
   return map;
 }