예제 #1
1
  // ---------------------------------------------------------------------------
  private void printDependencies() throws TablesawException {
    m_printedDependencies = new HashSet<String>();

    try {
      PrintWriter pw = new PrintWriter(new FileWriter("dependency.txt"));

      pw.println("Targets marked with a * have already been printed");
      // Create a reduced set of stuff to print
      Set<String> ruleNames = new HashSet<String>();

      for (String name : m_nameRuleMap.keySet()) ruleNames.add(name);

      for (String name : m_nameRuleMap.keySet()) {
        Rule rule = m_nameRuleMap.get(name);
        for (String dep : rule.getDependNames()) ruleNames.remove(dep);

        for (Rule dep : rule.getDependRules()) {
          if (dep.getName() != null) ruleNames.remove(dep.getName());
        }
      }

      for (String name : ruleNames) {
        if (!name.startsWith(NAMED_RULE_PREFIX)) printDependencies(name, pw, 0);
      }

      pw.close();
    } catch (IOException ioe) {
      throw new TablesawException("Cannot write to file dependency.txt", -1);
    }
  }
예제 #2
0
  // FIXME: fails!  needs MarcCombiningReader for mhld or at least a diff version of RawRecordReader
  @Test
  public void testMultMHLDsWithSameID() throws IOException {
    // bib134, multMhlds1
    String bibFilePath = testDataParentPath + File.separator + "mhldMergeBibs134.mrc";
    String mhldFilePath = testDataParentPath + File.separator + "mhldMergeMhlds1Mult.mrc";
    Map<String, Record> mergedRecs =
        MergeSummaryHoldings.mergeMhldsIntoBibRecordsAsMap(bibFilePath, mhldFilePath);

    Record mergedRec = mergedRecs.get("a1");
    assertEquals("Expected three 852", 3, mergedRec.getVariableFields("852").size());
    Set<String> expectedVals = new HashSet<String>();
    expectedVals.add("Location1");
    expectedVals.add("Location2");
    RecordTestingUtils.assertSubfieldHasExpectedValues(mergedRec, "852", 'b', expectedVals);

    expectedVals.clear();
    expectedVals.add("(month)");
    expectedVals.add("(season)");
    RecordTestingUtils.assertSubfieldHasExpectedValues(mergedRec, "853", 'b', expectedVals);

    assertEquals("Expected one 863", 2, mergedRec.getVariableFields("863").size());
    assertEquals("Expected one 866", 1, mergedRec.getVariableFields("866").size());
    // fail("Implement me");
    System.out.println("Test testMultMHLDsWithSameID() successful");
  }
예제 #3
0
  public GeneOrthologyEntry(String line) {
    String[] a = line.split("\\s");
    geneName = a[0];
    s288cCoords = parseCoordsString(a[1]);
    sigmaCoords = parseCoordsString(a[2]);
    try {
      identity = Double.parseDouble(a[3]);
    } catch (NumberFormatException e) {
      identity = null;
    }
    flags = new HashSet<Flag>();
    orfCoords = null;

    if (a.length > 4) {
      String[] fa = a[4].split(",");
      for (int i = 0; i < fa.length; i++) {
        try {
          if (fa[i].length() > 0) {
            flags.add(Flag.valueOf(fa[i]));
          } else {
            flags.add(Flag.UNKNOWN);
          }
        } catch (IllegalArgumentException e) {
          System.err.println(String.format("Unknown FLAG: \"%s\"", fa[i]));
          flags.add(Flag.UNKNOWN);
        }
      }

      if (a.length > 5) {
        orfCoords = parseCoordsString(a[5]);
      }
    }
  }
예제 #4
0
 public static void viewTagInfo(String inurl, String tag) throws Exception {
   // pr = new PrintStream(new FileOutputStream("/semplest/lluis/keywordExp/urldata.txt"));
   pr = System.out;
   long start = System.currentTimeMillis();
   pr.println(inurl + "****************************************************************");
   printList(cleanUrlText(TextUtils.HTMLText(inurl, tag)));
   String urls = TextUtils.HTMLLinkString(inurl, inurl);
   String[] url = urls.split("\\s+");
   Set<String> urlMap = new HashSet<String>(url.length);
   urlMap.add(inurl);
   for (String ur : url) {
     if (!urlMap.contains(ur)) {
       pr.println(ur + "***************************************************************");
       try {
         printList(cleanUrlText(TextUtils.HTMLText(ur, tag)));
       } catch (Exception e) {
         System.out.println("Error with url :" + ur);
         e.printStackTrace();
         logger.error("Problem", e);
       }
       urlMap.add(ur);
     }
   }
   pr.println("Time elapsed" + (start - System.currentTimeMillis()));
 }
예제 #5
0
파일: Main.java 프로젝트: karianna/jdk8_tl
 /**
  * Expands list of files to process into full list of all files that can be found by recursively
  * descending directories.
  */
 void expand(File dir, String[] files, boolean isUpdate) {
   if (files == null) {
     return;
   }
   for (int i = 0; i < files.length; i++) {
     File f;
     if (dir == null) {
       f = new File(files[i]);
     } else {
       f = new File(dir, files[i]);
     }
     if (f.isFile()) {
       if (entries.add(f)) {
         if (isUpdate) entryMap.put(entryName(f.getPath()), f);
       }
     } else if (f.isDirectory()) {
       if (entries.add(f)) {
         if (isUpdate) {
           String dirPath = f.getPath();
           dirPath = (dirPath.endsWith(File.separator)) ? dirPath : (dirPath + File.separator);
           entryMap.put(entryName(dirPath), f);
         }
         expand(f, f.list(), isUpdate);
       }
     } else {
       error(formatMsg("error.nosuch.fileordir", String.valueOf(f)));
       ok = false;
     }
   }
 }
  /**
   * Compute the set of all IA32 opcodes that have emit methods in the Assembler. This method uses
   * the stylized form of all emit method names in the Assembler to extract the opcode of each one.
   * It returns a set of all such distinct names, as a set of Strings.
   *
   * @param emitters the set of all emit methods in the Assembler
   * @return the set of all opcodes handled by the Assembler
   */
  private static Set<String> getOpcodes(Method[] emitters) {
    Set<String> s = new HashSet<String>();
    for (int i = 0; i < emitters.length; i++) {
      String name = emitters[i].getName();
      if (DEBUG) System.err.println(name);
      if (name.startsWith("emit")) {
        int posOf_ = name.indexOf('_');
        if (posOf_ != -1) {
          String opcode = name.substring(4, posOf_);
          if (!excludedOpcodes.contains(opcode)) {
            s.add(opcode);
          }
        } else {
          String opcode = name.substring(4);
          // make sure it is an opcode
          if (opcode.equals(opcode.toUpperCase(Locale.getDefault()))) {
            if (!excludedOpcodes.contains(opcode)) {
              s.add(opcode);
            }
          }
        }
      }
    }

    return s;
  }
  private void onFinishWebUITests(ITestContext testContext) {
    // List of test results which we will delete later because of duplication or because the test
    // eventually passed
    List<ITestResult> testsToBeRemoved = new ArrayList<ITestResult>();

    // collect all id's from passed test
    Set<Integer> passedTestIds = new HashSet<Integer>();
    for (ITestResult passedTest : testContext.getPassedTests().getAllResults()) {
      passedTestIds.add(getTestId(passedTest));
    }

    Set<Integer> failedTestIds = new HashSet<Integer>();
    for (ITestResult failedTest : testContext.getFailedTests().getAllResults()) {

      int failedTestId = getTestId(failedTest);
      // if this test failed before mark as to be deleted
      // or delete this failed test if there is at least one passed version
      if (failedTestIds.contains(failedTestId) || passedTestIds.contains(failedTestId)) {
        testsToBeRemoved.add(failedTest);
      } else {
        failedTestIds.add(failedTestId);
      }
    }
    // finally delete all tests that are marked
    for (Iterator<ITestResult> iterator = testContext.getFailedTests().getAllResults().iterator();
        iterator.hasNext(); ) {
      ITestResult testResult = iterator.next();
      if (testsToBeRemoved.contains(testResult)) {
        iterator.remove();
      }
    }
  }
예제 #8
0
 static {
   Set<EncodingRule> rules = new HashSet<EncodingRule>();
   rules.add(new EncodingRule("*", "%2A"));
   rules.add(new EncodingRule("+", "%20"));
   rules.add(new EncodingRule("%7E", "~"));
   ENCODING_RULES = Collections.unmodifiableSet(rules);
 }
  private static void check(String what, MBeanNotificationInfo[] mbnis) {
    System.out.print(what + ": checking notification info: ");

    if (mbnis.length == 0) {
      System.out.println("NONE (suspicious)");
      suspicious.add(what);
      return;
    }

    // Each MBeanNotificationInfo.getName() should be an existent
    // Java class that is Notification or a subclass of it
    for (int j = 0; j < mbnis.length; j++) {
      String notifClassName = mbnis[j].getName();
      Class notifClass;
      try {
        notifClass = Class.forName(notifClassName);
      } catch (Exception e) {
        System.out.print("FAILED(" + notifClassName + ": " + e + ") ");
        failed.add(what);
        continue;
      }
      if (!Notification.class.isAssignableFrom(notifClass)) {
        System.out.print("FAILED(" + notifClassName + ": not a Notification) ");
        failed.add(what);
        continue;
      }
      System.out.print("OK(" + notifClassName + ") ");
    }
    System.out.println();
  }
예제 #10
0
  private static List<String> calcVCFGenotypeKeys(VariantContext vc) {
    Set<String> keys = new HashSet<String>();

    boolean sawGoodGT = false;
    boolean sawGoodQual = false;
    boolean sawGenotypeFilter = false;
    for (Genotype g : vc.getGenotypes().values()) {
      keys.addAll(g.getAttributes().keySet());
      if (g.isAvailable()) sawGoodGT = true;
      if (g.hasNegLog10PError()) sawGoodQual = true;
      if (g.isFiltered() && g.isCalled()) sawGenotypeFilter = true;
    }

    if (sawGoodQual) keys.add(VCFConstants.GENOTYPE_QUALITY_KEY);

    if (sawGenotypeFilter) keys.add(VCFConstants.GENOTYPE_FILTER_KEY);

    List<String> sortedList = ParsingUtils.sortList(new ArrayList<String>(keys));

    // make sure the GT is first
    if (sawGoodGT) {
      List<String> newList = new ArrayList<String>(sortedList.size() + 1);
      newList.add(VCFConstants.GENOTYPE_KEY);
      newList.addAll(sortedList);
      sortedList = newList;
    }

    return sortedList;
  }
예제 #11
0
  /**
   * Return information about the objects in this Tree.
   *
   * @param t The tree to examine.
   * @return A human-readable String
   */
  public static String toDebugStructureString(Tree t) {
    StringBuilder sb = new StringBuilder();
    String tCl = StringUtils.getShortClassName(t);
    String tfCl = StringUtils.getShortClassName(t.treeFactory());
    String lCl = StringUtils.getShortClassName(t.label());
    String lfCl = StringUtils.getShortClassName(t.label().labelFactory());
    Set<String> otherClasses = new HashSet<String>();
    for (Tree st : t) {
      String stCl = StringUtils.getShortClassName(st);
      String stfCl = StringUtils.getShortClassName(st.treeFactory());
      String slCl = StringUtils.getShortClassName(st.label());
      String slfCl = StringUtils.getShortClassName(st.label().labelFactory());

      if (!tCl.equals(stCl)) {
        otherClasses.add(stCl);
      }
      if (!tfCl.equals(stfCl)) {
        otherClasses.add(stfCl);
      }
      if (!lCl.equals(slCl)) {
        otherClasses.add(slCl);
      }
      if (!lfCl.equals(slfCl)) {
        otherClasses.add(slfCl);
      }
    }
    sb.append("Tree with root of class ").append(tCl).append(" and factory ").append(tfCl);
    sb.append(" with label class ").append(lCl).append(" and factory ").append(lfCl);
    if (!otherClasses.isEmpty()) {
      sb.append(" with the following classes also found within the tree: ").append(otherClasses);
    }
    return sb.toString();
  }
예제 #12
0
파일: Main.java 프로젝트: karianna/jdk8_tl
  /** Extracts specified entries from JAR file. */
  void extract(InputStream in, String files[]) throws IOException {
    ZipInputStream zis = new ZipInputStream(in);
    ZipEntry e;
    // Set of all directory entries specified in archive.  Disallows
    // null entries.  Disallows all entries if using pre-6.0 behavior.
    Set<ZipEntry> dirs = newDirSet();
    while ((e = zis.getNextEntry()) != null) {
      if (files == null) {
        dirs.add(extractFile(zis, e));
      } else {
        String name = e.getName();
        for (String file : files) {
          if (name.startsWith(file)) {
            dirs.add(extractFile(zis, e));
            break;
          }
        }
      }
    }

    // Update timestamps of directories specified in archive with their
    // timestamps as given in the archive.  We do this after extraction,
    // instead of during, because creating a file in a directory changes
    // that directory's timestamp.
    updateLastModifiedTime(dirs);
  }
예제 #13
0
  private void readOldState(DataInputStream dis) throws IOException, TeamException {
    int repoSize = dis.readInt();
    boolean version1 = false;
    if (repoSize == STATE_FILE_VERSION_1) {
      version1 = true;
      repoSize = dis.readInt();
    }
    for (int i = 0; i < repoSize; i++) {
      ICVSRepositoryLocation root = KnownRepositories.getInstance().getRepository(dis.readUTF());
      RepositoryRoot repoRoot = getRepositoryRootFor(root);

      // read branch tags associated with this root
      int tagsSize = dis.readInt();
      CVSTag[] branchTags = new CVSTag[tagsSize];
      for (int j = 0; j < tagsSize; j++) {
        String tagName = dis.readUTF();
        int tagType = dis.readInt();
        branchTags[j] = new CVSTag(tagName, tagType);
      }
      // Ignore the branch tags since they are handled differently now
      // addBranchTags(root, branchTags);

      // read the number of projects for this root that have version tags
      int projSize = dis.readInt();
      if (projSize > 0) {
        for (int j = 0; j < projSize; j++) {
          String name = dis.readUTF();
          Set tagSet = new HashSet();
          int numTags = dis.readInt();
          for (int k = 0; k < numTags; k++) {
            tagSet.add(new CVSTag(dis.readUTF(), CVSTag.VERSION));
          }
          CVSTag[] tags = (CVSTag[]) tagSet.toArray(new CVSTag[tagSet.size()]);
          repoRoot.addTags(name, tags);
        }
      }
      // read the auto refresh filenames for this project
      if (version1) {
        try {
          projSize = dis.readInt();
          if (projSize > 0) {
            for (int j = 0; j < projSize; j++) {
              String name = dis.readUTF();
              Set filenames = new HashSet();
              int numFilenames = dis.readInt();
              for (int k = 0; k < numFilenames; k++) {
                filenames.add(name + "/" + dis.readUTF()); // $NON-NLS-1$
              }
              repoRoot.setAutoRefreshFiles(
                  name, (String[]) filenames.toArray(new String[filenames.size()]));
            }
          }
        } catch (EOFException e) {
          // auto refresh files are not persisted, continue and save them next time.
        }
      }
      broadcastRepositoryChange(repoRoot);
    }
  }
예제 #14
0
 static {
   _reachableStoppers.add(HttpServer.class);
   _reachableStoppers.add(AppServer.class);
   _reachableStoppers.add(DBTCP.class);
   _reachableStoppers.add(Mongo.class);
   _reachableStoppers.add(WeakBag.class);
   _reachableStoppers.add(WeakValueMap.class);
 }
  /**
   * Reads query file and parses the queries.
   *
   * @requires querystream, outstream, are successfully initialized
   * @requires g to be already generated
   * @param queryStream file input stream of query file
   * @param outStream file output stream of output file
   * @param g
   * @effects writes results to output file
   */
  private static void parseQuery(FileInputStream queryStream, FileOutputStream outStream, Graph g) {

    try {
      BufferedWriter output = new BufferedWriter(new OutputStreamWriter(outStream));
      BufferedReader queryReader = new BufferedReader(new InputStreamReader(queryStream));

      final int COMMAND_INDEX = 0;
      final int U1_INDEX = 1;
      final int U2_INDEX = 2;

      final String QUERY_ENDING = " ?";

      Set<Set<String>> queries = new LinkedHashSet<Set<String>>();

      String line;

      while ((line = queryReader.readLine()) != null) {

        // each query set contains two user id strings and a command
        // this also handles duplicate user ids with different commands
        Set<String> query = new LinkedHashSet<String>();

        // a bit overkill, but eliminate any unnecessary whitespace if needed and replaces with
        // single space
        String[] columns = line.trim().replaceAll("\\s+", " ").split(" ");

        // first column is query
        // second column is user 1
        // third column is user 2

        String command = columns[COMMAND_INDEX];
        String id1 = columns[U1_INDEX];
        String id2 = columns[U2_INDEX];
        query.add(id1);
        query.add(id2);
        query.add(command);

        // check if query ends with question mark (with leading space) and query is unique
        if (line.endsWith(QUERY_ENDING) && !queries.contains(query)) {

          queries.add(query);

          Vertex u1 = new Vertex(id1);
          Vertex u2 = new Vertex(id2);

          printResults(output, g, u1, u2, command);
        }
      }
      queryReader.close();
      output.close();
      // return new LinkedHashMap<List<Vertex>, String>(queries);

    } catch (Exception e) { // if something goes wrong
      throw new RuntimeException(e);
    }
  }
 private void computeMatchSet() {
   for (Vertex v : LV) {
     for (Vertex neighb : v.getNeighbors()) {
       if (neighb.getId() != SOURCE_ID && getFlow(v, neighb) > 0) {
         matches.add(v);
         matches.add(neighb);
       }
     }
   }
 }
 private static void processLibrariesAndJpsPlugins(
     final File jarFile,
     final File zipFile,
     final String pluginName,
     final Set<Library> libs,
     Map<Module, String> jpsModules,
     final ProgressIndicator progressIndicator)
     throws IOException {
   if (FileUtil.ensureCanCreateFile(zipFile)) {
     ZipOutputStream zos = null;
     try {
       zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile)));
       addStructure(pluginName, zos);
       addStructure(pluginName + "/" + MIDDLE_LIB_DIR, zos);
       final String entryName = pluginName + JAR_EXTENSION;
       ZipUtil.addFileToZip(
           zos,
           jarFile,
           getZipPath(pluginName, entryName),
           new HashSet<String>(),
           createFilter(progressIndicator, FileTypeManager.getInstance()));
       for (Map.Entry<Module, String> entry : jpsModules.entrySet()) {
         File jpsPluginJar = jarModulesOutput(Collections.singleton(entry.getKey()), null, null);
         ZipUtil.addFileToZip(
             zos, jpsPluginJar, getZipPath(pluginName, entry.getValue()), null, null);
       }
       Set<String> usedJarNames = new HashSet<String>();
       usedJarNames.add(entryName);
       Set<VirtualFile> jarredVirtualFiles = new HashSet<VirtualFile>();
       for (Library library : libs) {
         final VirtualFile[] files = library.getFiles(OrderRootType.CLASSES);
         for (VirtualFile virtualFile : files) {
           if (jarredVirtualFiles.add(virtualFile)) {
             if (virtualFile.getFileSystem() instanceof JarFileSystem) {
               addLibraryJar(
                   virtualFile, zipFile, pluginName, zos, usedJarNames, progressIndicator);
             } else {
               makeAndAddLibraryJar(
                   virtualFile,
                   zipFile,
                   pluginName,
                   zos,
                   usedJarNames,
                   progressIndicator,
                   library.getName());
             }
           }
         }
       }
     } finally {
       if (zos != null) zos.close();
     }
   }
 }
예제 #18
0
  @Ignore
  @Test

  /**
   * Tests ConflictTreeSimulatedSession with unsolvable ontologies, target diagnoses are calculated
   */
  public void doUnsolvableOAEIConferenceTest()
      throws SolverException, InconsistentTheoryException, OWLOntologyCreationException {
    logger.info("doUnsolveableTest_with_2015_ontologies");

    String matchingsDir = "oaei11conference/matchings/";
    String ontologyDir = "oaei11conference/ontology";

    File[] f = getMappingFiles(matchingsDir, "incoherent", "incoherent_2015.txt");
    File[] f2 = getMappingFiles(matchingsDir, "inconsistent", "inconsistent_2015.txt");

    Set<File> files = new LinkedHashSet<File>();
    Map<File, String> map = new HashMap<File, String>();
    for (File file : f) {
      files.add(file);
      map.put(file, "incoherent");
    }
    for (File file : f2) {
      files.add(file);
      map.put(file, "inconsistent");
    }

    runOaeiConferenceTests(matchingsDir, ontologyDir, files, map);
    /*
    result: 2015-02-26-T-10-58-50
    junit.framework.AssertionFailedError
        at at.ainf.owlapi3.performance.ConflictTreeTest.computeHSShortLog(ConflictTreeTest.java:562)
        at at.ainf.owlapi3.performance.ConflictTreeSession.search(ConflictTreeSession.java:109)
        at at.ainf.owlapi3.performance.ConflictTreeSession.search(ConflictTreeSession.java:69)
        at at.ainf.owlapi3.performance.ConflictTreeTest.runOaeiConferenceTests(ConflictTreeTest.java:315)
        at at.ainf.owlapi3.performance.ConflictTreeTest.doUnsolvableOAEIConferenceTest(ConflictTreeTest.java:174)

        and

    result: 2015-02-26-T-12-13-55
    at.ainf.diagnosis.model.InconsistentTheoryException: Background theory or test cases are inconsistent! Finding conflicts is impossible!
        at at.ainf.diagnosis.quickxplain.BaseQuickXplain.verifyKnowledgeBase(BaseQuickXplain.java:173)
        at at.ainf.diagnosis.quickxplain.MultiQuickXplain.search(MultiQuickXplain.java:107)
        at at.ainf.diagnosis.quickxplain.BaseQuickXplain.search(BaseQuickXplain.java:116)
        at at.ainf.owlapi3.performance.ConflictTreeSession.computeNConflictsAtTime(ConflictTreeSession.java:193)
        at at.ainf.owlapi3.performance.ConflictTreeSession.search(ConflictTreeSession.java:83)
        at at.ainf.owlapi3.performance.ConflictTreeSession.search(ConflictTreeSession.java:69)
        at at.ainf.owlapi3.performance.ConflictTreeTest.runOaeiConferenceTests(ConflictTreeTest.java:353)
        at at.ainf.owlapi3.performance.ConflictTreeTest.doUnsolvableOAEIConferenceTest(ConflictTreeTest.java:219)

    */
  }
예제 #19
0
  @Ignore
  @Test

  /**
   * Tests ConflictTreeSimulatedSession with solvable ontologies from OAEI11 Conference, target
   * diagnoses are calculated
   */
  public void doOAEIConferenceTest()
      throws SolverException, InconsistentTheoryException, OWLOntologyCreationException {
    logger.info("doOAEIConferenceTest");

    String matchingsDir = "oaei11conference/matchings/";
    String ontologyDir = "oaei11conference/ontology";

    File[] f = getMappingFiles(matchingsDir, "incoherent", "includedIncoher.txt");
    File[] f2 = getMappingFiles(matchingsDir, "inconsistent", "included.txt");

    Set<File> files = new LinkedHashSet<File>();
    Map<File, String> map = new HashMap<File, String>();
    for (File file : f) {
      files.add(file);
      map.put(file, "incoherent");
    }
    for (File file : f2) {
      files.add(file);
      map.put(file, "inconsistent");
    }

    runOaeiConferenceTests(matchingsDir, ontologyDir, files, map);
    /*
    result: 2015-02-26-T-09-29-38
    java.lang.OutOfMemoryError: GC overhead limit exceeded
        at java.util.TreeMap.keyIterator(TreeMap.java:1106)
        at java.util.TreeMap$KeySet.iterator(TreeMap.java:1119)
        at java.util.TreeSet.iterator(TreeSet.java:181)
        at java.util.Collections$UnmodifiableCollection$1.<init>(Collections.java:1039)
        at java.util.Collections$UnmodifiableCollection.iterator(Collections.java:1038)
        at at.ainf.diagnosis.storage.FormulaSetImpl.iterator(FormulaSetImpl.java:123)
        at at.ainf.diagnosis.tree.AbstractTreeSearch.intersectsWith(AbstractTreeSearch.java:712)
        at at.ainf.diagnosis.tree.AbstractTreeSearch.canReuseConflict(AbstractTreeSearch.java:697)
        at at.ainf.diagnosis.tree.AbstractTreeSearch.processNode(AbstractTreeSearch.java:363)
        at at.ainf.diagnosis.tree.AbstractTreeSearch.processOpenNodes(AbstractTreeSearch.java:311)
        at at.ainf.diagnosis.tree.AbstractTreeSearch.searchDiagnoses(AbstractTreeSearch.java:268)
        at at.ainf.diagnosis.tree.AbstractTreeSearch.start(AbstractTreeSearch.java:202)
        at at.ainf.owlapi3.base.OAEI11ConferenceSession.getRandomDiagSet(OAEI11ConferenceSession.java:151)
        at at.ainf.owlapi3.performance.OAEI11ConferenceTests.runOaeiConferenceTests(OAEI11ConferenceTests.java:151)
        at at.ainf.owlapi3.performance.OAEI11ConferenceTests.doTestsOAEIConference(OAEI11ConferenceTests.java:131)
    */

  }
  /** Scans the directory for new files. */
  protected void scanDirectory() {
    if (System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis) {
      Set<Path> newPaths = scanner.scan(fs, filePath, processedFiles);

      for (Path newPath : newPaths) {
        String newPathString = newPath.toString();
        pendingFiles.add(newPathString);
        processedFiles.add(newPathString);
        localProcessedFileCount.increment();
      }

      lastScanMillis = System.currentTimeMillis();
    }
  }
예제 #21
0
  /**
   * Enumerates the resouces in a give package name. This works even if the resources are loaded
   * from a jar file!
   *
   * <p>Adapted from code by mikewse on the java.sun.com message boards.
   * http://forum.java.sun.com/thread.jsp?forum=22&thread=30984
   *
   * @param packageName The package to enumerate
   * @return A Set of Strings for each resouce in the package.
   */
  public static Set getResoucesInPackage(String packageName) throws IOException {
    String localPackageName;
    if (packageName.endsWith("/")) {
      localPackageName = packageName;
    } else {
      localPackageName = packageName + '/';
    }

    Enumeration dirEnum = ClassLoader.getSystemResources(localPackageName);

    Set names = new HashSet();

    // Loop CLASSPATH directories
    while (dirEnum.hasMoreElements()) {
      URL resUrl = (URL) dirEnum.nextElement();

      // Pointing to filesystem directory
      if (resUrl.getProtocol().equals("file")) {
        File dir = new File(resUrl.getFile());
        File[] files = dir.listFiles();
        if (files != null) {
          for (int i = 0; i < files.length; i++) {
            File file = files[i];
            if (file.isDirectory()) continue;
            names.add(localPackageName + file.getName());
          }
        }

        // Pointing to Jar file
      } else if (resUrl.getProtocol().equals("jar")) {
        JarURLConnection jconn = (JarURLConnection) resUrl.openConnection();
        JarFile jfile = jconn.getJarFile();
        Enumeration entryEnum = jfile.entries();
        while (entryEnum.hasMoreElements()) {
          JarEntry entry = (JarEntry) entryEnum.nextElement();
          String entryName = entry.getName();
          // Exclude our own directory
          if (entryName.equals(localPackageName)) continue;
          String parentDirName = entryName.substring(0, entryName.lastIndexOf('/') + 1);
          if (!parentDirName.equals(localPackageName)) continue;
          names.add(entryName);
        }
      } else {
        // Invalid classpath entry
      }
    }

    return names;
  }
    public void run() {
      super.run();

      // save the last set of active JVMs
      Set lastActiveVms = activeVms;

      try {
        // get the current set of active JVMs
        activeVms = (HashSet) vmManager.activeVms();

      } catch (MonitorException e) {
        // XXX: use logging api
        System.err.println("MonitoredHostProvider: polling task " + "caught MonitorException:");
        e.printStackTrace();

        // mark the HostManager as errored and notify listeners
        setLastException(e);
        fireDisconnectedEvents();
      }

      if (activeVms.isEmpty()) {
        return;
      }

      Set startedVms = new HashSet();
      Set terminatedVms = new HashSet();

      for (Iterator i = activeVms.iterator(); i.hasNext(); /* empty */ ) {
        Integer vmid = (Integer) i.next();
        if (!lastActiveVms.contains(vmid)) {
          // a new file has been detected, add to set
          startedVms.add(vmid);
        }
      }

      for (Iterator i = lastActiveVms.iterator(); i.hasNext();
      /* empty */ ) {
        Object o = i.next();
        if (!activeVms.contains(o)) {
          // JVM has terminated, remove it from the active list
          terminatedVms.add(o);
        }
      }

      if (!startedVms.isEmpty() || !terminatedVms.isEmpty()) {
        fireVmStatusChangedEvents(activeVms, startedVms, terminatedVms);
      }
    }
  @RequestMapping(value = CLIENT_CONNECT_PATH, method = RequestMethod.POST)
  public @ResponseBody int connectClient(@RequestBody String allVid) {
    try {
      int reply = FAILED;
      String[] videos = allVid.split(",");
      String uName = videos[0].trim();
      videos = java.util.Arrays.copyOfRange(videos, 1, videos.length);
      // System.out.println("Client connect"+hostAdder+" "+uName+" "+ Arrays.asList(videos));
      int ans = masterService.psConnectClient(hostAdder, uName, videos);
      // System.out.println("ans =" +ans +" "+FAILED);
      while (ans == PS_NOT_CONNECTED) {
        reconnectToMS();
        ans = masterService.psConnectClient(hostAdder, uName, videos);
      }
      if (ans == FAILED) return FAILED;
      // System.out.println("Clinet "+ uName + " connected");

      if (user_vidNameMap.containsKey(uName)) {
        reply = CLIENT_ALREADY_CONNECTED;
      } else {
        reply = CLIENT_CONNECTED;
        user_vidNameMap.put(uName, new HashSet<String>());
      }
      // System.out.println("Clinet "+ uName + " connected");

      Set<String> vidSet = user_vidNameMap.get(uName);
      for (int i = 0; i < videos.length; i++) {
        String temp = videos[i].trim();
        // System.out.println("add video");

        if (!temp.equals("")) {
          vidSet.add(temp);
          addTovidName_UserMap(uName, temp);
        }
      }
      // System.out.println("Clinet "+ uName + " connected");

      userAliveMap.put(uName, new Long(System.currentTimeMillis() + TTL));
      // System.out.println("Clinet "+ uName + " connected");

      activeUsers.add(uName);
      System.out.println("Clinet " + uName + " connected");
      return reply;
    } catch (Exception e) {
      System.out.println("Error: " + e.getMessage());
      return FAILED;
    }
  }
예제 #24
0
  private static PencilPosition findShortestRoute(int[][] maze) {
    // all found solutions to the maze
    PriorityQueue<PencilPosition> solutions =
        new PriorityQueue<PencilPosition>(5, new PencilPositionComparator());
    // bread-first search queue
    Queue<PencilPosition> routes = new LinkedList<PencilPosition>();
    // set of already visited positions
    Set<PencilPosition> visitedPositions = new HashSet<PencilPosition>();

    // add the starting positions, which is always (0,0)
    routes.add(new PencilPosition(0, 0, false, null));

    while (!routes.isEmpty()) {
      PencilPosition position = routes.poll();

      // if this is the destinations position then we've found a solution
      if (0 == maze[position.row][position.column]) {
        solutions.add(position);
        continue;
      }

      // if we haven't already visited this position
      if (!visitedPositions.contains(position)) {
        routes.addAll(findPossibleRoutes(position, maze));
        visitedPositions.add(position);
      }
    }

    return solutions.poll();
  }
예제 #25
0
 /**
  * Get the list of ids of sequences in this sequence database
  *
  * @return the list of sequence ids.
  */
 public Set<Integer> getSequenceIDs() {
   Set<Integer> set = new HashSet<Integer>();
   for (Sequence sequence : getSequences()) {
     set.add(sequence.getId());
   }
   return set;
 }
예제 #26
0
 private void findParents(JavaClass jclass, Set<JavaClass> changedParents) {
   for (JavaClass parent : getParents(jclass)) {
     if (changedParents.add(parent)) {
       findParents(parent, changedParents);
     }
   }
 }
  private static boolean checkDependants(
      final IdeaPluginDescriptor pluginDescriptor,
      final Function<PluginId, IdeaPluginDescriptor> pluginId2Descriptor,
      final Condition<PluginId> check,
      final Set<PluginId> processed) {
    processed.add(pluginDescriptor.getPluginId());
    final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds();
    final Set<PluginId> optionalDependencies =
        new HashSet<PluginId>(Arrays.asList(pluginDescriptor.getOptionalDependentPluginIds()));
    for (final PluginId dependentPluginId : dependentPluginIds) {
      if (processed.contains(dependentPluginId)) continue;

      // TODO[yole] should this condition be a parameter?
      if (isModuleDependency(dependentPluginId)
          && (ourAvailableModules.isEmpty()
              || ourAvailableModules.contains(dependentPluginId.getIdString()))) {
        continue;
      }
      if (!optionalDependencies.contains(dependentPluginId)) {
        if (!check.value(dependentPluginId)) {
          return false;
        }
        final IdeaPluginDescriptor dependantPluginDescriptor =
            pluginId2Descriptor.fun(dependentPluginId);
        if (dependantPluginDescriptor != null
            && !checkDependants(dependantPluginDescriptor, pluginId2Descriptor, check, processed)) {
          return false;
        }
      }
    }
    return true;
  }
예제 #28
0
  // ---------------------------------------------------------------------------
  public Rule findTargetRule(String target) throws TablesawException {
    Rule rule = null;

    if (m_resolved) {
      ArrayList<String> posTargets;
      String posTarget = target;

      if (m_noRulesList.contains(target)) return (null);

      if ((rule = m_locatedRules.get(target)) != null) return (rule);

      // First look in name map
      rule = m_nameRuleMap.get(target);

      if (rule == null) { // Now look for targets
        rule = m_targetRuleMap.get(posTarget);
        if (rule == null) {
          posTargets = m_fileManager.getPossibleFiles(posTarget);
          for (String t : posTargets) {
            rule = m_targetRuleMap.get(t);
            if (rule != null) break;
          }
        }
      }

      Debug.print("Rule for " + target + " is " + rule);
      if (rule != null) {
        m_locatedRules.put(target, rule);
      } else m_noRulesList.add(target);
    }

    return (rule);
  }
  /**
   * Creates an NL fragment project along with the locale specific properties files.
   *
   * @throws CoreException
   * @throws IOException
   * @throws InvocationTargetException
   * @throws InterruptedException
   */
  private void internationalizePlugins(List plugins, List locales, Map overwrites)
      throws CoreException, IOException, InvocationTargetException, InterruptedException {

    Set created = new HashSet();

    for (Iterator it = plugins.iterator(); it.hasNext(); ) {
      IPluginModelBase plugin = (IPluginModelBase) it.next();

      for (Iterator iter = locales.iterator(); iter.hasNext(); ) {
        Locale locale = (Locale) iter.next();

        IProject project = getNLProject(plugin, locale);
        if (created.contains(project)
            || overwriteWithoutAsking
            || !project.exists()
            || OVERWRITE == overwrites.get(project.getName())) {
          if (!created.contains(project) && project.exists()) {
            project.delete(true, getProgressMonitor());
          }

          if (!created.contains(project)) {
            createNLFragment(plugin, project, locale);
            created.add(project);
            project.getFolder(RESOURCE_FOLDER_PARENT).create(false, true, getProgressMonitor());
          }

          project
              .getFolder(RESOURCE_FOLDER_PARENT)
              .getFolder(locale.toString())
              .create(true, true, getProgressMonitor());
          createLocaleSpecificPropertiesFile(project, plugin, locale);
        }
      }
    }
  }
예제 #30
0
  // ---------------------------------------------------------------------------
  private void printDependencies(String target, PrintWriter pw, int spacing)
      throws TablesawException {
    Rule tr = findTargetRule(target);
    String[] pre;

    for (int I = 0; I < spacing; I++) pw.print("\t");

    List<String> targetList = new ArrayList<String>();
    if (tr != null) {
      for (String name : tr.getDependNames()) {
        targetList.add(name);
      }

      for (Rule r : tr.getDependRules()) {
        if ((r.getName() != null) && (!r.getName().startsWith(NAMED_RULE_PREFIX))) {
          targetList.add(r.getName());
        } else {
          for (String t : r.getTargets()) {
            targetList.add(t);
          }
        }
      }
    }

    if (!m_printedDependencies.add(target) && (targetList.size() != 0)) {
      pw.println(target + "*");
      return;
    }

    pw.println(target);

    for (String t : targetList) printDependencies(t, pw, spacing + 1);
  }