private Set<Vertex> computeMinVertexCover(Set<Vertex> side1, Set<Vertex> side2) {
    Set<Vertex> konigSet = new HashSet<Vertex>();
    Set<Vertex> unmatched = new TreeSet<Vertex>(side1);
    unmatched.removeAll(matches);
    // System.out.println("Matches: " + matches);
    // System.out.println("side 1 unmatched set: " + unmatched);

    for (Vertex v : unmatched) {
      konigDFS(konigSet, v, false);
    }

    // System.out.println("Konig set: " + konigSet);

    Set<Vertex> result = new HashSet<Vertex>(side2);
    result.retainAll(konigSet);
    // System.out.println("side 2 intersect konigSet: " + result);

    Set<Vertex> side1notInKonigSet = new HashSet<Vertex>(side1);
    side1notInKonigSet.removeAll(konigSet);
    // System.out.println("side 1 not in Konig set: " + side1notInKonigSet);

    result.addAll(side1notInKonigSet);

    return result;
  }
Esempio n. 2
0
 public static Set<RepairedCell> getRepairFalse(Set<RepairedCell> truth, Set<RepairedCell> found) {
   Set<RepairedCell> rst = new HashSet<RepairedCell>();
   if (found.size() != 0) {
     rst.addAll(found);
     rst.removeAll(truth);
   }
   return rst;
 }
Esempio n. 3
0
  /** Asserts that this file contains exactly the given set of descendants. */
  public TestFile assertHasDescendants(String... descendants) {
    Set<String> actual = new TreeSet<String>();
    assertIsDir();
    visit(actual, "", this);
    Set<String> expected = new TreeSet<String>(Arrays.asList(descendants));

    Set<String> extras = new TreeSet<String>(actual);
    extras.removeAll(expected);
    Set<String> missing = new TreeSet<String>(expected);
    missing.removeAll(actual);

    assertEquals(
        String.format(
            "For dir: %s, extra files: %s, missing files: %s, expected: %s",
            this, extras, missing, expected),
        expected,
        actual);

    return this;
  }
  private void updateInSelectionHighlighters() {
    if (mySearchResults.getEditor() == null) return;
    final SelectionModel selectionModel = mySearchResults.getEditor().getSelectionModel();
    int[] starts = selectionModel.getBlockSelectionStarts();
    int[] ends = selectionModel.getBlockSelectionEnds();

    final HashSet<RangeHighlighter> toRemove = new HashSet<RangeHighlighter>();
    Set<RangeHighlighter> toAdd = new HashSet<RangeHighlighter>();
    for (RangeHighlighter highlighter : myHighlighters) {
      boolean intersectsWithSelection = false;
      for (int i = 0; i < starts.length; ++i) {
        TextRange selectionRange = new TextRange(starts[i], ends[i]);
        intersectsWithSelection =
            selectionRange.intersects(highlighter.getStartOffset(), highlighter.getEndOffset())
                && selectionRange.getEndOffset() != highlighter.getStartOffset()
                && highlighter.getEndOffset() != selectionRange.getStartOffset();
        if (intersectsWithSelection) break;
      }

      final Object userData = highlighter.getUserData(IN_SELECTION_KEY);
      if (userData != null) {
        if (!intersectsWithSelection) {
          if (userData == IN_SELECTION2) {
            HighlightManager.getInstance(mySearchResults.getProject())
                .removeSegmentHighlighter(mySearchResults.getEditor(), highlighter);
            toRemove.add(highlighter);
          } else {
            highlighter.putUserData(IN_SELECTION_KEY, null);
          }
        }
      } else if (intersectsWithSelection) {
        TextRange cursor = mySearchResults.getCursor();
        if (cursor != null
            && highlighter.getStartOffset() == cursor.getStartOffset()
            && highlighter.getEndOffset() == cursor.getEndOffset()) continue;
        final RangeHighlighter toAnnotate =
            highlightRange(
                new TextRange(highlighter.getStartOffset(), highlighter.getEndOffset()),
                new TextAttributes(null, null, Color.WHITE, EffectType.ROUNDED_BOX, 0),
                toAdd);
        highlighter.putUserData(IN_SELECTION_KEY, IN_SELECTION1);
        toAnnotate.putUserData(IN_SELECTION_KEY, IN_SELECTION2);
      }
    }
    myHighlighters.removeAll(toRemove);
    myHighlighters.addAll(toAdd);
  }
 private void clearUnusedHightlighters() {
   Set<RangeHighlighter> unused = new com.intellij.util.containers.HashSet<RangeHighlighter>();
   for (RangeHighlighter highlighter : myHighlighters) {
     if (highlighter.getUserData(MARKER_USED) == null) {
       unused.add(highlighter);
     } else {
       highlighter.putUserData(MARKER_USED, null);
     }
   }
   myHighlighters.removeAll(unused);
   Project project = mySearchResults.getProject();
   if (!project.isDisposed()) {
     for (RangeHighlighter highlighter : unused) {
       HighlightManager.getInstance(project)
           .removeSegmentHighlighter(mySearchResults.getEditor(), highlighter);
     }
   }
 }
Esempio n. 6
0
 private Set<Document> getDocsWithNoEmotions(
     Indexer indexer, Collection<Document> docs, boolean originalContentOnly) {
   Set<Document> result = new LinkedHashSet<Document>(docs);
   result.removeAll(getDocsWithAnyEmotions(indexer, docs, originalContentOnly));
   return result;
 }
Esempio n. 7
0
 /** Return the difference of two sets, a - b. */
 <T> Set<T> diff(Set<T> a, Set<T> b) {
   if (b.isEmpty()) return a;
   Set<T> result = new LinkedHashSet<T>(a);
   result.removeAll(b);
   return result;
 }
Esempio n. 8
0
  /**
   * Runs ConflictTreeSimulatedSession with calculated diagnoses
   *
   * @param matchingsDir
   * @param ontologyDir
   * @param files
   * @param map
   * @throws SolverException
   * @throws InconsistentTheoryException
   * @throws OWLOntologyCreationException
   */
  private void runOaeiConferenceTests(
      String matchingsDir,
      String ontologyDir,
      Set<File> files,
      Map<File, String> map,
      int numberOfConflicts)
      throws SolverException, InconsistentTheoryException, OWLOntologyCreationException {
    OAEI11ConferenceSession conferenceSession = new OAEI11ConferenceSession();

    QSSType[] qssTypes = new QSSType[] {QSSType.MINSCORE};
    // QSSType[] qssTypes = new QSSType[]{QSSType.MINSCORE, QSSType.SPLITINHALF,
    // QSSType.DYNAMICRISK};

    TargetSource targetSource = TargetSource.FROM_30_DIAGS;
    for (File file : files) {
      logger.info("processing " + file.getName());
      String out = "STAT, " + file;

      // file parameter
      String fileName = file.getName();
      StringTokenizer t = new StringTokenizer(fileName, "-");
      String matcher = t.nextToken();
      String o1 = t.nextToken();
      String o2 = t.nextToken();
      o2 = o2.substring(0, o2.length() - 4);
      String n = file.getName().substring(0, file.getName().length() - 4);

      Set<FormulaSet<OWLLogicalAxiom>> targetDgSet =
          getRandomDiagSet(matchingsDir, ontologyDir, map, file, o1, o2, n, conferenceSession);
      logger.info("number of found diagnoses (max. 30): " + targetDgSet.size());
      /*
      TODO: for each ontology always the same "randomly" chosen diagnosis is calculated, dependent
          from the size of the targetDgSet -> change to a real random selection
       */
      Random random = new Random(12311);
      int randomNr = conferenceSession.chooseRandomNum(targetDgSet, random);
      Set<OWLLogicalAxiom> targetDg =
          conferenceSession.chooseRandomDiag(targetDgSet, file, randomNr);

      OWLOntology inconsOntology =
          loadOntology(matchingsDir, ontologyDir, map, conferenceSession, file, o1, o2, n);
      //                OWLTheory faultyTheory = getExtendTheory(inconsOntology, false);
      OWLTheory faultyTheory = loadTheory(inconsOntology, ontologyDir, o1, o2);
      Set<OWLLogicalAxiom> rem = validateMinimality(targetDg, faultyTheory);
      if (!rem.isEmpty()) {
        targetDg.removeAll(rem);
        assertTrue(validateMinimality(targetDg, faultyTheory).isEmpty());
      }

      Map<QSSType, DurationStat> ctTimes = new HashMap<QSSType, DurationStat>();
      Map<QSSType, List<Double>> ctQueries = new HashMap<QSSType, List<Double>>();

      for (QSSType qssType : qssTypes) {
        String message =
            "act,"
                + file.getName()
                + ","
                + map.get(file)
                + ","
                + targetSource
                + ","
                + qssType
                + ","
                + randomNr;
        long preprocessModulExtract = System.currentTimeMillis();
        OWLOntology ontology =
            loadOntology(matchingsDir, ontologyDir, map, conferenceSession, file, o1, o2, n);
        ctTheory = loadTheory(ontology, ontologyDir, o1, o2);
        preprocessModulExtract = System.currentTimeMillis() - preprocessModulExtract;
        message += "," + preprocessModulExtract;

        // Define Treesearch here
        TreeSearch<FormulaSet<OWLLogicalAxiom>, OWLLogicalAxiom> search =
            getUniformCostSearch(ctTheory, false);

        Map<OWLLogicalAxiom, BigDecimal> map1 =
            conferenceSession.readRdfMapping(matchingsDir + map.get(file), n + ".rdf");
        OWLAxiomCostsEstimator es = new OWLAxiomCostsEstimator(ctTheory, map1);
        search.setCostsEstimator(es);
        search.reset();

        // set for all found diagnoses during search
        foundDiagnoses = new LinkedHashSet<OWLLogicalAxiom>();

        ctTimes.put(qssType, new DurationStat());
        ctQueries.put(qssType, new LinkedList<Double>());

        // calculation part
        ConflictTreeSession conflictTreeSearch = new ConflictTreeSession(this, ctTheory, search);
        if (numberOfConflicts > 0) {
          conflictTreeSearch.setMaximumNumberOfConflicts(numberOfConflicts);
        }
        conflictTreeSearch.setOutputString(out);
        conflictTreeSearch.setMessageString(message);
        FormulaSet<OWLLogicalAxiom> targetD =
            new FormulaSetImpl<OWLLogicalAxiom>(new BigDecimal(1), targetDg, null);
        long completeTime = conflictTreeSearch.search(targetD, ctQueries, qssType, true);
        ctTimes.get(qssType).add(completeTime);
        out += conflictTreeSearch.getOutputString();

        foundDiagnoses = conflictTreeSearch.getDiagnosis();
        logger.info("targetD: " + targetD.size() + ", " + CalculateDiagnoses.renderAxioms(targetD));
        logger.info(
            "foundD: "
                + foundDiagnoses.size()
                + ", "
                + CalculateDiagnoses.renderAxioms(foundDiagnoses));

        faultyTheory
            .getReasoner()
            .addFormulasToCache(faultyTheory.getKnowledgeBase().getFaultyFormulas());
        assertFalse(faultyTheory.verifyConsistency());
        Set<OWLLogicalAxiom> removedFound = validateMinimality(foundDiagnoses, faultyTheory);
        // assertTrue(removed.isEmpty());
        Set<OWLLogicalAxiom> removedTarget = validateMinimality(targetD, faultyTheory);
        assertTrue(removedTarget.isEmpty());

        assertTrue(targetD.containsAll(foundDiagnoses));
        // TODO uncomment later
        // assertTrue(foundDiagnoses.containsAll(targetD));

        resetTheoryTests(ctTheory);
        search.reset();
      }
      logger.info(out);
    }
  }
 public void setLabels(Collection<BxZoneLabel> labels) {
   ignoredLabels.addAll(EnumSet.allOf(BxZoneLabel.class));
   ignoredLabels.removeAll(labels);
 }
Esempio n. 10
0
  /**
   * @param packageNames Packages to be reindexd if version has changed or package has been
   *     installed since last indexing run.If non are provided all packages will be refreshed.
   */
  public boolean refreshIndex(String... packageNames) {
    RHelperUtil.runHelperWithArgs(RHelperUtil.R_HELPER_INSTALL_TIDYVERSE);

    Set<RPackage> installedPackages = LocalRUtil.getInstalledPackages();

    // remove packages from index that are no longer present
    if (packageNames.length == 0) {
      Sets.SetView<RPackage> noLongerInstalled = Sets.difference(allPackages, installedPackages);
      allPackages.removeAll(noLongerInstalled);
    }

    // cut down packges to be refreshed to speed up calculations
    //        if(packageNames.length>0){
    //            installedPackages = installedPackages.stream().
    //                    filter(p -> Arrays.asList(packageNames).contains(p.getName())).
    //                    collect(Collectors.toSet());
    //        }

    ExecutorService executorService = Executors.newFixedThreadPool(8);

    final boolean[] hasChanged = {false};

    for (final RPackage rPackage : installedPackages) {
      final RPackage indexPackage = getByName(rPackage.getName());

      if (indexPackage != null
          && ObjectUtils.equals(indexPackage.getVersion(), rPackage.getVersion())) {
        continue;
      }

      executorService.submit(
          new Runnable() {
            @Override
            public void run() {
              reindexPackage(rPackage, indexPackage);
              hasChanged[0] = true;
            }
          });
    }

    executorService.shutdown();
    try {
      executorService.awaitTermination(1, TimeUnit.DAYS);
    } catch (InterruptedException e) {
      e.printStackTrace();
    }
    //        allPackages.clear();
    //        allPackages.addAll(installedPackages);

    if (hasChanged[0]) {
      if (ApplicationManager.getApplication() != null) {
        Project[] projects = ProjectManager.getInstance().getOpenProjects();
        for (Project project : projects) {
          if (project.isInitialized() && project.isOpen() && !project.isDefault()) {
            SpellCheckerManager spellCheckerManager = SpellCheckerManager.getInstance(project);
            EditableDictionary dictionary = spellCheckerManager.getUserDictionary();

            for (RPackage rPackage : RPackageService.getInstance().allPackages) {
              dictionary.addToDictionary(rPackage.getName());
              dictionary.addToDictionary(rPackage.getFunctionNames());
            }

            DaemonCodeAnalyzer.getInstance(project).restart();
          }
        }
      }
    }

    return hasChanged[0];
  }