/** * The basic method for splitting off a clause of a tree. This modifies the tree in place. This * method addtionally follows ref edges. * * @param tree The tree to split a clause from. * @param toKeep The edge representing the clause to keep. */ @SuppressWarnings("unchecked") private void simpleClause(SemanticGraph tree, SemanticGraphEdge toKeep) { splitToChildOfEdge(tree, toKeep); // Follow 'ref' edges Map<IndexedWord, IndexedWord> refReplaceMap = new HashMap<>(); // (find replacements) for (IndexedWord vertex : tree.vertexSet()) { for (SemanticGraphEdge edge : extraEdgesByDependent.get(vertex)) { if ("ref".equals(edge.getRelation().toString()) && // it's a ref edge... !tree.containsVertex( edge.getGovernor())) { // ...that doesn't already exist in the tree. refReplaceMap.put(vertex, edge.getGovernor()); } } } // (do replacements) for (Map.Entry<IndexedWord, IndexedWord> entry : refReplaceMap.entrySet()) { Iterator<SemanticGraphEdge> iter = tree.incomingEdgeIterator(entry.getKey()); if (!iter.hasNext()) { continue; } SemanticGraphEdge incomingEdge = iter.next(); IndexedWord governor = incomingEdge.getGovernor(); tree.removeVertex(entry.getKey()); addSubtree( tree, governor, incomingEdge.getRelation().toString(), this.tree, entry.getValue(), this.tree.incomingEdgeList(tree.getFirstRoot())); } }
private Counter<String> uniformRandom() { Counter<String> uniformRandom = new ClassicCounter<>(MapFactory.<String, MutableDouble>linkedHashMapFactory()); for (Map<SentenceKey, EnsembleStatistics> impl : this.impl) { for (Map.Entry<SentenceKey, EnsembleStatistics> entry : impl.entrySet()) { uniformRandom.setCount(entry.getKey().sentenceHash, 1.0); } } return uniformRandom; }
private Counter<String> highKLFromMean() { // Get confidences Counter<String> kl = new ClassicCounter<>(MapFactory.<String, MutableDouble>linkedHashMapFactory()); for (Map<SentenceKey, EnsembleStatistics> impl : this.impl) { for (Map.Entry<SentenceKey, EnsembleStatistics> entry : impl.entrySet()) { kl.setCount(entry.getKey().sentenceHash, entry.getValue().averageKLFromMean()); } } return kl; }
private Counter<String> lowAverageConfidence() { // Get confidences Counter<String> lowConfidence = new ClassicCounter<>(MapFactory.<String, MutableDouble>linkedHashMapFactory()); for (Map<SentenceKey, EnsembleStatistics> impl : this.impl) { for (Map.Entry<SentenceKey, EnsembleStatistics> entry : impl.entrySet()) { SentenceStatistics average = entry.getValue().mean(); for (double confidence : average.confidence) { lowConfidence.setCount(entry.getKey().sentenceHash, 1 - confidence); } } } return lowConfidence; }
public SentenceStatistics mean() { double sumConfidence = 0; int countWithConfidence = 0; Counter<String> avePredictions = new ClassicCounter<>(MapFactory.<String, MutableDouble>linkedHashMapFactory()); // Sum for (SentenceStatistics stat : this.statisticsForClassifiers) { for (Double confidence : stat.confidence) { sumConfidence += confidence; countWithConfidence += 1; } assert Math.abs(stat.relationDistribution.totalCount() - 1.0) < 1e-5; for (Map.Entry<String, Double> entry : stat.relationDistribution.entrySet()) { assert entry.getValue() >= 0.0; assert entry.getValue() == stat.relationDistribution.getCount(entry.getKey()); avePredictions.incrementCount(entry.getKey(), entry.getValue()); assert stat.relationDistribution.getCount(entry.getKey()) == stat.relationDistribution.getCount(entry.getKey()); } } // Normalize double aveConfidence = sumConfidence / ((double) countWithConfidence); // Return if (this.statisticsForClassifiers.size() > 1) { Counters.divideInPlace(avePredictions, (double) this.statisticsForClassifiers.size()); } if (Math.abs(avePredictions.totalCount() - 1.0) > 1e-5) { throw new IllegalStateException("Mean relation distribution is not a distribution!"); } assert this.statisticsForClassifiers.size() > 1 || this.statisticsForClassifiers.size() == 0 || Counters.equals( avePredictions, statisticsForClassifiers.iterator().next().relationDistribution, 1e-5); return countWithConfidence > 0 ? new SentenceStatistics(avePredictions, aveConfidence) : new SentenceStatistics(avePredictions); }