/** * Goes through the buckets from ix and out, checking for each candidate if it's in one of the * buckets, and if so, increasing its score accordingly. No new candidates are added. */ private void bumpScores(Map<Long, Score> candidates, List<Bucket> buckets, int ix) { for (; ix < buckets.size(); ix++) { Bucket b = buckets.get(ix); if (b.nextfree > CUTOFF_FACTOR_2 * candidates.size()) return; double score = b.getScore(); for (Score s : candidates.values()) if (b.contains(s.id)) s.score += score; } }
public void testBallCatch() { for (int i = 0; i < balls.size(); i++) { if (bucket.contains(balls.get(i))) { if (balls.get(i).isGood()) points++; else lives--; balls.remove(i); i--; } else if (balls.get(i).getLocation().getY() >= HEIGHT + Ball.RADIUS) { if (balls.get(i).isGood()) lives--; balls.remove(i); i--; } } }
@Override public List<Covering> decode(String[] tokens, int indexOfInstance) { if (tokens.length >= 10) { this.stackSize = 10000; } List<Bucket> stacks = new ArrayList<Bucket>(); List<Covering> coverings = new ArrayList<Covering>(); // for (int i = 0; i < tokens.length; i++) { // Bucket s = new Bucket(stackSize); // stacks.add(s); // } for (int t = 0; t < tokens.length; t++) { try { Bucket stack = new Bucket(stackSize); for (int n = 1; n <= nGramSize; n++) { int endIndex = t + (n - 1); if (endIndex < tokens.length) { String ngram = ""; for (int k = t; k <= endIndex; k++) { ngram += tokens[k] + " "; } ngram = ngram.trim(); Interval v = new Interval(t, endIndex); List<Feature> matchingFeatures = getMatchingEntries(ngram, model, indexOfInstance, useInstancesBasedFeatures); if (ngram.equals("tree frogs")) { int z = 1; } for (Feature m : matchingFeatures) { CCGFeature feature = (CCGFeature) m; CCGLexEntry c1 = new CCGLexEntry(feature.getWord(), feature.getCategory(), feature.getLambda()); c1.setInstanceId(indexOfInstance); int nGramEffect = n; if (n > 1) { nGramEffect += 0.1; } if (stacks.isEmpty()) { Covering covering = new Covering(); covering.addToken(c1, v, feature.getScore() * nGramEffect); if (!stack.contains(covering)) { stack.add(covering); } } else { List<Covering> previous = stacks.get(t - 1).getCoverings(); for (Covering p : previous) { Covering covering = p.clone(); if (!covering.covers(t)) { covering.addToken(c1, v, feature.getScore() * nGramEffect); } if (!stack.contains(covering)) { stack.add(covering); } } } } } } stack.prune(); if (stack.getCoverings().isEmpty()) { if (t > 0) { stacks.add(stacks.get(t - 1)); } } else { stacks.add(stack); } } catch (Exception e) { System.err.println("Problem with token: " + tokens[t]); } } if (!stacks.isEmpty()) { if (stacks.get(stacks.size() - 1).getCoverings() != null) { coverings = stacks.get(stacks.size() - 1).getCoverings(); } } return coverings; }