public DoubleMatrix getScoreMatrix(File file) { Counter<String> docWords = new Counter<String>(); try { LineIterator iter = FileUtils.lineIterator(file); while (iter.hasNext()) { Tokenizer t = tokenizerFactory.create((new InputHomogenization(iter.nextLine()).transform())); while (t.hasMoreTokens()) { docWords.incrementCount(t.nextToken(), 1.0); } } iter.close(); } catch (IOException e) { throw new IllegalStateException("Unable to read file", e); } DoubleMatrix ret = new DoubleMatrix(1, currVocab.size()); for (int i = 0; i < currVocab.size(); i++) { if (docWords.getCount(currVocab.get(i).toString()) > 0) { ret.put(i, wordScores.getCount(currVocab.get(i).toString())); } } return ret; }
protected void addForDoc(File doc) { Set<String> encountered = new HashSet<String>(); SentenceIterator iter = new LineSentenceIterator(doc); while (iter.hasNext()) { String line = iter.nextSentence(); if (line == null) continue; Tokenizer tokenizer = tokenizerFactory.create(new InputHomogenization(line).transform()); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); java.util.regex.Matcher m = punct.matcher(token); if (validWord(token)) { documentWordFrequencies.incrementCount(token, doc.getAbsolutePath(), 1.0); tf.incrementCount(token, 1.0); if (!encountered.contains(token)) { idf.incrementCount(token, 1.0); encountered.add(token); } } } iter.finish(); } }