/** * Generic method for adding a blackboard artifact to the blackboard * * @param type is a blackboard.artifact_type enum to determine which type the artifact should be * @param content is the FsContent object that needs to have the artifact added for it * @param bbattributes is the collection of blackboard attributes that need to be added to the * artifact after the artifact has been created */ public void addArtifact( BlackboardArtifact.ARTIFACT_TYPE type, FsContent content, Collection<BlackboardAttribute> bbattributes) { try { BlackboardArtifact bbart = content.newArtifact(type); bbart.addAttributes(bbattributes); } catch (TskException ex) { logger.log(Level.WARNING, "Error while trying to add an artifact: " + ex); this.addErrorMessage( this.getName() + ": Error while trying to add artifact to case for file:" + content.getName()); } }
/** * Generic method for adding a blackboard artifact to the blackboard * * @param type is a blackboard.artifact_type enum to determine which type the artifact should be * @param content is the FsContent object that needs to have the artifact added for it * @param bbattributes is the collection of blackboard attributes that need to be added to the * artifact after the artifact has been created */ public void addArtifact( BlackboardArtifact.ARTIFACT_TYPE type, FsContent content, Collection<BlackboardAttribute> bbattributes) { try { BlackboardArtifact bbart = content.newArtifact(type); bbart.addAttributes(bbattributes); } catch (TskException ex) { logger.log(Level.SEVERE, "Error while trying to add an artifact: " + ex); } }
private void indexFile(AbstractFile aFile) { // logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); boolean ingestibleFile = Ingester.isIngestible(aFile); final long size = aFile.getSize(); // limit size of entire file, do not limit strings if (size == 0 || (ingestibleFile && size > MAX_INDEX_SIZE)) { ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED); return; } if (ingestibleFile == true) { // we know it's an allocated file or dir (FsContent) FsContent fileDir = (FsContent) aFile; try { // logger.log(Level.INFO, "indexing: " + fsContent.getName()); ingester.ingest(fileDir); ingestStatus.put(fileDir.getId(), IngestStatus.INGESTED); } catch (IngesterException e) { ingestStatus.put(fileDir.getId(), IngestStatus.SKIPPED); // try to extract strings if not a dir if (fileDir.isFile() == true) { processNonIngestible(fileDir); } } catch (Exception e) { ingestStatus.put(fileDir.getId(), IngestStatus.SKIPPED); // try to extract strings if not a dir if (fileDir.isFile() == true) { processNonIngestible(fileDir); } } } else { // unallocated or unsupported type by Solr processNonIngestible(aFile); } }
@Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Pending start of new searcher"); final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); progress = ProgressHandleFactory.createHandle( displayName + (" (Pending)"), new Cancellable() { @Override public boolean cancel() { logger.log(Level.INFO, "Cancelling the searcher by user."); if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } return Searcher.this.cancel(true); } }); progress.start(); progress.switchToIndeterminate(); // block to ensure previous searcher is completely done with doInBackground() // even after previous searcher cancellation, we need to check this searcherLock.lock(); try { logger.log(Level.INFO, "Started a new searcher"); progress.setDisplayName(displayName); // make sure other searchers are not spawned searcherDone = false; runSearcher = false; if (searchTimer.isRunning()) { searchTimer.stop(); } int numSearched = 0; updateKeywords(); progress.switchToDeterminate(keywords.size()); for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { logger.log( Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); return null; } final String queryStr = keywordQuery.getQuery(); final KeywordSearchList list = keywordToList.get(queryStr); final String listName = list.getName(); // DEBUG // logger.log(Level.INFO, "Searching: " + queryStr); progress.progress(queryStr, numSearched); KeywordSearchQuery del = null; boolean isRegex = !keywordQuery.isLiteral(); if (!isRegex) { del = new LuceneQuery(keywordQuery); del.escape(); } else { del = new TermComponentQuery(keywordQuery); } Map<String, List<ContentHit>> queryResult = null; try { queryResult = del.performQuery(); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); // no reason to continue with next query if recovery failed // or wait for recovery to kick in and run again later // likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { logger.log( Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); continue; } // calculate new results but substracting results already obtained in this run Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>(); for (String termResult : queryResult.keySet()) { List<ContentHit> queryTermResults = queryResult.get(termResult); Keyword termResultK = new Keyword(termResult, !isRegex); List<ContentHit> curTermResults = currentResults.get(termResultK); if (curTermResults == null) { currentResults.put(termResultK, queryTermResults); newResults.put(termResultK, queryTermResults); } else { // some AbstractFile hits already exist for this keyword for (ContentHit res : queryTermResults) { if (!previouslyHit(curTermResults, res)) { // add to new results List<ContentHit> newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { newResultsFs = new ArrayList<ContentHit>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); curTermResults.add(res); } } } } if (!newResults.isEmpty()) { // write results to BB // new artifacts created, to report to listeners Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>(); for (final Keyword hitTerm : newResults.keySet()) { List<ContentHit> contentHitsAll = newResults.get(hitTerm); Map<AbstractFile, Integer> contentHitsFlattened = ContentHit.flattenResults(contentHitsAll); for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { String snippet = null; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery(), true, false); int chunkId = contentHitsFlattened.get(hitFile); try { snippet = LuceneQuery.querySnippet( snippetQuery, hitFile.getId(), chunkId, isRegex, true); } catch (NoOpenCoreException e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); // no reason to continue return null; } catch (Exception e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); continue; } KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { logger.log( Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); continue; } newArtifacts.add(written.getArtifact()); // generate a data message for each artifact StringBuilder subjectSb = new StringBuilder(); StringBuilder detailsSb = new StringBuilder(); // final int hitFiles = newResults.size(); if (!keywordQuery.isLiteral()) { subjectSb.append("RegExp hit: "); } else { subjectSb.append("Keyword hit: "); } // subjectSb.append("<"); String uniqueKey = null; BlackboardAttribute attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); if (attr != null) { final String keyword = attr.getValueString(); subjectSb.append(keyword); uniqueKey = keyword.toLowerCase(); } // subjectSb.append(">"); // String uniqueKey = queryStr; // details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); // hit detailsSb.append("<tr>"); detailsSb.append("<th>Keyword hit</th>"); detailsSb .append("<td>") .append(StringEscapeUtils.escapeHtml(attr.getValueString())) .append("</td>"); detailsSb.append("</tr>"); // preview attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>Preview</th>"); detailsSb .append("<td>") .append(StringEscapeUtils.escapeHtml(attr.getValueString())) .append("</td>"); detailsSb.append("</tr>"); } // file detailsSb.append("<tr>"); detailsSb.append("<th>File</th>"); if (hitFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { detailsSb .append("<td>") .append(((FsContent) hitFile).getParentPath()) .append(hitFile.getName()) .append("</td>"); } else { detailsSb.append("<td>").append(hitFile.getName()).append("</td>"); } detailsSb.append("</tr>"); // list attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); detailsSb.append("<tr>"); detailsSb.append("<th>List</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); // regex if (!keywordQuery.isLiteral()) { attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>RegEx</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); } } detailsSb.append("</table>"); // check if should send messages on hits on this list if (list.getIngestMessages()) // post ingest inbox msg { managerProxy.postMessage( IngestMessage.createDataMessage( ++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } // for each term hit } // for each file hit // update artifact browser if (!newArtifacts.isEmpty()) { IngestManager.fireServiceDataEvent( new ServiceDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); } } progress.progress(queryStr, ++numSearched); } } // end try block catch (Exception ex) { logger.log(Level.WARNING, "searcher exception occurred", ex); } finally { finalizeSearcher(); searcherLock.unlock(); } return null; }