/** * Initializes the service for new ingest run Sets up threads, timers, retrieves settings, keyword * lists to run on * * @param managerProxy */ @Override public void init(IngestManagerProxy managerProxy) { logger.log(Level.INFO, "init()"); initialized = false; caseHandle = Case.getCurrentCase().getSleuthkitCase(); this.managerProxy = managerProxy; Server solrServer = KeywordSearch.getServer(); ingester = solrServer.getIngester(); ingestStatus = new HashMap<Long, IngestStatus>(); keywords = new ArrayList<Keyword>(); keywordLists = new ArrayList<String>(); keywordToList = new HashMap<String, KeywordSearchList>(); initKeywords(); if (keywords.isEmpty() || keywordLists.isEmpty()) { managerProxy.postMessage( IngestMessage.createWarningMessage( ++messageID, instance, "No keywords in keyword list.", "Only indexing will be done and and keyword search will be skipped (it can be executed later again as ingest or using toolbar search feature).")); } processedFiles = false; finalSearcherDone = false; searcherDone = true; // make sure to start the initial currentSearcher // keeps track of all results per run not to repeat reporting the same hits currentResults = new HashMap<Keyword, List<ContentHit>>(); indexer = new Indexer(); final int updateIntervalMs = managerProxy.getUpdateFrequency() * 60 * 1000; logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); initialized = true; commitTimer.start(); searchTimer.start(); managerProxy.postMessage( IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Started")); }
/** * Performs the cleanup that needs to be done right AFTER doInBackground() returns without * relying on done() method that is not guaranteed to run after background thread completes * REQUIRED to call this method always right before doInBackground() returns */ private void finalizeSearcher() { logger.log(Level.INFO, "Searcher finalizing"); SwingUtilities.invokeLater( new Runnable() { @Override public void run() { progress.finish(); } }); searcherDone = true; // next currentSearcher can start if (finalRun) { // this is the final searcher logger.log(Level.INFO, "The final searcher in this ingest done."); finalSearcherDone = true; keywords.clear(); keywordLists.clear(); keywordToList.clear(); // reset current resuls earlier to potentially garbage collect sooner currentResults = new HashMap<Keyword, List<ContentHit>>(); managerProxy.postMessage( IngestMessage.createMessage( ++messageID, MessageType.INFO, KeywordSearchIngestService.instance, "Completed")); } else { // start counting time for a new searcher to start // unless final searcher is pending if (finalSearcher != null) { searchTimer.start(); } } }
/** Posts inbox message with summary of indexed files */ private void postIndexSummary() { int indexed = 0; int indexed_extr = 0; int skipped = 0; for (IngestStatus s : ingestStatus.values()) { switch (s) { case INGESTED: ++indexed; break; case EXTRACTED_INGESTED: ++indexed_extr; break; case SKIPPED: ++skipped; break; default:; } } StringBuilder msg = new StringBuilder(); msg.append("Indexed files: ") .append(indexed) .append("<br />Indexed strings: ") .append(indexed_extr); msg.append("<br />Skipped files: ").append(skipped).append("<br />"); String indexStats = msg.toString(); logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); managerProxy.postMessage( IngestMessage.createMessage( ++messageID, MessageType.INFO, this, "Keyword Indexing Completed", indexStats)); }
static synchronized void reportBlackboardPostCount(long ingestJobId) { Long refCount = refCounter.decrementAndGet(ingestJobId); if (refCount == 0) { Long filesCount = artifactCountsForIngestJobs.remove(ingestJobId); String msgText = String.format("Posted %d times to the blackboard", filesCount); IngestMessage message = IngestMessage.createMessage( IngestMessage.MessageType.INFO, SampleIngestModuleFactory.getModuleName(), msgText); IngestServices.getInstance().postMessage(message); } }
/** * After all files are ingested, execute final index commit and final search Cleanup resources, * threads, timers */ @Override public void complete() { if (initialized == false) { return; } // logger.log(Level.INFO, "complete()"); commitTimer.stop(); // handle case if previous search running // cancel it, will re-run after final commit // note: cancellation of Searcher worker is graceful (between keywords) if (currentSearcher != null) { currentSearcher.cancel(false); } // cancel searcher timer, ensure unwanted searcher does not start // before we start the final one if (searchTimer.isRunning()) { searchTimer.stop(); } runSearcher = false; logger.log(Level.INFO, "Running final index commit and search"); // final commit commit(); postIndexSummary(); // run one last search as there are probably some new files committed if (keywords != null && !keywords.isEmpty() && processedFiles == true) { finalSearcher = new Searcher(keywords, true); // final searcher run finalSearcher.execute(); } else { finalSearcherDone = true; managerProxy.postMessage( IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Completed")); } // postSummary(); }
SevenZipExtractor( IngestJobContext context, FileTypeDetector fileTypeDetector, String moduleDirRelative, String moduleDirAbsolute) throws IngestModuleException { if (!SevenZip.isInitializedSuccessfully() && (SevenZip.getLastInitializationException() == null)) { try { SevenZip.initSevenZipFromPlatformJAR(); String platform = SevenZip.getUsedPlatform(); logger.log( Level.INFO, "7-Zip-JBinding library was initialized on supported platform: {0}", platform); // NON-NLS } catch (SevenZipNativeInitializationException e) { logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); // NON-NLS String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.msg", EmbeddedFileExtractorModuleFactory.getModuleName()); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errCantInitLib", e.getMessage()); services.postMessage( IngestMessage.createErrorMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); throw new IngestModuleException(e.getMessage()); } } this.context = context; this.fileTypeDetector = fileTypeDetector; this.moduleDirRelative = moduleDirRelative; this.moduleDirAbsolute = moduleDirAbsolute; this.archiveDepthCountTree = new ArchiveDepthCountTree(); }
@Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Pending start of new searcher"); final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); progress = ProgressHandleFactory.createHandle( displayName + (" (Pending)"), new Cancellable() { @Override public boolean cancel() { logger.log(Level.INFO, "Cancelling the searcher by user."); if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } return Searcher.this.cancel(true); } }); progress.start(); progress.switchToIndeterminate(); // block to ensure previous searcher is completely done with doInBackground() // even after previous searcher cancellation, we need to check this searcherLock.lock(); try { logger.log(Level.INFO, "Started a new searcher"); progress.setDisplayName(displayName); // make sure other searchers are not spawned searcherDone = false; runSearcher = false; if (searchTimer.isRunning()) { searchTimer.stop(); } int numSearched = 0; updateKeywords(); progress.switchToDeterminate(keywords.size()); for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { logger.log( Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); return null; } final String queryStr = keywordQuery.getQuery(); final KeywordSearchList list = keywordToList.get(queryStr); final String listName = list.getName(); // DEBUG // logger.log(Level.INFO, "Searching: " + queryStr); progress.progress(queryStr, numSearched); KeywordSearchQuery del = null; boolean isRegex = !keywordQuery.isLiteral(); if (!isRegex) { del = new LuceneQuery(keywordQuery); del.escape(); } else { del = new TermComponentQuery(keywordQuery); } Map<String, List<ContentHit>> queryResult = null; try { queryResult = del.performQuery(); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); // no reason to continue with next query if recovery failed // or wait for recovery to kick in and run again later // likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { logger.log( Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); continue; } // calculate new results but substracting results already obtained in this run Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>(); for (String termResult : queryResult.keySet()) { List<ContentHit> queryTermResults = queryResult.get(termResult); Keyword termResultK = new Keyword(termResult, !isRegex); List<ContentHit> curTermResults = currentResults.get(termResultK); if (curTermResults == null) { currentResults.put(termResultK, queryTermResults); newResults.put(termResultK, queryTermResults); } else { // some AbstractFile hits already exist for this keyword for (ContentHit res : queryTermResults) { if (!previouslyHit(curTermResults, res)) { // add to new results List<ContentHit> newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { newResultsFs = new ArrayList<ContentHit>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); curTermResults.add(res); } } } } if (!newResults.isEmpty()) { // write results to BB // new artifacts created, to report to listeners Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>(); for (final Keyword hitTerm : newResults.keySet()) { List<ContentHit> contentHitsAll = newResults.get(hitTerm); Map<AbstractFile, Integer> contentHitsFlattened = ContentHit.flattenResults(contentHitsAll); for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { String snippet = null; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery(), true, false); int chunkId = contentHitsFlattened.get(hitFile); try { snippet = LuceneQuery.querySnippet( snippetQuery, hitFile.getId(), chunkId, isRegex, true); } catch (NoOpenCoreException e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); // no reason to continue return null; } catch (Exception e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); continue; } KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { logger.log( Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); continue; } newArtifacts.add(written.getArtifact()); // generate a data message for each artifact StringBuilder subjectSb = new StringBuilder(); StringBuilder detailsSb = new StringBuilder(); // final int hitFiles = newResults.size(); if (!keywordQuery.isLiteral()) { subjectSb.append("RegExp hit: "); } else { subjectSb.append("Keyword hit: "); } // subjectSb.append("<"); String uniqueKey = null; BlackboardAttribute attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); if (attr != null) { final String keyword = attr.getValueString(); subjectSb.append(keyword); uniqueKey = keyword.toLowerCase(); } // subjectSb.append(">"); // String uniqueKey = queryStr; // details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); // hit detailsSb.append("<tr>"); detailsSb.append("<th>Keyword hit</th>"); detailsSb .append("<td>") .append(StringEscapeUtils.escapeHtml(attr.getValueString())) .append("</td>"); detailsSb.append("</tr>"); // preview attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>Preview</th>"); detailsSb .append("<td>") .append(StringEscapeUtils.escapeHtml(attr.getValueString())) .append("</td>"); detailsSb.append("</tr>"); } // file detailsSb.append("<tr>"); detailsSb.append("<th>File</th>"); if (hitFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { detailsSb .append("<td>") .append(((FsContent) hitFile).getParentPath()) .append(hitFile.getName()) .append("</td>"); } else { detailsSb.append("<td>").append(hitFile.getName()).append("</td>"); } detailsSb.append("</tr>"); // list attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); detailsSb.append("<tr>"); detailsSb.append("<th>List</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); // regex if (!keywordQuery.isLiteral()) { attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>RegEx</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); } } detailsSb.append("</table>"); // check if should send messages on hits on this list if (list.getIngestMessages()) // post ingest inbox msg { managerProxy.postMessage( IngestMessage.createDataMessage( ++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } // for each term hit } // for each file hit // update artifact browser if (!newArtifacts.isEmpty()) { IngestManager.fireServiceDataEvent( new ServiceDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); } } progress.progress(queryStr, ++numSearched); } } // end try block catch (Exception ex) { logger.log(Level.WARNING, "searcher exception occurred", ex); } finally { finalizeSearcher(); searcherLock.unlock(); } return null; }
/** * Unpack the file to local folder and return a list of derived files * * @param pipelineContext current ingest context * @param archiveFile file to unpack * @return list of unpacked derived files */ void unpack(AbstractFile archiveFile) { String archiveFilePath; try { archiveFilePath = archiveFile.getUniquePath(); } catch (TskCoreException ex) { archiveFilePath = archiveFile.getParentPath() + archiveFile.getName(); } // check if already has derived files, skip try { if (archiveFile.hasChildren()) { // check if local unpacked dir exists if (new File(EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)).exists()) { logger.log( Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: {0}", archiveFilePath); // NON-NLS return; } } } catch (TskCoreException e) { logger.log( Level.INFO, "Error checking if file already has been processed, skipping: {0}", archiveFilePath); // NON-NLS return; } List<AbstractFile> unpackedFiles = Collections.<AbstractFile>emptyList(); // recursion depth check for zip bomb final long archiveId = archiveFile.getId(); SevenZipExtractor.ArchiveDepthCountTree.Archive parentAr = archiveDepthCountTree.findArchive(archiveId); if (parentAr == null) { parentAr = archiveDepthCountTree.addArchive(null, archiveId); } else if (parentAr.getDepth() == MAX_DEPTH) { String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnMsg.zipBomb", archiveFile.getName()); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnDetails.zipBomb", parentAr.getDepth(), archiveFilePath); // MessageNotifyUtil.Notify.error(msg, details); services.postMessage( IngestMessage.createWarningMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); return; } boolean hasEncrypted = false; boolean fullEncryption = true; ISevenZipInArchive inArchive = null; SevenZipContentReadStream stream = null; final ProgressHandle progress = ProgressHandleFactory.createHandle( NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.moduleName")); int processedItems = 0; boolean progressStarted = false; try { stream = new SevenZipContentReadStream(new ReadContentInputStream(archiveFile)); // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive // inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality. // All other archive formats are still opened using 7zip built-in auto-detect functionality. ArchiveFormat options = get7ZipOptions(archiveFile); inArchive = SevenZip.openInArchive(options, stream); int numItems = inArchive.getNumberOfItems(); logger.log( Level.INFO, "Count of items in archive: {0}: {1}", new Object[] {archiveFilePath, numItems}); // NON-NLS progress.start(numItems); progressStarted = true; final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); // setup the archive local root folder final String uniqueArchiveFileName = EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile); final String localRootAbsPath = getLocalRootAbsPath(uniqueArchiveFileName); final File localRoot = new File(localRootAbsPath); if (!localRoot.exists()) { try { localRoot.mkdirs(); } catch (SecurityException e) { logger.log( Level.SEVERE, "Error setting up output path for archive root: {0}", localRootAbsPath); // NON-NLS // bail return; } } // initialize tree hierarchy to keep track of unpacked file structure SevenZipExtractor.UnpackedTree unpackedTree = new SevenZipExtractor.UnpackedTree( moduleDirRelative + "/" + uniqueArchiveFileName, archiveFile); long freeDiskSpace = services.getFreeDiskSpace(); // unpack and process every item in archive int itemNumber = 0; for (ISimpleInArchiveItem item : simpleInArchive.getArchiveItems()) { String pathInArchive = item.getPath(); if (pathInArchive == null || pathInArchive.isEmpty()) { // some formats (.tar.gz) may not be handled correctly -- file in archive has no name/path // handle this for .tar.gz and tgz but assuming the child is tar, // otherwise, unpack using itemNumber as name // TODO this should really be signature based, not extension based String archName = archiveFile.getName(); int dotI = archName.lastIndexOf("."); String useName = null; if (dotI != -1) { String base = archName.substring(0, dotI); String ext = archName.substring(dotI); switch (ext) { case ".gz": // NON-NLS useName = base; break; case ".tgz": // NON-NLS useName = base + ".tar"; // NON-NLS break; } } if (useName == null) { pathInArchive = "/" + archName + "/" + Integer.toString(itemNumber); } else { pathInArchive = "/" + useName; } String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg", archiveFilePath, pathInArchive); logger.log(Level.WARNING, msg); } ++itemNumber; logger.log(Level.INFO, "Extracted item path: {0}", pathInArchive); // NON-NLS // check if possible zip bomb if (isZipBombArchiveItemCheck(archiveFile, item)) { continue; // skip the item } // find this node in the hierarchy, create if needed SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode = unpackedTree.addNode(pathInArchive); String fileName = unpackedNode.getFileName(); // update progress bar progress.progress(archiveFile.getName() + ": " + fileName, processedItems); final boolean isEncrypted = item.isEncrypted(); final boolean isDir = item.isFolder(); if (isEncrypted) { logger.log( Level.WARNING, "Skipping encrypted file in archive: {0}", pathInArchive); // NON-NLS hasEncrypted = true; continue; } else { fullEncryption = false; } final Long size = item.getSize(); if (size == null) { // If the size property cannot be determined, out-of-disk-space // situations cannot be ascertained. // Hence skip this file. logger.log( Level.WARNING, "Size cannot be determined. Skipping file in archive: {0}", pathInArchive); // NON-NLS continue; } // check if unpacking this file will result in out of disk space // this is additional to zip bomb prevention mechanism if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size > 0) { // if known free space and file not empty long newDiskSpace = freeDiskSpace - size; if (newDiskSpace < MIN_FREE_DISK_SPACE) { String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.msg", archiveFilePath, fileName); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details"); // MessageNotifyUtil.Notify.error(msg, details); services.postMessage( IngestMessage.createErrorMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); logger.log( Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new Object[] {archiveFilePath, fileName}); // NON-NLS logger.log( Level.INFO, "Available disk space: {0}", new Object[] {freeDiskSpace}); // NON-NLS continue; // skip this file } else { // update est. disk space during this archive, so we don't need to poll for every file // extracted freeDiskSpace = newDiskSpace; } } final String uniqueExtractedName = uniqueArchiveFileName + File.separator + (item.getItemIndex() / 1000) + File.separator + item.getItemIndex() + new File(pathInArchive).getName(); // final String localRelPath = unpackDir + File.separator + localFileRelPath; final String localRelPath = moduleDirRelative + File.separator + uniqueExtractedName; final String localAbsPath = moduleDirAbsolute + File.separator + uniqueExtractedName; // create local dirs and empty files before extracted File localFile = new java.io.File(localAbsPath); // cannot rely on files in top-bottom order if (!localFile.exists()) { try { if (isDir) { localFile.mkdirs(); } else { localFile.getParentFile().mkdirs(); try { localFile.createNewFile(); } catch (IOException ex) { logger.log( Level.SEVERE, "Error creating extracted file: " + localFile.getAbsolutePath(), ex); // NON-NLS } } } catch (SecurityException e) { logger.log( Level.SEVERE, "Error setting up output path for unpacked file: {0}", pathInArchive); // NON-NLS // TODO consider bail out / msg to the user } } // skip the rest of this loop if we couldn't create the file if (localFile.exists() == false) { continue; } final Date createTime = item.getCreationTime(); final Date accessTime = item.getLastAccessTime(); final Date writeTime = item.getLastWriteTime(); final long createtime = createTime == null ? 0L : createTime.getTime() / 1000; final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000; final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000; // record derived data in unode, to be traversed later after unpacking the archive unpackedNode.addDerivedInfo( size, !isDir, 0L, createtime, accesstime, modtime, localRelPath); // unpack locally if a file if (!isDir) { SevenZipExtractor.UnpackStream unpackStream = null; try { unpackStream = new SevenZipExtractor.UnpackStream(localAbsPath); item.extractSlow(unpackStream); } catch (Exception e) { // could be something unexpected with this file, move on logger.log( Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); // NON-NLS } finally { if (unpackStream != null) { unpackStream.close(); } } } // update units for progress bar ++processedItems; } // add them to the DB. We wait until the end so that we have the metadata on all of the // intermediate nodes since the order is not guaranteed try { unpackedTree.addDerivedFilesToCase(); unpackedFiles = unpackedTree.getAllFileObjects(); // check if children are archives, update archive depth tracking for (AbstractFile unpackedFile : unpackedFiles) { if (isSevenZipExtractionSupported(unpackedFile)) { archiveDepthCountTree.addArchive(parentAr, unpackedFile.getId()); } } } catch (TskCoreException e) { logger.log( Level.SEVERE, "Error populating complete derived file hierarchy from the unpacked dir structure"); // NON-NLS // TODO decide if anything to cleanup, for now bailing } } catch (SevenZipException ex) { logger.log(Level.SEVERE, "Error unpacking file: " + archiveFile, ex); // NON-NLS // inbox message // print a message if the file is allocated if (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg", archiveFile.getName()); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details", archiveFilePath, ex.getMessage()); services.postMessage( IngestMessage.createErrorMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } } finally { if (inArchive != null) { try { inArchive.close(); } catch (SevenZipException e) { logger.log(Level.SEVERE, "Error closing archive: " + archiveFile, e); // NON-NLS } } if (stream != null) { try { stream.close(); } catch (IOException ex) { logger.log( Level.SEVERE, "Error closing stream after unpacking archive: " + archiveFile, ex); // NON-NLS } } // close progress bar if (progressStarted) { progress.finish(); } } // create artifact and send user message if (hasEncrypted) { String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; try { BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); artifact.addAttribute( new BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType)); services.fireModuleDataEvent( new ModuleDataEvent( EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED)); } catch (TskCoreException ex) { logger.log( Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFilePath, ex); // NON-NLS } String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg"); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details", archiveFile.getName(), EmbeddedFileExtractorModuleFactory.getModuleName()); services.postMessage( IngestMessage.createWarningMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } // adding unpacked extracted derived files to the job after closing relevant resources. if (!unpackedFiles.isEmpty()) { // currently sending a single event for all new files services.fireModuleContentEvent(new ModuleContentEvent(archiveFile)); context.addFilesToJob(unpackedFiles); } }
/** * Check if the item inside archive is a potential zipbomb * * <p>Currently checks compression ratio. * * <p>More heuristics to be added here * * @param archiveName the parent archive * @param archiveFileItem the archive item * @return true if potential zip bomb, false otherwise */ private boolean isZipBombArchiveItemCheck( AbstractFile archiveFile, ISimpleInArchiveItem archiveFileItem) { try { final Long archiveItemSize = archiveFileItem.getSize(); // skip the check for small files if (archiveItemSize == null || archiveItemSize < MIN_COMPRESSION_RATIO_SIZE) { return false; } final Long archiveItemPackedSize = archiveFileItem.getPackedSize(); if (archiveItemPackedSize == null || archiveItemPackedSize <= 0) { logger.log( Level.WARNING, "Cannot getting compression ratio, cannot detect if zipbomb: {0}, item: {1}", new Object[] {archiveFile.getName(), archiveFileItem.getPath()}); // NON-NLS return false; } int cRatio = (int) (archiveItemSize / archiveItemPackedSize); if (cRatio >= MAX_COMPRESSION_RATIO) { String itemName = archiveFileItem.getPath(); logger.log( Level.INFO, "Possible zip bomb detected, compression ration: {0} for in archive item: {1}", new Object[] {cRatio, itemName}); // NON-NLS String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), itemName); String path; try { path = archiveFile.getUniquePath(); } catch (TskCoreException ex) { path = archiveFile.getParentPath() + archiveFile.getName(); } String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails", cRatio, path); // MessageNotifyUtil.Notify.error(msg, details); services.postMessage( IngestMessage.createWarningMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); return true; } else { return false; } } catch (SevenZipException ex) { logger.log( Level.SEVERE, "Error getting archive item size and cannot detect if zipbomb. ", ex); // NON-NLS return false; } }