@Override public Set instantiate(ProgressHandle handle) throws IOException { handle.start(2); handle.progress( NbBundle.getMessage( JavaEESamplesWizardIterator.class, "LBL_NewSampleProjectWizardIterator_WizardProgress_CreatingProject"), 1); Set resultSet = new LinkedHashSet(); File dirF = FileUtil.normalizeFile((File) wiz.getProperty(WizardProperties.PROJ_DIR)); String name = (String) wiz.getProperty(WizardProperties.NAME); FileObject template = Templates.getTemplate(wiz); FileObject dir = null; if ("web".equals(template.getAttribute("prjType"))) { // Use generator from web.examples to create project with specified name dir = WebSampleProjectGenerator.createProjectFromTemplate(template, dirF, name); } else { // Unzip prepared project only (no way to change name of the project) // FIXME: should be modified to create projects with specified name (project.xml files in // sub-projects should be modified too) // FIXME: web.examples and j2ee.samples modules may be merged into one module createFolder(dirF); dir = FileUtil.toFileObject(dirF); unZipFile(template.getInputStream(), dir); WebSampleProjectGenerator.configureServer(dir); for (FileObject child : dir.getChildren()) { WebSampleProjectGenerator.configureServer(child); } } ProjectManager.getDefault().clearNonProjectCache(); handle.progress( NbBundle.getMessage( JavaEESamplesWizardIterator.class, "LBL_NewSampleProjectWizardIterator_WizardProgress_PreparingToOpen"), 2); // Always open top dir as a project: resultSet.add(dir); // Look for nested projects to open as well: Enumeration e = dir.getFolders(true); while (e.hasMoreElements()) { FileObject subfolder = (FileObject) e.nextElement(); if (ProjectManager.getDefault().isProject(subfolder)) { resultSet.add(subfolder); } } File parent = dirF.getParentFile(); if (parent != null && parent.exists()) { ProjectChooser.setProjectsFolder(parent); } handle.finish(); return resultSet; }
public Void visitDir(AbstractFile dir) { // don't extract . and .. directories if (isDotDirectory(dir)) { return null; } dest.mkdir(); try { int numProcessed = 0; // recurse on children for (Content child : dir.getChildren()) { java.io.File childFile = getFsContentDest(child); ExtractFscContentVisitor<T, V> childVisitor = new ExtractFscContentVisitor<>(childFile, progress, worker, false); // If this is the source directory of an extract it // will have a progress and worker, and will keep track // of the progress bar's progress if (worker != null && worker.isCancelled()) { break; } if (progress != null && source) { progress.progress(child.getName(), numProcessed); } child.accept(childVisitor); numProcessed++; } } catch (TskException ex) { logger.log(Level.SEVERE, "Trouble fetching children to extract.", ex); // NON-NLS } return null; }
private void next() { index++; if (index >= (buildFiles.length + runFiles.length)) { done(); return; } FileObject file; // ExecutionLevel level; if (index < buildFiles.length) { file = buildFiles[index]; executedBuildFiles.add(file); level = ExecutionLevel.BUILD; } else { file = runFiles[index - buildFiles.length]; level = ExecutionLevel.RUN; } FileHandler handler = findHandler(level, file); String msg = FileUtil.getRelativePath(getProjectDirectory(), file) + " [" + level + "]"; progress.progress(msg, index); try { handler.process(new CallbackImpl(handler, file)); } catch (Exception ex) { Exceptions.printStackTrace(ex); if (continueOnError(handler, file, null)) { next(); } else { done(); } } }
@Override public Spatial loadAsset() { ProgressHandle handle = ProgressHandleFactory.createHandle("Converting OgreBinary"); handle.start(); // mesh OgreXMLConvertOptions options = new OgreXMLConvertOptions(getPrimaryFile().getPath()); options.setBinaryFile(true); OgreXMLConvert conv = new OgreXMLConvert(); conv.doConvert(options, handle); // try skeleton if (getPrimaryFile().existsExt("skeleton")) { OgreXMLConvertOptions options2 = new OgreXMLConvertOptions( getPrimaryFile() .getParent() .getFileObject(getPrimaryFile().getName(), "skeleton") .getPath()); options2.setBinaryFile(true); OgreXMLConvert conv2 = new OgreXMLConvert(); conv2.doConvert(options2, handle); } handle.progress("Convert Model"); ProjectAssetManager mgr = getLookup().lookup(ProjectAssetManager.class); if (mgr == null) { DialogDisplayer.getDefault() .notifyLater( new NotifyDescriptor.Message( "File is not part of a project!\nCannot load without ProjectAssetManager.")); return null; } String assetKey = mgr.getRelativeAssetPath(options.getDestFile()); FileLock lock = null; try { lock = getPrimaryFile().lock(); listListener.start(); Spatial spatial = mgr.loadModel(assetKey); // replace transient xml files in list of assets for this model replaceXmlFiles(); listListener.stop(); savable = spatial; lock.releaseLock(); File deleteFile = new File(options.getDestFile()); deleteFile.delete(); handle.finish(); return spatial; } catch (IOException ex) { Exceptions.printStackTrace(ex); if (lock != null) { lock.releaseLock(); } } File deleteFile = new File(options.getDestFile()); deleteFile.delete(); handle.finish(); return null; }
/** * Reads all the data from any content object and writes (extracts) it to a file. * * @param content Any content object. * @param outputFile Will be created if it doesn't exist, and overwritten if it does * @param progress progress bar handle to update, if available. null otherwise * @param worker the swing worker background thread the process runs within, or null, if in the * main thread, used to handle task cancellation * @param source true if source file * @return number of bytes extracted * @throws IOException if file could not be written */ public static <T> long writeToFile( Content content, java.io.File outputFile, ProgressHandle progress, Future<T> worker, boolean source) throws IOException { InputStream in = new ReadContentInputStream(content); // Get the unit size for a progress bar int unit = (int) (content.getSize() / 100); long totalRead = 0; try (FileOutputStream out = new FileOutputStream(outputFile, false)) { byte[] buffer = new byte[TO_FILE_BUFFER_SIZE]; int len = in.read(buffer); while (len != -1) { // If there is a worker, check for a cancelation if (worker != null && worker.isCancelled()) { break; } out.write(buffer, 0, len); len = in.read(buffer); totalRead += len; // If there is a progress bar and this is the source file, // report any progress if (progress != null && source && totalRead >= TO_FILE_BUFFER_SIZE) { int totalProgress = (int) (totalRead / unit); progress.progress(content.getName(), totalProgress); // If it's not the source, just update the file being processed } else if (progress != null && !source) { progress.progress(content.getName()); } } } finally { in.close(); } return totalRead; }
@Override public Result loadModels( NbGradleProject project, ProjectConnection connection, ProgressHandle progress) throws IOException, GradleModelLoadError { ProjectModelFetcher modelFetcher = new ProjectModelFetcher(project, gradleTarget); FetchedModelsOrError fetchedModelsOrError = modelFetcher.getModels(connection, setup); FetchedModels fetchedModels = fetchedModelsOrError.getModels(); if (fetchedModels == null) { throw new GradleModelLoadError( project, fetchedModelsOrError.getBuildScriptEvaluationError(), fetchedModelsOrError.getUnexpectedError()); } progress.progress(NbStrings.getParsingModel()); ProjectModelParser parser = new ProjectModelParser(project, modelFetcher); return parser.parseModel(fetchedModels); }
/** Sets a message to be displayed in the progress GUI. */ synchronized void setMessage(final String msg) { progressHandle.progress(msg); }
public boolean generateTarget(ProgressHandle ph, String target) { if (mapping.getServiceMapping(target) != null) { String msg = NbBundle.getMessage(ClientJavonTemplate.class, "MSG_Client"); // NOI18N ph.progress(msg); OutputLogger.getInstance().log(msg); mapping.setProperty("target", "client"); JavonMapping.Service service = mapping.getServiceMapping(target); FileObject outputDir = FileUtil.toFileObject( FileUtil.normalizeFile(new File(mapping.getClientMapping().getOutputDirectory()))); outputDir = outputDir.getFileObject(mapping.getClientMapping().getPackageName().replace('.', '/')); FileObject outputFile = outputDir.getFileObject(mapping.getClientMapping().getClassName(), "java"); if (outputFile == null) { OutputLogger.getInstance() .log( MessageFormat.format( NbBundle.getMessage(ClientJavonTemplate.class, "MSG_ClientJavonCreation"), mapping.getClientMapping().getClassName())); // NOI18N try { outputFile = outputDir.createData(mapping.getClientMapping().getClassName(), "java"); } catch (IOException e) { OutputLogger.getInstance() .log( LogLevel.ERROR, MessageFormat.format( NbBundle.getMessage(ClientJavonTemplate.class, "MSG_FailClientJavonCreation"), mapping.getClientMapping().getClassName())); // NOI18N } } OutputFileFormatter off = null; try { off = new OutputFileFormatter(outputFile); } catch (DataObjectNotFoundException e) { generationFailed(e, outputFile); return false; } catch (IOException e) { generationFailed(e, outputFile); return false; } ScriptEngineManager mgr = new ScriptEngineManager(); ScriptEngine eng = mgr.getEngineByName("freemarker"); Bindings bind = eng.getContext().getBindings(ScriptContext.ENGINE_SCOPE); FileObject template = FileUtil.getConfigFile("Templates/Client/Client.java"); OutputLogger.getInstance() .log(NbBundle.getMessage(ClientJavonTemplate.class, "MSG_ConfigureBindings")); // NOI18N Set<ClassData> returnTypes = service.getReturnTypes(); Set<ClassData> parameterTypes = service.getParameterTypes(); bind.put("mapping", mapping); bind.put("registry", mapping.getRegistry()); bind.put("returnTypes", returnTypes); bind.put("parameterTypes", parameterTypes); bind.put("service", service); bind.put("utils", new Utils(mapping.getRegistry())); // Compute imports for JavaBeans Set<String> imports = new HashSet<String>(); for (ClassData cd : parameterTypes) { while (cd.isArray()) { cd = cd.getComponentType(); } if (cd.isPrimitive()) continue; if (cd.getPackage().equals("java.lang")) continue; if (cd.getFullyQualifiedName().equals("java.util.List")) continue; imports.add(cd.getFullyQualifiedName()); } for (ClassData cd : returnTypes) { while (cd.isArray()) { cd = cd.getComponentType(); } if (cd.isPrimitive()) continue; if (cd.getPackage().equals("java.lang")) continue; if (cd.getFullyQualifiedName().equals("java.util.List")) continue; imports.add(cd.getFullyQualifiedName()); } bind.put("imports", imports); OutputLogger.getInstance() .log( MessageFormat.format( NbBundle.getMessage(ClientBeanGeneratorTemplate.class, "MSG_GenerateJavonClient"), FileUtil.toFile(outputFile))); // NOI18N Writer w = null; Reader is = null; try { try { w = new StringWriter(); is = new InputStreamReader(template.getInputStream()); eng.getContext().setWriter(w); eng.getContext() .setAttribute(FileObject.class.getName(), template, ScriptContext.ENGINE_SCOPE); eng.getContext() .setAttribute( ScriptEngine.FILENAME, template.getNameExt(), ScriptContext.ENGINE_SCOPE); eng.eval(is); } catch (FileNotFoundException e) { OutputLogger.getInstance().log(e); ErrorManager.getDefault().notify(e); return false; } catch (ScriptException e) { OutputLogger.getInstance().log(e); ErrorManager.getDefault().notify(e); return false; } finally { if (w != null) { off.write(w.toString()); // System.err.println( "" + w.toString()); w.close(); } if (is != null) is.close(); off.close(); } } catch (IOException e) { generationFailed(e, FileUtil.toFile(outputFile)); return false; } OutputLogger.getInstance() .log( MessageFormat.format( NbBundle.getMessage(ClientJavonTemplate.class, "MSG_ClientGenerated"), FileUtil.toFile(outputFile))); } return true; }
/** * Executes given JPQL query and returns the result. * * @param jpql the query * @return JPQLResult containing the execution result (including any errors). */ public JPQLResult execute( String jpql, PersistenceUnit pu, PersistenceEnvironment pe, int maxRowCount, ProgressHandle ph) { JPQLResult result = new JPQLResult(); try { ph.progress(60); Class pClass = Thread.currentThread().getContextClassLoader().loadClass("javax.persistence.Persistence"); javax.persistence.Persistence p = (javax.persistence.Persistence) pClass.newInstance(); EntityManagerFactory emf = p.createEntityManagerFactory(pu.getName()); EntityManager em = emf.createEntityManager(); Query query = em.createQuery(jpql); // Provider provider = ProviderUtil.getProvider(pu); String queryStr = null; if (provider.equals(ProviderUtil.ECLIPSELINK_PROVIDER)) { // NOI18N Class qClass = Thread.currentThread().getContextClassLoader().loadClass(ECLIPSELINK_QUERY); if (qClass != null) { Method method = qClass.getMethod(ECLIPSELINK_QUERY_SQL0); if (method != null) { Object dqOject = method.invoke(query); Method method2 = (dqOject != null ? dqOject.getClass().getMethod(ECLIPSELINK_QUERY_SQL1) : null); if (method2 != null) { queryStr = (String) method2.invoke(dqOject); } } } } // else if (provider.equals(ProviderUtil.HIBERNATE_PROVIDER2_0)){//NOI18N // Class qClass = // Thread.currentThread().getContextClassLoader().loadClass(HIBERNATE_QUERY); // if(qClass !=null) { // Method method = qClass.getMethod(HIBERNATE_QUERY_SQL); // if(method != null){ // queryStr = (String) method.invoke(query); // } // } // } else if (provider.getProviderClass().contains("openjpa")){//NOI18N // Class qClass = // Thread.currentThread().getContextClassLoader().loadClass(OPENJPA_QUERY); // if(qClass !=null) { // Method method = qClass.getMethod(OPENJPA_QUERY_SQL); // if(method != null){ // queryStr = (String) method.invoke(query); // } // } // } result.setSqlQuery(queryStr); // ph.progress(70); query.setMaxResults(maxRowCount); jpql = jpql.trim(); jpql = jpql.toUpperCase(); if (jpql.startsWith("UPDATE") || jpql.startsWith("DELETE")) { // NOI18N result.setUpdateOrDeleteResult(query.executeUpdate()); } else { result.setQueryResults(query.getResultList()); } } catch (Exception e) { result.getExceptions().add(e); } return result; }
@Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Starting background ingest file processor"); logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); stats.start(); // notify main thread modules started for (IngestModuleAbstractFile s : abstractFileModules) { IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), s.getName()); } final String displayName = "File Ingest"; progress = ProgressHandleFactory.createHandle( displayName, new Cancellable() { @Override public boolean cancel() { logger.log(Level.INFO, "Filed ingest cancelled by user."); if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } return IngestAbstractFileProcessor.this.cancel(true); } }); final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); // initialize the progress bar progress.start(); progress.switchToIndeterminate(); // set initial totals and processed (to be updated as we process or new files are scheduled) int totalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst(); progress.switchToDeterminate(totalEnqueuedFiles); int processedFiles = 0; // process AbstractFiles queue while (fileScheduler.hasNext()) { final ProcessTask fileTask = fileScheduler.next(); final PipelineContext<IngestModuleAbstractFile> filepipelineContext = fileTask.context; final ScheduledTask<IngestModuleAbstractFile> fileIngestTask = filepipelineContext.getScheduledTask(); final AbstractFile fileToProcess = fileTask.file; // clear return values from modules for last file synchronized (abstractFileModulesRetValues) { abstractFileModulesRetValues.clear(); } // logger.log(Level.INFO, "IngestManager: Processing: {0}", fileToProcess.getName()); for (IngestModuleAbstractFile module : fileIngestTask.getModules()) { // process the file with every file module if (isCancelled()) { logger.log(Level.INFO, "Terminating file ingest due to cancellation."); return null; } progress.progress( fileToProcess.getName() + " (" + module.getName() + ")", processedFiles); try { stats.logFileModuleStartProcess(module); IngestModuleAbstractFile.ProcessResult result = module.process(filepipelineContext, fileToProcess); stats.logFileModuleEndProcess(module); // store the result for subsequent modules for this file synchronized (abstractFileModulesRetValues) { abstractFileModulesRetValues.put(module.getName(), result); } } catch (Exception e) { logger.log( Level.SEVERE, "Error: unexpected exception from module: " + module.getName(), e); stats.addError(module); } catch (OutOfMemoryError e) { logger.log(Level.SEVERE, "Error: out of memory from module: " + module.getName(), e); stats.addError(module); } } // end for every module // free the internal file resource after done with every module fileToProcess.close(); int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst(); if (newTotalEnqueuedFiles > totalEnqueuedFiles) { // update if new enqueued totalEnqueuedFiles = newTotalEnqueuedFiles + 1; // + processedFiles + 1; // processedFiles = 0; // reset progress.switchToIndeterminate(); progress.switchToDeterminate(totalEnqueuedFiles); } if (processedFiles < totalEnqueuedFiles) { // fix for now to handle the same datasource Content enqueued // twice ++processedFiles; } // --totalEnqueuedFiles; } // end of for every AbstractFile logger.log(Level.INFO, "IngestManager: Finished processing files"); return null; }
private void queueAll(List<IngestModuleAbstract> modules, final List<Content> inputs) { int processed = 0; for (Content input : inputs) { final String inputName = input.getName(); final List<IngestModuleDataSource> dataSourceMods = new ArrayList<IngestModuleDataSource>(); final List<IngestModuleAbstractFile> fileMods = new ArrayList<IngestModuleAbstractFile>(); for (IngestModuleAbstract module : modules) { if (isCancelled()) { logger.log(Level.INFO, "Terminating ingest queueing due to cancellation."); return; } final String moduleName = module.getName(); progress.progress(moduleName + " " + inputName, processed); switch (module.getType()) { case DataSource: final IngestModuleDataSource newModuleInstance = (IngestModuleDataSource) moduleLoader.getNewIngestModuleInstance(module); if (newModuleInstance != null) { dataSourceMods.add(newModuleInstance); } else { logger.log( Level.INFO, "Error loading module and adding input " + inputName + " with module " + module.getName()); } break; case AbstractFile: // enqueue the same singleton AbstractFile module logger.log( Level.INFO, "Adding input " + inputName + " for AbstractFileModule " + module.getName()); fileMods.add((IngestModuleAbstractFile) module); break; default: logger.log(Level.SEVERE, "Unexpected module type: " + module.getType().name()); } } // for modules // queue to schedulers // queue to datasource-level ingest pipeline(s) final boolean processUnalloc = getProcessUnallocSpace(); final ScheduledTask<IngestModuleDataSource> dataSourceTask = new ScheduledTask<IngestModuleDataSource>(input, dataSourceMods); final PipelineContext<IngestModuleDataSource> dataSourcePipelineContext = new PipelineContext<IngestModuleDataSource>(dataSourceTask, processUnalloc); logger.log(Level.INFO, "Queing data source ingest task: " + dataSourceTask); progress.progress("DataSource Ingest" + " " + inputName, processed); final IngestScheduler.DataSourceScheduler dataSourceScheduler = scheduler.getDataSourceScheduler(); dataSourceScheduler.schedule(dataSourcePipelineContext); progress.progress("DataSource Ingest" + " " + inputName, ++processed); // queue to file-level ingest pipeline final ScheduledTask<IngestModuleAbstractFile> fTask = new ScheduledTask(input, fileMods); final PipelineContext<IngestModuleAbstractFile> filepipelineContext = new PipelineContext<IngestModuleAbstractFile>(fTask, processUnalloc); logger.log(Level.INFO, "Queing file ingest task: " + fTask); progress.progress("File Ingest" + " " + inputName, processed); final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); fileScheduler.schedule(filepipelineContext); progress.progress("File Ingest" + " " + inputName, ++processed); } // for data sources // logger.log(Level.INFO, AbstractFileQueue.printQueue()); }
@Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Pending start of new searcher"); final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : ""); progress = ProgressHandleFactory.createHandle( displayName + (" (Pending)"), new Cancellable() { @Override public boolean cancel() { logger.log(Level.INFO, "Cancelling the searcher by user."); if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } return Searcher.this.cancel(true); } }); progress.start(); progress.switchToIndeterminate(); // block to ensure previous searcher is completely done with doInBackground() // even after previous searcher cancellation, we need to check this searcherLock.lock(); try { logger.log(Level.INFO, "Started a new searcher"); progress.setDisplayName(displayName); // make sure other searchers are not spawned searcherDone = false; runSearcher = false; if (searchTimer.isRunning()) { searchTimer.stop(); } int numSearched = 0; updateKeywords(); progress.switchToDeterminate(keywords.size()); for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { logger.log( Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); return null; } final String queryStr = keywordQuery.getQuery(); final KeywordSearchList list = keywordToList.get(queryStr); final String listName = list.getName(); // DEBUG // logger.log(Level.INFO, "Searching: " + queryStr); progress.progress(queryStr, numSearched); KeywordSearchQuery del = null; boolean isRegex = !keywordQuery.isLiteral(); if (!isRegex) { del = new LuceneQuery(keywordQuery); del.escape(); } else { del = new TermComponentQuery(keywordQuery); } Map<String, List<ContentHit>> queryResult = null; try { queryResult = del.performQuery(); } catch (NoOpenCoreException ex) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); // no reason to continue with next query if recovery failed // or wait for recovery to kick in and run again later // likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { logger.log( Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); continue; } // calculate new results but substracting results already obtained in this run Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>(); for (String termResult : queryResult.keySet()) { List<ContentHit> queryTermResults = queryResult.get(termResult); Keyword termResultK = new Keyword(termResult, !isRegex); List<ContentHit> curTermResults = currentResults.get(termResultK); if (curTermResults == null) { currentResults.put(termResultK, queryTermResults); newResults.put(termResultK, queryTermResults); } else { // some AbstractFile hits already exist for this keyword for (ContentHit res : queryTermResults) { if (!previouslyHit(curTermResults, res)) { // add to new results List<ContentHit> newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { newResultsFs = new ArrayList<ContentHit>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); curTermResults.add(res); } } } } if (!newResults.isEmpty()) { // write results to BB // new artifacts created, to report to listeners Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>(); for (final Keyword hitTerm : newResults.keySet()) { List<ContentHit> contentHitsAll = newResults.get(hitTerm); Map<AbstractFile, Integer> contentHitsFlattened = ContentHit.flattenResults(contentHitsAll); for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { String snippet = null; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery(), true, false); int chunkId = contentHitsFlattened.get(hitFile); try { snippet = LuceneQuery.querySnippet( snippetQuery, hitFile.getId(), chunkId, isRegex, true); } catch (NoOpenCoreException e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); // no reason to continue return null; } catch (Exception e) { logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); continue; } KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { logger.log( Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); continue; } newArtifacts.add(written.getArtifact()); // generate a data message for each artifact StringBuilder subjectSb = new StringBuilder(); StringBuilder detailsSb = new StringBuilder(); // final int hitFiles = newResults.size(); if (!keywordQuery.isLiteral()) { subjectSb.append("RegExp hit: "); } else { subjectSb.append("Keyword hit: "); } // subjectSb.append("<"); String uniqueKey = null; BlackboardAttribute attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); if (attr != null) { final String keyword = attr.getValueString(); subjectSb.append(keyword); uniqueKey = keyword.toLowerCase(); } // subjectSb.append(">"); // String uniqueKey = queryStr; // details detailsSb.append("<table border='0' cellpadding='4' width='280'>"); // hit detailsSb.append("<tr>"); detailsSb.append("<th>Keyword hit</th>"); detailsSb .append("<td>") .append(StringEscapeUtils.escapeHtml(attr.getValueString())) .append("</td>"); detailsSb.append("</tr>"); // preview attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>Preview</th>"); detailsSb .append("<td>") .append(StringEscapeUtils.escapeHtml(attr.getValueString())) .append("</td>"); detailsSb.append("</tr>"); } // file detailsSb.append("<tr>"); detailsSb.append("<th>File</th>"); if (hitFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) { detailsSb .append("<td>") .append(((FsContent) hitFile).getParentPath()) .append(hitFile.getName()) .append("</td>"); } else { detailsSb.append("<td>").append(hitFile.getName()).append("</td>"); } detailsSb.append("</tr>"); // list attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); detailsSb.append("<tr>"); detailsSb.append("<th>List</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); // regex if (!keywordQuery.isLiteral()) { attr = written.getAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); if (attr != null) { detailsSb.append("<tr>"); detailsSb.append("<th>RegEx</th>"); detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); detailsSb.append("</tr>"); } } detailsSb.append("</table>"); // check if should send messages on hits on this list if (list.getIngestMessages()) // post ingest inbox msg { managerProxy.postMessage( IngestMessage.createDataMessage( ++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } // for each term hit } // for each file hit // update artifact browser if (!newArtifacts.isEmpty()) { IngestManager.fireServiceDataEvent( new ServiceDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); } } progress.progress(queryStr, ++numSearched); } } // end try block catch (Exception ex) { logger.log(Level.WARNING, "searcher exception occurred", ex); } finally { finalizeSearcher(); searcherLock.unlock(); } return null; }
public static synchronized ServiceGeneratorResult generate(final E2EDataObject dataObject) { if (dataObject.getServerProject() == null) { final NotifyDescriptor.Message dd = new NotifyDescriptor.Message( NbBundle.getMessage( E2EDataObject.class, "ERR_ServerProjectNotOpened", // NOI18N dataObject.getConfiguration().getServerConfigutation().getProjectName())); DialogDisplayer.getDefault().notify(dd); if (Util.openProject(dataObject.getConfiguration().getServerConfigutation().getProjectPath()) == null) { // It is OK don't notify user here. All notifications // are already inside <code>openProject</code> method return null; } } // Call save before generate final SaveCookie saveCookie = dataObject.getCookie(SaveCookie.class); if (saveCookie != null) { try { saveCookie.save(); } catch (IOException ex) { ErrorManager.getDefault().notify(ex); } } // Get configuration final Configuration config = dataObject.getConfiguration(); if (config == null) { final NotifyDescriptor.Message dd = new NotifyDescriptor.Message( NbBundle.getMessage(E2EDataObject.class, "ERR_ConfigurationFileCorrupted")); // NOI18N DialogDisplayer.getDefault().notify(dd); return null; } if (Configuration.WSDLCLASS_TYPE.equals(config.getServiceType())) { final FileObject fo = dataObject .getServerProject() .getProjectDirectory() .getFileObject("build/generated/wsimport/"); // NOI18N if (fo == null) { DialogDisplayer.getDefault() .notify( new NotifyDescriptor.Message( NbBundle.getMessage( ConnectionGenerator.class, "MSG_WebProjectNotBuilt"))); // NOI18N return null; } } /* * All failures notifications was done via dialog windows. * Starting now logging will be done via OutputLogger class. */ final ProgressHandle ph = ProgressHandleFactory.createHandle( NbBundle.getMessage(ConnectionGenerator.class, "MSG_GeneratingJavonBridge"), // NOI18N new AbstractAction() { public void actionPerformed(ActionEvent e) { OutputLogger.getInstance().open(); } }); ph.start(); ph.switchToIndeterminate(); String message = NbBundle.getMessage(ConnectionGenerator.class, "MSG_GeneratingProxyStubs"); // NOI18N ph.progress(message); OutputLogger.getInstance().log(message); // FIXME: check for proper type // config.getServices(); // if (Configuration.WSDLCLASS_TYPE.equals(config.getServiceType())) { ph.progress(NbBundle.getMessage(ConnectionGenerator.class, "MSG_GeneratingProxyStubs")); final ProxyGenerator pg = new ProxyGenerator(dataObject); final String className = pg.generate(); if (className == null) { ph.finish(); StatusDisplayer.getDefault() .setStatusText(NbBundle.getMessage(ConnectionGenerator.class, "MSG_Failure")); // NOI18N return null; } config.getServices().get(0).getData().get(0).setProxyClassType(className); } // JavonOutput[] outputs; // Type type = null; // // final InputOutput io = IOProvider.getDefault().getIO( // NbBundle.getMessage( ConnectionGenerator.class, "LBL_JavonTab" ) // NOI18N // , true); // final OutputWriter ow = io.getOut(); try { String scanning = NbBundle.getMessage(ConnectionGenerator.class, "MSG_ScanningDataStructures"); // NOI18N ph.progress(scanning); OutputLogger.getInstance().log(scanning); final JavonMappingImpl mapping = dataObject.getMapping(); if (Configuration.WSDLCLASS_TYPE.equals(config.getServiceType())) { mapping.setProperty("serviceType", "WSDL"); } else { mapping.setProperty("serviceType", "CLASS"); } // //ph.progress(70); String creating = NbBundle.getMessage(ConnectionGenerator.class, "MSG_CreatingJavaFiles"); // NOI18N ph.progress(creating); OutputLogger.getInstance().log(creating); // Javon javon = new Javon(mapping); if (javon.generate(ph)) { StatusDisplayer.getDefault() .setStatusText( NbBundle.getMessage(ConnectionGenerator.class, "MSG_SuccessGenerated")); // NOI18N OutputLogger.getInstance() .log(NbBundle.getMessage(ConnectionGenerator.class, "TXT_GenerationSuccess")); // NOI18N } else { StatusDisplayer.getDefault() .setStatusText(NbBundle.getMessage(ConnectionGenerator.class, "MSG_Failure")); // NOI18N OutputLogger.getInstance() .log( NbBundle.getMessage( ConnectionGenerator.class, "TXT_GenerationUnsuccess")); // NOI18N } // Streams.setOut(ow); // Streams.setErr(ow); // outputs = new Main().run( mapping, "" ); // NOI18N // // for( int j = 0; j < outputs.length; j++ ) { // final String list[] = outputs[j].getCreatedFiles(); // for( int i = 0; i < list.length; i++ ) { // final File f = new File(list[i]); // final FileObject fo = FileUtil.toFileObject(FileUtil.normalizeFile(f)); // fo.refresh(); // JavaModel.getResource(fo); // } // } // //add servlet to container // Util.addServletToWebProject(dataObject.getServerProject(), // dataObject.getConfiguration().getServerConfigutation().getClassDescriptor().getType()); // // final ClassDescriptor clientClassDescriptor = // dataObject.getConfiguration().getClientConfiguration().getClassDescriptor(); // final Sources s = ProjectUtils.getSources(dataObject.getClientProject()); // final SourceGroup sourceGroup = Util.getPreselectedGroup( // s.getSourceGroups( JavaProjectConstants.SOURCES_TYPE_JAVA ), // clientClassDescriptor.getLocation()); // final FileObject srcDirectory = sourceGroup.getRootFolder(); // final ClassPath cp = ClassPath.getClassPath(srcDirectory,ClassPath.SOURCE); // JavaModel.getJavaRepository().beginTrans(false); // try { // JavaModel.setClassPath(cp); // type = // JavaModel.getDefaultExtent().getType().resolve(clientClassDescriptor.getType()); // } catch (Exception e){ // ErrorManager.getDefault().notify(e); // } finally { // JavaModel.getJavaRepository().endTrans(); // } } finally { ph.finish(); OutputLogger.getInstance().close(); } // if (type != null){ // //ow.println("Run / Redeploy Web Project to get changes reflected!"); // StatusDisplayer.getDefault().setStatusText(NbBundle.getMessage( // ConnectionGenerator.class, "MSG_SuccessGenerated" )); // NOI18N // } else { // StatusDisplayer.getDefault().setStatusText(NbBundle.getMessage( // ConnectionGenerator.class, "MSG_Failure" )); // NOI18N // return null; // } // final JavaClass resultClass = (JavaClass)type; // final List<Feature> features = resultClass.getFeatures(); // final List<Method> methods = new ArrayList<Method>(); // for ( final Feature elem : features ) { // if (elem instanceof Method){ // final Method m = (Method)elem; // if ("getGroupedResults".equals(m.getName())){ //NOI18N //not supported // continue; // } // if (m.getName().endsWith("Grouped")){ //NOI18N //not supported // continue; // } // if ( Modifier.isPublic(m.getModifiers()) ){ // methods.add(m); // } // } // } // return new ServiceGeneratorResult(resultClass, // methods.toArray(new Method[methods.size()]), // Util.getServerURL(dataObject.getServerProject(), // dataObject.getConfiguration())); return null; }
/** * Unpack the file to local folder and return a list of derived files * * @param pipelineContext current ingest context * @param archiveFile file to unpack * @return list of unpacked derived files */ void unpack(AbstractFile archiveFile) { String archiveFilePath; try { archiveFilePath = archiveFile.getUniquePath(); } catch (TskCoreException ex) { archiveFilePath = archiveFile.getParentPath() + archiveFile.getName(); } // check if already has derived files, skip try { if (archiveFile.hasChildren()) { // check if local unpacked dir exists if (new File(EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)).exists()) { logger.log( Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: {0}", archiveFilePath); // NON-NLS return; } } } catch (TskCoreException e) { logger.log( Level.INFO, "Error checking if file already has been processed, skipping: {0}", archiveFilePath); // NON-NLS return; } List<AbstractFile> unpackedFiles = Collections.<AbstractFile>emptyList(); // recursion depth check for zip bomb final long archiveId = archiveFile.getId(); SevenZipExtractor.ArchiveDepthCountTree.Archive parentAr = archiveDepthCountTree.findArchive(archiveId); if (parentAr == null) { parentAr = archiveDepthCountTree.addArchive(null, archiveId); } else if (parentAr.getDepth() == MAX_DEPTH) { String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnMsg.zipBomb", archiveFile.getName()); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnDetails.zipBomb", parentAr.getDepth(), archiveFilePath); // MessageNotifyUtil.Notify.error(msg, details); services.postMessage( IngestMessage.createWarningMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); return; } boolean hasEncrypted = false; boolean fullEncryption = true; ISevenZipInArchive inArchive = null; SevenZipContentReadStream stream = null; final ProgressHandle progress = ProgressHandleFactory.createHandle( NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.moduleName")); int processedItems = 0; boolean progressStarted = false; try { stream = new SevenZipContentReadStream(new ReadContentInputStream(archiveFile)); // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive // inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality. // All other archive formats are still opened using 7zip built-in auto-detect functionality. ArchiveFormat options = get7ZipOptions(archiveFile); inArchive = SevenZip.openInArchive(options, stream); int numItems = inArchive.getNumberOfItems(); logger.log( Level.INFO, "Count of items in archive: {0}: {1}", new Object[] {archiveFilePath, numItems}); // NON-NLS progress.start(numItems); progressStarted = true; final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); // setup the archive local root folder final String uniqueArchiveFileName = EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile); final String localRootAbsPath = getLocalRootAbsPath(uniqueArchiveFileName); final File localRoot = new File(localRootAbsPath); if (!localRoot.exists()) { try { localRoot.mkdirs(); } catch (SecurityException e) { logger.log( Level.SEVERE, "Error setting up output path for archive root: {0}", localRootAbsPath); // NON-NLS // bail return; } } // initialize tree hierarchy to keep track of unpacked file structure SevenZipExtractor.UnpackedTree unpackedTree = new SevenZipExtractor.UnpackedTree( moduleDirRelative + "/" + uniqueArchiveFileName, archiveFile); long freeDiskSpace = services.getFreeDiskSpace(); // unpack and process every item in archive int itemNumber = 0; for (ISimpleInArchiveItem item : simpleInArchive.getArchiveItems()) { String pathInArchive = item.getPath(); if (pathInArchive == null || pathInArchive.isEmpty()) { // some formats (.tar.gz) may not be handled correctly -- file in archive has no name/path // handle this for .tar.gz and tgz but assuming the child is tar, // otherwise, unpack using itemNumber as name // TODO this should really be signature based, not extension based String archName = archiveFile.getName(); int dotI = archName.lastIndexOf("."); String useName = null; if (dotI != -1) { String base = archName.substring(0, dotI); String ext = archName.substring(dotI); switch (ext) { case ".gz": // NON-NLS useName = base; break; case ".tgz": // NON-NLS useName = base + ".tar"; // NON-NLS break; } } if (useName == null) { pathInArchive = "/" + archName + "/" + Integer.toString(itemNumber); } else { pathInArchive = "/" + useName; } String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg", archiveFilePath, pathInArchive); logger.log(Level.WARNING, msg); } ++itemNumber; logger.log(Level.INFO, "Extracted item path: {0}", pathInArchive); // NON-NLS // check if possible zip bomb if (isZipBombArchiveItemCheck(archiveFile, item)) { continue; // skip the item } // find this node in the hierarchy, create if needed SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode = unpackedTree.addNode(pathInArchive); String fileName = unpackedNode.getFileName(); // update progress bar progress.progress(archiveFile.getName() + ": " + fileName, processedItems); final boolean isEncrypted = item.isEncrypted(); final boolean isDir = item.isFolder(); if (isEncrypted) { logger.log( Level.WARNING, "Skipping encrypted file in archive: {0}", pathInArchive); // NON-NLS hasEncrypted = true; continue; } else { fullEncryption = false; } final Long size = item.getSize(); if (size == null) { // If the size property cannot be determined, out-of-disk-space // situations cannot be ascertained. // Hence skip this file. logger.log( Level.WARNING, "Size cannot be determined. Skipping file in archive: {0}", pathInArchive); // NON-NLS continue; } // check if unpacking this file will result in out of disk space // this is additional to zip bomb prevention mechanism if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size > 0) { // if known free space and file not empty long newDiskSpace = freeDiskSpace - size; if (newDiskSpace < MIN_FREE_DISK_SPACE) { String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.msg", archiveFilePath, fileName); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details"); // MessageNotifyUtil.Notify.error(msg, details); services.postMessage( IngestMessage.createErrorMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); logger.log( Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new Object[] {archiveFilePath, fileName}); // NON-NLS logger.log( Level.INFO, "Available disk space: {0}", new Object[] {freeDiskSpace}); // NON-NLS continue; // skip this file } else { // update est. disk space during this archive, so we don't need to poll for every file // extracted freeDiskSpace = newDiskSpace; } } final String uniqueExtractedName = uniqueArchiveFileName + File.separator + (item.getItemIndex() / 1000) + File.separator + item.getItemIndex() + new File(pathInArchive).getName(); // final String localRelPath = unpackDir + File.separator + localFileRelPath; final String localRelPath = moduleDirRelative + File.separator + uniqueExtractedName; final String localAbsPath = moduleDirAbsolute + File.separator + uniqueExtractedName; // create local dirs and empty files before extracted File localFile = new java.io.File(localAbsPath); // cannot rely on files in top-bottom order if (!localFile.exists()) { try { if (isDir) { localFile.mkdirs(); } else { localFile.getParentFile().mkdirs(); try { localFile.createNewFile(); } catch (IOException ex) { logger.log( Level.SEVERE, "Error creating extracted file: " + localFile.getAbsolutePath(), ex); // NON-NLS } } } catch (SecurityException e) { logger.log( Level.SEVERE, "Error setting up output path for unpacked file: {0}", pathInArchive); // NON-NLS // TODO consider bail out / msg to the user } } // skip the rest of this loop if we couldn't create the file if (localFile.exists() == false) { continue; } final Date createTime = item.getCreationTime(); final Date accessTime = item.getLastAccessTime(); final Date writeTime = item.getLastWriteTime(); final long createtime = createTime == null ? 0L : createTime.getTime() / 1000; final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000; final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000; // record derived data in unode, to be traversed later after unpacking the archive unpackedNode.addDerivedInfo( size, !isDir, 0L, createtime, accesstime, modtime, localRelPath); // unpack locally if a file if (!isDir) { SevenZipExtractor.UnpackStream unpackStream = null; try { unpackStream = new SevenZipExtractor.UnpackStream(localAbsPath); item.extractSlow(unpackStream); } catch (Exception e) { // could be something unexpected with this file, move on logger.log( Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); // NON-NLS } finally { if (unpackStream != null) { unpackStream.close(); } } } // update units for progress bar ++processedItems; } // add them to the DB. We wait until the end so that we have the metadata on all of the // intermediate nodes since the order is not guaranteed try { unpackedTree.addDerivedFilesToCase(); unpackedFiles = unpackedTree.getAllFileObjects(); // check if children are archives, update archive depth tracking for (AbstractFile unpackedFile : unpackedFiles) { if (isSevenZipExtractionSupported(unpackedFile)) { archiveDepthCountTree.addArchive(parentAr, unpackedFile.getId()); } } } catch (TskCoreException e) { logger.log( Level.SEVERE, "Error populating complete derived file hierarchy from the unpacked dir structure"); // NON-NLS // TODO decide if anything to cleanup, for now bailing } } catch (SevenZipException ex) { logger.log(Level.SEVERE, "Error unpacking file: " + archiveFile, ex); // NON-NLS // inbox message // print a message if the file is allocated if (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg", archiveFile.getName()); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details", archiveFilePath, ex.getMessage()); services.postMessage( IngestMessage.createErrorMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } } finally { if (inArchive != null) { try { inArchive.close(); } catch (SevenZipException e) { logger.log(Level.SEVERE, "Error closing archive: " + archiveFile, e); // NON-NLS } } if (stream != null) { try { stream.close(); } catch (IOException ex) { logger.log( Level.SEVERE, "Error closing stream after unpacking archive: " + archiveFile, ex); // NON-NLS } } // close progress bar if (progressStarted) { progress.finish(); } } // create artifact and send user message if (hasEncrypted) { String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; try { BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); artifact.addAttribute( new BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType)); services.fireModuleDataEvent( new ModuleDataEvent( EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED)); } catch (TskCoreException ex) { logger.log( Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFilePath, ex); // NON-NLS } String msg = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg"); String details = NbBundle.getMessage( this.getClass(), "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details", archiveFile.getName(), EmbeddedFileExtractorModuleFactory.getModuleName()); services.postMessage( IngestMessage.createWarningMessage( EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } // adding unpacked extracted derived files to the job after closing relevant resources. if (!unpackedFiles.isEmpty()) { // currently sending a single event for all new files services.fireModuleContentEvent(new ModuleContentEvent(archiveFile)); context.addFilesToJob(unpackedFiles); } }