@After public void afterSqlEntitiyProcessorTestCase() throws Exception { useSimpleCaches = false; countryEntity = false; countryCached = false; countryZipper = false; sportsEntity = false; sportsCached = false; sportsZipper = false; wrongPeopleOrder = false; wrongSportsOrder = false; wrongCountryOrder = false; rootTransformerName = null; countryTransformer = false; sportsTransformer = false; underlyingDataModified = false; // If an Assume was tripped while setting up the test, // the file might not ever have been created... if (fileLocation != null) { Files.deleteIfExists(new File(fileLocation + File.separatorChar + fileName).toPath()); Files.deleteIfExists(new File(fileLocation).toPath()); } }
public Element exec(Element params, ServiceContext context) throws Exception { Path harvestingLogoDirectory = Resources.locateHarvesterLogosDir(context); Path nodeLogoDirectory = Resources.locateLogosDir(context); String file = Util.getParam(params, Params.FNAME); String asFavicon = Util.getParam(params, Params.FAVICON, "0"); if (file.contains("..")) { throw new BadParameterEx("Invalid character found in resource name.", file); } if ("".equals(file)) { throw new Exception("Logo name is not defined."); } SettingManager settingMan = context.getBean(SettingManager.class); String nodeUuid = settingMan.getSiteId(); try { Path logoFilePath = harvestingLogoDirectory.resolve(file); if (!Files.exists(logoFilePath)) { logoFilePath = context.getAppPath().resolve("images/harvesting/" + file); } try (InputStream inputStream = Files.newInputStream(logoFilePath)) { BufferedImage source = ImageIO.read(inputStream); if ("1".equals(asFavicon)) { createFavicon(source, nodeLogoDirectory.resolve("favicon.png")); } else { Path logo = nodeLogoDirectory.resolve(nodeUuid + ".png"); Path defaultLogo = nodeLogoDirectory.resolve("logo.png"); if (!file.endsWith(".png")) { try (OutputStream logoOut = Files.newOutputStream(logo); OutputStream defLogoOut = Files.newOutputStream(defaultLogo); ) { ImageIO.write(source, "png", logoOut); ImageIO.write(source, "png", defLogoOut); } } else { Files.deleteIfExists(logo); IO.copyDirectoryOrFile(logoFilePath, logo, false); Files.deleteIfExists(defaultLogo); IO.copyDirectoryOrFile(logoFilePath, defaultLogo, false); } } } } catch (Exception e) { throw new Exception( "Unable to move uploaded thumbnail to destination directory. Error: " + e.getMessage()); } Element response = new Element("response"); response.addContent(new Element("status").setText("Logo set.")); return response; }
/** * Method to save the selected Experiment, in xml and xls (if possible), in a temporary folder. * The files will be contained inside a zip. * * @return String with the path of the zip file. * @throws IOException */ private String saveExpInTempFolder() throws IOException { String toRet = ""; // Create temporary File String filePath = Files.createTempDirectory("").toString() + "/"; filePath = filePath.replace("\\", "/"); // Get bioID String bioID = selExp.getBioID(); if (bioID.isEmpty()) { bioID = "bio_"; } DataToFile.saveXMLData(selExp, filePath + "xml"); // Only save in XLS if the Experiment is an IntraExperiment if (!isInter) { DataToFile.saveXLSData(selExp, filePath + "xls"); } // Generate ZIP file with Java 7 Map<String, String> zipProperties = new HashMap<>(); zipProperties.put("create", "true"); zipProperties.put("encoding", "UTF-8"); // Create zip file URI zipDisk; toRet = filePath + bioID + ".zip"; if (toRet.startsWith("/")) { zipDisk = URI.create("jar:file:" + toRet); } else { zipDisk = URI.create("jar:file:/" + toRet); } // Adding files to zip try (FileSystem zipfs = FileSystems.newFileSystem(zipDisk, zipProperties)) { // Create file inside zip Path zipFilePath = zipfs.getPath(bioID + ".xml"); // Path where the file to be added resides Path addNewFile = Paths.get(filePath + "xml.xml"); // Append file to ZIP File Files.copy(addNewFile, zipFilePath); if (!isInter) { // Now go for the xls file zipFilePath = zipfs.getPath(bioID + ".xls"); addNewFile = Paths.get(filePath + "xls.xls"); Files.copy(addNewFile, zipFilePath); } } // Delete temp files Files.deleteIfExists(Paths.get(filePath + "xml.xml")); Files.deleteIfExists(Paths.get(filePath + "xls.xls")); return toRet; }
private void deleteTestFiles() throws Exception { FileBasedEventStore eventStore = (FileBasedEventStore) appSensorServer.getEventStore(); FileBasedAttackStore attackStore = (FileBasedAttackStore) appSensorServer.getAttackStore(); FileBasedResponseStore responseStore = (FileBasedResponseStore) appSensorServer.getResponseStore(); Files.deleteIfExists(eventStore.getPath()); Files.deleteIfExists(attackStore.getPath()); Files.deleteIfExists(responseStore.getPath()); }
@Override public void close() { try { if (debug) { log.debug("Closing tlog" + this); } synchronized (this) { if (fos != null) { fos.flush(); fos.close(); // dereference these variables for GC fos = null; os = null; channel = null; raf = null; } } if (deleteOnClose) { try { Files.deleteIfExists(tlogFile.toPath()); } catch (IOException e) { // TODO: should this class care if a file couldnt be deleted? // this just emulates previous behavior, where only SecurityException would be handled. } } } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } finally { assert ObjectReleaseTracker.release(this); } }
protected void close() { try { if (debug) { log.debug("Closing tlog" + this); } synchronized (this) { fos.flush(); fos.close(); } if (deleteOnClose) { try { Files.deleteIfExists(tlogFile.toPath()); } catch (IOException e) { // TODO: should this class care if a file couldnt be deleted? // this just emulates previous behavior, where only SecurityException would be handled. } } } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } finally { assert ObjectReleaseTracker.release(this); } }
/** * Marshals the object to the given path that must represent a path to the file. * * <p>This method is capable of setting the schema version to the object being marshalled. * * @param path Path to file * @param object Object to marshal * @param noNamespaceSchemaLocation NoNamespaceSchemaLocation to set. If it's <code>null</code> no * location will be set. * @param schemaVersion If schema version is set and object to marshall is instance of {@link * ISchemaVersionAware} then the given schema version will be set to the object. Use <code>0 * </code> to ignore setting of schema version. * @throws JAXBException If {@link JAXBException} occurs. * @throws IOException If {@link IOException} occurs. */ public void marshall( Path path, Object object, String noNamespaceSchemaLocation, int schemaVersion) throws JAXBException, IOException { if (Files.isDirectory(path)) { throw new IOException("Can not marshal object to the path that represents the directory"); } Files.deleteIfExists(path); Files.createDirectories(path.getParent()); JAXBContext context = JAXBContext.newInstance(object.getClass()); Marshaller marshaller = context.createMarshaller(); marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); if (null != noNamespaceSchemaLocation) { marshaller.setProperty( Marshaller.JAXB_NO_NAMESPACE_SCHEMA_LOCATION, noNamespaceSchemaLocation); } // set schema version if needed if ((object instanceof ISchemaVersionAware) && (0 != schemaVersion)) { ((ISchemaVersionAware) object).setSchemaVersion(schemaVersion); } try (OutputStream outputStream = Files.newOutputStream(path, StandardOpenOption.CREATE_NEW)) { marshaller.marshal(object, outputStream); } }
public static Result processFile(Path prev, Path cur, Path outDir, int num, int gzipFrom) throws IOException { Result deltaHashes = new Result(); Path deltaFile = outDir.resolve(cur.getFileName().toString() + ".bpatch"); deleteIfExists(deltaFile); deltaHashes.path = deltaFile; boolean isGzipping = num >= gzipFrom; try (HashingOutputStream hashingStream = new HashingOutputStream( Hashing.sha256(), new BufferedOutputStream(newOutputStream(deltaFile, StandardOpenOption.CREATE_NEW)))) { GZIPOutputStream zipStream = null; GDiffWriter writer; if (isGzipping) { // Just constructing this object writes to the stream. zipStream = new GZIPOutputStream(hashingStream); writer = new GDiffWriter(zipStream); } else { writer = new GDiffWriter(hashingStream); } Delta delta = new Delta(); deltaHashes.preHash = sha256(readAllBytes(prev)); delta.compute(prev.toFile(), cur.toFile(), writer); if (isGzipping) zipStream.close(); deltaHashes.patchHash = hashingStream.hash().asBytes(); deltaHashes.postHash = sha256(readAllBytes(cur)); } long size = Files.size(deltaFile); deltaHashes.patchSize = size; println("... done: %s (%.2fkb) %s", deltaFile, size / 1024.0, isGzipping ? "zipped" : ""); return deltaHashes; }
public void testInvalidFormat(ImageResultVerifier... verifiers) throws IOException { Path tempFile = null; try { tempFile = Files.createTempFile("jtrim", ".test"); final ContextAwareTaskExecutor taskExecutor = TaskExecutors.contextAware(SyncTaskExecutor.getSimpleExecutor()); AsyncDataLink<ImageResult> link = create(tempFile, taskExecutor); ImageCollectorListener listener = new ImageCollectorListener(taskExecutor); AsyncDataController controller = link.getData(Cancellation.UNCANCELABLE_TOKEN, listener); assertNotNull(controller.getDataState()); controller.controlData(null); AsyncReport report = listener.getReport(); ImageResult lastResult = listener.getLastResult(); long imageCount = listener.getImageCount(); listener.verifyNoTrivialErrors(); assertEquals("Should not have received an image.", 0L, imageCount); ImageResultVerifier verifier = combineVerifiers(new FailedVerifier(Throwable.class), combineVerifiers(verifiers)); verifier.verifyImageResult(imageCount, lastResult, report); } finally { if (tempFile != null) { Files.deleteIfExists(tempFile); } } }
@Test public void readAirlineCompanyCSVFileWithOneRecordTest() throws IOException, ParseException { Files.deleteIfExists(Paths.get(projectPath + CSVFileTest.AIRLINECOMPANYCSV.value())); AirlineCompany airlineCompany = new AirlineCompany(); airlineCompany.setAirline("Cathay Pacific Airways"); ArrayList<AirlineCompany> airlineCompanies = new ArrayList<AirlineCompany>(); airlineCompanies.add(airlineCompany); SourceWriter<List<AirlineCompany>> airlineCompanyCSVFileWriter = new AirlineCompanyCSVFileWriter(projectPath + CSVFileTest.AIRLINECOMPANYCSV.value()); airlineCompanyCSVFileWriter.write(airlineCompanies); airlineCompanyCSVFileWriter.close(); SourceReader<AirlineCompany> airlineCompanyCSVFileReader = new AirlineCompanyCSVFileReader(projectPath + CSVFileTest.AIRLINECOMPANYCSV.value()); List<AirlineCompany> airlineCompanyResultList = airlineCompanyCSVFileReader.read(new AirlineCompanyParser()); airlineCompanyCSVFileReader.close(); assertThat(airlineCompany.getAirline(), is(airlineCompanyResultList.get(0).getAirline())); assertThat( airlineCompany.getFlights().size(), is(airlineCompanyResultList.get(0).getFlights().size())); }
public void doStreamed( final EntityManager em, final ArtifactEntity ae, final ThrowingConsumer<Path> fileConsumer) throws IOException { final Path tmp = StreamServiceHelper.createTempFile(ae.getName()); try { try (OutputStream os = new BufferedOutputStream(new FileOutputStream(tmp.toFile()))) { streamArtifact( em, ae, (ai, in) -> { ByteStreams.copy(in, os); }); } try { fileConsumer.accept(tmp); } catch (final IOException e) { throw e; } catch (final Exception e) { throw new RuntimeException(e); } } finally { Files.deleteIfExists(tmp); } }
private void loadHistory() { history.clear(); if (!Files.exists(streamPath, LinkOption.NOFOLLOW_LINKS)) { logger.debug("no stream exist ({})", streamPath); return; } logger.info("load history ({})", streamPath); try (InputStream inStream = Files.newInputStream(streamPath, StandardOpenOption.READ)) { try (ObjectInputStream oStream = new ObjectInputStream(inStream)) { Object o = null; while (inStream.available() > 0 && (o = oStream.readObject()) != null) { HistoryEntry dev = (HistoryEntry) o; logger.debug("read dev: {}", dev); if (!history.contains(dev)) { history.add(dev); } else { logger.error("device already exist ({})", dev); } } } } catch (Exception e) { logger.error(e.getMessage(), e); try { Files.deleteIfExists(streamPath); } catch (IOException e1) { } } }
@After public void tearDown() throws IOException { String jarName = PropsUtil.get(PropsKeys.SETUP_LIFERAY_POOL_PROVIDER_JAR_NAME, new Filter("hikaricp")); Files.deleteIfExists(Paths.get(PropsValues.LIFERAY_LIB_PORTAL_DIR, jarName)); }
public static void outputDoctorSentimentReviewsToJson( Map<Integer, List<SentimentReview>> docToSentimentReviews, String updatedDatasetPath) { ObjectMapper mapper = new ObjectMapper(); try { Files.deleteIfExists(Paths.get(updatedDatasetPath)); } catch (IOException e1) { e1.printStackTrace(); } int count = 1; for (Integer docId : docToSentimentReviews.keySet()) { try (BufferedWriter writer = Files.newBufferedWriter( Paths.get(updatedDatasetPath), StandardOpenOption.CREATE, StandardOpenOption.APPEND)) { if (count > 1) writer.newLine(); DoctorSentimentReview doctorReview = new DoctorSentimentReview(docId, docToSentimentReviews.get(docId)); mapper.writeValue(writer, doctorReview); count++; } catch (IOException e) { e.printStackTrace(); } } System.out.println("Num of Outputed docIDs " + count); }
public void test() throws Exception { new Plant(); try { Path dbPath = Paths.get("vcow.db"); Files.deleteIfExists(dbPath); int maxRootBlockSize = 1000; try (Db db = new Db(new BaseRegistry(), dbPath, maxRootBlockSize)) { Files.deleteIfExists(dbPath); db.registerTransaction("SecondaryTran", SecondaryTran.class); db.open(true); String timestampId = db.update("SecondaryTran").call(); } } finally { Plant.close(); } }
public static void deleteFile(final Path filePath, boolean retry) throws IOException { try { Files.deleteIfExists(filePath); } catch (Exception e) { if (!retry) { throw e; } PathCallable pathCallable = new PathCallable(filePath) { @Override public Object call() throws Exception { FileTime fileTime = Files.getLastModifiedTime(filePath); if (fileTime.toMillis() <= getStartTime()) { Files.deleteIfExists(filePath); } return null; } }; FileLockRetryUtil.registerPathCallable(pathCallable); } }
/** Utility method to delete temporary files used by this test fixture */ public static void deleteFile(String fName) { try { Files.deleteIfExists(Paths.get(fName)); } catch (IOException ex) { Logger.getLogger(Flow3V2Test.class.getName()).log(Level.SEVERE, null, ex); } }
@Test public void testUnzip() throws Exception { AsciiDocMetaData asciiDocMetaData = new AsciiDocMetaData(); File dataDir = asciiDocMetaData.disocverDataDir(); File mathjax = new File(dataDir, "mathjax"); if (mathjax.exists()) { deleteDir(mathjax); } Files.deleteIfExists(new File(dataDir, "asciidoctor.css").toPath()); Files.deleteIfExists(new File(dataDir, "asciidoctor-coderay.css").toPath()); Files.deleteIfExists(new File(dataDir, "markdown.css").toPath()); assertEquals(3, dataDir.listFiles().length); asciiDocMetaData.extract(); assertEquals(7, dataDir.listFiles().length); }
private void deleteLocal(SmallModuleEntry sme) { Path localCopyPath = UploaderPlugin.getLocalPath(updateLocation, sme); try { Files.deleteIfExists(localCopyPath); } catch (IOException a) { a.printStackTrace(); } }
private static void deleteFile(final File file) { try { Files.deleteIfExists(file.toPath()); } catch (final IOException e) { LOGGER.warning( "No se pudo borrar tras su uso el fichero " + file.getAbsolutePath()); // $NON-NLS-1$ } }
private int runTestsExternal( final CommandRunnerParams params, Build build, Iterable<String> command, Iterable<TestRule> testRules) throws InterruptedException, IOException { TestRunningOptions options = getTestRunningOptions(params); // Walk the test rules, collecting all the specs. List<ExternalTestRunnerTestSpec> specs = Lists.newArrayList(); for (TestRule testRule : testRules) { if (!(testRule instanceof ExternalTestRunnerRule)) { params .getBuckEventBus() .post( ConsoleEvent.severe( String.format( "Test %s does not support external test running", testRule.getBuildTarget()))); return 1; } ExternalTestRunnerRule rule = (ExternalTestRunnerRule) testRule; specs.add(rule.getExternalTestRunnerSpec(build.getExecutionContext(), options)); } // Serialize the specs to a file to pass into the test runner. Path infoFile = params .getCell() .getFilesystem() .resolve(BuckConstant.SCRATCH_PATH.resolve("external_runner_specs.json")); Files.createDirectories(infoFile.getParent()); Files.deleteIfExists(infoFile); params.getObjectMapper().writerWithDefaultPrettyPrinter().writeValue(infoFile.toFile(), specs); // Launch and run the external test runner, forwarding it's stdout/stderr to the console. // We wait for it to complete then returns its error code. ListeningProcessExecutor processExecutor = new ListeningProcessExecutor(); ProcessExecutorParams processExecutorParams = ProcessExecutorParams.builder() .addAllCommand(command) .addAllCommand(withDashArguments) .addCommand("--buck-test-info", infoFile.toString()) .setDirectory(params.getCell().getFilesystem().getRootPath().toFile()) .build(); ForwardingProcessListener processListener = new ForwardingProcessListener( Channels.newChannel(params.getConsole().getStdOut()), Channels.newChannel(params.getConsole().getStdErr())); ListeningProcessExecutor.LaunchedProcess process = processExecutor.launchProcess(processExecutorParams, processListener); try { return processExecutor.waitForProcess(process, Long.MAX_VALUE, TimeUnit.DAYS); } finally { processExecutor.destroyProcess(process, /* force */ false); processExecutor.waitForProcess(process, Long.MAX_VALUE, TimeUnit.DAYS); } }
private void assertCompDir(Path compDir, Optional<String> failure) throws Exception { ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot().toPath()); CxxPlatform platform = DefaultCxxPlatforms.build(new CxxBuckConfig(new FakeBuckConfig())); // Build up the paths to various files the archive step will use. ImmutableList<String> compiler = platform.getCc().getCommandPrefix(new SourcePathResolver(new BuildRuleResolver())); Path output = filesystem.resolve(Paths.get("output.o")); Path relativeInput = Paths.get("input.c"); Path input = filesystem.resolve(relativeInput); filesystem.writeContentsToPath("int main() {}", relativeInput); ImmutableList.Builder<String> preprocessorCommand = ImmutableList.builder(); preprocessorCommand.addAll(compiler); ImmutableList.Builder<String> compilerCommand = ImmutableList.builder(); compilerCommand.addAll(compiler); compilerCommand.add("-g"); DebugPathSanitizer sanitizer = new DebugPathSanitizer(200, File.separatorChar, compDir, ImmutableBiMap.<Path, Path>of()); // Build an archive step. CxxPreprocessAndCompileStep step = new CxxPreprocessAndCompileStep( CxxPreprocessAndCompileStep.Operation.COMPILE_MUNGE_DEBUGINFO, output, relativeInput, CxxSource.Type.C, Optional.of(preprocessorCommand.build()), Optional.of(compilerCommand.build()), ImmutableMap.<Path, Path>of(), sanitizer); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newBuilder() .setProjectFilesystem(new ProjectFilesystem(tmp.getRoot().toPath())) .build(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = step.execute(executionContext); if (failure.isPresent()) { assertNotEquals("compile step succeeded", 0, exitCode); assertThat( console.getTextWrittenToStdErr(), console.getTextWrittenToStdErr(), Matchers.containsString(failure.get())); } else { assertEquals("compile step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Verify that we find the expected compilation dir embedded in the file. String contents = new String(Files.readAllBytes(output)); assertThat(contents, Matchers.containsString(sanitizer.getCompilationDirectory())); } // Cleanup. Files.delete(input); Files.deleteIfExists(output); }
public void deleteFile(Path path) { Charset charset = Charset.forName("US-ASCII"); Path filepath = Paths.get("resources/system/" + path); try { Files.deleteIfExists(filepath); } catch (IOException ioe) { JOptionPane.showMessageDialog(null, ioe); } }
@Before public void setUp() throws IOException { Files.deleteIfExists(Paths.get(OS.TARGET, _mapName)); _testMap = clientAssetTree.acquireMap(_mapName + "?putReturnsNull=true", String.class, String.class); _testMap.clear(); }
@Override public void deleteTempFile(Path path) { checkFileStructure(); try { Files.deleteIfExists(path); } catch (IOException e) { FileSystemLogging.logger.log( Level.FINE, "IOException while trying to delete temp file: " + path, e); } }
static void testCopyInputStreamToFile(int size) throws IOException { Path tmpdir = createTempDirectory("blah"); Path source = tmpdir.resolve("source"); Path target = tmpdir.resolve("target"); try { boolean testReplaceExisting = rand.nextBoolean(); // create source file byte[] b = new byte[size]; rand.nextBytes(b); write(source, b); // target file might already exist if (testReplaceExisting && rand.nextBoolean()) { write(target, new byte[rand.nextInt(512)]); } // copy from stream to file InputStream in = new FileInputStream(source.toFile()); try { long n; if (testReplaceExisting) { n = copy(in, target, StandardCopyOption.REPLACE_EXISTING); } else { n = copy(in, target); } assertTrue(in.read() == -1); // EOF assertTrue(n == size); assertTrue(size(target) == size); } finally { in.close(); } // check file byte[] read = readAllBytes(target); assertTrue(Arrays.equals(read, b)); } finally { deleteIfExists(source); deleteIfExists(target); delete(tmpdir); } }
private boolean removeLogrotateFile() { boolean deleted = false; try { deleted = Files.deleteIfExists(getLogrotateConfPath()); } catch (Throwable t) { log.trace("Couldn't delete {}", getLogrotateConfPath(), t); return false; } log.trace("Deleted {} : {}", getLogrotateConfPath(), deleted); return true; }
private boolean canCreate(String pathPart) { try { Path tempFile = Files.createTempFile(pathPart, "canCreate"); Files.deleteIfExists(tempFile); return true; } catch (Exception e) { logger.log(Level.SEVERE, "WARNING: Cannot create file: " + pathPart); return false; } }
public ShowPlan(World world) { Path path = Paths.get("tmp.xml"); try { world.toXml(path.toString()); this.world = new World(path); Files.deleteIfExists(path); } catch (Exception e) { e.printStackTrace(); } }
/** * This method tries to write an empty file and moves it using an atomic move operation. This * method throws an {@link IllegalStateException} if this operation is not supported by the * filesystem. This test is executed on each of the data directories. This method cleans up all * files even in the case of an error. */ public void ensureAtomicMoveSupported() throws IOException { final NodePath[] nodePaths = nodePaths(); for (NodePath nodePath : nodePaths) { assert Files.isDirectory(nodePath.path) : nodePath.path + " is not a directory"; final Path src = nodePath.path.resolve("__es__.tmp"); Files.createFile(src); final Path target = nodePath.path.resolve("__es__.final"); try { Files.move(src, target, StandardCopyOption.ATOMIC_MOVE); } catch (AtomicMoveNotSupportedException ex) { throw new IllegalStateException( "atomic_move is not supported by the filesystem on path [" + nodePath.path + "] atomic_move is required for elasticsearch to work correctly.", ex); } finally { Files.deleteIfExists(src); Files.deleteIfExists(target); } } }