@Test public void testContinueOnSomeDbDirectoriesMissing() throws Exception { File targetDir1 = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); File targetDir2 = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); try { assertTrue(targetDir1.mkdirs()); assertTrue(targetDir2.mkdirs()); if (!targetDir1.setWritable(false, false)) { System.err.println( "Cannot execute 'testContinueOnSomeDbDirectoriesMissing' because cannot mark directory non-writable"); return; } RocksDBStateBackend rocksDbBackend = new RocksDBStateBackend(TEMP_URI); rocksDbBackend.setDbStoragePaths(targetDir1.getAbsolutePath(), targetDir2.getAbsolutePath()); try { rocksDbBackend.initializeForJob(getMockEnvironment(), "foobar", IntSerializer.INSTANCE); } catch (Exception e) { e.printStackTrace(); fail("Backend initialization failed even though some paths were available"); } } finally { //noinspection ResultOfMethodCallIgnored targetDir1.setWritable(true, false); FileUtils.deleteDirectory(targetDir1); FileUtils.deleteDirectory(targetDir2); } }
public boolean pkgReset() { CommandInfo cmdInfo = cset.newCommandInfo(CommandInfo.CMD_RESET); if ("ask".equalsIgnoreCase(accept)) { accept = readConsole( "The reset will erase Marketplace packages history.\n" + "Do you want to continue (yes/no)? [yes] ", "yes"); } if (!Boolean.parseBoolean(accept)) { cmdInfo.exitCode = 1; return false; } try { service.reset(); log.info("Packages reset done: all packages were marked as DOWNLOADED"); List<LocalPackage> localPackages = service.getPackages(); for (LocalPackage localPackage : localPackages) { localPackage.getUninstallFile().delete(); FileUtils.deleteDirectory(localPackage.getData().getEntry(LocalPackage.BACKUP_DIR)); newPackageInfo(cmdInfo, localPackage); } service.getRegistry().delete(); FileUtils.deleteDirectory(service.getBackupDir()); } catch (PackageException e) { log.error(e); cmdInfo.exitCode = 1; } catch (IOException e) { log.error(e); cmdInfo.exitCode = 1; } return cmdInfo.exitCode == 0; }
public static void prepareDocumentation(boolean online) throws IOException { // - Deletes every generated folders // - Creates every folders when they do not exist File genFolder = new File(Constants.GEN_FOLDER); File testFolder = new File(Constants.TEST_FOLDER); if (genFolder.exists()) { FileUtils.deleteDirectory(genFolder); } if (testFolder.exists()) { FileUtils.deleteDirectory(testFolder); } genFolder.mkdir(); new File(Constants.JAVA2XML_FOLDER).mkdirs(); new File(Constants.XML2WIKI_FOLDER).mkdirs(); new File(Constants.WIKI2WIKI_FOLDER).mkdirs(); new File(Constants.HTML2XML_FOLDER).mkdirs(); new File(Constants.PDF_FOLDER).mkdirs(); new File(Constants.TEST_FOLDER).mkdirs(); new File(Constants.TOC_GEN_FOLDER).mkdir(); copyPythonTemplate(); }
/** * If the source file exists, it should be moved to the dest path. The destination path should be * created and the file must exist in the destination path, as a file. * * @throws Throwable */ @Test public void testProcessWithReadCompleteAsTrue() throws Throwable { ActionEvent actionEvent = new ActionEvent(); String tempDir = System.getProperty("java.io.tmpdir"); File sourceFilePath = new File(tempDir + "tempsrc"); sourceFilePath.mkdirs(); File destPath = new File(tempDir + "tempdest"); File sourceFile = File.createTempFile("temp-file-name-" + 0, ".tmp", sourceFilePath); actionEvent.getHeaders().put(ActionEventHeaderConstants.READ_COMPLETE, "true"); actionEvent .getHeaders() .put(ActionEventHeaderConstants.SOURCE_FILE_PATH, sourceFile.getAbsolutePath()); // actionEvent.getHeaders().put(ActionEventHeaderConstants.SOURCE_FILE_LOCATION, // sourceFilePath.getAbsolutePath()); FileArchiveHandler fileArchiveHandler = new FileArchiveHandler(); ReflectionTestUtils.setField(fileArchiveHandler, "archivePath", destPath.getAbsolutePath()); Assert.assertTrue(sourceFile.exists()); Status status = invoke(actionEvent, fileArchiveHandler); Assert.assertEquals(status, Status.READY); Assert.assertFalse(new File(sourceFile.getAbsolutePath()).exists()); Assert.assertTrue(new File(destPath.getAbsolutePath() + sourceFile.getAbsolutePath()).exists()); Assert.assertFalse( new File(destPath.getAbsolutePath() + sourceFile.getAbsolutePath()).isDirectory()); FileUtils.deleteDirectory(sourceFilePath); FileUtils.deleteDirectory(destPath); }
private static void deleteFileImpl(String path, File file, File uploadsDir, Upload upload) throws IOException { checkUploadPath(file, uploadsDir); if (!file.exists()) { notFound(path); } Logger.info("delete: %s exists: %s", path, file.exists()); if (uploadsDir.getCanonicalPath().equals(file.getCanonicalPath())) { // clear the entire repo for (File f : uploadsDir.listFiles()) { if (f.isDirectory()) FileUtils.deleteDirectory(f); else f.delete(); } // let's be helpful and remove maven dependencies too for (MavenDependency md : upload.mavenDependencies) { md.delete(); } flash("message", "Upload cleared"); } else if (file.isDirectory()) { FileUtils.deleteDirectory(file); flash("message", "Directory deleted"); } else { file.delete(); flash("message", "File deleted"); } }
@AfterClass public void shutDown() throws IOException { FileUtils.deleteDirectory(new File("./models/")); FileUtils.deleteDirectory(new File("./modelsTmp/")); Encog.getInstance().shutdown(); }
@After public void after() throws Exception { File test = new File("dir"); test.mkdir(); FileUtils.deleteDirectory(test); FileUtils.deleteDirectory(new File("multidir")); }
public static void receive_abort(Client client) throws IOException { File tempDir = getTempReceiveDir(client); File trashDir = getTrashReceiveDir(client); synchronized (ClientCatalog.class) { if (tempDir.exists()) FileUtils.deleteDirectory(tempDir); if (trashDir.exists()) FileUtils.deleteDirectory(trashDir); } }
/** {@inheritDoc} */ public void execute() throws MojoExecutionException, MojoFailureException { try { FileUtils.deleteDirectory(getGeneratedSourcesDirectory()); FileUtils.deleteDirectory(getGeneratedResourcesDirectory()); } catch (IOException e) { throw new MojoFailureException(e.getMessage()); } }
@AfterMethod public void tearDown() throws IOException { if (_mmapDir != null && _mmapDir.exists()) { FileUtils.deleteDirectory(_mmapDir); } if (_mmapBakDir != null && _mmapBakDir.exists()) { FileUtils.deleteDirectory(_mmapBakDir); } }
/** * Creates a new RocksDB backed state and restores from the given backup directory. After * restoring the backup directory is deleted. * * @param keySerializer The serializer for the keys. * @param namespaceSerializer The serializer for the namespace. * @param basePath The path on the local system where RocksDB data should be stored. * @param restorePath The path to a backup directory from which to restore RocksDb database. */ protected AbstractRocksDBState( TypeSerializer<K> keySerializer, TypeSerializer<N> namespaceSerializer, File basePath, String checkpointPath, String restorePath, Options options) { rocksDbPath = new File(basePath, "db" + UUID.randomUUID().toString()); hadoopConfPath = new File(basePath, HADOOP_CONF_NAME); RocksDB.loadLibrary(); // clean it, this will remove the last part of the path but RocksDB will recreate it try { if (rocksDbPath.exists()) { LOG.warn("Deleting already existing db directory {}.", rocksDbPath); FileUtils.deleteDirectory(rocksDbPath); } } catch (IOException e) { throw new RuntimeException("Error cleaning RocksDB data directory.", e); } try (BackupEngine backupEngine = BackupEngine.open(Env.getDefault(), new BackupableDBOptions(restorePath + "/"))) { backupEngine.restoreDbFromLatestBackup( rocksDbPath.getAbsolutePath(), rocksDbPath.getAbsolutePath(), new RestoreOptions(true)); } catch (RocksDBException | IllegalArgumentException e) { throw new RuntimeException("Error while restoring RocksDB state from " + restorePath, e); } finally { try { FileUtils.deleteDirectory(new File(restorePath)); } catch (IOException e) { LOG.error("Error cleaning up local restore directory " + restorePath, e); } } this.keySerializer = requireNonNull(keySerializer); this.namespaceSerializer = namespaceSerializer; this.basePath = basePath; this.checkpointPath = checkpointPath; if (!basePath.exists()) { if (!basePath.mkdirs()) { throw new RuntimeException("Could not create RocksDB data directory."); } } try { db = RocksDB.open(options, rocksDbPath.getAbsolutePath()); } catch (RocksDBException e) { throw new RuntimeException("Error while opening RocksDB instance.", e); } writeHadoopConfig(hadoopConfPath); }
private static void destroyWorkingDirs() throws IOException { if (workingDir != null) { FileUtils.deleteDirectory(workingDir.toFile()); workingDir = null; } if (globalWorkingDir != null) { FileUtils.deleteDirectory(globalWorkingDir.toFile()); globalWorkingDir = null; } }
@Test public void testNetCDFWithDifferentTimeDimensions() throws MalformedURLException, IOException { // Selection of the input file final File workDir = new File(TestData.file(this, "."), "times"); if (!workDir.mkdir()) { FileUtils.deleteDirectory(workDir); assertTrue("Unable to create workdir:" + workDir, workDir.mkdir()); } FileUtils.copyFile(TestData.file(this, "times.zip"), new File(workDir, "times.zip")); TestData.unzipFile(this, "times/times.zip"); final File inputFile = TestData.file(this, "times/times.nc"); // Get format final AbstractGridFormat format = (AbstractGridFormat) GridFormatFinder.findFormat(inputFile.toURI().toURL(), null); final NetCDFReader reader = new NetCDFReader(inputFile, null); Assert.assertNotNull(format); Assert.assertNotNull(reader); try { // Selection of all the Coverage names String[] names = reader.getGridCoverageNames(); assertNotNull(names); assertEquals(2, names.length); // Parsing metadata values assertEquals("true", reader.getMetadataValue(names[0], "HAS_TIME_DOMAIN")); List<DimensionDescriptor> descriptors = reader.getDimensionDescriptors(names[0]); assertEquals(1, descriptors.size()); DimensionDescriptor descriptor = descriptors.get(0); assertEquals("time", descriptor.getStartAttribute()); assertEquals("TIME", descriptor.getName()); descriptors = reader.getDimensionDescriptors(names[1]); assertEquals(1, descriptors.size()); descriptor = descriptors.get(0); assertEquals("time1", descriptor.getStartAttribute()); assertEquals("TIME", descriptor.getName()); assertEquals("true", reader.getMetadataValue(names[1], "HAS_TIME_DOMAIN")); } finally { if (reader != null) { try { reader.dispose(); } catch (Throwable t) { // Does nothing } } FileUtils.deleteDirectory(TestData.file(this, "times")); } }
@Before public void cleanup() { Path dataFolder1 = Paths.get(dataFolder, "data", "test"); try { FileUtils.deleteDirectory(dataFolder1.toFile()); } catch (IOException e) { } Path dataFolder2 = Paths.get(dataFolder, "data", "test2"); try { FileUtils.deleteDirectory(dataFolder2.toFile()); } catch (IOException e) { } }
@Override public void commitCompact(boolean force) throws IOException { this.close(); FileUtils.deleteDirectory(new File(this.origFileName).getParentFile()); SDFSLogger.getLog().info("Deleted " + new File(this.origFileName).getParent()); new File(this.fileName).getParentFile().renameTo(new File(this.origFileName).getParentFile()); SDFSLogger.getLog() .info( "moved " + new File(this.fileName).getParent() + " to " + new File(this.origFileName).getParent()); FileUtils.deleteDirectory(new File(this.fileName).getParentFile()); SDFSLogger.getLog().info("deleted " + new File(this.fileName).getParent()); }
/** * Clear the test result output directory. * * @throws MojoFailureException - unable to delete output directory. * @param directory */ private void clearOutputDirectory(String directory) throws MojoFailureException { try { FileUtils.deleteDirectory(new File(directory)); } catch (IOException e) { throw new MojoFailureException("Could not clear output directory.", e); } }
private static void copyScss() throws IOException, InterruptedException { FileUtils.deleteDirectory(new File("webapp/stylesheets/css_sass")); new ProcessRunner() .command(jrubyPath(), "-S", "sass", "--update", ".:../stylesheets/css_sass/") .withWorkingDir("webapp/sass/") .run(); }
public static void deleteDirectory(File directory) { try { FileUtils.deleteDirectory(directory); } catch (IOException e) { throw new UncheckedIOException(e); } }
private void initializeWorkerStorage() throws IOException, FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException, TException { LOG.info("Initializing the worker storage."); if (!mLocalDataFolder.exists()) { LOG.info("Local folder " + mLocalDataFolder + " does not exist. Creating a new one."); mLocalDataFolder.mkdirs(); mLocalUserFolder.mkdirs(); CommonUtils.changeLocalFilePermission(mLocalDataFolder.getPath(), "775"); CommonUtils.changeLocalFilePermission(mLocalUserFolder.getPath(), "775"); return; } if (!mLocalDataFolder.isDirectory()) { String tmp = "Data folder " + mLocalDataFolder + " is not a folder!"; LOG.error(tmp); throw new IllegalArgumentException(tmp); } if (mLocalUserFolder.exists()) { try { FileUtils.deleteDirectory(mLocalUserFolder); } catch (IOException e) { LOG.error(e.getMessage(), e); } } mLocalUserFolder.mkdir(); CommonUtils.changeLocalFilePermission(mLocalUserFolder.getPath(), "775"); mUnderfsOrphansFolder = mUnderfsWorkerFolder + "/orphans"; if (!mUnderFs.exists(mUnderfsOrphansFolder)) { mUnderFs.mkdirs(mUnderfsOrphansFolder, true); } int cnt = 0; for (File tFile : mLocalDataFolder.listFiles()) { if (tFile.isFile()) { cnt++; LOG.info("File " + cnt + ": " + tFile.getPath() + " with size " + tFile.length() + " Bs."); long blockId = CommonUtils.getBlockIdFromFileName(tFile.getName()); boolean success = mWorkerSpaceCounter.requestSpaceBytes(tFile.length()); try { addFoundBlock(blockId, tFile.length()); } catch (FileDoesNotExistException e) { LOG.error("BlockId: " + blockId + " becomes orphan for: \"" + e.message + "\""); LOG.info( "Swapout File " + cnt + ": blockId: " + blockId + " to " + mUnderfsOrphansFolder); swapoutOrphanBlocks(blockId, tFile); freeBlock(blockId); continue; } mAddedBlockList.add(blockId); if (!success) { throw new RuntimeException("Pre-existing files exceed the local memory capacity."); } } } }
@Test public void testInitializeWithStorageDirNotExisting() throws Throwable { // make sure folder does not exist prior to initialization of host FileUtils.deleteDirectory(storageDir); host.initialize(); assertThat(storageDir.exists(), is(true)); }
@Before public void setUp() throws Exception { FileUtils.deleteDirectory(SVN_DIR); System.out.println("setup..."); SVN_DIR.mkdirs(); ProcessBuilder builder = new ProcessBuilder(SVNADMIN_EXEC, "create", SVN_DIR.getAbsolutePath()); builder.redirectErrorStream(true); Process process = builder.start(); process.waitFor(); FileUtils.writeStringToFile( new File(SVN_DIR, "conf/svnserve.conf"), "[general]\npassword-db = passwd", null); FileUtils.writeStringToFile(new File(SVN_DIR, "conf/passwd"), "[users]\nguest = guest", null); System.out.println("setup ok."); writer = context.mock(LrdWriter.class); repositoryBean = new RepositoryBean(); repositoryBean.setUrl(SVN_URL); repositoryBean.setUserName(SVN_USER); repositoryBean.setPassword(SVN_PASS); }
@BeforeClass public static void setup() throws Exception { // configure the location of the nifi properties File nifiPropertiesFile = new File("src/test/resources/access-control/nifi.properties"); System.setProperty(NiFiProperties.PROPERTIES_FILE_PATH, nifiPropertiesFile.getAbsolutePath()); NiFiProperties props = NiFiProperties.createBasicNiFiProperties(null, null); flowXmlPath = props.getProperty(NiFiProperties.FLOW_CONFIGURATION_FILE); // delete the database directory to avoid issues with re-registration in // testRequestAccessUsingToken FileUtils.deleteDirectory(props.getDatabaseRepositoryPath().toFile()); // load extensions NarClassLoaders.getInstance() .init(props.getFrameworkWorkingDirectory(), props.getExtensionsWorkingDirectory()); ExtensionManager.discoverExtensions(NarClassLoaders.getInstance().getExtensionClassLoaders()); // start the server SERVER = new NiFiTestServer("src/main/webapp", CONTEXT_PATH, props); SERVER.startServer(); SERVER.loadFlow(); // get the base url BASE_URL = SERVER.getBaseUrl() + CONTEXT_PATH; // create the user final Client client = WebUtils.createClient(null, createTrustContext(props)); TOKEN_USER = new NiFiTestUser(client, null); }
public static void decodeResources(File input, File output) { try { Path temporaryDirectory = Files.createTempDirectory("apkresources"); File directory = temporaryDirectory.toFile(); Files.delete(temporaryDirectory); cmdDecode(input, directory); File original = new File(directory, "original"); FileUtils.deleteDirectory(original); File apktool = new File(directory, "apktool.yml"); apktool.delete(); ZipUtil.pack(directory, output); FileUtils.deleteDirectory(directory); } catch (Exception e) { ExceptionHandler.handle(e); } }
@Override public void kill(DataSegment segment) throws SegmentLoadingException { final File path = getPath(segment); log.info("killing segment[%s] mapped to path[%s]", segment.getIdentifier(), path); try { if (path.getName().endsWith(".zip")) { // path format -- > .../dataSource/interval/version/partitionNum/xxx.zip File partitionNumDir = path.getParentFile(); FileUtils.deleteDirectory(partitionNumDir); // try to delete other directories if possible File versionDir = partitionNumDir.getParentFile(); if (versionDir.delete()) { File intervalDir = versionDir.getParentFile(); if (intervalDir.delete()) { File dataSourceDir = intervalDir.getParentFile(); dataSourceDir.delete(); } } } else { throw new SegmentLoadingException("Unknown file type[%s]", path); } } catch (IOException e) { throw new SegmentLoadingException(e, "Unable to kill segment"); } }
private Path executeWindupAgainstAppUntilRule( final String inputDir, final GraphContext grCtx, final Class<MavenizeRuleProvider> ruleToRunUpTo) throws IOException, IllegalAccessException, InstantiationException { Assume.assumeTrue("Exists: " + inputDir, new File(inputDir).exists()); final Path outputPath = Paths.get(FileUtils.getTempDirectory().toString(), "Windup-Mavenization-output"); FileUtils.deleteDirectory(outputPath.toFile()); Files.createDirectories(outputPath); grCtx.getGraph().getBaseGraph().commit(); // Configure Windup core final WindupConfiguration processorConfig = new WindupConfiguration(); processorConfig.setRuleProviderFilter(new RuleProviderWithDependenciesPredicate(ruleToRunUpTo)); processorConfig.setGraphContext(grCtx); processorConfig.addInputPath(Paths.get(inputDir)); processorConfig.setOutputDirectory(outputPath); processorConfig.setOptionValue(ScanPackagesOption.NAME, Collections.singletonList("")); processorConfig.setOptionValue(SourceModeOption.NAME, false); processorConfig.setOptionValue(MavenizeOption.NAME, true); processor.execute(processorConfig); return outputPath; }
private void deleteFiles(ArrayList<String> exportList) { // int count = 0; for (int i = 0; i < exportList.size(); i++) { File file = new File(exportList.get(i)); boolean deleted; if (file.isDirectory()) { try { FileUtils.deleteDirectory(file); deleted = true; } catch (IOException e) { deleted = false; Logger.e(this.toString(), "ERROR: tried to delete a directory and failed"); } } else { deleted = file.delete(); } if (deleted) { String value = exportList.get(i).replace(currentDir + "/", ""); for (int a = 0; a < tempItemList.size(); a++) { if (tempItemList.get(a).getName().equals(value)) { tempItemList.remove(a); a = tempItemList.size() + 2; } } } } removeUnusedVisualizationFiles(currentDir); }
@Test public void testFailWhenNoLocalStorageDir() throws Exception { File targetDir = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); try { assertTrue(targetDir.mkdirs()); if (!targetDir.setWritable(false, false)) { System.err.println( "Cannot execute 'testFailWhenNoLocalStorageDir' because cannot mark directory non-writable"); return; } RocksDBStateBackend rocksDbBackend = new RocksDBStateBackend(TEMP_URI); rocksDbBackend.setDbStoragePath(targetDir.getAbsolutePath()); try { rocksDbBackend.initializeForJob(getMockEnvironment(), "foobar", IntSerializer.INSTANCE); } catch (Exception e) { assertTrue(e.getMessage().contains("No local storage directories available")); assertTrue(e.getMessage().contains(targetDir.getAbsolutePath())); } } finally { //noinspection ResultOfMethodCallIgnored targetDir.setWritable(true, false); FileUtils.deleteDirectory(targetDir); } }
@AfterMethod public void tearDown() throws Throwable { if (host != null) { // some tests try to re-assign to this member and may fail host.destroy(); } FileUtils.deleteDirectory(storageDir); }
@Override protected void setUp() throws Exception { super.setUp(); mRootDir = getContext().getCacheDir().getPath() + "/FileWalkerTest/"; // 1 FileUtils.deleteDirectory(new File(mRootDir)); mFiles.add(mRootDir + "dir4"); // 2 mFiles.add(mRootDir + "dir5/dir51"); // 3, 4 mFiles.add(mRootDir + "dir6/dir61/dir611"); // 5, 6, 7 mFiles.add(mRootDir + "dir6/dir62/dir621"); // 8, 9 mFiles.add(mRootDir + "dir7"); // 10 mFiles.add(mRootDir + "dir8"); // 11 mFiles.add(mRootDir + "dir1/file1.1"); // 12, 13 mFiles.add(mRootDir + "dir2/file2.1"); // 14, 15 mFiles.add(mRootDir + "dir2/dir21/file21.1"); // 16, 17 mFiles.add(mRootDir + "dir2/dir21/file21.2"); // 18 mFiles.add(mRootDir + "dir3/dir31/dir311/file311.1"); // 19, 20, 21, 22 mFiles.add(mRootDir + "dir3/dir31/dir311/file311.2"); // 23, mFiles.add(mRootDir + "dir3/dir31/dir311/file311.3"); // 24 mFiles.add(mRootDir + "file001.1"); // 25 mFiles.add(mRootDir + "file001.2"); // 26 mFiles.add(mRootDir + "file001.3"); // 27 for (String path : mFiles) { final File file = new File(path); file.getParentFile().mkdirs(); if (path.matches(".*file\\d+\\.\\d+$")) { file.createNewFile(); } else { file.mkdir(); } } }
private void deleteDirectory() { try { FileUtils.deleteDirectory(new File(getDir())); } catch (IOException ex) { throw new RuntimeException(ex); } }