@Before public void setUp() { rs.setUp(); FileUtils.removeRecursively(WORKING_ARCHIVE_DIR); FileUtils.removeRecursively(NEW_ARCHIVE_DIR); try { // Copy over the "existing" bit archive. TestFileUtils.copyDirectoryNonCVS(EXISTING_ARCHIVE_NAME, WORKING_ARCHIVE_DIR); } catch (IOFailure e) { throw new ExceptionInInitializerError(e); } }
/** Remove files in FileUtils.getTempDir(); */ public void testThatMostTmpFilesGone() { File[] files = tmpdir.listFiles(new SvnFileFilter()); for (File f : files) { FileUtils.removeRecursively(f); } File tmp = new File("tmp"); File tmp1 = new File("tmp1"); FileUtils.remove(tmp); FileUtils.remove(tmp1); for (String fileToDelete : dirsToClean) { File f = new File(fileToDelete); System.out.println("Ready to delete file " + f.getAbsolutePath()); FileUtils.removeRecursively(f); } }
/** @see TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); processed = 0; warcBlaf = new BatchLocalFiles( new File[] { TestInfo.WARC_FILE1, TestInfo.WARC_FILE2, TestInfo.WARC_FILE3, }); FileUtils.createDir(TestInfo.CDX_DIR); }
/** * Create a new batch job that runs the loaded class. * * @param classFile the classfile for the batch job we want to run. * @param arguments The arguments for the batchjobs. This can be null. * @throws ArgumentNotValid If the classfile is null. */ public LoadableFileBatchJob(File classFile, List<String> arguments) throws ArgumentNotValid { ArgumentNotValid.checkNotNull(classFile, "File classFile"); fileContents = FileUtils.readBinaryFile(classFile); fileName = classFile.getName(); if (arguments == null) { this.args = new ArrayList<String>(); } else { this.args = arguments; } loadBatchJob(); }
/** * Constructor. Start generating charts for data belonging to the given job. * * @param jobId a job id. */ StartedJobHistoryChartGen(long jobId) { super(); this.outputFolder = new File(FileUtils.getTempDir() + File.separator + OUTPUT_REL_PATH); this.jobId = jobId; // Set the locale to the system default this.locale = Locale.getDefault(); genExec = new PeriodicTaskExecutor("ChartGen", new ChartGen(this), 0, GEN_INTERVAL); }
public void testCombine() throws Exception { // Check that items are collected, null entries ignored, and all // is sorted. CDXIndexCache cache = new CDXIndexCache(); Map<Long, File> files = new HashMap<Long, File>(); files.put(4L, TestInfo.METADATA_FILE_4); files.put(3L, TestInfo.METADATA_FILE_3); Set<Long> requiredSet = new HashSet<Long>(); requiredSet.add(3L); requiredSet.add(4L); cache.combine(files); File cacheFile = cache.getCacheFile(files.keySet()); FileAsserts.assertFileNumberOfLines( "Should have files 3 and 4", cacheFile, (int) FileUtils.countLines(TestInfo.METADATA_FILE_3) + (int) FileUtils.countLines(TestInfo.METADATA_FILE_4)); // Checks that lines are sorted: The original metadata3 file has a // metadatb line after the file 3 block 2 line. FileAsserts.assertFileContains( "Must have lines sorted", "metadata file 3 block 2\nmetadata file 4 block 1", cacheFile); }
public void testExtractCDXJobWithWarcfilesIncludeChecksum() throws Exception { warcJob = new ExtractCDXFromWarcJob(true); OutputStream os = new ByteArrayOutputStream(); assertFalse("The to-be-generated file should not exist aforehand", TestInfo.CDX_FILE.exists()); os = new FileOutputStream(TestInfo.CDX_FILE); warcBlaf.run(warcJob, os); os.close(); List<ExceptionOccurrence> exceptions = warcJob.getExceptions(); for (ExceptionOccurrence eo : exceptions) { System.out.println("Exception: " + eo.getException()); } // assertFalse(warcJob.getExceptions().isEmpty()); System.out.println(FileUtils.readFile(TestInfo.CDX_FILE)); }
@After public void tearDown() { HibernateUtil.getSession().getSessionFactory().close(); FileUtils.removeRecursively(TestInfo.WORKING_DIR); if (oldClient != null) { System.setProperty(CommonSettings.ARC_REPOSITORY_CLIENT, oldClient); } else { System.setProperty(CommonSettings.ARC_REPOSITORY_CLIENT, ""); } if (oldFileDir != null) { System.setProperty("settings.common.arcrepositoryClient.fileDir", oldFileDir); } else { System.setProperty("settings.common.arcrepositoryClient.fileDir", ""); } rs.tearDown(); }
@Before public void setUp() { rs.setUp(); System.setProperty(WaybackSettings.HIBERNATE_HBM2DDL_AUTO, "create-drop"); HibernateUtil.getSession().getSessionFactory().close(); FileUtils.removeRecursively(TestInfo.WORKING_DIR); TestFileUtils.copyDirectoryNonCVS(TestInfo.ORIGINALS_DIR, TestInfo.WORKING_DIR); System.setProperty( CommonSettings.ARC_REPOSITORY_CLIENT, "dk.netarkivet.common.distribute.arcrepository.LocalArcRepositoryClient"); System.setProperty( "settings.common.arcrepositoryClient.fileDir", TestInfo.FILE_DIR.getAbsolutePath()); System.setProperty( CommonSettings.REMOTE_FILE_CLASS, "dk.netarkivet.common.distribute.TestRemoteFile"); assertTrue( ArcRepositoryClientFactory.getPreservationInstance() instanceof LocalArcRepositoryClient); }
@Override public void setUp() throws Exception { super.setUp(); tmpdir = FileUtils.getTempDir(); }
@Override public void tearDown() { FileUtils.removeRecursively(TestInfo.WORKING_DIR); FileUtils.remove(TestInfo.LOG_FILE); }
@Override public void setUp() { FileUtils.removeRecursively(TestInfo.WORKING_DIR); TestFileUtils.copyDirectoryNonCVS(TestInfo.ORIGINALS_DIR, TestInfo.WORKING_DIR); // System.out.println(DEDUP_CRAWL_STRING); }
@After public void tearDown() { FileUtils.removeRecursively(WORKING_ARCHIVE_DIR); FileUtils.removeRecursively(NEW_ARCHIVE_DIR); rs.tearDown(); }
@Override public void run() { synchronized (gen) { gen.chartFile = null; } long jobId = gen.jobId; StartedJobInfo[] fullHistory = RunningJobsInfoDAO.getInstance().getFullJobHistory(jobId); LinkedList<Double> timeValues = new LinkedList<Double>(); LinkedList<Double> progressValues = new LinkedList<Double>(); LinkedList<Double> urlValues = new LinkedList<Double>(); for (StartedJobInfo sji : fullHistory) { timeValues.add((double) sji.getElapsedSeconds()); progressValues.add(sji.getProgress()); urlValues.add((double) sji.getQueuedFilesCount()); } // Refresh the history png image for the job. File pngFile = new File(gen.outputFolder, jobId + "-history.png"); File newPngFile; try { newPngFile = File.createTempFile(jobId + "-history", "." + System.currentTimeMillis() + ".png"); } catch (IOException e) { LOG.warn("Failed to create temp PNG file for job " + jobId); return; } long startTime = System.currentTimeMillis(); gen.generatePngChart( newPngFile, CHART_RESOLUTION[0], CHART_RESOLUTION[1], null, // no chart title I18N.getString(gen.locale, "running.job.details.chart.legend.crawlTime"), new String[] { I18N.getString(gen.locale, "running.job.details.chart.legend.progress"), I18N.getString(gen.locale, "running.job.details.chart.legend.queuedUris") }, NumberUtils.toPrimitiveArray(timeValues), new double[][] {new double[] {0, 100}, null}, new double[][] { NumberUtils.toPrimitiveArray(progressValues), NumberUtils.toPrimitiveArray(urlValues) }, new Color[] {Color.blue, Color.green.darker()}, new String[] {"%", ""}, false, Color.lightGray.brighter().brighter()); long genTime = System.currentTimeMillis() - startTime; LOG.info( "Generated history chart for job " + jobId + " in " + (genTime < TimeUtils.SECOND_IN_MILLIS ? genTime + " ms" : StringUtils.formatDuration(genTime / TimeUtils.SECOND_IN_MILLIS)) + "."); synchronized (gen) { // Overwrite old file, then delete temp file try { FileUtils.copyFile(newPngFile, pngFile); FileUtils.remove(newPngFile); } catch (IOFailure iof) { LOG.error("IOFailure while copying PNG file", iof); } gen.chartFile = pngFile; } }
protected void tearDown() { FileUtils.removeRecursively(TestInfo.CDX_DIR); }