void test(String[] opts, String className) throws Exception { count++; System.err.println("Test " + count + " " + Arrays.asList(opts) + " " + className); Path testSrcDir = Paths.get(System.getProperty("test.src")); Path testClassesDir = Paths.get(System.getProperty("test.classes")); Path classes = Paths.get("classes." + count); classes.createDirectory(); Context ctx = new Context(); PathFileManager fm = new JavacPathFileManager(ctx, true, null); JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); List<String> options = new ArrayList<String>(); options.addAll(Arrays.asList(opts)); options.addAll(Arrays.asList("-verbose", "-XDverboseCompilePolicy", "-d", classes.toString())); Iterable<? extends JavaFileObject> compilationUnits = fm.getJavaFileObjects(testSrcDir.resolve(className + ".java")); StringWriter sw = new StringWriter(); PrintWriter out = new PrintWriter(sw); JavaCompiler.CompilationTask t = compiler.getTask(out, fm, null, options, null, compilationUnits); boolean ok = t.call(); System.err.println(sw.toString()); if (!ok) { throw new Exception("compilation failed"); } File expect = new File("classes." + count + "/" + className + ".class"); if (!expect.exists()) throw new Exception("expected file not found: " + expect); long expectedSize = new File(testClassesDir.toString(), className + ".class").length(); long actualSize = expect.length(); if (expectedSize != actualSize) throw new Exception("wrong size found: " + actualSize + "; expected: " + expectedSize); }
public void testAbort() throws IOException { JobConf job = new JobConf(); setConfForFileOutputCommitter(job); JobContext jContext = new JobContextImpl(job, taskID.getJobID()); TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID); FileOutputCommitter committer = new FileOutputCommitter(); FileOutputFormat.setWorkOutputPath(job, committer.getTempTaskOutputPath(tContext)); // do setup committer.setupJob(jContext); committer.setupTask(tContext); String file = "test.txt"; // A reporter that does nothing Reporter reporter = Reporter.NULL; // write output FileSystem localFs = FileSystem.getLocal(job); TextOutputFormat theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs, job, file, reporter); writeOutput(theRecordWriter, reporter); // do abort committer.abortTask(tContext); File expectedFile = new File(new Path(committer.getTempTaskOutputPath(tContext), file).toString()); assertFalse("task temp dir still exists", expectedFile.exists()); committer.abortJob(jContext, JobStatus.State.FAILED); expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME).toString()); assertFalse("job temp dir still exists", expectedFile.exists()); assertEquals("Output directory not empty", 0, new File(outDir.toString()).listFiles().length); FileUtil.fullyDelete(new File(outDir.toString())); }
private InputStream OpenMultiplePartsWithOffset(FileSystem fs, Path pt, long offset) throws IOException { RemoteIterator<LocatedFileStatus> rit = fs.listFiles(pt, false); Vector<FSDataInputStream> fileHandleList = new Vector<FSDataInputStream>(); while (rit.hasNext()) { Path path = rit.next().getPath(); String filename = path.toString().substring(path.getParent().toString().length(), path.toString().length()); if (filename.startsWith("/part-")) { long filesize = fs.getFileStatus(path).getLen(); if (offset < filesize) { FSDataInputStream handle = fs.open(path); if (offset > 0) { handle.seek(offset); } fileHandleList.add(handle); } offset -= filesize; } } if (fileHandleList.size() == 1) return fileHandleList.get(0); else if (fileHandleList.size() > 1) { Enumeration<FSDataInputStream> enu = fileHandleList.elements(); return new SequenceInputStream(enu); } else { System.err.println("Error, no source file loaded. run genSeedDataset.sh fisrt!"); return null; } }
private static void addFolder(FileSystem fs, Path p, JsonArray succeeded, JsonArray failed) { try { if (fs == null) return; for (FileStatus file : fs.listStatus(p)) { Path pfs = file.getPath(); if (file.isDir()) { addFolder(fs, pfs, succeeded, failed); } else { Key k = Key.make(pfs.toString()); long size = file.getLen(); Value val = null; if (pfs.getName().endsWith(Extensions.JSON)) { JsonParser parser = new JsonParser(); JsonObject json = parser.parse(new InputStreamReader(fs.open(pfs))).getAsJsonObject(); JsonElement v = json.get(Constants.VERSION); if (v == null) throw new RuntimeException("Missing version"); JsonElement type = json.get(Constants.TYPE); if (type == null) throw new RuntimeException("Missing type"); Class c = Class.forName(type.getAsString()); OldModel model = (OldModel) c.newInstance(); model.fromJson(json); } else if (pfs.getName().endsWith(Extensions.HEX)) { // Hex file? FSDataInputStream s = fs.open(pfs); int sz = (int) Math.min(1L << 20, size); // Read up to the 1st meg byte[] mem = MemoryManager.malloc1(sz); s.readFully(mem); // Convert to a ValueArray (hope it fits in 1Meg!) ValueArray ary = new ValueArray(k, 0).read(new AutoBuffer(mem)); val = new Value(k, ary, Value.HDFS); } else if (size >= 2 * ValueArray.CHUNK_SZ) { val = new Value( k, new ValueArray(k, size), Value.HDFS); // ValueArray byte wrapper over a large file } else { val = new Value(k, (int) size, Value.HDFS); // Plain Value val.setdsk(); } DKV.put(k, val); Log.info("PersistHdfs: DKV.put(" + k + ")"); JsonObject o = new JsonObject(); o.addProperty(Constants.KEY, k.toString()); o.addProperty(Constants.FILE, pfs.toString()); o.addProperty(Constants.VALUE_SIZE, file.getLen()); succeeded.add(o); } } } catch (Exception e) { Log.err(e); JsonObject o = new JsonObject(); o.addProperty(Constants.FILE, p.toString()); o.addProperty(Constants.ERROR, e.getMessage()); failed.add(o); } }
/** * Add an file path to the current set of classpath entries It adds the file to cache as well. * * @param file Path of the file to be added * @param conf Configuration that contains the classpath setting */ public static void addFileToClassPath(Path file, Configuration conf) throws IOException { String classpath = conf.get("mapred.job.classpath.files"); conf.set( "mapred.job.classpath.files", classpath == null ? file.toString() : classpath + System.getProperty("path.separator") + file.toString()); URI uri = file.makeQualified(file.getFileSystem(conf)).toUri(); addCacheFile(uri, conf); }
// the method which actually copies the caches locally and unjars/unzips them // and does chmod for the files private static Path localizeCache( Configuration conf, URI cache, long confFileStamp, CacheStatus cacheStatus, boolean isArchive) throws IOException { FileSystem fs = getFileSystem(cache, conf); FileSystem localFs = FileSystem.getLocal(conf); Path parchive = null; if (isArchive) { parchive = new Path( cacheStatus.localizedLoadPath, new Path(cacheStatus.localizedLoadPath.getName())); } else { parchive = cacheStatus.localizedLoadPath; } if (!localFs.mkdirs(parchive.getParent())) { throw new IOException( "Mkdirs failed to create directory " + cacheStatus.localizedLoadPath.toString()); } String cacheId = cache.getPath(); fs.copyToLocalFile(new Path(cacheId), parchive); if (isArchive) { String tmpArchive = parchive.toString().toLowerCase(); File srcFile = new File(parchive.toString()); File destDir = new File(parchive.getParent().toString()); if (tmpArchive.endsWith(".jar")) { RunJar.unJar(srcFile, destDir); } else if (tmpArchive.endsWith(".zip")) { FileUtil.unZip(srcFile, destDir); } else if (isTarFile(tmpArchive)) { FileUtil.unTar(srcFile, destDir); } // else will not do anyhting // and copy the file into the dir as it is } long cacheSize = FileUtil.getDU(new File(parchive.getParent().toString())); cacheStatus.size = cacheSize; addCacheInfoUpdate(cacheStatus); // do chmod here try { // Setting recursive permission to grant everyone read and execute Path localDir = new Path(cacheStatus.localizedBaseDir, cacheStatus.uniqueParentDir); LOG.info("Doing chmod on localdir :" + localDir); FileUtil.chmod(localDir.toString(), "ugo+rx", true); } catch (InterruptedException e) { LOG.warn("Exception in chmod" + e.toString()); } // update cacheStatus to reflect the newly cached file cacheStatus.mtime = getTimestamp(conf, cache); return cacheStatus.localizedLoadPath; }
/** Copy FileSystem files to local files. */ public static boolean copy( FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf) throws IOException { if (srcFS.getFileStatus(src).isDir()) { if (!dst.mkdirs()) { return false; } FileStatus contents[] = srcFS.listStatus(src); for (int i = 0; i < contents.length; i++) { copy( srcFS, contents[i].getPath(), new File(dst, contents[i].getPath().getName()), deleteSource, conf); } } else if (srcFS.isFile(src)) { InputStream in = srcFS.open(src); IOUtils.copyBytes(in, new FileOutputStream(dst), conf); } else { throw new IOException(src.toString() + ": No such file or directory"); } if (deleteSource) { return srcFS.delete(src, true); } else { return true; } }
// Mostly for setting up the symlinks. Note that when we setup the distributed // cache, we didn't create the symlinks. This is done on a per task basis // by the currently executing task. public static void setupWorkDir(JobConf conf) throws IOException { File workDir = new File(".").getAbsoluteFile(); FileUtil.fullyDelete(workDir); if (DistributedCache.getSymlink(conf)) { URI[] archives = DistributedCache.getCacheArchives(conf); URI[] files = DistributedCache.getCacheFiles(conf); Path[] localArchives = DistributedCache.getLocalCacheArchives(conf); Path[] localFiles = DistributedCache.getLocalCacheFiles(conf); if (archives != null) { for (int i = 0; i < archives.length; i++) { String link = archives[i].getFragment(); if (link != null) { link = workDir.toString() + Path.SEPARATOR + link; File flink = new File(link); if (!flink.exists()) { FileUtil.symLink(localArchives[i].toString(), link); } } } } if (files != null) { for (int i = 0; i < files.length; i++) { String link = files[i].getFragment(); if (link != null) { link = workDir.toString() + Path.SEPARATOR + link; File flink = new File(link); if (!flink.exists()) { FileUtil.symLink(localFiles[i].toString(), link); } } } } } File jobCacheDir = null; if (conf.getJar() != null) { jobCacheDir = new File(new Path(conf.getJar()).getParent().toString()); } // create symlinks for all the files in job cache dir in current // workingdir for streaming try { DistributedCache.createAllSymlink(conf, jobCacheDir, workDir); } catch (IOException ie) { // Do not exit even if symlinks have not been created. LOG.warn(StringUtils.stringifyException(ie)); } // add java.io.tmpdir given by mapred.child.tmp String tmp = conf.get("mapred.child.tmp", "./tmp"); Path tmpDir = new Path(tmp); // if temp directory path is not absolute // prepend it with workDir. if (!tmpDir.isAbsolute()) { tmpDir = new Path(workDir.toString(), tmp); FileSystem localFs = FileSystem.getLocal(conf); if (!localFs.mkdirs(tmpDir) && !localFs.getFileStatus(tmpDir).isDir()) { throw new IOException("Mkdirs failed to create " + tmpDir.toString()); } } }
public static void main(String[] args) throws Exception { Path dir1 = TestUtil.createTemporaryDirectory(); try { // Same directory testCopyFileToFile(dir1, dir1, TestUtil.supportsLinks(dir1)); testMove(dir1, dir1, TestUtil.supportsLinks(dir1)); // Different directories. Use test.dir if possible as it might be // a different volume/file system and so improve test coverage. String testDir = System.getProperty("test.dir", "."); Path dir2 = TestUtil.createTemporaryDirectory(testDir); try { boolean testSymbolicLinks = TestUtil.supportsLinks(dir1) && TestUtil.supportsLinks(dir2); testCopyFileToFile(dir1, dir2, testSymbolicLinks); testMove(dir1, dir2, testSymbolicLinks); } finally { TestUtil.removeAll(dir2); } // Target is location associated with custom provider Path dir3 = PassThroughFileSystem.create().getPath(dir1.toString()); testCopyFileToFile(dir1, dir3, false); testMove(dir1, dir3, false); // Test copy(InputStream,Path) and copy(Path,OutputStream) testCopyInputStreamToFile(); testCopyFileToOuputStream(); } finally { TestUtil.removeAll(dir1); } }
private static void addFolder2( FileSystem fs, Path p, ArrayList<String> keys, ArrayList<String> failed) { try { if (fs == null) return; Futures futures = new Futures(); for (FileStatus file : fs.listStatus(p)) { Path pfs = file.getPath(); if (file.isDir()) { addFolder2(fs, pfs, keys, failed); } else { long size = file.getLen(); Key res; if (pfs.getName().endsWith(Extensions.JSON)) { throw H2O.unimpl(); } else if (pfs.getName().endsWith(Extensions.HEX)) { // Hex file? throw H2O.unimpl(); } else { Key k = null; keys.add((k = HdfsFileVec.make(file, futures)).toString()); Log.info("PersistHdfs: DKV.put(" + k + ")"); } } } } catch (Exception e) { Log.err(e); failed.add(p.toString()); } }
public static FileDesc loadFile(Path root, Path file, int blocSize) throws NoSuchAlgorithmException, FileNotFoundException, IOException { MessageDigest md = MessageDigest.getInstance("SHA-512"); MessageDigest fileMd = MessageDigest.getInstance("SHA-512"); FileDesc desc = new FileDesc(file.toString(), null, null); List<Bloc> list = new ArrayList<Bloc>(); try (FileInputStream fis = new FileInputStream(root.resolve(file).toString())) { byte[] buf = new byte[blocSize]; byte[] h; int s; while ((s = fis.read(buf)) != -1) { int c; while (s < buf.length && (c = fis.read()) != -1) buf[s++] = (byte) c; fileMd.update(buf, 0, s); // padding byte p = 0; while (s < buf.length) buf[s++] = ++p; h = md.digest(buf); Bloc bloc = new Bloc(RollingChecksum.compute(buf), new Hash(h)); list.add(bloc); } h = fileMd.digest(); desc.fileHash = new Hash(h); desc.blocs = list.toArray(new Bloc[0]); } return desc; }
/** * Check that the user has properly selected a target node for this tool * * <p> * * @return The phase progress message or <CODE>null</CODE> to abort early. * @throws PipelineException If unable to validate the given user input. */ public synchronized String collectPhaseInput() throws PipelineException { if ((pPrimary == null) || (pSelected.size() != 1)) throw new PipelineException("Please selected one node only."); // folder = nPath.getParent(); // assetName = nPath.getName(); Path nPath = new Path(pPrimary); nPath = nPath.getParentPath().getParentPath(); if (!nPath.toString().matches(assetPattern)) throw new PipelineException( "This tool only works on assets." + nPath.toString() + " " + nPath.toOsString()); // if(!pPrimary.matches(matPattern)) // throw new PipelineException("This tool will only work on a lgt node!"); File errFile; try { errFile = File.createTempFile("MaterialsCheckOut", ".err", PackageInfo.sTempPath.toFile()); err = new PrintWriter(errFile); FileCleaner.add(errFile); } catch (IOException e) { e.printStackTrace(); } NodeStatus status = pSelected.get(pPrimary); NodeID nodeID = status.getNodeID(); pUser = nodeID.getAuthor(); pView = nodeID.getView(); OverallNodeState state = status.getHeavyDetails().getOverallNodeState(); JToolDialog tool = new JToolDialog("MaterialsCheckOut", new JPanel(), "Continue"); if (!state.equals(OverallNodeState.Identical)) { JConfirmDialog dialog = new JConfirmDialog( tool, "This node is different from the checked in node. Do you want to continue with this check out?"); dialog.setVisible(true); if (!dialog.wasConfirmed()) { return null; } // end if } // end if return "...Sing Hallelujah!"; }
public static void addFolder2(Path p, ArrayList<String> keys, ArrayList<String> failed) throws IOException { FileSystem fs = FileSystem.get(p.toUri(), PersistHdfs.CONF); if (!fs.exists(p)) { failed.add("Path does not exist: '" + p.toString() + "'"); return; } addFolder2(fs, p, keys, failed); }
protected boolean isPathAcceptable(final Path pPath1) { String path = pPath1.toString().toLowerCase(); if (path.startsWith("part-r-")) return true; String extension = getExtension(); if (extension != null && path.endsWith(extension.toLowerCase())) return true; if (extension != null && path.endsWith(extension.toLowerCase() + ".gz")) return true; //noinspection SimplifiableIfStatement,RedundantIfStatement if (extension == null) return true; return false; }
public static String readChild(Path parentDir, String childFileName) { try { final Path newFilePath = path(parentDir.toString(), childFileName); return read(newFilePath); } catch (Exception ex) { return Exceptions.handle(String.class, ex); } }
public static void writeChild(Path parentDir, String childFileName, String childContents) { try { final Path newFilePath = path(parentDir.toString(), childFileName); write(newFilePath, childContents); } catch (Exception ex) { Exceptions.handle(ex); } }
public static void addFolder(Path p, JsonArray succeeded, JsonArray failed) throws IOException { FileSystem fs = FileSystem.get(p.toUri(), PersistHdfs.CONF); if (!fs.exists(p)) { JsonObject o = new JsonObject(); o.addProperty(Constants.FILE, p.toString()); o.addProperty(Constants.ERROR, "Path does not exist!"); failed.add(o); return; } addFolder(fs, p, succeeded, failed); }
public static URL toURL(String str) { try { if (!Path.uriSchemeSpecified(str)) { Path cur = currentPath(); Path path = cur.resolve(str); if (path.isAbsolute()) return path.toURL(); str = path.toString(); } return new URL(str); } catch (Throwable ex) { throw WrappedException.wrapIfNeeded(ex); } }
public static void recursePath(Configuration conf, Path path, Job job) { try { FileSystem fs = path.getFileSystem(conf); FileStatus[] fstats = fs.listStatus(path); if (fstats != null) { for (FileStatus f : fstats) { Path p = f.getPath(); ; if (fs.isFile(p)) { // connection times out otherwise System.err.println("file:" + p.toString()); FileInputFormat.addInputPath(job, p); } else { System.err.println("dir:" + p.toString()); recursePath(conf, p, job); } } } } catch (IOException e) { // shouldn't be here throw new RuntimeException(e); } }
/** Copy files between FileSystems. */ public static boolean copy( FileSystem srcFS, Path src, FileSystem dstFS, Path dst, boolean deleteSource, boolean overwrite, Configuration conf) throws IOException { dst = checkDest(src.getName(), dstFS, dst, overwrite); if (srcFS.getFileStatus(src).isDir()) { checkDependencies(srcFS, src, dstFS, dst); if (!dstFS.mkdirs(dst)) { return false; } FileStatus contents[] = srcFS.listStatus(src); for (int i = 0; i < contents.length; i++) { copy( srcFS, contents[i].getPath(), dstFS, new Path(dst, contents[i].getPath().getName()), deleteSource, overwrite, conf); } } else if (srcFS.isFile(src)) { InputStream in = null; OutputStream out = null; try { in = srcFS.open(src); out = dstFS.create(dst, overwrite); IOUtils.copyBytes(in, out, conf, true); } catch (IOException e) { IOUtils.closeStream(out); IOUtils.closeStream(in); throw e; } } else { throw new IOException(src.toString() + ": No such file or directory"); } if (deleteSource) { return srcFS.delete(src, true); } else { return true; } }
public static Path createChildDirectory(Path parentDir, String childDir) { try { final Path newDir = path(parentDir.toString(), childDir); if (!Files.exists(newDir)) { Files.createDirectory(newDir); } return newDir; } catch (Exception ex) { return Exceptions.handle(Path.class, ex); } }
@Override public void run() { for (CacheStatus lcacheStatus : toBeDeletedCache) { synchronized (lcacheStatus) { Path fullUniqueParentDir = new Path(lcacheStatus.localizedBaseDir, lcacheStatus.uniqueParentDir); try { LOG.info("Deleting local cached path: " + fullUniqueParentDir.toString()); deleteLocalPath(asyncDiskService, fs, fullUniqueParentDir); // decrement the size of the cache from baseDirSize deleteCacheInfoUpdate(lcacheStatus); LOG.info("Removed cache " + lcacheStatus.localizedLoadPath); } catch (IOException e) { LOG.warn("Error when deleting " + fullUniqueParentDir, e); } } } }
private void laden(Path saveName) throws IOException { Properties prop = new Properties(); FileInputStream in = new FileInputStream(saveName.toString()); prop.load(in); for (int i = 0; prop.containsKey(String.format("quellMenu%d", i)); i++) quellListModel.addElement( new ListItem( Paths.get(prop.getProperty(String.format("quellMenu%d", i))), Paths.get(prop.getProperty(String.format("quellMenu%d", i))))); for (int i = 0; prop.containsKey(String.format("zielMenu%d", i)); i++) zielListModel.addElement( new ListItem( Paths.get(prop.getProperty(String.format("zielMenu%d", i))), Paths.get(prop.getProperty(String.format("zielMenu%d", i))))); in.close(); }
private int getNumberOfItems(Path quellOrdner) { int retValue = 0; try { DirectoryStream<Path> qstream = Files.newDirectoryStream(quellOrdner); for (Path qfile : qstream) { if (Files.isDirectory(qfile)) { getNumberOfItems(Paths.get(quellOrdner.toString() + "/" + qfile.getFileName())); } i++; } qstream.close(); } catch (IOException e) { e.printStackTrace(); } retValue = i; return retValue; }
private static void createSymlink( Configuration conf, URI cache, CacheStatus cacheStatus, boolean isArchive, Path currentWorkDir, boolean honorSymLinkConf) throws IOException { boolean doSymlink = honorSymLinkConf && DistributedCache.getSymlink(conf); if (cache.getFragment() == null) { doSymlink = false; } String link = currentWorkDir.toString() + Path.SEPARATOR + cache.getFragment(); File flink = new File(link); if (doSymlink) { if (!flink.exists()) { FileUtil.symLink(cacheStatus.localizedLoadPath.toString(), link); } } }
private static List<String> listFromDefaultClassLoader(String s) { List<String> result = new ArrayList<>(); String newPath = s; final List<Path> resources = Classpaths.resources(IO.class, newPath); for (Path resourcePath : resources) { if (Files.isDirectory(resourcePath)) { result.addAll(IO.list(resourcePath)); } else { result.add(resourcePath.toString()); } } // for ( int index = 0; index < result.size(); index++ ) { // result.set( index, "classpath:" + result.get( index ) ); // } return result; }
/** * open a file for reading * * @param hdfsPath !null path - probably of an existing file * @return !null stream */ @Override public InputStream openFileForRead(Path src) { if (isRunningAsUser()) { return super.openFileForRead(src); } String hdfsPath = src.toString(); if (isFileNameLocal(hdfsPath)) { try { return new FileInputStream(hdfsPath); // better be local } catch (FileNotFoundException e) { throw new RuntimeException(e); } } if (true) throw new UnsupportedOperationException("Fix This"); // ToDo final FileSystem fs = getDFS(); try { return fs.open(src); } catch (IOException e) { throw new RuntimeException(e); } }
@SuppressWarnings("unchecked") public void testCommitter() throws Exception { JobConf job = new JobConf(); setConfForFileOutputCommitter(job); JobContext jContext = new JobContextImpl(job, taskID.getJobID()); TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID); FileOutputCommitter committer = new FileOutputCommitter(); FileOutputFormat.setWorkOutputPath(job, committer.getTempTaskOutputPath(tContext)); committer.setupJob(jContext); committer.setupTask(tContext); String file = "test.txt"; // A reporter that does nothing Reporter reporter = Reporter.NULL; // write output FileSystem localFs = FileSystem.getLocal(job); TextOutputFormat theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs, job, file, reporter); writeOutput(theRecordWriter, reporter); // do commit committer.commitTask(tContext); committer.commitJob(jContext); // validate output File expectedFile = new File(new Path(outDir, file).toString()); StringBuffer expectedOutput = new StringBuffer(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); expectedOutput.append(key2).append("\n"); expectedOutput.append(key1).append("\n"); expectedOutput.append(key2).append('\t').append(val2).append("\n"); String output = UtilsForTests.slurp(expectedFile); assertEquals(output, expectedOutput.toString()); FileUtil.fullyDelete(new File(outDir.toString())); }
private void speichern(Path saveName) { Properties prop = new Properties(); if (!quellListModel.isEmpty()) for (int i = 0; i < quellListModel.getSize(); i++) prop.setProperty( String.format("quellMenu%d", i), quellListModel.getElementAt(i).getValueMember().toString()); if (!zielListModel.isEmpty()) for (int i = 0; i < zielListModel.getSize(); i++) prop.setProperty( String.format("zielMenu%d", i), zielListModel.getElementAt(i).getValueMember().toString()); try { FileOutputStream out = new FileOutputStream(saveName.toString()); prop.store(out, null); out.close(); } catch (Exception e) { e.printStackTrace(); } }
public void downloadFile(String relPath) { VOSync.debug("Downloading file from storage: " + relPath); Path filePath = FileSystems.getDefault().getPath(startDir.toString(), relPath.substring(1)); try { WatchKey key = filePath.getParent().register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); keys.remove(key); key.cancel(); FileOutputStream outp = new FileOutputStream(filePath.toFile()); DropboxFileInfo info = api.getFile(relPath, null, outp, null); outp.close(); key = filePath.getParent().register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); keys.put(key, filePath.getParent()); MetaHandler.setFile(relPath, filePath.toFile(), info.getMetadata().rev); } catch (IOException ex) { logger.error("Error downloading file " + relPath + ": " + ex.getMessage()); } catch (DropboxException ex) { ex.printStackTrace(); logger.error("Error downloading file " + relPath + ": " + ex.getMessage()); } }