@Override public void writeXml2ResultFileBody( final File file, final List<Defect> allDefectList, final String sourceFileFullPath) throws IOException { int size = allDefectList.size() * 1024; StringBuilder m; if (size < Integer.MAX_VALUE) m = new StringBuilder(size); else m = new StringBuilder(Integer.MAX_VALUE); m.append("\t<error filename=\"").append(sourceFileFullPath).append("\">\n"); for (Defect defect : allDefectList) { m.append("\t\t<defect checker=\"").append(defect.getCheckerCode()).append("\">\n"); for (Occurence o : defect.getOccurences()) { m.append("\t\t\t<occurence startLine=\"") .append(o.getStartLine()) .append("\" ") .append("endLine=\"") .append(o.getEndLine()) .append("\" ") .append(" message=\"") .append(o.getMessage()) .append("\" />\n"); } m.append("\t\t</defect>\n"); } m.append("\t</error>\n"); m.trimToSize(); Files.append(m.toString(), file, Charsets.UTF_8); }
@Override public void writeJsonResultFileBody(final File file, final List<Defect> allDefectList) throws IOException { for (Defect defect : allDefectList) { Files.append(defect.toJson(), file, Charsets.UTF_8); Files.append(",\n", file, Charsets.UTF_8); } }
@Override public void putEntry(FileLike fileLike) throws IOException { String name = fileLike.getRelativePath(); // Tracks unique entry names and avoids duplicates. This is, believe it or not, how // proguard seems to handle merging multiple -injars into a single -outjar. if (!containsEntry(fileLike)) { entryNames.add(name); outStream.putNextEntry(new ZipEntry(name)); try (InputStream in = fileLike.getInput()) { ByteStreams.copy(in, outStream); } // Make sure FileLike#getSize didn't lie (or we forgot to call canPutEntry). DalvikStatsTool.Stats stats = dalvikStatsCache.getStats(fileLike); Preconditions.checkState( !isEntryTooBig(fileLike), "Putting entry %s (%s) exceeded maximum size of %s", name, stats.estimatedLinearAllocSize, linearAllocLimit); currentLinearAllocSize += stats.estimatedLinearAllocSize; currentMethodReferences.addAll(stats.methodReferences); String report = String.format( "%d %d %s\n", stats.estimatedLinearAllocSize, stats.methodReferences.size(), name); Files.append(report, reportFile, Charsets.UTF_8); } }
public static void main(String[] args) throws IOException { Closer closer = Closer.create(); // copy a file File origin = new File("join_temp"); File copy = new File("target_temp"); try { BufferedReader reader = new BufferedReader(new FileReader("join_temp")); BufferedWriter writer = new BufferedWriter(new FileWriter("target_temp")); closer.register(reader); closer.register(writer); String line; while ((line = reader.readLine()) != null) { writer.write(line); } } catch (IOException e) { throw closer.rethrow(e); } finally { closer.close(); } Files.copy(origin, copy); File moved = new File("moved"); // moving renaming Files.move(copy, moved); // working files as string List<String> lines = Files.readLines(origin, Charsets.UTF_8); HashCode hashCode = Files.hash(origin, Hashing.md5()); System.out.println(hashCode); // file write and append String hamlet = "To be, or not to be it is a question\n"; File write_and_append = new File("write_and_append"); Files.write(hamlet, write_and_append, Charsets.UTF_8); Files.append(hamlet, write_and_append, Charsets.UTF_8); // write_and_append.deleteOnExit(); Files.write("OverWrite the file", write_and_append, Charsets.UTF_8); // ByteSource ByteSink ByteSource fileBytes = Files.asByteSource(write_and_append); byte[] readBytes = fileBytes.read(); // equals to pre line -> Files.toByteArray(write_and_append) == readBytes ByteSink fileByteSink = Files.asByteSink(write_and_append); fileByteSink.write(Files.toByteArray(write_and_append)); BaseEncoding base64 = BaseEncoding.base64(); System.out.println(base64.encode("123456".getBytes())); }
public void writeDrawing(Container container) { try { Files.append(container.toString(), file, Charset.forName(DrawingFileConstants.CHARSET_UTF8)); } catch (IOException e) { e.printStackTrace(); } }
public void append(String... lines) throws IOException { StringBuffer buffer = new StringBuffer(); for (String line : lines) { buffer.append(line).append("\n"); } Files.append(buffer, policyFile, Charsets.UTF_8); }
/** * Writes content to the bomFile creating intermediate directories. * * @param content to write * @throws IOException when the target directory could not be created or the content could not be * written. */ void write(final String content) throws IOException { final File bomFile = calculateBillOfMaterialsFile(); final File parentDirectory = bomFile.getParentFile(); if (!createParentDirectory(parentDirectory)) { throw new IOException("Could not create parent directory for " + bomFile); } Files.append(content, bomFile, Charsets.UTF_8); }
public void logTime(final String method, final int itemsCount) { try { end(); StringBuilder value = prepareLog(method, itemsCount, time); Files.append(value, LOG_FILE, Charset.defaultCharset()); } catch (IOException e) { logger.error("Error during log time. ", e); } }
@Before public void setUp() throws Exception { repositoryDirectory = new File("target/localrepository"); repositoryDirectory.mkdirs(); repository = new LocalRepositoryImpl(repositoryDirectory); File artifact = new File(repositoryDirectory, "com/bygg/bygg-test-artifact/1.2/bygg-test-artifact-1.2.jar"); Files.createParentDirs(artifact); Files.append("lite data", artifact, Charset.forName("UTF-8")); }
@Override public void writeXml2ResultFilePrefix(final File file) throws IOException { Files.append( "<dexter-result created=\"" + DexterUtil.currentDateTime() + "\">\n", file, Charsets.UTF_8); }
@Override public void writeJsonResultFilePrefix(final File file) throws IOException { Files.append("[\n", file, Charsets.UTF_8); }
@Override public void writeXml2ResultFilePostfix(final File file) throws IOException { Files.append("</dexter-result>", file, Charsets.UTF_8); }
public static void appendToTextFile(String fileName, String content) throws IOException { Files.append(content, new File(fileName), Charset.defaultCharset()); }
public static void append(CharSequence from, File to, Charset charset) throws IOException { com.google.common.io.Files.append(from, to, charset); }