@Test public void createByteSourceFromFileTest() throws Exception { File f1 = new File("src/main/resources/sample.pdf"); ByteSource byteSource = Files.asByteSource(f1); byte[] readBytes = byteSource.read(); assertThat(readBytes, is(Files.toByteArray(f1))); }
public static void main(String[] args) throws IOException { Closer closer = Closer.create(); // copy a file File origin = new File("join_temp"); File copy = new File("target_temp"); try { BufferedReader reader = new BufferedReader(new FileReader("join_temp")); BufferedWriter writer = new BufferedWriter(new FileWriter("target_temp")); closer.register(reader); closer.register(writer); String line; while ((line = reader.readLine()) != null) { writer.write(line); } } catch (IOException e) { throw closer.rethrow(e); } finally { closer.close(); } Files.copy(origin, copy); File moved = new File("moved"); // moving renaming Files.move(copy, moved); // working files as string List<String> lines = Files.readLines(origin, Charsets.UTF_8); HashCode hashCode = Files.hash(origin, Hashing.md5()); System.out.println(hashCode); // file write and append String hamlet = "To be, or not to be it is a question\n"; File write_and_append = new File("write_and_append"); Files.write(hamlet, write_and_append, Charsets.UTF_8); Files.append(hamlet, write_and_append, Charsets.UTF_8); // write_and_append.deleteOnExit(); Files.write("OverWrite the file", write_and_append, Charsets.UTF_8); // ByteSource ByteSink ByteSource fileBytes = Files.asByteSource(write_and_append); byte[] readBytes = fileBytes.read(); // equals to pre line -> Files.toByteArray(write_and_append) == readBytes ByteSink fileByteSink = Files.asByteSink(write_and_append); fileByteSink.write(Files.toByteArray(write_and_append)); BaseEncoding base64 = BaseEncoding.base64(); System.out.println(base64.encode("123456".getBytes())); }
private void checkThatTheFileWasReceivedSuccessfully(final File file) throws IOException { assertTrue("Should exist: " + file, file.exists()); final ByteSource originalFile = Files.asByteSource(image.getFile()); final ByteSource savedFile = Files.asByteSource(file); assertTrue(originalFile.contentEquals(savedFile)); }
@Override public long size() throws IOException { long result = 0L; for (ByteSource source : sources) { result += source.size(); } return result; }
@Override public boolean isEmpty() throws IOException { for (ByteSource source : sources) { if (!source.isEmpty()) { return false; } } return true; }
/** * Reads at most {@code limit} bytes from {@code inputFile} and returns it as a byte array. * * @throws IOException if there was an error. */ public static byte[] readContentWithLimit(Path inputFile, int limit) throws IOException { Preconditions.checkArgument(limit >= 0, "limit needs to be >=0, but it is %s", limit); ByteSource byteSource = asByteSource(inputFile); byte[] buffer = new byte[limit]; try (InputStream inputStream = byteSource.openBufferedStream()) { int read = ByteStreams.read(inputStream, buffer, 0, limit); return Arrays.copyOf(buffer, read); } }
@Test public void copyToByteSinkTest() throws Exception { File dest = new File("src/test/resources/sampleCompany.pdf"); dest.deleteOnExit(); File source = new File("src/main/resources/sample.pdf"); ByteSource byteSource = Files.asByteSource(source); ByteSink byteSink = Files.asByteSink(dest); byteSource.copyTo(byteSink); assertThat(Files.toByteArray(dest), is(Files.toByteArray(source))); }
/** * Read the contents of the source into a byte array. * * @param source the byte array source * @return the byte[] read from the source or null */ private byte[] consumeByteSourceOrNull(final ByteSource source) { try { if (source == null || source.isEmpty()) { return null; } return source.read(); } catch (final IOException e) { logger.warn("Could not consume the byte array source", e); return null; } }
private ByteSource createUsersMock(Map<String, String> users) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); for (Map.Entry<String, String> user : users.entrySet()) { BatchInput.User.Builder builder = BatchInput.User.newBuilder(); builder.setLogin(user.getKey()).setName(user.getValue()).build().writeDelimitedTo(out); } ByteSource source = mock(ByteSource.class); when(source.openStream()).thenReturn(new ByteArrayInputStream(out.toByteArray())); return source; }
@Override public Optional<Long> sizeIfKnown() { long result = 0L; for (ByteSource source : sources) { Optional<Long> sizeIfKnown = source.sizeIfKnown(); if (!sizeIfKnown.isPresent()) { return Optional.absent(); } result += sizeIfKnown.get(); } return Optional.of(result); }
@BeforeClass public void setup() throws InvalidKeySpecException, NoSuchAlgorithmException, IOException { KeyFactory keyfactory = KeyFactory.getInstance("RSA"); PrivateKey privateKey = keyfactory.generatePrivate( Pems.privateKeySpec(ByteSource.wrap(PRIVATE_KEY.getBytes(Charsets.UTF_8)))); PublicKey publicKey = keyfactory.generatePublic( Pems.publicKeySpec(ByteSource.wrap(PUBLIC_KEY.getBytes(Charsets.UTF_8)))); keyPair = new KeyPair(publicKey, privateKey); openSshKey = SshKeys.encodeAsOpenSSH(RSAPublicKey.class.cast(publicKey)); }
public ExampleRecordCursor(List<ExampleColumnHandle> columnHandles, ByteSource byteSource) { this.columnHandles = columnHandles; fieldToColumnIndex = new int[columnHandles.size()]; for (int i = 0; i < columnHandles.size(); i++) { ExampleColumnHandle columnHandle = columnHandles.get(i); fieldToColumnIndex[i] = columnHandle.getOrdinalPosition(); } try (CountingInputStream input = new CountingInputStream(byteSource.openStream())) { lines = byteSource.asCharSource(UTF_8).readLines().iterator(); totalBytes = input.getCount(); } catch (IOException e) { throw Throwables.propagate(e); } }
protected Payload doSlice(byte[] content, long offset, long length) { Payload returnVal; checkArgument(offset <= Integer.MAX_VALUE, "offset is too big for an array"); checkArgument(length <= Integer.MAX_VALUE, "length is too big for an array"); returnVal = new InputStreamSupplierPayload(ByteSource.wrap(content).slice(offset, length)); return returnVal; }
@Test public void testReadFetchResponse() throws IOException { final String base64EncodedData = "AAAALgAAAAEAIDAwMDAwMDAwMDEwMDAwMDAwMDAwMDA4MDAwMDAwMDAwAAAAANcwdr5kYXRh"; final RuleKey ruleKey = new RuleKey("00000000010000000000008000000000"); final String data = "data"; byte[] expectedData; try (ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dataOut = new DataOutputStream(out)) { byte[] metadata = HttpArtifactCacheBinaryProtocol.createMetadataHeader( ImmutableSet.of(ruleKey), ImmutableMap.<String, String>of(), ByteSource.wrap(data.getBytes(Charsets.UTF_8))); dataOut.writeInt(metadata.length); dataOut.write(metadata); dataOut.write(data.getBytes(Charsets.UTF_8)); expectedData = out.toByteArray(); } assertThat(expectedData, Matchers.equalTo(BaseEncoding.base64().decode(base64EncodedData))); try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(expectedData))) { FetchResponseReadResult result = HttpArtifactCacheBinaryProtocol.readFetchResponse(inputStream, outputStream); assertThat(result.getRuleKeys(), Matchers.contains(ruleKey)); assertThat(outputStream.toByteArray(), Matchers.equalTo(data.getBytes(Charsets.UTF_8))); assertThat(result.getActualHashCode(), Matchers.equalTo(HashCode.fromString("d73076be"))); assertThat(result.getExpectedHashCode(), Matchers.equalTo(HashCode.fromString("d73076be"))); assertThat(result.getMetadata(), Matchers.anEmptyMap()); assertThat(result.getResponseSizeBytes(), Matchers.equalTo(4L)); } }
/** * Checks that the contents of this byte source are equal to the contents of the given byte * source. * * @throws IOException if an I/O error occurs in the process of reading from this source or {@code * other} */ public boolean contentEquals(ByteSource other) throws IOException { checkNotNull(other); byte[] buf1 = new byte[BUF_SIZE]; byte[] buf2 = new byte[BUF_SIZE]; Closer closer = Closer.create(); try { InputStream in1 = closer.register(openStream()); InputStream in2 = closer.register(other.openStream()); while (true) { int read1 = ByteStreams.read(in1, buf1, 0, BUF_SIZE); int read2 = ByteStreams.read(in2, buf2, 0, BUF_SIZE); if (read1 != read2 || !Arrays.equals(buf1, buf2)) { return false; } else if (read1 != BUF_SIZE) { return true; } } } catch (Throwable e) { throw closer.rethrow(e); } finally { closer.close(); } }
@Override public void run() { for (Map.Entry<String, ConfigFileInfo> entry : watchedFileMap.entrySet()) { String filePath = entry.getKey(); ConfigFileInfo configFileInfo = entry.getValue(); try { File file = new File(filePath); long lastModified = file.lastModified(); Preconditions.checkArgument(lastModified > 0L); if (lastModified != configFileInfo.lastModifiedTimestampMillis) { configFileInfo.lastModifiedTimestampMillis = lastModified; ByteSource byteSource = Files.asByteSource(file); HashCode newContentHash = byteSource.hash(HASH_FUNCTION); if (!newContentHash.equals(configFileInfo.contentHash)) { configFileInfo.contentHash = newContentHash; LOG.info("File {} was modified at {}, notifying watchers.", filePath, lastModified); byte[] newContents = byteSource.read(); for (Function<byte[], Void> watchers : configFileInfo.changeWatchers) { try { watchers.apply(newContents); } catch (Exception e) { LOG.error( "Exception in watcher callback for {}, ignoring. New file contents were: {}", filePath, new String(newContents, Charsets.UTF_8), e); } } } else { LOG.info( "File {} was modified at {} but content hash is unchanged.", filePath, lastModified); } } else { LOG.debug("File {} not modified since {}", filePath, lastModified); } } catch (Exception e) { // We catch and log exceptions related to the update of any specific file, but // move on so others aren't affected. Issues can happen for example if the watcher // races with an external file replace operation; in that case, the next run should // pick up the update. // TODO: Consider adding a metric to track this so we can alert on failures. LOG.error("Config update check failed for {}", filePath, e); } } }
@Test public void testInputStreamError() throws IOException { ByteSource source = mock(ByteSource.class); WSLoaderResult<ByteSource> res = new WSLoaderResult<>(source, true); when(wsLoader.loadSource("/batch/users?logins=fmallet,sbrandhof")).thenReturn(res); InputStream errorInputStream = mock(InputStream.class); Mockito.doThrow(IOException.class).when(errorInputStream).read(); when(source.openStream()).thenReturn(errorInputStream); exception.expect(IllegalStateException.class); exception.expectMessage("Unable to get user details from server"); userRepo.load(Arrays.asList("fmallet", "sbrandhof")); }
private static void tryCopyIfNeeded(ByteSource supplier, File rcFile) { if (!rcFile.exists()) { try { supplier.copyTo(Files.asByteSink(rcFile)); } catch (IOException e) { rcFile.delete(); } } }
/** * Adds a watch on the specified file. The file must exist, otherwise a FileNotFoundException is * returned. If the file is deleted after a watch is established, the watcher will log errors but * continue to monitor it, and resume watching if it is recreated. * * @param filePath path to the file to watch. * @param onUpdate function to call when a change is detected to the file. The entire contents of * the file will be passed in to the function. Note that onUpdate will be called once before * this call completes, which facilities initial load of data. This callback is executed * synchronously on the watcher thread - it is important that the function be non-blocking. */ public synchronized void addWatch(String filePath, Function<byte[], Void> onUpdate) throws IOException { MorePreconditions.checkNotBlank(filePath); Preconditions.checkNotNull(onUpdate); // Read the file and make the initial onUpdate call. File file = new File(filePath); ByteSource byteSource = Files.asByteSource(file); onUpdate.apply(byteSource.read()); // Add the file to our map if it isn't already there, and register the new change watcher. ConfigFileInfo configFileInfo = watchedFileMap.get(filePath); if (configFileInfo == null) { configFileInfo = new ConfigFileInfo(file.lastModified(), byteSource.hash(HASH_FUNCTION)); watchedFileMap.put(filePath, configFileInfo); } configFileInfo.changeWatchers.add(onUpdate); }
private static Map<String, String> content_for( Set<String> relevant_files, Map<String, Vertabrae> file_names) { Map<String, String> result = new HashMap<>(); for (String file : relevant_files) { String index = file.substring(file.length() - 1); Vertabrae vertabrae = file_names.get(file); ByteSource bs = Files.asByteSource(vertabrae.path().toFile()); String html; try { html = md.process(bs.openBufferedStream()); result.put(index, html); } catch (IOException e) { e.printStackTrace(); } } return result; }
private static void generate_markup( Path abs_path_from, Path abs_path_to, Map<String, Vertabrae> file_names) throws IOException { Function<Vertabrae, String> title = Vertabrae::title; Map<String, String> toc = Maps.transformValues(file_names, title); Set<String> files = file_names.keySet(); Set<String> filtered_files = files.stream().filter(file -> !file.contains("_")).collect(Collectors.toSet()); for (String key : filtered_files) { Vertabrae vertabrae = file_names.get(key); ByteSource bs = Files.asByteSource(vertabrae.path().toFile()); String html = md.process(bs.openBufferedStream()); Set<String> relevant_files = files.stream().filter(file -> file.contains(key + "_")).collect(Collectors.toSet()); String template = pick_template(abs_path_from, vertabrae.file_name()); VelocityContext velocity_context = new VelocityContext(); velocity_context.put("name", vertabrae.title()); velocity_context.put("content", html); velocity_context.put("pages", toc); Map<String, String> content_files = content_for(relevant_files, file_names); for (String index : content_files.keySet()) { velocity_context.put("content_" + index, content_files.get(index)); } System.out.println("[SPINE] Template: " + template); String page = merge(template, velocity_context); File to_file = new File(abs_path_to + File.separator + vertabrae.file_name() + ".html"); Files.write(page, to_file, Charsets.UTF_8); } }
@Override public String createTask(Graph<Operation, EdgeType> graph, String hostname) { logger.info("create task script for host:" + hostname); List<ByteSource> byteSources = Lists.newArrayList(); TreeSet<Operation> imports = Sets.newTreeSet( new Comparator<Operation>() { @Override public int compare(Operation o1, Operation o2) { return o1.getName().compareTo(o2.getName()); } }); StringBuilder builder = new StringBuilder(); String changeLine = System.getProperty("line.separator"); builder.append(changeLine + "node '" + hostname + "'{" + changeLine); String definedContent = opBuilder.createDefineContent(graph, imports, null); builder.append(definedContent); builder.append(changeLine + "}"); logger.debug(builder.toString()); imports.addAll(dependedOperation(imports)); try { for (Operation operation : imports) { if (operation.getDefineMd5() != null) { byteSources.add(fileService.findFile(operation.getDefineMd5())); } } byteSources.add(ByteSource.wrap(builder.toString().getBytes())); String md5Key = fileService.saveFile(ByteSource.concat(byteSources)); logger.info("the md5key of this task script is" + md5Key); logger.info("create task script successful"); return md5Key; } catch (IOException e) { logger.error("store task script file failed"); e.printStackTrace(); } return null; }
@Override public int execute(ExecutionContext context) { try (InputStream sourceStream = source.openStream()) { context .getProjectFilesystem() .copyToPath(sourceStream, outputPath, StandardCopyOption.REPLACE_EXISTING); return 0; } catch (IOException e) { LOG.error(e, "Couldn't copy bytes to %s", outputPath); e.printStackTrace(context.getStdErr()); return 1; } }
@Test public void testCreateMetadataHeader() throws IOException { final String base64EncodedData = "AAAAAQAgMDAwMDAwMDAwMTAwMDAwMDAwMDAwMDgwMDAwMDAwMDAAAAABAANrZXkAAAAFdmFsdWVc/GBY"; final RuleKey ruleKey = new RuleKey("00000000010000000000008000000000"); final String data = "data"; byte[] metadata = HttpArtifactCacheBinaryProtocol.createMetadataHeader( ImmutableSet.of(ruleKey), ImmutableMap.of("key", "value"), ByteSource.wrap(data.getBytes(Charsets.UTF_8))); assertThat(metadata, Matchers.equalTo(BaseEncoding.base64().decode(base64EncodedData))); }
@Test void testToBlob() { assertNull(ToBlob.INSTANCE.apply(null)); BlobStore blobStore = getBlobStore(); blobStore.createContainerInLocation(null, "container"); blobStore.createDirectory("container", "one"); blobStore.putBlob( "container", blobStore.blobBuilder("myblob").payload(ByteSource.wrap("testcontent".getBytes())).build()); Blob representation = ToBlob.INSTANCE.apply(blobStore.getBlob("container", "myblob")); assertNotNull(representation); assertNotNull(representation.getBlobMetadata()); }
/** * Joins multiple {@link InputStream} suppliers into a single supplier. Streams returned from the * supplier will contain the concatenated data from the streams of the underlying suppliers. * * <p>Only one underlying input stream will be open at a time. Closing the joined stream will * close the open underlying stream. * * <p>Reading from the joined stream will throw a {@link NullPointerException} if any of the * suppliers are null or return null. * * @param suppliers the suppliers to concatenate * @return a supplier that will return a stream containing the concatenated stream data */ public static InputSupplier<InputStream> join( final Iterable<? extends InputSupplier<? extends InputStream>> suppliers) { checkNotNull(suppliers); Iterable<ByteSource> sources = Iterables.transform( suppliers, new Function<InputSupplier<? extends InputStream>, ByteSource>() { @Override public ByteSource apply(InputSupplier<? extends InputStream> input) { return asByteSource(input); } }); return asInputSupplier(ByteSource.concat(sources)); }
public ByteSource asByteSource(ReadOnlyTransaction txn) throws IOException { ByteBuffer snapshotHeader = ByteBuffer.allocate(HEADER_SIZE); { ByteBuffer header = buildHeader(); assert header.remaining() == MASTERPAGE_HEADER_SIZE; snapshotHeader.put(header); } // We build a master header with just the current snapshot transaction for (int i = 0; i < MASTERPAGE_SLOTS; i++) { int position = MASTERPAGE_HEADER_SIZE + (i * MASTERPAGE_SLOT_SIZE); ByteBuffer mmap = ByteBuffer.allocate(MASTERPAGE_SLOT_SIZE); assert mmap.position() == 0; int rootPage = txn.rootPageId; int transactionPage = txn.transactionPageId; long transactionId = txn.snapshotTransactionId; MasterPage.create(mmap, rootPage, transactionPage, transactionId); mmap.position(MASTERPAGE_SLOT_SIZE); mmap.flip(); assert mmap.remaining() == MASTERPAGE_SLOT_SIZE; snapshotHeader.position(position); snapshotHeader.put(mmap); } snapshotHeader.position(0); ByteSource dataByteSource = asByteSource0(); dataByteSource = dataByteSource.slice(HEADER_SIZE, Long.MAX_VALUE); return ByteSource.concat(new ByteBufferByteSource(snapshotHeader), dataByteSource); }
private void setupMedia() throws Exception { final Attachment attachment = Attachment.create().name("logo.png").mimeType("image/png").label("small").build(); final Media content = createMedia("123456", "path/to/content", attachment); Mockito.when(this.contentService.getById(Mockito.eq(content.getId()))).thenReturn(content); Mockito.when(this.contentService.getByPath(Mockito.eq(content.getPath()))).thenReturn(content); this.mediaBytes = ByteSource.wrap(new byte[0]); Mockito.when( this.contentService.getBinary( Mockito.isA(ContentId.class), Mockito.isA(BinaryReference.class))) .thenReturn(this.mediaBytes); }
private static HashCode buildHashedPayload(HttpRequest request) { HashingInputStream his = null; try { his = new HashingInputStream( Hashing.sha256(), request.getPayload() == null ? ByteSource.empty().openStream() : request.getPayload().openStream()); ByteStreams.copy(his, ByteStreams.nullOutputStream()); return his.hash(); } catch (IOException e) { throw new HttpException("Error signing request", e); } finally { closeQuietly(his); } }
@Test(groups = "Integration") public void testCopyResource() throws Exception { File tempDest = new File(tempDataDir, "tempDest.txt"); String tempLocalContent = "abc"; File tempLocal = new File(tempDataDir, "tempLocal.txt"); Files.write(tempLocalContent, tempLocal, Charsets.UTF_8); localhost.setConfig(BrooklynConfigKeys.ONBOX_BASE_DIR, tempDataDir.getAbsolutePath()); MyService entity = app.createAndManageChild(EntitySpec.create(MyService.class)); app.start(ImmutableList.of(localhost)); // Copy local file entity.getDriver().copyResource(tempLocal, tempDest.getAbsolutePath()); assertEquals(Files.readLines(tempDest, Charsets.UTF_8), ImmutableList.of(tempLocalContent)); tempDest.delete(); // Copy local file using url entity.getDriver().copyResource(tempLocal.toURI().toString(), tempDest.getAbsolutePath()); assertEquals(Files.readLines(tempDest, Charsets.UTF_8), ImmutableList.of(tempLocalContent)); tempDest.delete(); // Copy reader entity.getDriver().copyResource(new StringReader(tempLocalContent), tempDest.getAbsolutePath()); assertEquals(Files.readLines(tempDest, Charsets.UTF_8), ImmutableList.of(tempLocalContent)); tempDest.delete(); // Copy stream entity .getDriver() .copyResource( ByteSource.wrap(tempLocalContent.getBytes()).openStream(), tempDest.getAbsolutePath()); assertEquals(Files.readLines(tempDest, Charsets.UTF_8), ImmutableList.of(tempLocalContent)); tempDest.delete(); // Copy known-size stream entity .getDriver() .copyResource( new KnownSizeInputStream( Streams.newInputStreamWithContents(tempLocalContent), tempLocalContent.length()), tempDest.getAbsolutePath()); assertEquals(Files.readLines(tempDest, Charsets.UTF_8), ImmutableList.of(tempLocalContent)); tempDest.delete(); }