@Override public F.Promise<Void> store(Path path, String key) { Promise<Void> promise = Futures.promise(); TransferManager transferManager = new TransferManager(credentials); Upload upload = transferManager.upload(bucketName, key, path.toFile()); upload.addProgressListener( (ProgressListener) progressEvent -> { if (progressEvent.getEventType().isTransferEvent()) { if (progressEvent .getEventType() .equals(ProgressEventType.TRANSFER_COMPLETED_EVENT)) { transferManager.shutdownNow(); promise.success(null); } else if (progressEvent .getEventType() .equals(ProgressEventType.TRANSFER_FAILED_EVENT)) { transferManager.shutdownNow(); promise.failure(new Exception("Upload failed")); } } }); return F.Promise.wrap(promise.future()); }
@Override protected UploadResult doInBackground(File... files) { try { for (File file : files) { TransferManager transferManager = new TransferManager(amazonS3); Upload upload = transferManager.upload(BUCKET, file.getName(), file); return upload.waitForUploadResult(); } } catch (InterruptedException e) { Log.wtf("UPLOAD", e); } return null; }
private void createAmazonS3Bucket() { try { if (tx.getAmazonS3Client().doesBucketExist(bucketName) == false) { tx.getAmazonS3Client().createBucket(bucketName); } } catch (AmazonClientException ace) { JOptionPane.showMessageDialog( frame, "Unable to create a new Amazon S3 bucket: " + ace.getMessage(), "Error Creating Bucket", JOptionPane.ERROR_MESSAGE); } }
@Override public PutObjectResult putObject(PutObjectRequest req) throws AmazonClientException, AmazonServiceException { if (!multipartUpload) return super.putObject(req); final long contentLen = TransferManagerUtils.getContentLength(req); String tempFilename = req.getKey() + ".tmp"; String origFilename = req.getKey(); req.setKey(tempFilename); XProgressListener progressListener = new XProgressListener(); req.setProgressListener(new ProgressListenerChain(progressListener)); progressListener.setContentLen(contentLen); progressListener.setUpload(transferManager.upload(req)); try { progressListener.getUpload().waitForCompletion(); } catch (InterruptedException e) { throw new AmazonClientException(e.getMessage(), e); } CopyObjectRequest copyReq = new CopyObjectRequest(req.getBucketName(), tempFilename, req.getBucketName(), origFilename); copyObject(copyReq); deleteObject(new DeleteObjectRequest(req.getBucketName(), tempFilename)); return null; }
private static void moveToS3(String outputFilePath, String existingBucketName) throws InterruptedException { System.setProperty("aws.accessKeyId", AWS_ACCESS_KEY_ID); System.setProperty("aws.secretKey", AWS_ACCESS_SECRET_KEY); File outputFile = new File(outputFilePath); String keyName = outputFile.getName(); TransferManager tm = new TransferManager(new SystemPropertiesCredentialsProvider()); // TransferManager processes all transfers asynchronously, // so this call will return immediately. Upload upload = tm.upload(existingBucketName, keyName, new File(outputFilePath)); try { // Or you can block and wait for the upload to finish upload.waitForCompletion(); System.out.println("Upload complete."); } catch (AmazonClientException amazonClientException) { System.out.println("Unable to upload file, upload was aborted."); amazonClientException.printStackTrace(); } }
public PrestoS3OutputStream( AmazonS3 s3, TransferManagerConfiguration config, String host, String key, File tempFile) throws IOException { super( new BufferedOutputStream( new FileOutputStream(checkNotNull(tempFile, "tempFile is null")))); transferManager = new TransferManager(checkNotNull(s3, "s3 is null")); transferManager.setConfiguration(checkNotNull(config, "config is null")); this.host = checkNotNull(host, "host is null"); this.key = checkNotNull(key, "key is null"); this.tempFile = tempFile; log.debug("OutputStream for key '%s' using file: %s", key, tempFile); }
@Override public void close() throws IOException { if (closed) { return; } closed = true; try { super.close(); uploadObject(); } finally { if (!tempFile.delete()) { log.warn("Could not delete temporary file: %s", tempFile); } // close transfer manager but keep underlying S3 client open transferManager.shutdownNow(false); } }
private void uploadObject() throws IOException { try { log.debug( "Starting upload for host: %s, key: %s, file: %s, size: %s", host, key, tempFile, tempFile.length()); Upload upload = transferManager.upload(host, key, tempFile); if (log.isDebugEnabled()) { upload.addProgressListener(createProgressListener(upload)); } upload.waitForCompletion(); log.debug("Completed upload for host: %s, key: %s", host, key); } catch (AmazonClientException e) { throw new IOException(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new InterruptedIOException(); } }
@Override public ListenableFuture<Void> upload( String bucketName, String name, InputStream input, ObjectMetadata meta) throws IOException { SettableFuture<Void> future = SettableFuture.create(); LOG.info("Starting upload, bucket={}, key={}", bucketName, name); final Upload up = tx.upload(bucketName, name, input, meta); up.addProgressListener( (ProgressEvent progressEvent) -> { switch (progressEvent.getEventType()) { case CLIENT_REQUEST_FAILED_EVENT: future.setException( new IOException( "Failed to complete upload after " + progressEvent.getBytesTransferred() + " bytes")); case CLIENT_REQUEST_SUCCESS_EVENT: LOG.info("Upload completed, bucket={}, key={}", bucketName, name); future.set(null); } }); return future; }
public void deleteMultiparts(String bucketName, Date since) { transferManager.abortMultipartUploads(bucketName, since); }
protected void init() { transferManager = new TransferManager(this); TransferManagerConfiguration configuration = new TransferManagerConfiguration(); configuration.setMultipartUploadThreshold(100 * Constants.KB); transferManager.setConfiguration(configuration); }
public static List<ExternalConfigurationModel> buildFromConfig(String configFilePath) throws Exception { List<ExternalConfigurationModel> response = new ArrayList<>(); // reference the config file as a full path File configFile = new File(configFilePath); if (!configFile.exists()) { // try to load the file from the classpath InputStream classpathConfig = ExternalConfigurationModel.class.getClassLoader().getResourceAsStream(configFilePath); if (classpathConfig != null && classpathConfig.available() > 0) { configFile = new File( ExternalConfigurationModel.class .getResource((configFilePath.startsWith("/") ? "" : "/") + configFilePath) .toURI()); LOG.info(String.format("Loaded Configuration %s from Classpath", configFilePath)); } else { if (configFilePath.startsWith("s3://")) { AmazonS3 s3Client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); TransferManager tm = new TransferManager(s3Client); // parse the config path to get the bucket name and prefix final String s3ProtoRegex = "s3:\\/\\/"; String bucket = configFilePath.replaceAll(s3ProtoRegex, "").split("/")[0]; String prefix = configFilePath.replaceAll(String.format("%s%s\\/", s3ProtoRegex, bucket), ""); // download the file using TransferManager configFile = File.createTempFile(configFilePath, null); Download download = tm.download(bucket, prefix, configFile); download.waitForCompletion(); // shut down the transfer manager tm.shutdownNow(); LOG.info( String.format( "Loaded Configuration from Amazon S3 %s/%s to %s", bucket, prefix, configFile.getAbsolutePath())); } else { // load the file from external URL try { configFile = File.createTempFile(configFilePath, null); FileUtils.copyURLToFile(new URL(configFilePath), configFile, 1000, 1000); LOG.info( String.format( "Loaded Configuration from %s to %s", configFilePath, configFile.getAbsolutePath())); } catch (IOException e) { // handle the timeouts and so on with a generalised // config // file not found handler later } } } } else { LOG.info(String.format("Loaded Configuration from Filesystem %s", configFilePath)); } // if we haven't been able to load a config file, then bail if (configFile == null || !configFile.exists()) { throw new InvalidConfigurationException( String.format("Unable to Load Config File from %s", configFilePath)); } JsonNode document = StreamAggregatorUtils.asJsonNode(configFile); ExternalConfigurationModel config = null; Iterator<JsonNode> i = document.elements(); while (i.hasNext()) { config = new ExternalConfigurationModel(); JsonNode section = i.next(); // set generic properties config.setNamespace(StreamAggregatorUtils.readValueAsString(section, "namespace")); config.setDateFormat(StreamAggregatorUtils.readValueAsString(section, "dateFormat")); addTimeHorizons(section, config); setAggregatorType(section, config); // set the label items JsonNode labelItems = StreamAggregatorUtils.readJsonValue(section, "labelItems"); if (labelItems != null && labelItems.size() > 0) { Iterator<JsonNode> iterator = labelItems.elements(); while (iterator.hasNext()) { JsonNode n = iterator.next(); config.addLabelItems(n.asText()); } } config.setLabelAttributeAlias( StreamAggregatorUtils.readValueAsString(section, "labelAttributeAlias")); config.setDateItem(StreamAggregatorUtils.readValueAsString(section, "dateItem")); config.setDateAttributeAlias( StreamAggregatorUtils.readValueAsString(section, "dateAttributeAlias")); JsonNode summaryItems = StreamAggregatorUtils.readJsonValue(section, "summaryItems"); if (summaryItems != null && summaryItems.size() > 0) { Iterator<JsonNode> iterator = summaryItems.elements(); while (iterator.hasNext()) { JsonNode n = iterator.next(); config.addSummaryItem(n.asText()); } } config.setTableName(StreamAggregatorUtils.readValueAsString(section, "tableName")); String readIO = StreamAggregatorUtils.readValueAsString(section, "readIOPS"); if (readIO != null) config.setReadIOPs(Long.parseLong(readIO)); String writeIO = StreamAggregatorUtils.readValueAsString(section, "writeIOPS"); if (writeIO != null) config.setWriteIOPs(Long.parseLong(writeIO)); // configure tolerance of data extraction problems String failOnDataExtraction = StreamAggregatorUtils.readValueAsString(section, "failOnDataExtraction"); if (failOnDataExtraction != null) config.setFailOnDataExtraction(Boolean.parseBoolean(failOnDataExtraction)); // configure whether metrics should be emitted String emitMetrics = StreamAggregatorUtils.readValueAsString(section, "emitMetrics"); String metricsEmitterClassname = StreamAggregatorUtils.readValueAsString(section, "metricsEmitterClass"); if (emitMetrics != null || metricsEmitterClassname != null) { if (metricsEmitterClassname != null) { config.setMetricsEmitter( (Class<IMetricsEmitter>) ClassLoader.getSystemClassLoader().loadClass(metricsEmitterClassname)); } else { config.setEmitMetrics(Boolean.parseBoolean(emitMetrics)); } } // configure the data store class String dataStoreClass = StreamAggregatorUtils.readValueAsString(section, "IDataStore"); if (dataStoreClass != null) { Class<IDataStore> dataStore = (Class<IDataStore>) ClassLoader.getSystemClassLoader().loadClass(dataStoreClass); config.setDataStore(dataStore); } // get the data extractor configuration, so we know what other json // elements to retrieve from the configuration document String useExtractor = null; try { useExtractor = StreamAggregatorUtils.readValueAsString(section, "dataExtractor"); config.setDataExtractor(DataExtractor.valueOf(useExtractor)); } catch (Exception e) { throw new Exception( String.format("Unable to configure aggregator with Data Extractor %s", useExtractor)); } switch (config.getDataExtractor()) { case CSV: configureStringCommon(section, config); configureCsv(section, config); break; case JSON: configureStringCommon(section, config); break; case OBJECT: configureObject(section, config); break; case REGEX: configureRegex(section, config); } response.add(config); } return response; }