コード例 #1
0
  @Override
  protected void getResource(
      String resourceName, File destination, TransferProgress transferProgress)
      throws TransferFailedException, ResourceDoesNotExistException {
    InputStream in = null;
    OutputStream out = null;
    try {
      S3Object s3Object = this.amazonS3.getObject(this.bucketName, getKey(resourceName));

      in = s3Object.getObjectContent();
      out = new TransferProgressFileOutputStream(destination, transferProgress);

      IoUtils.copy(in, out);
    } catch (AmazonServiceException e) {
      throw new ResourceDoesNotExistException(
          String.format("'%s' does not exist", resourceName), e);
    } catch (FileNotFoundException e) {
      throw new TransferFailedException(String.format("Cannot write file to '%s'", destination), e);
    } catch (IOException e) {
      throw new TransferFailedException(
          String.format("Cannot read from '%s' and write to '%s'", resourceName, destination), e);
    } finally {
      IoUtils.closeQuietly(in, out);
    }
  }
コード例 #2
0
    @Override
    public PointSet load(String pointSetId) throws Exception {

      File cachedFile;

      if (!workOffline) {
        // get pointset metadata from S3
        cachedFile = new File(POINT_DIR, pointSetId + ".json");
        if (!cachedFile.exists()) {
          POINT_DIR.mkdirs();

          S3Object obj = s3.getObject(pointsetBucket, pointSetId + ".json.gz");
          ObjectMetadata objMet = obj.getObjectMetadata();
          FileOutputStream fos = new FileOutputStream(cachedFile);
          GZIPInputStream gis = new GZIPInputStream(obj.getObjectContent());
          try {
            ByteStreams.copy(gis, fos);
          } finally {
            fos.close();
            gis.close();
          }
        }
      } else cachedFile = new File(POINT_DIR, pointSetId + ".json");

      // grab it from the cache

      return PointSet.fromGeoJson(cachedFile);
    }
コード例 #3
0
 public UrlList getUrlList(String url) {
   UrlList urlList = null;
   try {
     String key = Hash.hashKey(url);
     GetObjectRequest req = new GetObjectRequest(bucketName, key);
     S3Object s3Object = s3client.getObject(req);
     InputStream objectData = s3Object.getObjectContent();
     BufferedReader reader = new BufferedReader(new InputStreamReader(objectData));
     StringBuilder s3Content = new StringBuilder();
     String line;
     while ((line = reader.readLine()) != null) {
       s3Content.append(line + "\r\n");
     }
     reader.close();
     objectData.close();
     ObjectMapper mapper = new ObjectMapper();
     mapper.setVisibility(PropertyAccessor.ALL, Visibility.NONE);
     mapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
     urlList = mapper.readValue(s3Content.toString(), UrlList.class);
   } catch (AmazonS3Exception ase) {
     System.out.println("S3UrlListDA : document does not exist");
     ase.printStackTrace();
   } catch (IOException e) {
     System.out.println("S3UrlListDA : IOException while fetching document from S3");
     e.printStackTrace();
   } catch (NoSuchAlgorithmException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   }
   return urlList;
 }
コード例 #4
0
ファイル: App.java プロジェクト: Shahbaz03/java-amazonS3
  /**
   * The method fetches the specified file from the amazon s3 bucket
   *
   * @throws IOException
   */
  public void getObjectFromS3() throws IOException {
    /**
     * Initializing the GetObjectRequest of Amazon S3. It is used to read files stored in amazon s3
     * bucket. It is initialized with the Aws_Bucket_Name, in which the file is stored, and the file
     * name which we want to read
     */
    GetObjectRequest getObj = new GetObjectRequest("AWS_BUCKET_NAME", "fileName");

    /**
     * Use the Amazon S3 client and the GetObjectRequest to fetch the files and hold it in the
     * S3Object container.
     */
    S3Object s3FileObj = getS3Client().getObject(getObj);
    /**
     * creating a temp file in memory for writing the file content
     *
     * <p>The Amazon S3Object does not directly converts to a File, nor does it has any built-in
     * function to do so. Hence we need to use the IOUtils of common.io for writing the input Stream
     * to a file. We can do the same using the conventional manual style but IOUtils provide the
     * built-in function for it, thus lessening our work.
     */
    File tempJsFile = File.createTempFile("temp", ".js");
    FileOutputStream out = new FileOutputStream(tempJsFile);
    IOUtils.copy(s3FileObj.getObjectContent(), out);
    out.close();
  }
コード例 #5
0
ファイル: S3NotebookRepo.java プロジェクト: conker84/zeppelin
  private Note getNote(String key) throws IOException {
    GsonBuilder gsonBuilder = new GsonBuilder();
    gsonBuilder.setPrettyPrinting();
    Gson gson =
        gsonBuilder.registerTypeAdapter(Date.class, new NotebookImportDeserializer()).create();

    S3Object s3object;
    try {
      s3object = s3client.getObject(new GetObjectRequest(bucketName, key));
    } catch (AmazonClientException ace) {
      throw new IOException("Unable to retrieve object from S3: " + ace, ace);
    }

    Note note;
    try (InputStream ins = s3object.getObjectContent()) {
      String json = IOUtils.toString(ins, conf.getString(ConfVars.ZEPPELIN_ENCODING));
      note = gson.fromJson(json, Note.class);
    }

    for (Paragraph p : note.getParagraphs()) {
      if (p.getStatus() == Status.PENDING || p.getStatus() == Status.RUNNING) {
        p.setStatus(Status.ABORT);
      }
    }

    return note;
  }
コード例 #6
0
 public InputStream downloadRange(long start, long end, String file) {
   GetObjectRequest rangeObjectRequest = new GetObjectRequest(bucketName, file);
   rangeObjectRequest.setRange(start, end); // retrieve 1st 10 bytes.
   S3Object objectPortion = s3client.getObject(rangeObjectRequest);
   InputStream objectData = objectPortion.getObjectContent();
   return objectData;
 }
コード例 #7
0
  /**
   * Downloads an S3Object, as returned from {@link
   * AmazonS3Client#getObject(com.amazonaws.services.s3.model.GetObjectRequest)}, to the specified
   * file.
   *
   * @param s3Object The S3Object containing a reference to an InputStream containing the object's
   *     data.
   * @param destinationFile The file to store the object's data in.
   * @param performIntegrityCheck Boolean valuable to indicate whether do the integrity check or not
   */
  public static void downloadObjectToFile(
      S3Object s3Object, File destinationFile, boolean performIntegrityCheck) {

    // attempt to create the parent if it doesn't exist
    File parentDirectory = destinationFile.getParentFile();
    if (parentDirectory != null && !parentDirectory.exists()) {
      parentDirectory.mkdirs();
    }

    OutputStream outputStream = null;
    try {
      outputStream = new BufferedOutputStream(new FileOutputStream(destinationFile));
      byte[] buffer = new byte[1024 * 10];
      int bytesRead;
      while ((bytesRead = s3Object.getObjectContent().read(buffer)) > -1) {
        outputStream.write(buffer, 0, bytesRead);
      }
    } catch (IOException e) {
      try {
        s3Object.getObjectContent().abort();
      } catch (IOException abortException) {
        log.warn("Couldn't abort stream", e);
      }
      throw new AmazonClientException(
          "Unable to store object contents to disk: " + e.getMessage(), e);
    } finally {
      try {
        outputStream.close();
      } catch (Exception e) {
      }
      try {
        s3Object.getObjectContent().close();
      } catch (Exception e) {
      }
    }

    byte[] clientSideHash = null;
    byte[] serverSideHash = null;
    try {
      // Multipart Uploads don't have an MD5 calculated on the service side
      if (ServiceUtils.isMultipartUploadETag(s3Object.getObjectMetadata().getETag()) == false) {
        clientSideHash = Md5Utils.computeMD5Hash(new FileInputStream(destinationFile));
        serverSideHash = BinaryUtils.fromHex(s3Object.getObjectMetadata().getETag());
      }
    } catch (Exception e) {
      log.warn("Unable to calculate MD5 hash to validate download: " + e.getMessage(), e);
    }

    if (performIntegrityCheck
        && clientSideHash != null
        && serverSideHash != null
        && !Arrays.equals(clientSideHash, serverSideHash)) {
      throw new AmazonClientException(
          "Unable to verify integrity of data download.  "
              + "Client calculated content hash didn't match hash calculated by Amazon S3.  "
              + "The data stored in '"
              + destinationFile.getAbsolutePath()
              + "' may be corrupt.");
    }
  }
コード例 #8
0
  /**
   * Return the graph for the given unique identifier for graph builder inputs on S3. If this is the
   * same as the last graph built, just return the pre-built graph. If not, build the graph from the
   * inputs, fetching them from S3 to the local cache as needed.
   */
  public synchronized Graph getGraph(String graphId) {

    LOG.info("Finding a graph for ID {}", graphId);

    if (graphId.equals(currGraphId)) {
      LOG.info("GraphID has not changed. Reusing the last graph that was built.");
      return currGraph;
    }

    // The location of the inputs that will be used to build this graph
    File graphDataDirectory = new File(GRAPH_CACHE_DIR, graphId);

    // If we don't have a local copy of the inputs, fetch graph data as a ZIP from S3 and unzip it
    if (!graphDataDirectory.exists() || graphDataDirectory.list().length == 0) {
      LOG.info("Downloading graph input files.");
      graphDataDirectory.mkdirs();
      S3Object graphDataZipObject = s3.getObject(graphBucket, graphId + ".zip");
      ZipInputStream zis = new ZipInputStream(graphDataZipObject.getObjectContent());
      try {
        ZipEntry entry;
        while ((entry = zis.getNextEntry()) != null) {
          File entryDestination = new File(graphDataDirectory, entry.getName());
          // Are both these mkdirs calls necessary?
          entryDestination.getParentFile().mkdirs();
          if (entry.isDirectory()) entryDestination.mkdirs();
          else {
            OutputStream entryFileOut = new FileOutputStream(entryDestination);
            IOUtils.copy(zis, entryFileOut);
            entryFileOut.close();
          }
        }
        zis.close();
      } catch (Exception e) {
        // TODO delete graph cache dir which is probably corrupted
        LOG.info("Error retrieving graph files", e);
      }
    } else {
      LOG.info("Graph input files were found locally. Using these files from the cache.");
    }

    // Now we have a local copy of these graph inputs. Make a graph out of them.
    CommandLineParameters params = new CommandLineParameters();
    params.build = new File(GRAPH_CACHE_DIR, graphId);
    params.inMemory = true;
    GraphBuilder graphBuilder = GraphBuilder.forDirectory(params, params.build);
    graphBuilder.run();
    Graph graph = graphBuilder.getGraph();
    graph.routerId = graphId;
    graph.index(new DefaultStreetVertexIndexFactory());
    graph.index.clusterStopsAsNeeded();
    this.currGraphId = graphId;
    this.currGraph = graph;
    return graph;
  }
コード例 #9
0
  /**
   * Returns an updated object where the object content input stream contains the decrypted
   * contents.
   *
   * @param object The object whose contents are to be decrypted.
   * @param instruction The instruction that will be used to decrypt the object data.
   * @return The updated object where the object content input stream contains the decrypted
   *     contents.
   * @deprecated no longer used and will be removed in the future
   */
  @Deprecated
  public static S3Object decryptObjectUsingInstruction(
      S3Object object, EncryptionInstruction instruction) {
    S3ObjectInputStream objectContent = object.getObjectContent();

    InputStream decryptedInputStream =
        new RepeatableCipherInputStream(objectContent, instruction.getCipherFactory());
    object.setObjectContent(
        new S3ObjectInputStream(decryptedInputStream, objectContent.getHttpRequest()));
    return object;
  }
コード例 #10
0
  /** Data cleansing method */
  public void cleanseData(AmazonS3 client) throws Exception {

    AwsDataLoader loader = new AwsDataLoader();
    CSVReader reader = null;
    String prefix = loader.getS3Prefix(source);
    client.setEndpoint(S3_ENDPOINT);
    S3Object object = client.getObject(new GetObjectRequest(BUCKET_NM, prefix));
    reader =
        new CSVReader(
            new BufferedReader(new InputStreamReader(object.getObjectContent())),
            CSVParser.DEFAULT_SEPARATOR,
            CSVParser.DEFAULT_QUOTE_CHARACTER,
            CSVParser.DEFAULT_ESCAPE_CHARACTER,
            HEADERS_LINE);
    ColumnPositionMappingStrategy<ProductLanguage> strat =
        new ColumnPositionMappingStrategy<ProductLanguage>();
    strat.setType(ProductLanguage.class);
    String[] columns =
        new String[] {"refId", "code", "name", "locale", "state", "format", "displayNameLanguage"};
    strat.setColumnMapping(columns);
    CsvToBean<ProductLanguage> csv = new CsvToBean<ProductLanguage>();
    list = csv.parse(strat, reader);

    System.out.println("ProductLanguageCleanser input size: " + list.size());

    this.updateDataset(list);

    BeanToCsv<ProductLanguage> csvWriter = new BeanToCsv<ProductLanguage>();
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(os), ',', '"');
    // strat.setColumnMapping(columns);
    log.info("List size: " + list.size());
    csvWriter.write(strat, writer, list);
    writer.flush();
    String dataset = os.toString();
    String outPrefix = PREFIX + OUTPUT_KEY + source + ".csv";

    client.setEndpoint(S3_ENDPOINT);
    ObjectMetadata omd = new ObjectMetadata();

    try {
      byte[] content = dataset.getBytes(StandardCharsets.UTF_8);
      ByteArrayInputStream input = new ByteArrayInputStream(content);
      BufferedReader buffReader = new BufferedReader(new InputStreamReader(input));
      buffReader.readLine();
      InputStream inputObj = new ReaderInputStream(buffReader);
      // omd.setContentLength(IOUtils.toByteArray(input).length);
      client.putObject(BUCKET_NM, outPrefix, inputObj, omd);
      input.close();
    } catch (IOException e) {
      log.error("Axon data write to s3 failed: " + e.getMessage());
    }
  }
コード例 #11
0
  /**
   * Builds an instruction object from the contents of an instruction file.
   *
   * @param instructionFile A non-null instruction file retrieved from S3 that contains encryption
   *     information
   * @param materialsProvider The non-null encryption materials provider to be used to encrypt and
   *     decrypt data.
   * @param cryptoProvider The crypto provider whose encryption implementation will be used to
   *     encrypt and decrypt data. Null is ok and uses the preferred provider from
   *     Security.getProviders().
   * @return A non-null instruction object containing encryption information
   * @deprecated no longer used and will be removed in the future
   */
  @Deprecated
  public static EncryptionInstruction buildInstructionFromInstructionFile(
      S3Object instructionFile,
      EncryptionMaterialsProvider materialsProvider,
      Provider cryptoProvider) {
    JSONObject instructionJSON = parseJSONInstruction(instructionFile);
    try {
      // Get fields from instruction object
      String encryptedSymmetricKeyB64 = instructionJSON.getString(Headers.CRYPTO_KEY);
      String ivB64 = instructionJSON.getString(Headers.CRYPTO_IV);
      String materialsDescriptionString =
          instructionJSON.tryGetString(Headers.MATERIALS_DESCRIPTION);
      Map<String, String> materialsDescription = convertJSONToMap(materialsDescriptionString);

      // Decode from Base 64 to standard binary bytes
      byte[] encryptedSymmetricKey = Base64.decode(encryptedSymmetricKeyB64);
      byte[] iv = Base64.decode(ivB64);

      if (encryptedSymmetricKey == null || iv == null) {
        // If necessary encryption info was not found in the instruction file, throw an exception.
        throw new AmazonClientException(
            String.format(
                "Necessary encryption info not found in the instruction file '%s' in bucket '%s'",
                instructionFile.getKey(), instructionFile.getBucketName()));
      }

      EncryptionMaterials materials =
          retrieveOriginalMaterials(materialsDescription, materialsProvider);
      // If we're unable to retrieve the original encryption materials, we can't decrypt the object,
      // so
      // throw an exception.
      if (materials == null) {
        throw new AmazonClientException(
            String.format(
                "Unable to retrieve the encryption materials that originally "
                    + "encrypted object corresponding to instruction file '%s' in bucket '%s'.",
                instructionFile.getKey(), instructionFile.getBucketName()));
      }

      // Decrypt the symmetric key and create the symmetric cipher
      SecretKey symmetricKey =
          getDecryptedSymmetricKey(encryptedSymmetricKey, materials, cryptoProvider);
      CipherFactory cipherFactory =
          new CipherFactory(symmetricKey, Cipher.DECRYPT_MODE, iv, cryptoProvider);

      return new EncryptionInstruction(
          materialsDescription, encryptedSymmetricKey, symmetricKey, cipherFactory);
    } catch (JSONException e) {
      throw new AmazonClientException(
          "Unable to parse retrieved instruction file : " + e.getMessage());
    }
  }
コード例 #12
0
 private S3Object getConfigObject() throws Exception {
   try {
     S3Object object = s3Client.getObject(arguments.getBucket(), arguments.getKey());
     if (object.getObjectMetadata().getContentLength() > 0) {
       return object;
     }
   } catch (AmazonS3Exception e) {
     if (!isNotFoundError(e)) {
       throw e;
     }
   }
   return null;
 }
コード例 #13
0
ファイル: S3Storage.java プロジェクト: carriercomm/cosbench
  @Override
  public InputStream getObject(String container, String object, Config config) {
    super.getObject(container, object, config);
    InputStream stream;
    try {

      S3Object s3Obj = client.getObject(container, object);
      stream = s3Obj.getObjectContent();

    } catch (Exception e) {
      throw new StorageException(e);
    }
    return stream;
  }
コード例 #14
0
ファイル: S3Tool.java プロジェクト: DDMAL/jmei-search
 /**
  * Read a file from s3 or from the filesystem
  *
  * @param filename
  * @return
  */
 public static String readFile(String filename) {
   try {
     if (isS3File(filename)) {
       String[] parts = filename.split("/");
       String[] keyArray = Arrays.copyOfRange(parts, 3, parts.length);
       String key = StringUtils.join(keyArray, "/");
       S3Object object = getInstance().getObject(new GetObjectRequest(parts[2], key));
       return IOUtils.toString(object.getObjectContent());
     } else {
       return FileUtils.readFileToString(new File(filename));
     }
   } catch (IOException e) {
     return null;
   }
 }
コード例 #15
0
ファイル: S3Test.java プロジェクト: koniecznypatrick/wuic
  /**
   * Tests the S3 access.
   *
   * @throws Exception if test fails
   */
  @Test
  public void s3Test() throws Exception {
    // Create a real object and mock its initClient method
    final S3NutDao dao = spy(new S3NutDao("/path", false, null, -1, "wuic", "login", "pwd", false));

    // Build client mock
    final AmazonS3Client client = mock(AmazonS3Client.class);
    when(dao.initClient()).thenReturn(client);

    // List returned by client
    final ObjectListing list = mock(ObjectListing.class);
    final S3ObjectSummary summary = mock(S3ObjectSummary.class);
    when(summary.getKey()).thenReturn("[cloud].css");
    final S3ObjectSummary summarBis = mock(S3ObjectSummary.class);
    when(summarBis.getKey()).thenReturn("cloud.css");
    when(client.listObjects(any(ListObjectsRequest.class))).thenReturn(list);
    when(list.getObjectSummaries()).thenReturn(Arrays.asList(summary, summarBis));

    // Bytes returned by mocked S3
    final byte[] array = ".cloud { text-align : justify;}".getBytes();
    final S3Object object = mock(S3Object.class);
    when(object.getObjectContent())
        .thenReturn(new S3ObjectInputStream(new ByteArrayInputStream(array), null));
    when(client.getObject(anyString(), anyString())).thenReturn(object);

    // TODO : problem here : we specify '[cloud.css]' but getNuts() returns 'cloud.css' because
    // regex are always activated !
    final NutsHeap nutsHeap = new NutsHeap(Arrays.asList("[cloud].css"), dao, "heap");
    Assert.assertEquals(nutsHeap.getNuts().size(), 1);

    final Engine compressor = new CssYuiCompressorEngine(true, "UTF-8", -1);
    final Engine cacheEngine = new EhCacheEngine(false, null);
    final Engine aggregator = new CGTextAggregatorEngine(true);
    cacheEngine.setNext(compressor);
    compressor.setNext(aggregator);

    final List<Nut> group =
        cacheEngine.parse(new EngineRequest("", "", nutsHeap, new HashMap<NutType, Engine>()));

    Assert.assertFalse(group.isEmpty());
    InputStream is;

    for (Nut res : group) {
      is = res.openStream();
      Assert.assertTrue(IOUtils.readString(new InputStreamReader(is)).length() > 0);
      is.close();
    }
  }
コード例 #16
0
  /**
   * Gets an object stored in S3 and downloads it into the specified file. This method includes the
   * one-time retry mechanism after integrity check failure on the downloaded file. It will also
   * return immediately after getting null valued S3Object (when getObject request does not meet the
   * specified constraints).
   *
   * @param file The file to store the object's data in.
   * @param safeS3DownloadTask The implementation of SafeS3DownloadTask interface which allows user
   *     to get access to all the visible variables at the calling site of this method.
   */
  public static S3Object retryableDownloadS3ObjectToFile(
      File file, RetryableS3DownloadTask retryableS3DownloadTask) {
    boolean hasRetried = false;
    boolean needRetry;
    S3Object s3Object;
    do {
      needRetry = false;
      s3Object = retryableS3DownloadTask.getS3ObjectStream();
      if (s3Object == null) return null;

      try {
        ServiceUtils.downloadObjectToFile(
            s3Object, file, retryableS3DownloadTask.needIntegrityCheck());
      } catch (AmazonClientException ace) {
        // Determine whether an immediate retry is needed according to the captured
        // AmazonClientException.
        // (There are three cases when downloadObjectToFile() throws AmazonClientException:
        // 		1) SocketException or SSLProtocolException when writing to disk (e.g. when user aborts
        // the download)
        //		2) Other IOException when writing to disk
        //		3) MD5 hashes don't match
        // The current code will retry the download only when case 2) or 3) happens.
        if (ace.getCause() instanceof SocketException
            || ace.getCause() instanceof SSLProtocolException) {
          throw ace;
        } else {
          needRetry = true;
          if (hasRetried) throw ace;
          else {
            log.info(
                "Retry the download of object "
                    + s3Object.getKey()
                    + " (bucket "
                    + s3Object.getBucketName()
                    + ")",
                ace);
            hasRetried = true;
          }
        }
      } finally {
        try {
          s3Object.getObjectContent().abort();
        } catch (IOException e) {
        }
      }
    } while (needRetry);
    return s3Object;
  }
コード例 #17
0
 @Override
 public void download(String key, File file) throws IOException {
   S3Object s3Object;
   try {
     s3Object = s3Client.getObject(Namespaces.get().getBucket(), key);
   } catch (AmazonClientException ace) {
     throw new IOException(ace);
   }
   file.delete();
   InputStream in = s3Object.getObjectContent();
   try {
     IOUtils.copyStreamToFile(in, file);
   } finally {
     in.close();
   }
 }
コード例 #18
0
 /**
  * Converts and return the underlying S3 object as a json string.
  *
  * @throws AmazonClientException if failed in JSON conversion.
  */
 String toJsonString() {
   try {
     return from(s3obj.getObjectContent());
   } catch (Exception e) {
     throw new AmazonClientException("Error parsing JSON: " + e.getMessage());
   }
 }
コード例 #19
0
 /**
  * Returns true if the specified S3Object contains encryption info in its metadata, false
  * otherwise.
  *
  * @param retrievedObject An S3Object
  * @return True if the specified S3Object contains encryption info in its metadata, false
  *     otherwise.
  * @deprecated no longer used and will be removed in the future
  */
 @Deprecated
 public static boolean isEncryptionInfoInMetadata(S3Object retrievedObject) {
   Map<String, String> metadata = retrievedObject.getObjectMetadata().getUserMetadata();
   return metadata != null
       && metadata.containsKey(Headers.CRYPTO_IV)
       && metadata.containsKey(Headers.CRYPTO_KEY);
 }
コード例 #20
0
 /**
  * Returns true if this S3 object has the encryption information stored as user meta data; false
  * otherwise.
  */
 final boolean hasEncryptionInfo() {
   ObjectMetadata metadata = s3obj.getObjectMetadata();
   Map<String, String> userMeta = metadata.getUserMetadata();
   return userMeta != null
       && userMeta.containsKey(Headers.CRYPTO_IV)
       && (userMeta.containsKey(Headers.CRYPTO_KEY_V2)
           || userMeta.containsKey(Headers.CRYPTO_KEY));
 }
コード例 #21
0
  @Override
  public BufferedReader streamFrom(String key) throws IOException {

    S3Object s3Object;
    try {
      s3Object = s3Client.getObject(Namespaces.get().getBucket(), key);
    } catch (AmazonClientException ace) {
      throw new IOException(ace);
    }

    String[] contentTypeEncoding = guessContentTypeEncodingFromExtension(key);
    String encoding = contentTypeEncoding[1];

    InputStream in = s3Object.getObjectContent();
    try {
      if ("gzip".equals(encoding)) {
        in = new GZIPInputStream(in);
      } else if ("zip".equals(encoding)) {
        in = new ZipInputStream(in);
      } else if ("deflate".equals(encoding)) {
        in = new InflaterInputStream(in);
      } else if ("bzip2".equals(encoding)) {
        in = new BZip2CompressorInputStream(in);
      } else {
        ObjectMetadata metadata = s3Object.getObjectMetadata();
        if (metadata != null) {
          String contentEncoding = metadata.getContentEncoding();
          if ("gzip".equals(contentEncoding)) {
            in = new GZIPInputStream(in);
          } else if ("zip".equals(contentEncoding)) {
            in = new ZipInputStream(in);
          } else if ("deflate".equals(contentEncoding)) {
            in = new InflaterInputStream(in);
          } else if ("bzip2".equals(contentEncoding)) {
            in = new BZip2CompressorInputStream(in);
          }
        }
      }
    } catch (IOException ioe) {
      // Be extra sure this doesn't like in case of an error
      in.close();
      throw ioe;
    }

    return new BufferedReader(new InputStreamReader(in, Charsets.UTF_8), 1 << 20); // ~1MB
  }
コード例 #22
0
  /** @see com.amazonaws.http.HttpResponseHandler#handle(com.amazonaws.http.HttpResponse) */
  public AmazonWebServiceResponse<S3Object> handle(HttpResponse response) throws Exception {
    /*
     * TODO: It'd be nice to set the bucket name and key here, but the
     *       information isn't easy to pull out of the response/request
     *       currently.
     */
    S3Object object = new S3Object();
    AmazonWebServiceResponse<S3Object> awsResponse = parseResponseMetadata(response);
    if (response.getHeaders().get(Headers.REDIRECT_LOCATION) != null) {
      object.setRedirectLocation(response.getHeaders().get(Headers.REDIRECT_LOCATION));
    }
    ObjectMetadata metadata = object.getObjectMetadata();
    populateObjectMetadata(response, metadata);
    boolean hasServerSideCalculatedChecksum =
        !ServiceUtils.isMultipartUploadETag(metadata.getETag());
    boolean responseContainsEntireObject = response.getHeaders().get("Content-Range") == null;

    if (hasServerSideCalculatedChecksum && responseContainsEntireObject) {
      byte[] expectedChecksum = BinaryUtils.fromHex(metadata.getETag());
      object.setObjectContent(
          new S3ObjectInputStream(
              new ChecksumValidatingInputStream(
                  response.getContent(),
                  expectedChecksum,
                  object.getBucketName() + "/" + object.getKey()),
              response.getHttpRequest()));
    } else {
      object.setObjectContent(
          new S3ObjectInputStream(response.getContent(), response.getHttpRequest()));
    }

    awsResponse.setResult(object);
    return awsResponse;
  }
コード例 #23
0
  public static String scaleImage(String bucket, String key) throws IOException {
    if (key.startsWith(PREFIX)) {
      System.out.println("Target image is already scaled");
      return "Nothing";
    }

    Optional<String> optionalImageType = getImageType(key);
    if (!optionalImageType.isPresent()) {
      return "";
    }
    String imageType = optionalImageType.get();

    // Download the image from S3 into a stream
    AmazonS3 s3Client = new AmazonS3Client();
    S3Object s3Object = s3Client.getObject(new GetObjectRequest(bucket, key));
    InputStream objectData = s3Object.getObjectContent();

    // Read the source image
    BufferedImage srcImage = ImageIO.read(objectData);
    int srcHeight = srcImage.getHeight();
    int srcWidth = srcImage.getWidth();

    for (int scaleDimension : SCALE_DIMENSIONS) {
      // Infer the scaling factor to avoid stretching the image
      // unnaturally
      float scalingFactor =
          Math.min((float) scaleDimension / srcWidth, (float) scaleDimension / srcHeight);
      int width = (int) (scalingFactor * srcWidth);
      int height = (int) (scalingFactor * srcHeight);

      BufferedImage resizedImage =
          getHighQualityScaledInstance(
              srcImage, width, height, RenderingHints.VALUE_INTERPOLATION_BICUBIC);

      BufferedImage squaredImage =
          getSquaredImage(resizedImage, RenderingHints.VALUE_INTERPOLATION_BICUBIC);

      String dstKeyResized = PREFIX + "-" + scaleDimension + "-" + key;
      String dstKeyResizedSquared = PREFIX + "-" + scaleDimension + "-squared-" + key;

      saveToS3(bucket, key, imageType, s3Client, dstKeyResized, resizedImage);
      saveToS3(bucket, key, imageType, s3Client, dstKeyResizedSquared, squaredImage);
    }
    return "Ok";
  }
コード例 #24
0
 /**
  * Parses instruction data retrieved from S3 and returns a JSONObject representing the instruction
  */
 @Deprecated
 private static JSONObject parseJSONInstruction(S3Object instructionObject) {
   try {
     String instructionString = convertStreamToString(instructionObject.getObjectContent());
     return new JSONObject(instructionString);
   } catch (Exception e) {
     throw new AmazonClientException("Error parsing JSON instruction file: " + e.getMessage());
   }
 }
コード例 #25
0
  @Override
  public void downloadBackup(
      Exhibitor exhibitor,
      BackupMetaData backup,
      File destination,
      Map<String, String> configValues)
      throws Exception {
    S3Object object = s3Client.getObject(configValues.get(CONFIG_BUCKET.getKey()), toKey(backup));

    long startMs = System.currentTimeMillis();
    RetryPolicy retryPolicy = makeRetryPolicy(configValues);
    int retryCount = 0;
    boolean done = false;
    while (!done) {
      Throttle throttle = makeThrottle(configValues);
      InputStream in = null;
      FileOutputStream out = null;
      try {
        out = new FileOutputStream(destination);
        in = object.getObjectContent();

        FileChannel channel = out.getChannel();
        CompressorIterator compressorIterator = compressor.decompress(in);
        for (; ; ) {
          ByteBuffer bytes = compressorIterator.next();
          if (bytes == null) {
            break;
          }

          throttle.throttle(bytes.limit());
          channel.write(bytes);
        }

        done = true;
      } catch (Exception e) {
        if (!retryPolicy.allowRetry(retryCount++, System.currentTimeMillis() - startMs)) {
          done = true;
        }
      } finally {
        Closeables.closeQuietly(in);
        Closeables.closeQuietly(out);
      }
    }
  }
コード例 #26
0
 /**
  * Adjusts the retrieved S3Object so that the object contents contain only the range of bytes
  * desired by the user. Since encrypted contents can only be retrieved in CIPHER_BLOCK_SIZE (16
  * bytes) chunks, the S3Object potentially contains more bytes than desired, so this method
  * adjusts the contents range.
  *
  * @param object The S3Object retrieved from S3 that could possibly contain more bytes than
  *     desired by the user.
  * @param range A two-element array of longs corresponding to the start and finish (inclusive) of
  *     a desired range of bytes.
  * @return The S3Object with adjusted object contents containing only the range desired by the
  *     user. If the range specified is invalid, then the S3Object is returned without any
  *     modifications.
  * @deprecated no longer used and will be removed in the future
  */
 @Deprecated
 public static S3Object adjustOutputToDesiredRange(S3Object object, long[] range) {
   if (range == null || range[0] > range[1]) {
     // Make no modifications if range is invalid.
     return object;
   } else {
     try {
       S3ObjectInputStream objectContent = object.getObjectContent();
       InputStream adjustedRangeContents =
           new AdjustedRangeInputStream(objectContent, range[0], range[1]);
       object.setObjectContent(
           new S3ObjectInputStream(adjustedRangeContents, objectContent.getHttpRequest()));
       return object;
     } catch (IOException e) {
       throw new AmazonClientException(
           "Error adjusting output to desired byte range: " + e.getMessage());
     }
   }
 }
コード例 #27
0
  @Override
  public LoadedInstanceConfig loadConfig() throws Exception {
    Date lastModified;
    Properties properties = new Properties();
    S3Object object = getConfigObject();
    if (object != null) {
      try {
        lastModified = object.getObjectMetadata().getLastModified();
        properties.load(object.getObjectContent());
      } finally {
        Closeables.closeQuietly(object.getObjectContent());
      }
    } else {
      lastModified = new Date(0L);
    }

    PropertyBasedInstanceConfig config = new PropertyBasedInstanceConfig(properties, defaults);
    return new LoadedInstanceConfig(config, lastModified.getTime());
  }
コード例 #28
0
 /**
  * Returns the original crypto scheme used for encryption, which may differ from the crypto scheme
  * used for decryption during, for example, a range-get operation.
  *
  * @param instructionFile the instruction file of the s3 object; or null if there is none.
  */
 ContentCryptoScheme encryptionSchemeOf(Map<String, String> instructionFile) {
   if (instructionFile != null) {
     String cekAlgo = instructionFile.get(Headers.CRYPTO_CEK_ALGORITHM);
     return ContentCryptoScheme.fromCEKAlgo(cekAlgo);
   }
   ObjectMetadata meta = s3obj.getObjectMetadata();
   Map<String, String> userMeta = meta.getUserMetadata();
   String cekAlgo = userMeta.get(Headers.CRYPTO_CEK_ALGORITHM);
   return ContentCryptoScheme.fromCEKAlgo(cekAlgo);
 }
コード例 #29
0
  /**
   * Builds an instruction object from the object metadata.
   *
   * @param object A non-null object that contains encryption information in its headers
   * @param materialsProvider The non-null encryption materials provider to be used to encrypt and
   *     decrypt data.
   * @param cryptoProvider The crypto provider whose encryption implementation will be used to
   *     encrypt and decrypt data. Null is ok and uses the preferred provider from
   *     Security.getProviders().
   * @return A non-null instruction object containing encryption information
   * @throws AmazonClientException if encryption information is missing in the metadata, or the
   *     encryption materials used to encrypt the object are not available via the materials
   *     Accessor
   * @deprecated no longer used and will be removed in the future
   */
  @Deprecated
  public static EncryptionInstruction buildInstructionFromObjectMetadata(
      S3Object object, EncryptionMaterialsProvider materialsProvider, Provider cryptoProvider) {
    ObjectMetadata metadata = object.getObjectMetadata();

    // Get encryption info from metadata.
    byte[] encryptedSymmetricKeyBytes = getCryptoBytesFromMetadata(Headers.CRYPTO_KEY, metadata);
    byte[] initVectorBytes = getCryptoBytesFromMetadata(Headers.CRYPTO_IV, metadata);
    String materialsDescriptionString =
        getStringFromMetadata(Headers.MATERIALS_DESCRIPTION, metadata);
    Map<String, String> materialsDescription = convertJSONToMap(materialsDescriptionString);

    if (encryptedSymmetricKeyBytes == null || initVectorBytes == null) {
      // If necessary encryption info was not found in the instruction file, throw an exception.
      throw new AmazonClientException(
          String.format(
              "Necessary encryption info not found in the headers of file '%s' in bucket '%s'",
              object.getKey(), object.getBucketName()));
    }

    EncryptionMaterials materials =
        retrieveOriginalMaterials(materialsDescription, materialsProvider);
    // If we're unable to retrieve the original encryption materials, we can't decrypt the object,
    // so
    // throw an exception.
    if (materials == null) {
      throw new AmazonClientException(
          String.format(
              "Unable to retrieve the encryption materials that originally "
                  + "encrypted file '%s' in bucket '%s'.",
              object.getKey(), object.getBucketName()));
    }

    // Decrypt the symmetric key and create the symmetric cipher
    SecretKey symmetricKey =
        getDecryptedSymmetricKey(encryptedSymmetricKeyBytes, materials, cryptoProvider);
    CipherFactory cipherFactory =
        new CipherFactory(symmetricKey, Cipher.DECRYPT_MODE, initVectorBytes, cryptoProvider);

    return new EncryptionInstruction(
        materialsDescription, encryptedSymmetricKeyBytes, symmetricKey, cipherFactory);
  }
コード例 #30
0
  /* (non-Javadoc)
   * @see com.amazonaws.services.s3.AmazonS3#getObject(com.amazonaws.services.s3.model.GetObjectRequest, java.io.File)
   */
  @Override
  public ObjectMetadata getObject(GetObjectRequest getObjectRequest, File destinationFile)
      throws AmazonClientException, AmazonServiceException {

    assertParameterNotNull(
        destinationFile,
        "The destination file parameter must be specified when downloading an object directly to a file");

    S3Object s3Object = getObject(getObjectRequest);
    // getObject can return null if constraints were specified but not met
    if (s3Object == null) return null;

    OutputStream outputStream = null;
    try {
      outputStream = new BufferedOutputStream(new FileOutputStream(destinationFile));
      byte[] buffer = new byte[1024 * 10];
      int bytesRead;
      while ((bytesRead = s3Object.getObjectContent().read(buffer)) > -1) {
        outputStream.write(buffer, 0, bytesRead);
      }
    } catch (IOException e) {
      throw new AmazonClientException(
          "Unable to store object contents to disk: " + e.getMessage(), e);
    } finally {
      try {
        outputStream.close();
      } catch (Exception e) {
      }
      try {
        s3Object.getObjectContent().close();
      } catch (Exception e) {
      }
    }

    /*
     * Unlike the standard Amazon S3 Client, the Amazon S3 Encryption Client does not do an MD5 check
     * here because the contents stored in S3 and the contents we just retrieved are different.  In
     * S3, the stored contents are encrypted, and locally, the retrieved contents are decrypted.
     */

    return s3Object.getObjectMetadata();
  }