/**
   * output without streaming nor staging.
   *
   * @throws Exception if failed
   */
  @Test
  public void output_nomove() throws Exception {
    profile.setOutputStaging(false);
    profile.setOutputStreaming(false);
    profile
        .getLocalFileSystem()
        .getConf()
        .set(HadoopDataSourceUtil.KEY_LOCAL_TEMPDIR, localtemp.getPath());
    HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
    setup(core);
    ModelOutput<StringBuilder> output =
        core.openOutput(context, StringBuilder.class, format, "output", "file.txt", counter);
    try {
      output.write(new StringBuilder("Hello, world!"));
    } finally {
      output.close();
    }
    assertThat(counter.get(), is(greaterThan(0L)));

    File target = new File(mapping, "output/file.txt");
    assertThat(target.exists(), is(false));

    commitAttempt(core);
    assertThat(target.exists(), is(true));

    commitTransaction(core);
    assertThat(target.exists(), is(true));

    assertThat(get(target), is(Arrays.asList("Hello, world!")));
  }
  /**
   * simple delete.
   *
   * @throws Exception if failed
   */
  @Test
  public void delete() throws Exception {
    File file = new File(mapping, "delete/file.txt");
    put(file, "Hello, world!");

    HadoopDataSourceCore core = new HadoopDataSourceCore(profile);

    assertThat(file.exists(), is(true));
    boolean result = core.delete("delete", FilePattern.compile("**/*"), true, counter);

    assertThat(result, is(true));
    assertThat(file.exists(), is(false));
  }
  /**
   * simple delete.
   *
   * @throws Exception if failed
   */
  @Test
  public void delete_all() throws Exception {
    File file = new File(mapping, "file.txt");
    put(file, "Hello, world!");

    HadoopDataSourceCore core = new HadoopDataSourceCore(profile);

    assertThat(file.exists(), is(true));
    boolean result = core.delete("", FilePattern.compile("**"), true, counter);

    assertThat(result, is(true));
    assertThat(file.exists(), is(false));
    assertThat("the root directory must not be deleted", mapping.exists(), is(true));
  }
 /**
  * rollback output.
  *
  * @throws Exception if failed
  */
 @Test
 public void output_rollback() throws Exception {
   HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
   setup(core);
   ModelOutput<StringBuilder> output =
       core.openOutput(context, StringBuilder.class, format, "output", "file.txt", counter);
   try {
     output.write(new StringBuilder("Hello, world!"));
   } finally {
     output.close();
   }
   cleanup(core);
   assertThat(new File(mapping, "output/file.txt").exists(), is(false));
 }
  /**
   * simple input.
   *
   * @throws Exception if failed
   */
  @Test
  public void input() throws Exception {
    put(new File(mapping, "input/file.txt"), "Hello, world!");
    profile.setMinimumFragmentSize(-1);

    HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
    List<DirectInputFragment> fragments =
        core.findInputFragments(StringBuilder.class, format, "input", FilePattern.compile("**"));
    assertThat(fragments.size(), is(1));

    List<String> results = consume(core, fragments);
    assertThat(counter.get(), is(greaterThan(0L)));
    assertThat(results.size(), is(1));
    assertThat(results, hasItem("Hello, world!"));
  }
 /**
  * input multiple records.
  *
  * @throws Exception if failed
  */
 @Test
 public void input_large() throws Exception {
   long fragmentSize = 1 * 1024 * 1024;
   int fragmentCount = 20;
   put(new File(mapping, "input/file.txt"), fragmentSize * fragmentCount);
   profile.setMinimumFragmentSize(1);
   profile.setPreferredFragmentSize(fragmentSize);
   HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
   List<DirectInputFragment> fragments =
       core.findInputFragments(StringBuilder.class, format, "input", FilePattern.compile("**"));
   assertThat(fragments.size(), is(greaterThanOrEqualTo(fragmentCount / 2)));
   for (DirectInputFragment fragment : fragments) {
     assertThat(fragment.getSize(), is(greaterThanOrEqualTo(fragmentSize / 2)));
     assertThat(fragment.getSize(), is(lessThanOrEqualTo(fragmentSize * 2)));
   }
 }
  /**
   * simple delete.
   *
   * @throws Exception if failed
   */
  @Test
  public void delete_multifile() throws Exception {
    File[] files = {
      new File(mapping, "delete/file.txt"),
      new File(mapping, "delete/file2.txt"),
      new File(mapping, "delete/a/file.txt"),
      new File(mapping, "delete/a/b/file.txt"),
    };
    for (File file : files) {
      put(file, "Hello, world!");
    }
    HadoopDataSourceCore core = new HadoopDataSourceCore(profile);

    for (File file : files) {
      assertThat(file.exists(), is(true));
    }
    boolean result = core.delete("delete", FilePattern.compile("**/*"), true, counter);

    assertThat(result, is(true));
    for (File file : files) {
      assertThat(file.exists(), is(false));
    }
  }
 /**
  * output multiple files.
  *
  * @throws Exception if failed
  */
 @Test
 public void output_multifile() throws Exception {
   HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
   setup(core);
   for (int i = 0; i < 3; i++) {
     ModelOutput<StringBuilder> output =
         core.openOutput(
             context, StringBuilder.class, format, "output", "file" + i + ".txt", counter);
     try {
       for (int j = 0; j < i + 1; j++) {
         output.write(new StringBuilder("Hello" + j));
       }
     } finally {
       output.close();
     }
   }
   commit(core);
   assertThat(get(new File(mapping, "output/file0.txt")), is(Arrays.asList("Hello0")));
   assertThat(get(new File(mapping, "output/file1.txt")), is(Arrays.asList("Hello0", "Hello1")));
   assertThat(
       get(new File(mapping, "output/file2.txt")),
       is(Arrays.asList("Hello0", "Hello1", "Hello2")));
 }
  /**
   * output multiple records.
   *
   * @throws Exception if failed
   */
  @Test
  public void output_multirecord() throws Exception {
    HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
    setup(core);
    ModelOutput<StringBuilder> output =
        core.openOutput(context, StringBuilder.class, format, "output", "file.txt", counter);
    try {
      output.write(new StringBuilder("Hello, world!"));
    } finally {
      output.close();
    }

    File target = new File(mapping, "output/file.txt");
    assertThat(target.exists(), is(false));
    commitAttempt(core);

    assertThat(target.exists(), is(false));
    commitTransaction(core);

    assertThat(target.exists(), is(true));

    assertThat(get(target), is(Arrays.asList("Hello, world!")));
  }
  /**
   * simple delete.
   *
   * @throws Exception if failed
   */
  @Test
  public void delete_sharetemp() throws Exception {
    HadoopDataSourceProfile shareTempProfile =
        new HadoopDataSourceProfile(
            conf,
            profile.getId(),
            profile.getContextPath(),
            profile.getFileSystemPath(),
            new Path(profile.getFileSystemPath(), "_TEMP"));
    HadoopDataSourceCore core = new HadoopDataSourceCore(shareTempProfile);

    File onProd = new File(mapping, "file.txt");
    File onTemp = new File(mapping, "_TEMP/temp.txt");
    put(onProd, "production");
    put(onTemp, "temporary");

    assertThat(onProd.exists(), is(true));
    assertThat(onTemp.exists(), is(true));

    boolean result = core.delete("", FilePattern.compile("**/*"), true, counter);
    assertThat(result, is(true));
    assertThat(onProd.exists(), is(false));
    assertThat(onTemp.exists(), is(true));
  }
 private List<String> consume(HadoopDataSourceCore core, List<DirectInputFragment> fragments)
     throws IOException, InterruptedException {
   List<String> results = new ArrayList<String>();
   for (DirectInputFragment fragment : fragments) {
     ModelInput<StringBuilder> input =
         core.openInput(StringBuilder.class, format, fragment, counter);
     try {
       StringBuilder buf = new StringBuilder();
       while (input.readTo(buf)) {
         results.add(buf.toString());
       }
     } finally {
       input.close();
     }
   }
   return results;
 }
 private void cleanup(HadoopDataSourceCore core) throws IOException, InterruptedException {
   core.cleanupAttemptOutput(context);
   core.cleanupTransactionOutput(context.getTransactionContext());
 }
 private void commitAttempt(HadoopDataSourceCore core) throws IOException, InterruptedException {
   core.commitAttemptOutput(context);
   core.cleanupAttemptOutput(context);
 }