/** * output without streaming nor staging. * * @throws Exception if failed */ @Test public void output_nomove() throws Exception { profile.setOutputStaging(false); profile.setOutputStreaming(false); profile .getLocalFileSystem() .getConf() .set(HadoopDataSourceUtil.KEY_LOCAL_TEMPDIR, localtemp.getPath()); HadoopDataSourceCore core = new HadoopDataSourceCore(profile); setup(core); ModelOutput<StringBuilder> output = core.openOutput(context, StringBuilder.class, format, "output", "file.txt", counter); try { output.write(new StringBuilder("Hello, world!")); } finally { output.close(); } assertThat(counter.get(), is(greaterThan(0L))); File target = new File(mapping, "output/file.txt"); assertThat(target.exists(), is(false)); commitAttempt(core); assertThat(target.exists(), is(true)); commitTransaction(core); assertThat(target.exists(), is(true)); assertThat(get(target), is(Arrays.asList("Hello, world!"))); }
/** * input multiple records. * * @throws Exception if failed */ @Test public void input_large() throws Exception { long fragmentSize = 1 * 1024 * 1024; int fragmentCount = 20; put(new File(mapping, "input/file.txt"), fragmentSize * fragmentCount); profile.setMinimumFragmentSize(1); profile.setPreferredFragmentSize(fragmentSize); HadoopDataSourceCore core = new HadoopDataSourceCore(profile); List<DirectInputFragment> fragments = core.findInputFragments(StringBuilder.class, format, "input", FilePattern.compile("**")); assertThat(fragments.size(), is(greaterThanOrEqualTo(fragmentCount / 2))); for (DirectInputFragment fragment : fragments) { assertThat(fragment.getSize(), is(greaterThanOrEqualTo(fragmentSize / 2))); assertThat(fragment.getSize(), is(lessThanOrEqualTo(fragmentSize * 2))); } }
/** * Initializes the test. * * @throws Exception if some errors were occurred */ @Before public void setUp() throws Exception { conf = new Configuration(true); if (format instanceof Configurable) { ((Configurable) format).setConf(conf); } mapping = new File(temp.getRoot(), "mapping").getCanonicalFile(); temporary = new File(temp.getRoot(), "temporary").getCanonicalFile(); localtemp = new File(temp.getRoot(), "localtemp").getCanonicalFile(); profile = new HadoopDataSourceProfile( conf, "testing", "testing", new Path(mapping.toURI()), new Path(temporary.toURI())); context = new OutputAttemptContext("tx", "atmpt", profile.getId(), new Counter()); }
/** * simple delete. * * @throws Exception if failed */ @Test public void delete_sharetemp() throws Exception { HadoopDataSourceProfile shareTempProfile = new HadoopDataSourceProfile( conf, profile.getId(), profile.getContextPath(), profile.getFileSystemPath(), new Path(profile.getFileSystemPath(), "_TEMP")); HadoopDataSourceCore core = new HadoopDataSourceCore(shareTempProfile); File onProd = new File(mapping, "file.txt"); File onTemp = new File(mapping, "_TEMP/temp.txt"); put(onProd, "production"); put(onTemp, "temporary"); assertThat(onProd.exists(), is(true)); assertThat(onTemp.exists(), is(true)); boolean result = core.delete("", FilePattern.compile("**/*"), true, counter); assertThat(result, is(true)); assertThat(onProd.exists(), is(false)); assertThat(onTemp.exists(), is(true)); }
/** * simple input. * * @throws Exception if failed */ @Test public void input() throws Exception { put(new File(mapping, "input/file.txt"), "Hello, world!"); profile.setMinimumFragmentSize(-1); HadoopDataSourceCore core = new HadoopDataSourceCore(profile); List<DirectInputFragment> fragments = core.findInputFragments(StringBuilder.class, format, "input", FilePattern.compile("**")); assertThat(fragments.size(), is(1)); List<String> results = consume(core, fragments); assertThat(counter.get(), is(greaterThan(0L))); assertThat(results.size(), is(1)); assertThat(results, hasItem("Hello, world!")); }