/**
  * input multiple records.
  *
  * @throws Exception if failed
  */
 @Test
 public void input_large() throws Exception {
   long fragmentSize = 1 * 1024 * 1024;
   int fragmentCount = 20;
   put(new File(mapping, "input/file.txt"), fragmentSize * fragmentCount);
   profile.setMinimumFragmentSize(1);
   profile.setPreferredFragmentSize(fragmentSize);
   HadoopDataSourceCore core = new HadoopDataSourceCore(profile);
   List<DirectInputFragment> fragments =
       core.findInputFragments(StringBuilder.class, format, "input", FilePattern.compile("**"));
   assertThat(fragments.size(), is(greaterThanOrEqualTo(fragmentCount / 2)));
   for (DirectInputFragment fragment : fragments) {
     assertThat(fragment.getSize(), is(greaterThanOrEqualTo(fragmentSize / 2)));
     assertThat(fragment.getSize(), is(lessThanOrEqualTo(fragmentSize * 2)));
   }
 }