@Test
  public void testReadingDirectory() throws Exception {
    Path index =
        directoryTreeFactory.addIndex(directoryTreeFactory.getIndexerPaths().get(0), "Index1");
    Path bucket = directoryTreeFactory.addBucket(index, "db_0_1_idx");

    String[] header = {"header", "_raw"};

    String file1Content =
        "header,_raw\n"
            + "\"ImCESTvlhu\",\"LOjZxHYGZy\"\n"
            + "\"kqYkcFhbSB\",\"RRLjuHCHze\"\n"
            + "\"kmeEaOcKTx\",\"mvIPrMSOSS\"\n"
            + "\"lzzPLYFGFU\",\"sGHTPVsYlF\"\n"
            + "\"zpUxVlTaAq\",\"ysoUYuyZKO\"";
    String file2Content =
        "header,_raw\n"
            + "\"fkBkKfCkuT\",\"BRlSkqHmHe\"\n"
            + "\"dWDJViEuot\",\"LcdkTQBLmu\"\n"
            + "\"ovQoDFATdn\",\"YewByxPXqN\"\n"
            + "\"tKBxjsSZmV\",\"luuOivALWj\"\n"
            + "\"mssAbiUnub\",\"NeYnIlDMdW\"";

    int[] offsets = new int[] {12, 38, 64, 90, 116};
    Path file1 = HadoopFileTestUtils.createPath(bucket.toString(), "file1.csv");
    Path file2 = HadoopFileTestUtils.createPath(bucket.toString(), "file2.csv");

    HadoopFileTestUtils.createFileWithContent(fs, file1, file1Content);
    HadoopFileTestUtils.createFileWithContent(fs, file2, file2Content);

    Path outputPath = new Path(FileTestUtils.getRandomTestFilepath());
    assertTrue(
        runJob(
            new Configuration(),
            new String[] {directoryTreeFactory.getRoot().toString(), outputPath.toString()}));

    String expectedContent =
        "0\theader,_raw\n"
            + "12\tImCESTvlhu,LOjZxHYGZy\n"
            + "38\tkqYkcFhbSB,RRLjuHCHze\n"
            + "64\tkmeEaOcKTx,mvIPrMSOSS\n"
            + "90\tlzzPLYFGFU,sGHTPVsYlF\n"
            + "116\tzpUxVlTaAq,ysoUYuyZKO\n"
            + "153\tfkBkKfCkuT,BRlSkqHmHe\n"
            + "179\tdWDJViEuot,LcdkTQBLmu\n"
            + "205\tovQoDFATdn,YewByxPXqN\n"
            + "231\ttKBxjsSZmV,luuOivALWj\n"
            + "257\tmssAbiUnub,NeYnIlDMdW\n";

    String actualResults = HadoopFileTestUtils.readMapReduceOutputFile(fs, outputPath);

    assertEquals(expectedContent, actualResults);
  }
Exemplo n.º 2
0
  @Test
  public void testFinder() throws Exception {

    ShuttlDirectoryTreeFactory directoryTreeFactory = new ShuttlDirectoryTreeFactory();
    List<Path> indexers = directoryTreeFactory.getIndexerPaths();

    directoryTreeFactory.addIndex(indexers.get(0), "index1");
    directoryTreeFactory.addIndex(indexers.get(1), "index2");

    IndexFinder finder = new IndexFinder(fileSystem, directoryTreeFactory.getRoot());

    List<Index> indexes = finder.find();

    assertEquals(indexes.size(), 2);
    assertEquals(indexes.get(0).getName(), "index1");
    assertEquals(indexes.get(1).getName(), "index2");
  }
  private void runTestOnContent(String content, int[] offsets) throws Exception {

    Path index =
        directoryTreeFactory.addIndex(directoryTreeFactory.getIndexerPaths().get(0), "Index1");
    Path bucket = directoryTreeFactory.addBucket(index, "db_0_1_idx");

    Path inputPath =
        HadoopFileTestUtils.createPath(bucket.toString(), FileTestUtils.getRandomFilename("csv"));
    HadoopFileTestUtils.createFileWithContent(fs, inputPath, content);

    Path outputPath = new Path(FileTestUtils.getRandomTestFilepath());
    assertTrue(
        runJob(
            new Configuration(),
            new String[] {directoryTreeFactory.getRoot().toString(), outputPath.toString()}));

    String result = HadoopFileTestUtils.readMapReduceOutputFile(fs, outputPath);

    String expected = getExpectedResult(content, offsets);

    assertEquals(expected, result);
  }