public void testInFlow() throws Exception {
    FileSystem.get(new Configuration()).delete(new Path("/tmp/input"), true);
    FileSystem.get(new Configuration()).delete(new Path("/tmp/output"), true);

    Hfs inTap = new Hfs(new ProtobufScheme("value", Example.Person.class), "/tmp/input");
    TupleEntryCollector collector = inTap.openForWrite(new HadoopFlowProcess());
    collector.add(new TupleEntry(new Fields("value"), new Tuple(BRYAN.build())));
    collector.add(new TupleEntry(new Fields("value"), new Tuple(LUCAS.build())));
    collector.close();

    Pipe inPipe = new Pipe("in");
    Pipe p =
        new Each(
            inPipe,
            new Fields("value"),
            new ExpandProto(Example.Person.class),
            new Fields("id", "name", "email", "position"));

    Hfs sink = new Hfs(new TextLine(), "/tmp/output");
    new HadoopFlowConnector().connect(inTap, sink, p).complete();

    TupleEntryIterator iter = sink.openForRead(new HadoopFlowProcess());
    List<Tuple> results = new ArrayList<Tuple>();
    while (iter.hasNext()) {
      results.add(iter.next().getTupleCopy());
    }
    assertEquals(2, results.size());

    assertEquals(
        new Tuple(0, 1, "bryan", "*****@*****.**", Example.Person.Position.CEO.getNumber())
            .toString(),
        results.get(0).toString());
    assertEquals(new Tuple(25, 2, "lucas", null, null).toString(), results.get(1).toString());
  }
  public void testSimple() throws Exception {
    ExpandProto allFields = new ExpandProto(Example.Person.class);
    List<Tuple> results =
        operateFunction(allFields, new TupleEntry(new Fields("value"), new Tuple(BRYAN.build())));
    Tuple result = results.get(0);
    assertEquals(
        new Tuple(1, "bryan", "*****@*****.**", Example.Person.Position.CEO.getNumber()), result);

    results =
        operateFunction(allFields, new TupleEntry(new Fields("value"), new Tuple(LUCAS.build())));
    result = results.get(0);
    assertEquals(new Tuple(2, "lucas", null, null), result);
  }