public void setUp() throws Exception { readMessages = new ArrayList<Message>(); fs = rootDir.getFileSystem(conf); defaultConf = new JobConf(new Configuration()); dataFiles = new String[] {TestUtil.files[0], TestUtil.files[1]}; setUpCluster(); fs.delete(new Path(cluster.getRootDir()), true); Path streamDir = new Path(cluster.getDataDir(), testStream); fs.delete(streamDir, true); fs.mkdirs(streamDir); collectorDir = new Path(streamDir, collectors); fs.delete(collectorDir, true); fs.mkdirs(collectorDir); TestUtil.setUpFiles(cluster, collectors, dataFiles, null, null, 1, 0); }
@BeforeTest public void setup() throws Exception { testRootDir = TestUtil.getConfiguredRootDir(); conusmerNumber = 1; // initialize config cluster = TestUtil.setupLocalCluster( this.getClass().getSimpleName(), testStream, new PartitionId(clusterName, collectors[0]), files, null, databusFiles1, 0, 3, testRootDir); TestUtil.setUpFiles(cluster, collectors[1], files, null, databusFiles2, 0, 3); conf = cluster.getHadoopConf(); fsUri = FileSystem.get(conf).getUri().toString(); partitionMinList = new TreeSet<Integer>(); for (int i = 0; i < 60; i++) { partitionMinList.add(i); } chkPoints = new TreeMap<Integer, PartitionCheckpoint>(); partitionCheckpointList = new PartitionCheckpointList(chkPoints); }
@BeforeTest public void setup() throws Exception { // setup cluster consumerNumber = 1; cluster = TestUtil.setupLocalCluster( this.getClass().getSimpleName(), testStream, partitionId, files, null, databusFiles, 4, TestUtil.getConfiguredRootDir()); collectorDir = DatabusUtil.getCollectorStreamDir( new Path(cluster.getRootDir()), testStream, collectorName); streamsLocalDir = DatabusUtil.getStreamDir(StreamType.LOCAL, new Path(cluster.getRootDir()), testStream); fs = FileSystem.get(cluster.getHadoopConf()); fsUri = fs.getUri().toString(); }
@Test public void testReadFromStart() throws Exception { PartitionReaderStatsExposer metrics = new PartitionReaderStatsExposer( testStream, "c1", partitionId.toString(), conusmerNumber, fsUri); reader = new DatabusStreamWaitingReader( partitionId, FileSystem.get(cluster.getHadoopConf()), TestUtil.getStreamsDir(cluster, testStream), DatabusInputFormat.class.getCanonicalName(), conf, 1000, metrics, false, partitionMinList, partitionCheckpointList, null); reader.build(CollectorStreamReader.getDateFromCollectorFile(files[0])); reader.initFromStart(); Assert.assertNotNull(reader.getCurrentFile()); reader.openStream(); TestAbstractDatabusWaitingReader.readFile(reader, 0, 0, databusFiles1[0], encoded); TestAbstractDatabusWaitingReader.readFile(reader, 0, 0, databusFiles2[0], encoded); TestAbstractDatabusWaitingReader.readFile(reader, 1, 0, databusFiles1[1], encoded); TestAbstractDatabusWaitingReader.readFile(reader, 1, 0, databusFiles2[1], encoded); TestAbstractDatabusWaitingReader.readFile(reader, 2, 0, databusFiles1[2], encoded); TestAbstractDatabusWaitingReader.readFile(reader, 2, 0, databusFiles2[2], encoded); reader.close(); Assert.assertEquals(metrics.getHandledExceptions(), 0); Assert.assertEquals(metrics.getMessagesReadFromSource(), 600); Assert.assertTrue(metrics.getListOps() > 0); Assert.assertTrue(metrics.getOpenOps() == 0); Assert.assertTrue(metrics.getFileStatusOps() > 0); Assert.assertTrue(metrics.getExistsOps() > 0); Assert.assertTrue(metrics.getNumberRecordReaders() > 0); }