/** Test when only empty directories exist for all instances. */ @Test public void testFeedListingWhenAllEmpty() throws Exception { bundles[0].setProcessValidity("2010-01-02T01:00Z", "2010-01-02T01:21Z"); bundles[0].setProcessConcurrency(1); bundles[0].submitFeedsScheduleProcess(prism); InstanceUtil.waitTillInstancesAreCreated(clusterOC, bundles[0].getProcessData(), 0); OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0); InstanceUtil.waitTillInstanceReachState( clusterOC, processName, 1, CoordinatorAction.Status.RUNNING, EntityType.PROCESS, 5); FeedInstanceResult r = prism .getFeedHelper() .getFeedInstanceListing( Util.readEntityName(bundles[0].getDataSets().get(0)), "?start=2010-01-02T01:00Z&end=2010-01-02T01:21Z"); validateResponse(r, 5, 0, 5, 0, 0); }
/** Test when all data is available for all instances. */ @Test public void testFeedListingWhenAllAvailable() throws Exception { bundles[0].setProcessValidity("2010-01-02T01:00Z", "2010-01-02T01:21Z"); bundles[0].setProcessConcurrency(1); bundles[0].submitFeedsScheduleProcess(prism); InstanceUtil.waitTillInstancesAreCreated(clusterOC, bundles[0].getProcessData(), 0); List<List<String>> missingDependencies = OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0); List<String> missingDependencyLastInstance = missingDependencies.get(missingDependencies.size() - 1); HadoopUtil.flattenAndPutDataInFolder( clusterFS, OSUtil.SINGLE_FILE, missingDependencyLastInstance); InstanceUtil.waitTillInstanceReachState( clusterOC, processName, 1, CoordinatorAction.Status.RUNNING, EntityType.PROCESS, 5); FeedInstanceResult r = prism .getFeedHelper() .getFeedInstanceListing( Util.readEntityName(bundles[0].getDataSets().get(0)), "?start=2010-01-02T01:00Z&end=2010-01-02T01:21Z"); validateResponse(r, 5, 0, 0, 0, 5); }
/** * Data is created for the feed, so instance status is available. Then, change the data path and * update the feed. The instance status should change to partial. */ @Test public void testFeedListingAfterFeedDataPathUpdate() throws Exception { bundles[0].setProcessValidity("2010-01-02T01:00Z", "2010-01-02T01:21Z"); bundles[0].setProcessConcurrency(1); bundles[0].submitFeedsScheduleProcess(prism); InstanceUtil.waitTillInstancesAreCreated(clusterOC, bundles[0].getProcessData(), 0); List<List<String>> missingDependencies = OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0); List<String> missingDependencyLastInstance = missingDependencies.get(missingDependencies.size() - 1); HadoopUtil.flattenAndPutDataInFolder( clusterFS, OSUtil.SINGLE_FILE, missingDependencyLastInstance); InstanceUtil.waitTillInstanceReachState( clusterOC, processName, 1, CoordinatorAction.Status.RUNNING, EntityType.PROCESS, 5); FeedInstanceResult r = prism .getFeedHelper() .getFeedInstanceListing( Util.readEntityName(bundles[0].getDataSets().get(0)), "?start=2010-01-02T01:00Z&end=2010-01-02T01:21Z"); validateResponse(r, 5, 0, 0, 0, 5); String inputFeed = bundles[0].getInputFeedFromBundle(); bundles[0].setInputFeedDataPath(baseTestDir + "/inputNew" + MINUTE_DATE_PATTERN); ServiceResponse serviceResponse = prism.getFeedHelper().update(inputFeed, bundles[0].getInputFeedFromBundle()); AssertUtil.assertSucceeded(serviceResponse); // Since we have not created directories for new path, the feed instance status should be // missing r = prism .getFeedHelper() .getFeedInstanceListing( Util.readEntityName(bundles[0].getDataSets().get(0)), "?start=2010-01-02T01:00Z&end=2010-01-02T01:21Z"); validateResponse(r, 5, 5, 0, 0, 0); }