@BeforeClass(alwaysRun = true) public void createTestData() throws Exception { LOGGER.info("in @BeforeClass"); HadoopUtil.uploadDir(clusterFS, aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE); Bundle b = BundleUtil.readELBundle(); b.generateUniqueBundle(); b = new Bundle(b, cluster); String startDate = "2010-01-01T23:20Z"; String endDate = "2010-01-02T01:21Z"; b.setInputFeedDataPath(feedInputPath); String prefix = b.getFeedDataPathPrefix(); HadoopUtil.deleteDirIfExists(prefix.substring(1), clusterFS); List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startDate, endDate, 20); HadoopUtil.flattenAndPutDataInFolder(clusterFS, OSUtil.NORMAL_INPUT, prefix, dataDates); }
@Test public void drTwoDstTablesTwoRequests() throws Exception { final RecipeExecLocation recipeExecLocation = RecipeExecLocation.TargetCluster; setUp(recipeExecLocation); final HCatClient clusterHC3 = cluster3.getClusterHelper().getHCatClient(); final Connection connection3 = cluster3.getClusterHelper().getHiveJdbcConnection(); runSql(connection3, "drop database if exists hdr_sdb1 cascade"); runSql(connection3, "create database hdr_sdb1"); runSql(connection3, "use hdr_sdb1"); final String tblName = "vanillaTable"; recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName); final String recipe1Name = recipeMerlin.getName(); final List<String> command1 = recipeMerlin.getSubmissionCommand(); final Bundle bundle3 = new Bundle(BundleUtil.readHCatBundle(), cluster3); bundle3.generateUniqueBundle(this); bundle3.submitClusters(prism); recipeMerlin .withTargetCluster(bundle3.getClusterElement()) .withRecipeCluster( recipeExecLocation.getRecipeCluster( bundles[0].getClusterElement(), bundle3.getClusterElement())); recipeMerlin.setUniqueName(this.getClass().getSimpleName()); final List<String> command2 = recipeMerlin.getSubmissionCommand(); final String recipe2Name = recipeMerlin.getName(); runSql(connection, "create table " + tblName + "(comment string)"); bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName); bootstrapCopy(connection, clusterFS, tblName, connection3, clusterFS3, tblName); runSql( connection, "insert into table " + tblName + " values" + "('this string has been added post bootstrap - should appear after dr')"); Assert.assertEquals(Bundle.runFalconCLI(command1), 0, "Recipe submission failed."); Assert.assertEquals(Bundle.runFalconCLI(command2), 0, "Recipe submission failed."); InstanceUtil.waitTillInstanceReachState( recipeExecLocation.getRecipeOC(clusterOC, clusterOC2), recipe1Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS); InstanceUtil.waitTillInstanceReachState( recipeExecLocation.getRecipeOC(clusterOC, clusterOC3), recipe2Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS); final NotifyingAssert anAssert = new NotifyingAssert(true); HiveAssert.assertTableEqual( cluster, clusterHC.getTable(DB_NAME, tblName), cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert); HiveAssert.assertTableEqual( cluster, clusterHC.getTable(DB_NAME, tblName), cluster3, clusterHC3.getTable(DB_NAME, tblName), anAssert); anAssert.assertAll(); }