@AfterClass
  public void tearDown() throws Exception {
    cleanupHiveMetastore(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME);
    cleanupHiveMetastore(targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME);

    cleanupStagingDirs(sourceContext.getCluster().getCluster(), SOURCE_DATABASE_NAME);
    cleanupStagingDirs(targetContext.getCluster().getCluster(), TARGET_DATABASE_NAME);
    TestContext.deleteEntitiesFromStore();
  }
  @BeforeClass
  public void setUp() throws Exception {
    TestContext.cleanupStore();

    Map<String, String> overlay = sourceContext.getUniqueOverlay();
    String sourceFilePath =
        TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
    sourceContext.setCluster(sourceFilePath);

    final Cluster sourceCluster = sourceContext.getCluster().getCluster();
    String sourceStorageUrl = ClusterHelper.getStorageUrl(sourceCluster);

    // copyTestDataToHDFS
    final String sourcePath = sourceStorageUrl + "/falcon/test/input/" + PARTITION_VALUE;
    FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", sourcePath);

    sourceMetastoreUrl =
        ClusterHelper.getInterface(sourceCluster, Interfacetype.REGISTRY).getEndpoint();
    setupHiveMetastore(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME);
    HiveTestUtils.loadData(
        sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, sourcePath, PARTITION_VALUE);

    String targetFilePath =
        TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
    targetContext.setCluster(targetFilePath);

    final Cluster targetCluster = targetContext.getCluster().getCluster();
    targetMetastoreUrl =
        ClusterHelper.getInterface(targetCluster, Interfacetype.REGISTRY).getEndpoint();
    setupHiveMetastore(targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME);

    copyLibsToHDFS(targetCluster);
  }
  @BeforeClass
  public void setUp() throws Exception {
    FeedEvictor.OUT.set(stream);

    client = TestContext.getHCatClient(METASTORE_URL);

    HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, EXTERNAL_TABLE_NAME);
    HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, TABLE_NAME);
    HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_EXTERNAL_TABLE_NAME);
    HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_TABLE_NAME);
    HiveTestUtils.dropDatabase(METASTORE_URL, DATABASE_NAME);

    HiveTestUtils.createDatabase(METASTORE_URL, DATABASE_NAME);
    final List<String> partitionKeys = Arrays.asList("ds", "region");
    HiveTestUtils.createTable(METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionKeys);
    HiveTestUtils.createExternalTable(
        METASTORE_URL, DATABASE_NAME, EXTERNAL_TABLE_NAME, partitionKeys, EXTERNAL_TABLE_LOCATION);

    final List<String> multiColDatedPartitionKeys = Arrays.asList("year", "month", "day", "region");
    HiveTestUtils.createTable(
        METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_TABLE_NAME, multiColDatedPartitionKeys);
    HiveTestUtils.createExternalTable(
        METASTORE_URL,
        DATABASE_NAME,
        MULTI_COL_DATED_EXTERNAL_TABLE_NAME,
        multiColDatedPartitionKeys,
        MULTI_COL_DATED_EXTERNAL_TABLE_LOCATION);
  }
  private void addPartitionToTarget() throws Exception {
    final Cluster targetCluster = targetContext.getCluster().getCluster();
    String targetStorageUrl = ClusterHelper.getStorageUrl(targetCluster);

    // copyTestDataToHDFS
    final String targetPath = targetStorageUrl + "/falcon/test/input/" + PARTITION_VALUE;
    FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", targetPath);

    HiveTestUtils.loadData(
        targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, targetPath, PARTITION_VALUE);
  }
 @AfterClass
 public void tearDown() throws Exception {
   TestContext.deleteEntitiesFromStore();
 }
  @Test(enabled = false)
  public void testTableReplicationWithExistingTargetPartition() throws Exception {
    final String feedName = "customer-table-replicating-feed";
    final Map<String, String> overlay = sourceContext.getUniqueOverlay();
    String filePath =
        TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
    Assert.assertEquals(
        TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);

    filePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
    Assert.assertEquals(
        TestContext.executeWithURL("entity -submit -type cluster -file " + filePath), 0);

    HCatPartition sourcePartition =
        HiveTestUtils.getPartition(
            sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, "ds", PARTITION_VALUE);
    Assert.assertNotNull(sourcePartition);

    addPartitionToTarget();
    // verify if the partition on the target exists before replication starts
    // to see import drops partition before importing partition
    HCatPartition targetPartition =
        HiveTestUtils.getPartition(
            targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, "ds", PARTITION_VALUE);
    Assert.assertNotNull(targetPartition);

    filePath =
        TestContext.overlayParametersOverTemplate(
            "/table/customer-table-replicating-feed.xml", overlay);
    Assert.assertEquals(
        TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath), 0);

    // wait until the workflow job completes
    WorkflowJob jobInfo =
        OozieTestUtils.getWorkflowJob(
            targetContext.getCluster().getCluster(),
            OozieClient.FILTER_NAME + "=FALCON_FEED_REPLICATION_" + feedName);
    Assert.assertEquals(jobInfo.getStatus(), WorkflowJob.Status.SUCCEEDED);

    // verify if the partition on the target exists
    targetPartition =
        HiveTestUtils.getPartition(
            targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, "ds", PARTITION_VALUE);
    Assert.assertNotNull(targetPartition);

    InstancesResult response =
        targetContext
            .getService()
            .path("api/instance/running/feed/" + feedName)
            .header("Cookie", targetContext.getAuthenticationToken())
            .accept(MediaType.APPLICATION_JSON)
            .get(InstancesResult.class);
    Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);

    TestContext.executeWithURL("entity -delete -type feed -name customer-table-replicating-feed");
    TestContext.executeWithURL("entity -delete -type cluster -name primary-cluster");
    TestContext.executeWithURL("entity -delete -type cluster -name bcp-cluster");
  }