@Test
  public void testEviction4() throws Exception {
    try {
      Configuration conf = cluster.getConf();
      FileSystem fs = FileSystem.get(conf);
      fs.delete(new Path("/"), true);
      stream.clear();

      Pair<List<String>, List<String>> pair = createTestData("/data");
      FeedEvictor.main(
          new String[] {
            "-feedBasePath",
                LocationType.DATA.name()
                    + "="
                    + cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
                    + "/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}",
            "-retentionType", "instance",
            "-retentionLimit", "months(5)",
            "-timeZone", "UTC",
            "-frequency", "hourly",
            "-logFile",
                conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
                    + "/falcon/staging/feed/2012-01-01-04-00",
            "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
          });
      Assert.assertEquals("instances=NULL", stream.getBuffer());

      stream.clear();
      String dataPath = "/data/YYYY/feed4/dd/MM/02/more/hello";
      String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-02-00.csv";
      FeedEvictor.main(
          new String[] {
            "-feedBasePath",
                LocationType.DATA.name()
                    + "="
                    + cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
                    + dataPath,
            "-retentionType", "instance",
            "-retentionLimit", "hours(5)",
            "-timeZone", "UTC",
            "-frequency", "hourly",
            "-logFile", logFile,
            "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
          });
      Assert.assertEquals("instances=NULL", stream.getBuffer());

      Assert.assertEquals(readLogFile(new Path(logFile)), getExpectedInstancePaths(dataPath));

      assertFailures(fs, pair);
    } catch (Exception e) {
      Assert.fail("Unknown exception", e);
    }
  }
  @Test(enabled = false)
  public void testEvictionWithEmptyDirs() throws Exception {
    try {
      Configuration conf = cluster.getConf();
      FileSystem fs = FileSystem.get(conf);
      fs.delete(new Path("/"), true);
      stream.clear();

      Pair<List<String>, List<String>> pair =
          generateInstances(
              fs, "feed1", "yyyy/MM/dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data", false);
      final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
      String dataPath =
          LocationType.DATA.name()
              + "="
              + storageUrl
              + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}/?{MONTH}/?{DAY}/more/?{YEAR}";
      String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";
      long beforeDelCount =
          fs.getContentSummary(new Path(("/data/YYYY/feed1/mmHH/dd/MM/"))).getDirectoryCount();

      FeedEvictor.main(
          new String[] {
            "-feedBasePath", dataPath,
            "-retentionType", "instance",
            "-retentionLimit", "days(10)",
            "-timeZone", "UTC",
            "-frequency", "daily",
            "-logFile", logFile,
            "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
          });

      compare(map.get("feed1"), stream.getBuffer());

      String expectedInstancePaths = getExpectedInstancePaths(dataPath.replaceAll(storageUrl, ""));
      Assert.assertEquals(readLogFile(new Path(logFile)), expectedInstancePaths);

      String deletedPath = expectedInstancePaths.split(",")[0].split("=")[1];
      Assert.assertFalse(fs.exists(new Path(deletedPath)));
      // empty parents
      Assert.assertFalse(fs.exists(new Path(deletedPath).getParent()));
      Assert.assertFalse(fs.exists(new Path(deletedPath).getParent().getParent()));
      // base path not deleted
      Assert.assertTrue(fs.exists(new Path("/data/YYYY/feed1/mmHH/dd/MM/")));
      // non-eligible empty dirs
      long afterDelCount =
          fs.getContentSummary(new Path(("/data/YYYY/feed1/mmHH/dd/MM/"))).getDirectoryCount();
      Assert.assertEquals((beforeDelCount - afterDelCount), 19);
      for (String path : pair.second) {
        Assert.assertTrue(fs.exists(new Path(path)));
      }

    } catch (Exception e) {
      Assert.fail("Unknown exception", e);
    }
  }
  @Test
  public void testEviction2() throws Exception {
    try {
      Configuration conf = cluster.getConf();
      FileSystem fs = FileSystem.get(conf);
      fs.delete(new Path("/"), true);
      stream.clear();

      Pair<List<String>, List<String>> pair =
          createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data");
      final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
      String dataPath =
          LocationType.DATA.name()
              + "="
              + storageUrl
              + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
      String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";

      FeedEvictor.main(
          new String[] {
            "-feedBasePath", dataPath,
            "-retentionType", "instance",
            "-retentionLimit", "days(10)",
            "-timeZone", "UTC",
            "-frequency", "daily",
            "-logFile", logFile,
            "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
          });

      assertFailures(fs, pair);
      compare(map.get("feed1"), stream.getBuffer());

      String expectedInstancePaths = getExpectedInstancePaths(dataPath);
      Assert.assertEquals(readLogFile(new Path(logFile)), expectedInstancePaths);

      String deletedPath = expectedInstancePaths.split(",")[0].split("=")[1];
      Assert.assertFalse(fs.exists(new Path(deletedPath)));
      // empty parents
      Assert.assertFalse(fs.exists(new Path(deletedPath).getParent()));
      Assert.assertFalse(fs.exists(new Path(deletedPath).getParent().getParent()));
      // base path not deleted
      Assert.assertTrue(fs.exists(new Path("/data/YYYY/feed1/mmHH/dd/MM/")));

    } catch (Exception e) {
      Assert.fail("Unknown exception", e);
    }
  }
  @Test
  public void testEviction6() throws Exception {
    try {
      Configuration conf = cluster.getConf();
      FileSystem fs = FileSystem.get(conf);
      fs.delete(new Path("/"), true);
      stream.clear();

      Pair<List<String>, List<String>> pair =
          createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/data");
      createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/stats");
      createTestData("feed1", "yyyy-MM-dd/'more'/yyyy", 10, TimeUnit.DAYS, "/meta");

      final String storageUrl = cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
      String dataPath =
          "DATA="
              + storageUrl
              + "/data/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
              + "#STATS="
              + storageUrl
              + "/stats/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}"
              + "#META="
              + storageUrl
              + "/meta/YYYY/feed1/mmHH/dd/MM/?{YEAR}-?{MONTH}-?{DAY}/more/?{YEAR}";
      String logFile = hdfsUrl + "/falcon/staging/feed/instancePaths-2012-01-01-01-00.csv";

      FeedEvictor.main(
          new String[] {
            "-feedBasePath", dataPath,
            "-retentionType", "instance",
            "-retentionLimit", "days(10)",
            "-timeZone", "UTC",
            "-frequency", "daily",
            "-logFile", logFile,
            "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
          });

      assertFailures(fs, pair);

      Assert.assertEquals(readLogFile(new Path(logFile)), getExpectedInstancePaths(dataPath));

    } catch (Exception e) {
      Assert.fail("Unknown exception", e);
    }
  }