@BeforeClass
 public void setup() throws NamingException, AnalyticsException, IOException {
   GenericUtils.clearGlobalCustomDataSourceRepo();
   System.setProperty(
       GenericUtils.WSO2_ANALYTICS_CONF_DIRECTORY_SYS_PROP, "src/test/resources/conf1");
   AnalyticsServiceHolder.setHazelcastInstance(null);
   AnalyticsServiceHolder.setAnalyticsClusterManager(new AnalyticsClusterManagerImpl());
   System.setProperty(AnalyticsServiceHolder.FORCE_INDEXING_ENV_PROP, Boolean.TRUE.toString());
   this.service = ServiceHolder.getAnalyticsDataService();
   ServiceHolder.setAnalyticskExecutor(
       new SparkAnalyticsExecutor("localhost", 0, "src/test/resources/conf1"));
   ServiceHolder.getAnalyticskExecutor().startSparkServer("src/test/resources/conf1");
 }
 private int computePartitions() throws AnalyticsException {
   if (ServiceHolder.getAnalyticskExecutor() != null) {
     return ServiceHolder.getAnalyticskExecutor().getNumPartitionsHint();
   }
   return AnalyticsConstants.SPARK_DEFAULT_PARTITION_COUNT;
 }
 @AfterClass
 public void done() throws NamingException, AnalyticsException, IOException {
   ServiceHolder.getAnalyticskExecutor().stop();
   this.service.destroy();
   System.clearProperty(AnalyticsServiceHolder.FORCE_INDEXING_ENV_PROP);
 }