private Path executeWindupAgainstAppUntilRule(
      final String inputDir,
      final GraphContext grCtx,
      final Class<MavenizeRuleProvider> ruleToRunUpTo)
      throws IOException, IllegalAccessException, InstantiationException {
    Assume.assumeTrue("Exists: " + inputDir, new File(inputDir).exists());

    final Path outputPath =
        Paths.get(FileUtils.getTempDirectory().toString(), "Windup-Mavenization-output");
    FileUtils.deleteDirectory(outputPath.toFile());
    Files.createDirectories(outputPath);

    grCtx.getGraph().getBaseGraph().commit();

    // Configure Windup core
    final WindupConfiguration processorConfig = new WindupConfiguration();
    processorConfig.setRuleProviderFilter(new RuleProviderWithDependenciesPredicate(ruleToRunUpTo));
    processorConfig.setGraphContext(grCtx);
    processorConfig.addInputPath(Paths.get(inputDir));
    processorConfig.setOutputDirectory(outputPath);
    processorConfig.setOptionValue(ScanPackagesOption.NAME, Collections.singletonList(""));
    processorConfig.setOptionValue(SourceModeOption.NAME, false);
    processorConfig.setOptionValue(MavenizeOption.NAME, true);

    processor.execute(processorConfig);

    return outputPath;
  }
Example #2
0
 /** Take a screenshot of the current browser and store it in the output directory. */
 public File takeScreenshot(final String prefix) {
   File screenshot = null;
   if (driverCanTakeSnapshots()) {
     OutputStream stream = null;
     try {
       byte[] screenshotData = ((TakesScreenshot) driver).getScreenshotAs(OutputType.BYTES);
       if (screenshotData != null) {
         File temporaryFolder = FileUtils.getTempDirectory();
         String snapshotName = getTemporarySnapshotName();
         screenshot = new File(temporaryFolder, snapshotName);
         stream = new FileOutputStream(screenshot);
         stream.write(screenshotData);
       }
       if ((screenshot != null) && (screenshot.exists())) {
         return saveScreenshoot(prefix, screenshot);
       } else if (!isAMock(driver)) {
         getLogger().warn("Failed to write screenshot (possibly an out of memory error)");
       }
     } catch (Throwable e) {
       getLogger()
           .warn(
               "Failed to write screenshot (possibly an out of memory error): " + e.getMessage());
     } finally {
       if (stream != null)
         try {
           stream.close();
         } catch (IOException e) {
         } // Ignore any error on close
     }
   }
   return screenshot;
 }
 private MultiFilesResult buildMockMultiFilesCallResult() throws IOException {
   final MultiFilesResult multiFilesResult = mock(MultiFilesResult.class);
   when(multiFilesResult.getApplicationName()).thenReturn("test_app_name");
   when(multiFilesResult.getPayload()).thenReturn(new File[0]);
   when(multiFilesResult.getTemporaryDirectory()).thenReturn(FileUtils.getTempDirectory());
   return multiFilesResult;
 }
  /**
   * Downloads the given file specified via url to the given canonicalDestination.
   *
   * @param urlSource String
   * @param urlDestination String
   * @throws Exception
   */
  @Override
  public void downloadFile(String urlSource, String urlDestination) throws Exception {

    // sanity check
    if (urlSource == null
        || urlSource.length() == 0
        || urlDestination == null
        || urlDestination.length() == 0) {
      throw new IllegalArgumentException(
          "downloadFile(): urlSource or urlDestination argument is null...");
    }

    // URLs for given parameters
    URL source = new URL(urlSource);
    URL destination = new URL(urlDestination);

    // we have a compressed file
    if (GzipUtils.isCompressedFilename(urlSource)) {
      // downlod to temp destination
      File tempDestinationFile =
          org.apache.commons.io.FileUtils.getFile(
              org.apache.commons.io.FileUtils.getTempDirectory(),
              new File(source.getFile()).getName());
      if (LOG.isInfoEnabled()) {
        LOG.info("downloadFile(), " + urlSource + ", this may take a while...");
      }
      org.apache.commons.io.FileUtils.copyURLToFile(source, tempDestinationFile);
      if (LOG.isInfoEnabled()) {
        LOG.info("downloadFile(), gunzip: we have compressed file, decompressing...");
      }
      // decompress the file
      gunzip(tempDestinationFile.getCanonicalPath());
      if (LOG.isInfoEnabled()) {
        LOG.info("downloadFile(), gunzip complete...");
      }
      // move temp/decompressed file to final destination
      File destinationFile = new File(destination.getFile());
      if (destinationFile.exists()) {
        org.apache.commons.io.FileUtils.forceDelete(destinationFile);
      }
      org.apache.commons.io.FileUtils.moveFile(
          org.apache.commons.io.FileUtils.getFile(
              GzipUtils.getUncompressedFilename(tempDestinationFile.getCanonicalPath())),
          destinationFile);

      // lets cleanup after ourselves - remove compressed file
      tempDestinationFile.delete();
    }
    // uncompressed file, download directry to urlDestination
    else {
      if (LOG.isInfoEnabled()) {
        LOG.info("downloadFile(), " + urlSource + ", this may take a while...");
      }
      org.apache.commons.io.FileUtils.copyURLToFile(
          source, org.apache.commons.io.FileUtils.getFile(destination.getFile()));
    }
  }
  public AndroidApp createSelendroidServer(AndroidApp aut)
      throws IOException, ShellCommandException, AndroidSdkException {
    log.info("create SelendroidServer for apk: " + aut.getAbsolutePath());
    init(aut);
    cleanUpPrebuildServer();
    File selendroidServer = createAndAddCustomizedAndroidManifestToSelendroidServer();
    File outputFile =
        new File(
            FileUtils.getTempDirectory(),
            String.format(
                "selendroid-server-%s-%s.apk",
                applicationUnderTest.getBasePackage(), getJarVersionNumber()));

    return signTestServer(selendroidServer, outputFile);
  }
  /**
   * Creates a staging file for mutation data (and meta file) with contents from the given
   * DataMatrix. This is called when the mutation file needs to be run through the Oncotator and
   * Mutation Assessor Tools.
   *
   * @param portalMetadata PortalMetadata
   * @param cancerStudy CancerStudyMetadata
   * @param datatypeMetadata DatatypeMetadata
   * @param dataMatrix DataMatrix
   * @throws Exception
   */
  @Override
  public void writeMutationStagingFile(
      PortalMetadata portalMetadata,
      CancerStudyMetadata cancerStudyMetadata,
      DatatypeMetadata datatypeMetadata,
      DataMatrix dataMatrix)
      throws Exception {

    // we only have data matrix at this point, we need to create a temp with its contents
    File oncotatorInputFile =
        org.apache.commons.io.FileUtils.getFile(
            org.apache.commons.io.FileUtils.getTempDirectory(), "oncotatorInputFile");
    FileOutputStream out = org.apache.commons.io.FileUtils.openOutputStream(oncotatorInputFile);
    dataMatrix.write(out);
    IOUtils.closeQuietly(out);

    // output should be the path/name of staging file
    String stagingFilename = datatypeMetadata.getStagingFilename();
    stagingFilename =
        stagingFilename.replaceAll(
            DatatypeMetadata.CANCER_STUDY_TAG, cancerStudyMetadata.toString());
    File stagingFile =
        org.apache.commons.io.FileUtils.getFile(
            portalMetadata.getStagingDirectory(),
            cancerStudyMetadata.getStudyPath(),
            stagingFilename);

    // call oncotateAF
    oncotateMAF(
        FileUtils.FILE_URL_PREFIX + oncotatorInputFile.getCanonicalPath(),
        FileUtils.FILE_URL_PREFIX + stagingFile.getCanonicalPath());

    // clean up
    if (oncotatorInputFile.exists()) {
      org.apache.commons.io.FileUtils.forceDelete(oncotatorInputFile);
    }

    // meta file
    if (datatypeMetadata.requiresMetafile()) {
      if (LOG.isInfoEnabled()) {
        LOG.info(
            "writingMutationStagingFile(), creating metadata file for staging file: "
                + stagingFile);
      }
      writeMetadataFile(portalMetadata, cancerStudyMetadata, datatypeMetadata, dataMatrix);
    }
  }
Example #7
0
  @Override
  public boolean execute() throws Exception {
    if (_controllerHost == null) {
      _controllerHost = NetUtil.getHostAddress();
    }

    // Create a temp working directory.
    File tmpDir = File.createTempFile(SEGMENT_UPLOADER, null, FileUtils.getTempDirectory());
    FileUtils.deleteQuietly(tmpDir);
    tmpDir.mkdir();

    try {
      LOGGER.info("Executing command: " + toString());
      File dir = new File(_segmentDir);
      File[] files = dir.listFiles();

      for (File file : files) {
        File tgzFile = file;

        if (file.isDirectory()) {
          LOGGER.info("Compressing segment {}", file.getName());

          String srcDir = file.getAbsolutePath();
          String tgzFileName =
              TarGzCompressionUtils.createTarGzOfDirectory(
                  srcDir, tmpDir.getAbsolutePath() + File.separator + file.getName() + TAR_GZIP);
          tgzFile = new File(tgzFileName);
        }

        LOGGER.info("Uploading segment {}", tgzFile.getName());
        FileUploadUtils.sendSegmentFile(
            _controllerHost,
            _controllerPort,
            tgzFile.getName(),
            new FileInputStream(tgzFile),
            tgzFile.length());
      }
    } catch (Exception e) {
      LOGGER.error("Exception caught while uploading segment {}", _segmentDir, e);
    } finally {
      // Delete the temporary working directory.
      FileUtils.deleteQuietly(tmpDir);
    }
    return true;
  }
  @Test
  public void createByUrl() throws IOException {
    InputStream in = getClass().getResourceAsStream("/testdata.csv");
    File csvFile = new File(FileUtils.getTempDirectory(), "testdata.csv");
    FileCopyUtils.copy(in, new FileOutputStream(csvFile));
    String url = "csv://" + csvFile.getAbsolutePath();

    EntitySource entitySource = new CsvEntitySource(url, null);
    try {
      assertEquals(entitySource.getUrl(), url);

      Iterator<String> it = entitySource.getEntityNames().iterator();
      assertNotNull(it);
      assertTrue(it.hasNext());
      assertEquals(it.next(), "testdata");
      assertFalse(it.hasNext());

      Repository<? extends Entity> repo = entitySource.getRepositoryByEntityName("testdata");
      assertNotNull(repo);

      Iterator<AttributeMetaData> itMeta = repo.getAttributes().iterator();
      assertNotNull(itMeta);
      assertTrue(itMeta.hasNext());

      AttributeMetaData col1 = itMeta.next();
      assertNotNull(col1);
      assertEquals(col1.getName(), "col1");

      AttributeMetaData col2 = itMeta.next();
      assertNotNull(col2);
      assertEquals(col2.getName(), "col2");
      assertFalse(itMeta.hasNext());

      Iterator<? extends Entity> itEntity = repo.iterator();
      assertNotNull(itEntity);
      assertTrue(itEntity.hasNext());

      Entity entity = itEntity.next();
      assertNotNull(entity);
      assertEquals(entity.get("col1"), "val1");
      assertEquals(entity.get("col2"), "val2");
    } finally {
      entitySource.close();
    }
  }
  @Test
  public void testNestedCondition() throws IOException {
    try (GraphContext context = factory.create()) {
      ProjectModel pm = context.getFramed().addVertex(null, ProjectModel.class);
      pm.setName("Main Project");
      FileModel inputPath = context.getFramed().addVertex(null, FileModel.class);
      inputPath.setFilePath("src/test/resources/");

      Path outputPath =
          Paths.get(
              FileUtils.getTempDirectory().toString(), "windup_" + UUID.randomUUID().toString());
      FileUtils.deleteDirectory(outputPath.toFile());
      Files.createDirectories(outputPath);

      inputPath.setProjectModel(pm);
      pm.setRootFileModel(inputPath);

      WindupConfiguration windupConfiguration =
          new WindupConfiguration()
              .setRuleProviderFilter(
                  new NotPredicate(
                      new RuleProviderPhasePredicate(
                          MigrationRulesPhase.class, ReportGenerationPhase.class)))
              .setGraphContext(context);
      windupConfiguration.setInputPath(Paths.get(inputPath.getFilePath()));
      windupConfiguration.setOutputDirectory(outputPath);
      processor.execute(windupConfiguration);

      GraphService<ClassificationModel> classificationService =
          new GraphService<>(context, ClassificationModel.class);

      Assert.assertEquals(1, provider.getXmlFileMatches().size());
      List<ClassificationModel> classifications = Iterators.asList(classificationService.findAll());
      for (ClassificationModel model : classifications) {
        String classification = model.getClassification();
        String classificationString = classification.toString();
        Assert.assertEquals("Spring File", classificationString);
      }
      Assert.assertEquals(1, classifications.size());
      Iterator<FileModel> iterator = classifications.get(0).getFileModels().iterator();
      Assert.assertNotNull(iterator.next());
      Assert.assertNotNull(iterator.next());
      Assert.assertFalse(iterator.hasNext());
    }
  }
Example #10
0
  public static void main(String[] args) throws Exception {
    File tmpDir = new File(FileUtils.getTempDirectory(), "macc-" + UUID.randomUUID().toString());

    try {
      MiniAccumuloCluster la =
          new MiniAccumuloCluster(tmpDir, "pass1234", new HashMap<String, String>());
      la.start();

      System.out.println(
          "\n   ---- Running Accumulo App against accumulo-" + la.getAccumuloVersion() + "\n");

      run(la.getInstanceName(), la.getZookeepers(), new PasswordToken("pass1234"), args);

      System.out.println("\n   ---- Ran Accumulo App\n");

      la.stop();
    } finally {
      FileUtils.deleteQuietly(tmpDir);
    }
  }
  /**
   * Runs all MAFs for the given dataaSourcesMetadata through the Oncotator and OMA tools.
   *
   * @param dataSourcesMetadata DataSourcesMetadata
   * @throws Exception
   */
  @Override
  public void oncotateAllMAFs(DataSourcesMetadata dataSourcesMetadata) throws Exception {

    // iterate over datasource download directory and process all MAFs
    String[] extensions = new String[] {DatatypeMetadata.MAF_FILE_EXT};
    for (File maf :
        listFiles(new File(dataSourcesMetadata.getDownloadDirectory()), extensions, true)) {
      // create temp for given maf
      File oncotatorInputFile =
          org.apache.commons.io.FileUtils.getFile(
              org.apache.commons.io.FileUtils.getTempDirectory(), "oncotatorInputFile");
      org.apache.commons.io.FileUtils.copyFile(maf, oncotatorInputFile);
      // input is tmp file we just created, we want output to go into the original maf
      oncotateMAF(
          FileUtils.FILE_URL_PREFIX + oncotatorInputFile.getCanonicalPath(),
          FileUtils.FILE_URL_PREFIX + maf.getCanonicalPath());
      // clean up
      org.apache.commons.io.FileUtils.forceDelete(oncotatorInputFile);
    }
  }
  @Test
  public void testRunWindupSourceMode() throws Exception {
    Path userPath =
        FileUtils.getTempDirectory()
            .toPath()
            .resolve("Windup")
            .resolve("windupuserscriptsdir_" + RandomStringUtils.randomAlphanumeric(6));
    try {
      Files.createDirectories(userPath);
      try (InputStream is = getClass().getResourceAsStream(EXAMPLE_USERSCRIPT_INPUT);
          OutputStream os =
              new FileOutputStream(userPath.resolve(EXAMPLE_USERSCRIPT_OUTPUT).toFile())) {
        IOUtils.copy(is, os);
      }
      try (InputStream is = getClass().getResourceAsStream("/exampleconversion.xsl");
          OutputStream os = new FileOutputStream(userPath.resolve(XSLT_OUTPUT_NAME).toFile())) {
        IOUtils.copy(is, os);
      }

      try (GraphContext context = createGraphContext()) {
        // The test-files folder in the project root dir.
        List<String> includeList = Collections.emptyList();
        List<String> excludeList = Collections.emptyList();
        super.runTest(
            context,
            "../test-files/src_example",
            userPath.toFile(),
            true,
            includeList,
            excludeList);

        validateWebXmlReferences(context);
        validatePropertiesModels(context);
        validateReports(context);
        validateCompatibleReport(context);
      }
    } finally {
      FileUtils.deleteDirectory(userPath.toFile());
    }
  }
  @Test
  public void testSimpleSchema() throws Exception {
    CompiledDatum result = DatumCompiler.generate(MyDatumTemplate.class);

    File baseDir = FileUtils.getTempDirectory();
    FileUtils.deleteDirectory(baseDir);
    File srcDir = new File(baseDir, result.getPackageName().replaceAll("\\.", "/"));
    assertTrue(srcDir.mkdirs());

    File codeFile = new File(srcDir, result.getClassName() + ".java");
    OutputStream os = new FileOutputStream(codeFile);
    IOUtils.write(result.getClassCode(), os, "UTF-8");
    os.close();

    // Compile with Janino, give it a try. We have Janino since
    // it's a cascading dependency, but probably want to add a test
    // dependency on it.

    ClassLoader cl =
        new JavaSourceClassLoader(
            this.getClass().getClassLoader(), // parentClassLoader
            new File[] {baseDir}, // optionalSourcePath
            (String) null // optionalCharacterEncoding
            );

    // WARNING - we have to use xxxDatumTemplate as the base name, so that the code returned
    // by the compiler is for type xxxDatum. Otherwise when we try to load the class here,
    // we'll likely get the base (template) class, which will mask our generated class.
    Class clazz = cl.loadClass(result.getPackageName() + "." + result.getClassName());
    assertEquals("MyDatum", clazz.getSimpleName());

    // Verify that we have a constructor which takes all of the fields.
    //        private String _name;
    //        private int ageAndRisk;
    //        private Date _date;
    //        private Tuple _aliases;

    Constructor c = clazz.getConstructor(String.class, int.class, Date.class, Tuple.class);
    Object datum = c.newInstance("robert", 25, new Date(), new Tuple("bob", "rob"));
  }
  /**
   * Reads the script from the resource, processes the variables and writes it to the file system so
   * it can be accessed by PhantomJS.
   *
   * @throws IOException if reading the template or writing the file failed
   */
  private void prepareScript() throws IOException {
    InputStream is = null;
    InputStream fis = null;
    OutputStream os = null;
    try {
      // Create the temporary directory for everything PhantomJS
      phantomTmpDir = new File(FileUtils.getTempDirectory(), "phantomjs");
      if (!phantomTmpDir.isDirectory() && !phantomTmpDir.mkdirs()) {
        logger.error("Unable to create temp directory for PhantomJS at {}", phantomTmpDir);
        throw new IOException("Unable to create temp directory for PhantomJS at " + phantomTmpDir);
      }

      // Create the script
      is = PhantomJsPagePreviewGenerator.class.getResourceAsStream(SCRIPT_FILE);
      scriptTemplate = IOUtils.toString(is);
      scriptFile = new File(phantomTmpDir, "pagepreview.js");

      // Process templates
      Map<String, String> properties = new HashMap<String, String>();
      properties.put(PARAM_PREPARE_SCRIPT, "return true;");
      String script = ConfigurationUtils.processTemplate(scriptTemplate, properties);

      // Write the processed script to disk
      fis = IOUtils.toInputStream(script);
      os = new FileOutputStream(scriptFile);
      IOUtils.copy(fis, os);

    } catch (IOException e) {
      logger.error("Error reading phantomjs script template from " + SCRIPT_FILE, e);
      FileUtils.deleteQuietly(scriptFile);
      throw e;
    } finally {
      IOUtils.closeQuietly(is);
      IOUtils.closeQuietly(fis);
      IOUtils.closeQuietly(os);
    }
  }
Example #15
0
  private File unpack(File dest) throws IOException {
    ZipFile zfile = new ZipFile(this.getZip());
    Enumeration<? extends ZipEntry> entries = zfile.entries();

    File f, basedir = null;
    File tmp = FileUtils.getTempDirectory();
    while (entries.hasMoreElements()) {
      ZipEntry entry = entries.nextElement();
      f = new File(tmp, entry.getName());
      if (f.exists()) FileUtils.deleteQuietly(f);

      if (entry.isDirectory()) FileUtils.forceMkdir(f);
      else FileUtils.copyInputStreamToFile(zfile.getInputStream(entry), f);

      if (entry.isDirectory() && (basedir == null || basedir.getName().startsWith(f.getName())))
        basedir = f;
    }

    zfile.close();
    if (dest.exists()) FileUtils.deleteDirectory(dest);
    FileUtils.moveDirectory(basedir, dest);

    return dest;
  }
Example #16
0
/** Oct 14, 2014 */
public class QueriesSentinelTest {
  private static final Logger LOGGER = LoggerFactory.getLogger(QueriesSentinelTest.class);
  private static ReduceService REDUCE_SERVICE = new DefaultReduceService();

  private static final PQLCompiler REQUEST_COMPILER =
      new PQLCompiler(new HashMap<String, String[]>());
  private final String AVRO_DATA = "data/test_data-mv.avro";
  private static File INDEX_DIR =
      new File(FileUtils.getTempDirectory() + File.separator + "QueriesSentinelTest");
  private static AvroQueryGenerator AVRO_QUERY_GENERATOR;
  private static QueryExecutor QUERY_EXECUTOR;
  private static TestingServerPropertiesBuilder CONFIG_BUILDER;
  private String segmentName;

  @BeforeClass
  public void setup() throws Exception {
    CONFIG_BUILDER = new TestingServerPropertiesBuilder("testTable");

    setupSegmentFor("testTable");
    setUpTestQueries("testTable");

    final PropertiesConfiguration serverConf = CONFIG_BUILDER.build();
    serverConf.setDelimiterParsingDisabled(false);

    final FileBasedInstanceDataManager instanceDataManager =
        FileBasedInstanceDataManager.getInstanceDataManager();
    instanceDataManager.init(
        new FileBasedInstanceDataManagerConfig(serverConf.subset("pinot.server.instance")));
    instanceDataManager.start();

    System.out.println(
        "************************** : " + new File(INDEX_DIR, "segment").getAbsolutePath());
    File segmentFile = new File(INDEX_DIR, "segment").listFiles()[0];
    segmentName = segmentFile.getName();
    final IndexSegment indexSegment = ColumnarSegmentLoader.load(segmentFile, ReadMode.heap);
    instanceDataManager.getTableDataManager("testTable");
    instanceDataManager.getTableDataManager("testTable").addSegment(indexSegment);

    QUERY_EXECUTOR = new ServerQueryExecutorV1Impl(false);
    QUERY_EXECUTOR.init(
        serverConf.subset("pinot.server.query.executor"),
        instanceDataManager,
        new ServerMetrics(new MetricsRegistry()));
  }

  @AfterClass
  public void tearDown() {
    FileUtils.deleteQuietly(INDEX_DIR);
  }

  private void runApproximationQueries(
      List<? extends AvroQueryGenerator.TestAggreationQuery> queries, double precision)
      throws Exception {
    boolean isAccurate = true;
    Object accurateValue = null;

    int counter = 0;
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    for (final AvroQueryGenerator.TestAggreationQuery query : queries) {
      LOGGER.info("**************************");
      LOGGER.info("running " + counter + " : " + query.getPql());
      final BrokerRequest brokerRequest =
          RequestConverter.fromJSON(REQUEST_COMPILER.compile(query.getPql()));
      InstanceRequest instanceRequest = new InstanceRequest(counter++, brokerRequest);
      instanceRequest.setSearchSegments(new ArrayList<String>());
      instanceRequest.getSearchSegments().add(segmentName);
      final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
      instanceResponseMap.clear();
      instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
      final BrokerResponse brokerResponse =
          REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
      LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));

      // compute value
      Object val;
      if (query instanceof AvroQueryGenerator.TestSimpleAggreationQuery) {
        val = Double.parseDouble(brokerResponse.getAggregationResults().get(0).getString("value"));
      } else {
        val = brokerResponse.getAggregationResults().get(0).getJSONArray("groupByResult");
      }

      if (isAccurate) {
        // store accurate value
        accurateValue = val;
        isAccurate = false;
      } else {
        // compare value with accurate value
        // it's estimation so we need to test its result within error bound
        if (query instanceof AvroQueryGenerator.TestSimpleAggreationQuery) {
          TestUtils.assertApproximation((Double) val, (Double) accurateValue, precision);
        } else {
          TestUtils.assertJSONArrayApproximation(
              (JSONArray) val, (JSONArray) accurateValue, precision);
        }
        isAccurate = true;
      }
    }
  }

  /**
   * Console output of the last statement may not appear, maybe a result of intellij idea test
   * console redirection. To avoid this, always add assert clauses, and do not rely on the console
   * output.
   *
   * @throws Exception
   */
  @Test
  public void testDistinctCountHLLNoGroupBy() throws Exception {
    final List<TestSimpleAggreationQuery> aggCalls = new ArrayList<TestSimpleAggreationQuery>();
    // distinct count(*) not works
    for (int i = 1; i <= 5; i++) {
      aggCalls.add(
          new TestSimpleAggreationQuery(
              "select distinctcount(column" + i + ") from testTable limit 0", 0.0));
      aggCalls.add(
          new TestSimpleAggreationQuery(
              "select distinctcounthll(column" + i + ") from testTable limit 0", 0.0));
    }

    runApproximationQueries(aggCalls, 0.1);
  }

  @Test
  public void testDistinctCountHLLGroupBy() throws Exception {
    final List<TestGroupByAggreationQuery> groupByCalls =
        new ArrayList<TestGroupByAggreationQuery>();
    for (int i = 1; i <= 5; i++) {
      if (i == 2) {
        continue;
      }
      groupByCalls.add(
          new TestGroupByAggreationQuery(
              "select distinctcount(column2) from testTable group by column" + i + " limit 0",
              null));
      groupByCalls.add(
          new TestGroupByAggreationQuery(
              "select distinctcounthll(column2) from testTable group by column" + i + " limit 0",
              null));
    }

    runApproximationQueries(groupByCalls, 0.1);
  }

  @Test
  public void testPercentileNoGroupBy() throws Exception {
    final List<TestSimpleAggreationQuery> aggCalls = new ArrayList<TestSimpleAggreationQuery>();

    // 5 single-value columns -- column 3 is String type
    for (int i = 1; i <= 2; i++) {
      aggCalls.add(
          new TestSimpleAggreationQuery(
              "select percentile50(column" + i + ") from testTable limit 0", 0.0));
      aggCalls.add(
          new TestSimpleAggreationQuery(
              "select percentileest50(column" + i + ") from testTable limit 0", 0.0));
    }

    runApproximationQueries(aggCalls, 0.1);
  }

  @Test
  public void testPercentileGroupBy() throws Exception {
    final List<TestGroupByAggreationQuery> groupByCalls =
        new ArrayList<TestGroupByAggreationQuery>();
    final int top = 1000;
    for (int i = 2; i <= 2; i++) {
      if (i == 2) {
        // continue;
      }
      groupByCalls.add(
          new TestGroupByAggreationQuery(
              "select percentile50(column1) from testTable group by column"
                  + i
                  + " top "
                  + top
                  + " limit 0",
              null));
      groupByCalls.add(
          new TestGroupByAggreationQuery(
              "select percentileest50(column1) from testTable group by column"
                  + i
                  + " top "
                  + top
                  + " limit 0",
              null));
    }

    runApproximationQueries(groupByCalls, 0.1);
  }

  @Test
  public void testAggregation() throws Exception {
    int counter = 0;
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    final List<TestSimpleAggreationQuery> aggCalls =
        AVRO_QUERY_GENERATOR.giveMeNSimpleAggregationQueries(10000);
    for (final TestSimpleAggreationQuery aggCall : aggCalls) {
      LOGGER.info("running " + counter + " : " + aggCall.pql);
      final BrokerRequest brokerRequest =
          RequestConverter.fromJSON(REQUEST_COMPILER.compile(aggCall.pql));
      InstanceRequest instanceRequest = new InstanceRequest(counter++, brokerRequest);
      instanceRequest.setSearchSegments(new ArrayList<String>());
      instanceRequest.getSearchSegments().add(segmentName);
      final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
      instanceResponseMap.clear();
      instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
      final BrokerResponse brokerResponse =
          REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
      LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
      LOGGER.info("Result from avro is : " + aggCall.result);
      try {
        Assert.assertEquals(
            Double.parseDouble(brokerResponse.getAggregationResults().get(0).getString("value")),
            aggCall.result);
      } catch (AssertionError e) {
        System.out.println(aggCall.pql);
        System.out.println(
            "from broker : "
                + Double.parseDouble(
                    brokerResponse.getAggregationResults().get(0).getString("value")));
        System.out.println("from precomp : " + aggCall.result);
        throw new AssertionError(e);
      }
    }
  }

  @Test
  public void testAggregationGroupBy() throws Exception {
    final List<TestGroupByAggreationQuery> groupByCalls =
        AVRO_QUERY_GENERATOR.giveMeNGroupByAggregationQueries(10000);
    int counter = 0;
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    for (final TestGroupByAggreationQuery groupBy : groupByCalls) {
      LOGGER.info("running " + counter + " : " + groupBy.pql);
      final BrokerRequest brokerRequest =
          RequestConverter.fromJSON(REQUEST_COMPILER.compile(groupBy.pql));
      InstanceRequest instanceRequest = new InstanceRequest(counter++, brokerRequest);
      instanceRequest.setSearchSegments(new ArrayList<String>());
      instanceRequest.getSearchSegments().add(segmentName);
      final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
      instanceResponseMap.clear();
      instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
      final BrokerResponse brokerResponse =
          REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
      LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
      LOGGER.info("Result from avro is : " + groupBy.groupResults);

      try {
        assertGroupByResults(
            brokerResponse.getAggregationResults().get(0).getJSONArray("groupByResult"),
            groupBy.groupResults);
      } catch (AssertionError e) {
        System.out.println(groupBy.pql);
        System.out.println(
            "from broker : "
                + brokerResponse
                    .getAggregationResults()
                    .get(0)
                    .getJSONArray("groupByResult")
                    .toString());
        System.out.println("from precomp : " + groupBy.groupResults);
        throw new AssertionError(e);
      }
    }
  }

  private void assertGroupByResults(JSONArray jsonArray, Map<Object, Double> groupResultsFromAvro)
      throws JSONException {
    final Map<String, Double> groupResultsFromQuery = new HashMap<String, Double>();
    if (groupResultsFromAvro.size() > 10) {
      Assert.assertEquals(jsonArray.length(), 10);
    } else {
      Assert.assertTrue(jsonArray.length() >= groupResultsFromAvro.size());
    }
    for (int i = 0; i < jsonArray.length(); ++i) {
      groupResultsFromQuery.put(
          jsonArray.getJSONObject(i).getJSONArray("group").getString(0),
          jsonArray.getJSONObject(i).getDouble("value"));
    }

    for (final Object key : groupResultsFromAvro.keySet()) {
      String keyString;
      if (key == null) {
        keyString = "null";
      } else {
        keyString = key.toString();
      }
      if (!groupResultsFromQuery.containsKey(keyString)) {
        continue;
      }
      final double actual = groupResultsFromQuery.get(keyString);
      // System.out.println("Result from query - group:" + keyString + ", value:" + actual);
      final double expected = groupResultsFromAvro.get(key);
      // System.out.println("Result from avro - group:" + keyString + ", value:" + expected);
      try {
        Assert.assertEquals(actual, expected);
      } catch (AssertionError e) {
        throw new AssertionError(e);
      }
    }
  }

  private void setUpTestQueries(String table) throws FileNotFoundException, IOException {
    final String filePath =
        TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    System.out.println(filePath);
    final List<String> dims = new ArrayList<String>();
    dims.add("column1");
    dims.add("column2");
    dims.add("column3");
    dims.add("column4");
    dims.add("column5");
    dims.add("column6");
    dims.add("column7");
    dims.add("column8");
    dims.add("column9");
    dims.add("column10");
    dims.add("weeksSinceEpochSunday");
    dims.add("daysSinceEpoch");
    dims.add("count");

    final List<String> mets = new ArrayList<String>();
    mets.add("count");

    final String time = "minutesSinceEpoch";
    AVRO_QUERY_GENERATOR = new AvroQueryGenerator(new File(filePath), dims, mets, time, table);
    AVRO_QUERY_GENERATOR.init();
    AVRO_QUERY_GENERATOR.generateSimpleAggregationOnSingleColumnFilters();
  }

  private void setupSegmentFor(String table) throws Exception {
    final String filePath =
        TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));

    if (INDEX_DIR.exists()) {
      FileUtils.deleteQuietly(INDEX_DIR);
    }
    INDEX_DIR.mkdir();

    final SegmentGeneratorConfig config =
        SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath),
            new File(INDEX_DIR, "segment"),
            "daysSinceEpoch",
            TimeUnit.DAYS,
            table);

    final SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();

    driver.init(config);
    driver.build();

    System.out.println("built at : " + INDEX_DIR.getAbsolutePath());
  }

  @Test
  public void testSingleQuery() throws RecognitionException, Exception {
    String query;
    query = "select count(*) from testTable where column5='kCMyNVGCASKYDdQbftOPaqVMWc'";
    // query= "select sum('count') from testTable where column1='660156454'";
    LOGGER.info("running  : " + query);
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    final BrokerRequest brokerRequest = RequestConverter.fromJSON(REQUEST_COMPILER.compile(query));
    InstanceRequest instanceRequest = new InstanceRequest(1, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    instanceRequest.getSearchSegments().add(segmentName);
    final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
    instanceResponseMap.clear();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    final BrokerResponse brokerResponse =
        REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
  }

  @Test
  public void testMatchAllQuery() throws RecognitionException, Exception {
    String query = "select count(*),sum(count) from testTable  ";
    LOGGER.info("running  : " + query);
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    final BrokerRequest brokerRequest = RequestConverter.fromJSON(REQUEST_COMPILER.compile(query));
    InstanceRequest instanceRequest = new InstanceRequest(1, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    instanceRequest.getSearchSegments().add(segmentName);
    final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
    instanceResponseMap.clear();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    final BrokerResponse brokerResponse =
        REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.info("BrokerResponse is " + brokerResponse);
    LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
    LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(1));

    Assert.assertEquals(brokerResponse.getAggregationResults().get(0).getInt("value"), 100000);
    Assert.assertEquals(
        brokerResponse.getAggregationResults().get(1).getDouble("value"), 8.90662862E13);
    Assert.assertEquals(brokerResponse.getNumDocsScanned(), 100000);
  }

  @Test
  public void testRangeQuery() throws RecognitionException, Exception {
    String query =
        "select count(*) from testTable where column1 in ('999983251', '510705831', '1000720716', '1001058817', '1001099410')";
    LOGGER.info("running  : " + query);
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    final BrokerRequest brokerRequest = RequestConverter.fromJSON(REQUEST_COMPILER.compile(query));
    InstanceRequest instanceRequest = new InstanceRequest(1, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    instanceRequest.getSearchSegments().add(segmentName);
    final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
    instanceResponseMap.clear();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    final BrokerResponse brokerResponse =
        REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
    Assert.assertEquals(brokerResponse.getAggregationResults().get(0).getInt("value"), 14);
    Assert.assertEquals(brokerResponse.getNumDocsScanned(), 14);
  }

  @Test
  public void testTrace() throws RecognitionException, Exception {
    String query = "select count(*) from testTable where column1='186154188'";
    LOGGER.info("running  : " + query);
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    final BrokerRequest brokerRequest = RequestConverter.fromJSON(REQUEST_COMPILER.compile(query));
    brokerRequest.setEnableTrace(true); //
    InstanceRequest instanceRequest = new InstanceRequest(1, brokerRequest);
    instanceRequest.setEnableTrace(true); // TODO: add trace settings consistency
    instanceRequest.setSearchSegments(new ArrayList<String>());
    instanceRequest.getSearchSegments().add(segmentName);
    final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(instanceRequest);
    instanceResponseMap.clear();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    final BrokerResponse brokerResponse =
        REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
    LOGGER.info("TraceInfo is " + brokerResponse.getTraceInfo()); //
  }
}
public class AggregationGroupByWithDictionaryAndTrieTreeOperatorTest {

  protected static Logger LOGGER =
      LoggerFactory.getLogger(AggregationGroupByWithDictionaryAndTrieTreeOperatorTest.class);
  private final String AVRO_DATA = "data/test_sample_data.avro";
  private static File INDEX_DIR =
      new File(
          FileUtils.getTempDirectory()
              + File.separator
              + "TestAggregationGroupByWithDictionaryAndTrieTreeOperator");
  private static File INDEXES_DIR =
      new File(
          FileUtils.getTempDirectory()
              + File.separator
              + "TestAggregationGroupByWithDictionaryAndTrieTreeOperatorList");

  public static IndexSegment _indexSegment;
  private static List<SegmentDataManager> _indexSegmentList;

  public static AggregationInfo _paramsInfo;
  public static List<AggregationInfo> _aggregationInfos;
  public static int _numAggregations = 6;

  public Map<String, ColumnMetadata> _medataMap;
  public static GroupBy _groupBy;

  @BeforeClass
  public void setup() throws Exception {
    setupSegment();
    setupQuery();
    _indexSegmentList = new ArrayList<SegmentDataManager>();
  }

  @AfterClass
  public void tearDown() {
    if (INDEX_DIR.exists()) {
      FileUtils.deleteQuietly(INDEX_DIR);
    }
    if (INDEXES_DIR.exists()) {
      FileUtils.deleteQuietly(INDEXES_DIR);
    }
  }

  private void setupSegment() throws Exception {
    final String filePath =
        TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    if (INDEX_DIR.exists()) {
      FileUtils.deleteQuietly(INDEX_DIR);
    }

    final SegmentGeneratorConfig config =
        SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath), INDEX_DIR, "time_day", TimeUnit.DAYS, "test");

    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();

    LOGGER.debug("built at : {}", INDEX_DIR.getAbsolutePath());
    final File indexSegmentDir = new File(INDEX_DIR, driver.getSegmentName());
    _indexSegment = ColumnarSegmentLoader.load(indexSegmentDir, ReadMode.heap);
    _medataMap =
        ((SegmentMetadataImpl) ((IndexSegmentImpl) _indexSegment).getSegmentMetadata())
            .getColumnMetadataMap();
  }

  public void setupQuery() {
    _aggregationInfos = getAggregationsInfo();
    final List<String> groupbyColumns = new ArrayList<String>();
    groupbyColumns.add("column11");
    _groupBy = new GroupBy();
    _groupBy.setColumns(groupbyColumns);
    _groupBy.setTopN(10);
  }

  private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath =
        TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(AVRO_DATA));
    _indexSegmentList.clear();
    if (INDEXES_DIR.exists()) {
      FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
      final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

      final SegmentGeneratorConfig config =
          SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
              new File(filePath), segmentDir, "time_day", TimeUnit.DAYS, "test");

      config.setSegmentNamePostfix(String.valueOf(i));

      final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
      driver.init(config);
      driver.build();

      LOGGER.debug("built at : {}", segmentDir.getAbsolutePath());
      final File indexSegmentDir = new File(segmentDir, driver.getSegmentName());
      _indexSegmentList.add(
          new OfflineSegmentDataManager(
              ColumnarSegmentLoader.load(indexSegmentDir, ReadMode.heap)));
    }
  }

  @Test
  public void testAggregationGroupBys() {
    final MAggregationGroupByOperator aggregationGroupByOperator = getmAggregationGroupByOperator();

    LOGGER.debug("running query: ");
    final IntermediateResultsBlock block =
        (IntermediateResultsBlock) aggregationGroupByOperator.nextBlock();
    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Result: {}", block.getAggregationGroupByOperatorResult().get(i));
    }
  }

  private MAggregationGroupByOperator getmAggregationGroupByOperator() {
    final List<AggregationFunctionGroupByOperator> aggregationFunctionGroupByOperatorList =
        new ArrayList<AggregationFunctionGroupByOperator>();
    Operator filterOperator =
        new MatchEntireSegmentOperator(_indexSegment.getSegmentMetadata().getTotalRawDocs());
    final BReusableFilteredDocIdSetOperator docIdSetOperator =
        new BReusableFilteredDocIdSetOperator(
            filterOperator, _indexSegment.getSegmentMetadata().getTotalRawDocs(), 5000);
    final Map<String, DataSource> dataSourceMap = getDataSourceMap();
    final MProjectionOperator projectionOperator =
        new MProjectionOperator(dataSourceMap, docIdSetOperator);

    for (int i = 0; i < _numAggregations; ++i) {
      final MAggregationFunctionGroupByWithDictionaryAndTrieTreeOperator
          aggregationFunctionGroupByOperator =
              new MAggregationFunctionGroupByWithDictionaryAndTrieTreeOperator(
                  _aggregationInfos.get(i),
                  _groupBy,
                  new UReplicatedProjectionOperator(projectionOperator),
                  true);
      aggregationFunctionGroupByOperatorList.add(aggregationFunctionGroupByOperator);
    }

    return new MAggregationGroupByOperator(
        _indexSegment,
        _aggregationInfos,
        _groupBy,
        projectionOperator,
        aggregationFunctionGroupByOperatorList);
  }

  @Test
  public void testAggregationGroupBysWithCombine() {
    final MAggregationGroupByOperator aggregationGroupByOperator = getmAggregationGroupByOperator();

    final IntermediateResultsBlock block =
        (IntermediateResultsBlock) aggregationGroupByOperator.nextBlock();

    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Result1: {}", block.getAggregationGroupByOperatorResult().get(i));
    }

    ////////////////////////////////////////////////////////////////////////////////
    final MAggregationGroupByOperator aggregationGroupByOperator1 =
        getmAggregationGroupByOperator();

    final IntermediateResultsBlock block1 =
        (IntermediateResultsBlock) aggregationGroupByOperator1.nextBlock();

    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Result 2: {}", block1.getAggregationGroupByOperatorResult().get(i));
    }

    CombineService.mergeTwoBlocks(getAggregationGroupByNoFilterBrokerRequest(), block, block1);

    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Combined Result: {}", block.getAggregationGroupByOperatorResult().get(i));
    }
  }

  @Test
  public void testAggregationGroupBysWithDataTableEncodeAndDecode() throws Exception {
    final MAggregationGroupByOperator aggregationGroupByOperator = getmAggregationGroupByOperator();

    final IntermediateResultsBlock block =
        (IntermediateResultsBlock) aggregationGroupByOperator.nextBlock();

    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Result 1: {}", block.getAggregationGroupByOperatorResult().get(i));
    }

    ////////////////////////////////////////////////////////////////////////////////
    final MAggregationGroupByOperator aggregationGroupByOperator1 =
        getmAggregationGroupByOperator();

    final IntermediateResultsBlock block1 =
        (IntermediateResultsBlock) aggregationGroupByOperator1.nextBlock();

    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Result 2: {}", block1.getAggregationGroupByOperatorResult().get(i));
    }

    CombineService.mergeTwoBlocks(getAggregationGroupByNoFilterBrokerRequest(), block, block1);

    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Combined Result: {}", block.getAggregationGroupByOperatorResult().get(i));
    }

    final DataTable dataTable = block.getAggregationGroupByResultDataTable();

    final List<Map<String, Serializable>> results =
        AggregationGroupByOperatorService.transformDataTableToGroupByResult(dataTable);
    for (int i = 0; i < _numAggregations; ++i) {
      LOGGER.debug("Decoded Result: {}", results.get(i));
    }
  }

  @Test
  public void testInnerSegmentPlanMakerForAggregationGroupByOperatorNoFilter() throws Exception {
    final BrokerRequest brokerRequest = getAggregationGroupByNoFilterBrokerRequest();
    final PlanMaker instancePlanMaker = new InstancePlanMakerImplV1();
    final PlanNode rootPlanNode =
        instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    final MAggregationGroupByOperator operator = (MAggregationGroupByOperator) rootPlanNode.run();
    final IntermediateResultsBlock resultBlock = (IntermediateResultsBlock) operator.nextBlock();
    LOGGER.debug("RunningTime : {}", resultBlock.getTimeUsedMs());
    LOGGER.debug("NumDocsScanned : {}", resultBlock.getNumDocsScanned());
    LOGGER.debug("TotalDocs : {}", resultBlock.getTotalRawDocs());
    LOGGER.debug("Result: {}", resultBlock.getAggregationGroupByResultDataTable());

    final AggregationGroupByOperatorService aggregationGroupByOperatorService =
        new AggregationGroupByOperatorService(_aggregationInfos, brokerRequest.getGroupBy());

    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(
        new ServerInstance("localhost:0000"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:1111"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:2222"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:3333"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:4444"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:5555"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:6666"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:7777"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:8888"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:9999"), resultBlock.getAggregationGroupByResultDataTable());
    final List<Map<String, Serializable>> reducedResults =
        aggregationGroupByOperatorService.reduceGroupByOperators(instanceResponseMap);
    final List<JSONObject> jsonResult =
        aggregationGroupByOperatorService.renderGroupByOperators(reducedResults);
    LOGGER.debug("Json Result: {}", jsonResult);
  }

  @Test
  public void testInnerSegmentPlanMakerForAggregationGroupByOperatorWithFilter() throws Exception {
    final BrokerRequest brokerRequest = getAggregationGroupByWithFilterBrokerRequest();
    final PlanMaker instancePlanMaker = new InstancePlanMakerImplV1();
    final PlanNode rootPlanNode =
        instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    final MAggregationGroupByOperator operator = (MAggregationGroupByOperator) rootPlanNode.run();
    final IntermediateResultsBlock resultBlock = (IntermediateResultsBlock) operator.nextBlock();
    LOGGER.debug("RunningTime : {}", resultBlock.getTimeUsedMs());
    LOGGER.debug("NumDocsScanned : {}", resultBlock.getNumDocsScanned());
    LOGGER.debug("TotalDocs : {}", resultBlock.getTotalRawDocs());
    LOGGER.debug("Result: {}", resultBlock.getAggregationGroupByResultDataTable());
    Assert.assertEquals(resultBlock.getNumDocsScanned(), 582);
    Assert.assertEquals(resultBlock.getTotalRawDocs(), 10001);

    final AggregationGroupByOperatorService aggregationGroupByOperatorService =
        new AggregationGroupByOperatorService(_aggregationInfos, brokerRequest.getGroupBy());

    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(
        new ServerInstance("localhost:0000"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:1111"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:2222"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:3333"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:4444"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:5555"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:6666"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:7777"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:8888"), resultBlock.getAggregationGroupByResultDataTable());
    instanceResponseMap.put(
        new ServerInstance("localhost:9999"), resultBlock.getAggregationGroupByResultDataTable());
    final List<Map<String, Serializable>> reducedResults =
        aggregationGroupByOperatorService.reduceGroupByOperators(instanceResponseMap);

    final List<JSONObject> jsonResult =
        aggregationGroupByOperatorService.renderGroupByOperators(reducedResults);
    LOGGER.debug("Result: {}", jsonResult);
  }

  @Test
  public void testInterSegmentAggregationGroupByPlanMakerAndRun() throws Exception {
    final int numSegments = 20;
    setupSegmentList(numSegments);
    final PlanMaker instancePlanMaker = new InstancePlanMakerImplV1();
    final BrokerRequest brokerRequest = getAggregationGroupByNoFilterBrokerRequest();
    final BrokerResponse brokerResponse = getBrokerResponse(instancePlanMaker, brokerRequest);
    assertBrokerResponse(numSegments, brokerResponse);
  }

  private BrokerResponse getBrokerResponse(
      PlanMaker instancePlanMaker, BrokerRequest brokerRequest) {
    final ExecutorService executorService =
        Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    final Plan globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            _indexSegmentList, brokerRequest, executorService, 150000);
    globalPlan.print();
    globalPlan.execute();
    final DataTable instanceResponse = globalPlan.getInstanceResponse();
    LOGGER.debug("Instance Response: {}", instanceResponse);

    final DefaultReduceService defaultReduceService = new DefaultReduceService();
    final Map<ServerInstance, DataTable> instanceResponseMap =
        new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    final BrokerResponse brokerResponse =
        defaultReduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.debug("Broker Response: {}", new JSONArray(brokerResponse.getAggregationResults()));
    LOGGER.debug("Time used : {}", brokerResponse.getTimeUsedMs());
    return brokerResponse;
  }

  @Test
  public void testEmptyQueryResultsForInterSegmentAggregationGroupBy() throws Exception {
    final int numSegments = 20;
    setupSegmentList(numSegments);
    final PlanMaker instancePlanMaker = new InstancePlanMakerImplV1();
    final BrokerRequest brokerRequest = getAggregationGroupByWithEmptyFilterBrokerRequest();
    final BrokerResponse brokerResponse = getBrokerResponse(instancePlanMaker, brokerRequest);
    assertEmptyBrokerResponse(brokerResponse);
  }

  private void assertBrokerResponse(int numSegments, BrokerResponse brokerResponse)
      throws JSONException {
    Assert.assertEquals(10001 * numSegments, brokerResponse.getNumDocsScanned());
    final int groupSize = 15;
    assertBrokerResponse(brokerResponse, groupSize);

    // Assertion on Aggregation Results
    final List<double[]> aggregationResult = getAggregationResult(numSegments);
    final List<String[]> groupByResult = getGroupResult();
    for (int j = 0; j < _numAggregations; ++j) {
      final double[] aggResult = aggregationResult.get(j);
      final String[] groupResult = groupByResult.get(j);
      for (int i = 0; i < 15; ++i) {
        Assert.assertEquals(
            0,
            DoubleComparisonUtil.defaultDoubleCompare(
                aggResult[i],
                brokerResponse
                    .getAggregationResults()
                    .get(j)
                    .getJSONArray("groupByResult")
                    .getJSONObject(i)
                    .getDouble("value")));
        if ((i < 14 && aggResult[i] == aggResult[i + 1])
            || (i > 0 && aggResult[i] == aggResult[i - 1])) {
          // do nothing, as we have multiple groups within same value.
        } else {
          Assert.assertEquals(
              groupResult[i],
              brokerResponse
                  .getAggregationResults()
                  .get(j)
                  .getJSONArray("groupByResult")
                  .getJSONObject(i)
                  .getString("group"));
        }
      }
    }
  }

  private void assertBrokerResponse(BrokerResponse brokerResponse, int groupSize)
      throws JSONException {
    Assert.assertEquals(_numAggregations, brokerResponse.getAggregationResults().size());
    for (int i = 0; i < _numAggregations; ++i) {
      Assert.assertEquals(
          "[\"column11\",\"column10\"]",
          brokerResponse.getAggregationResults().get(i).getJSONArray("groupByColumns").toString());
      Assert.assertEquals(
          groupSize,
          brokerResponse.getAggregationResults().get(i).getJSONArray("groupByResult").length());
    }

    // Assertion on Count
    Assert.assertEquals(
        "count_star",
        brokerResponse.getAggregationResults().get(0).getString("function").toString());
    Assert.assertEquals(
        "sum_met_impressionCount",
        brokerResponse.getAggregationResults().get(1).getString("function").toString());
    Assert.assertEquals(
        "max_met_impressionCount",
        brokerResponse.getAggregationResults().get(2).getString("function").toString());
    Assert.assertEquals(
        "min_met_impressionCount",
        brokerResponse.getAggregationResults().get(3).getString("function").toString());
    Assert.assertEquals(
        "avg_met_impressionCount",
        brokerResponse.getAggregationResults().get(4).getString("function").toString());
    Assert.assertEquals(
        "distinctCount_column12",
        brokerResponse.getAggregationResults().get(5).getString("function").toString());
  }

  private void assertEmptyBrokerResponse(BrokerResponse brokerResponse) throws JSONException {
    Assert.assertEquals(0, brokerResponse.getNumDocsScanned());
    final int groupSize = 0;
    assertBrokerResponse(brokerResponse, groupSize);
  }

  private static List<double[]> getAggregationResult(int numSegments) {
    final List<double[]> aggregationResultList = new ArrayList<double[]>();
    aggregationResultList.add(getCountResult(numSegments));
    aggregationResultList.add(getSumResult(numSegments));
    aggregationResultList.add(getMaxResult());
    aggregationResultList.add(getMinResult());
    aggregationResultList.add(getAvgResult());
    aggregationResultList.add(getDistinctCountResult());
    return aggregationResultList;
  }

  private static List<String[]> getGroupResult() {
    final List<String[]> groupResults = new ArrayList<String[]>();
    groupResults.add(getCountGroupResult());
    groupResults.add(getSumGroupResult());
    groupResults.add(getMaxGroupResult());
    groupResults.add(getMinGroupResult());
    groupResults.add(getAvgGroupResult());
    groupResults.add(getDistinctCountGroupResult());
    return groupResults;
  }

  private static double[] getCountResult(int numSegments) {
    return new double[] {
      1450 * numSegments,
      620 * numSegments,
      517 * numSegments,
      422 * numSegments,
      365 * numSegments,
      340 * numSegments,
      321 * numSegments,
      296 * numSegments,
      286 * numSegments,
      273 * numSegments,
      271 * numSegments,
      268 * numSegments,
      234 * numSegments,
      210 * numSegments,
      208 * numSegments
    };
  }

  private static String[] getCountGroupResult() {
    return new String[] {
      "[\"i\",\"\"]",
      "[\"D\",\"\"]",
      "[\"i\",\"CqC\"]",
      "[\"i\",\"QMl\"]",
      "[\"i\",\"bVnY\"]",
      "[\"i\",\"iV\"]",
      "[\"i\",\"zZe\"]",
      "[\"i\",\"xDLG\"]",
      "[\"i\",\"VsKz\"]",
      "[\"i\",\"mNh\"]",
      "[\"i\",\"ez\"]",
      "[\"i\",\"rNcu\"]",
      "[\"i\",\"EXYv\"]",
      "[\"i\",\"gpyD\"]",
      "[\"i\",\"yhq\"]"
    };
  }

  private static double[] getSumResult(int numSegments) {
    return new double[] {
      194232989695956150000000.00000,
      82874083725452570000000.00000,
      69188102307666020000000.00000,
      57011594945268800000000.00000,
      49669069292549060000000.00000,
      45658425435674350000000.00000,
      42733154649942075000000.00000,
      39374565823833550000000.00000,
      38376043393352970000000.00000,
      36944406922141550000000.00000,
      36562112604244086000000.00000,
      36141768458849143000000.00000,
      31259578136918286000000.00000,
      27679187240218786000000.00000,
      27524721980723073000000.00000
    };
  }

  private static String[] getSumGroupResult() {
    return new String[] {
      "[\"i\",\"\"]",
      "[\"D\",\"\"]",
      "[\"i\",\"CqC\"]",
      "[\"i\",\"QMl\"]",
      "[\"i\",\"bVnY\"]",
      "[\"i\",\"iV\"]",
      "[\"i\",\"zZe\"]",
      "[\"i\",\"xDLG\"]",
      "[\"i\",\"VsKz\"]",
      "[\"i\",\"mNh\"]",
      "[\"i\",\"ez\"]",
      "[\"i\",\"rNcu\"]",
      "[\"i\",\"EXYv\"]",
      "[\"i\",\"yhq\"]",
      "[\"i\",\"gpyD\"]"
    };
  }

  private static double[] getMaxResult() {
    return new double[] {
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000,
      8637957270245934100.00000
    };
  }

  private static String[] getMaxGroupResult() {
    return new String[] {
      "[\"i\",\"yH\"]",
      "[\"U\",\"mNh\"]",
      "[\"i\",\"OYMU\"]",
      "[\"D\",\"opm\"]",
      "[\"D\",\"Gac\"]",
      "[\"i\",\"ZQa\"]",
      "[\"i\",\"gpyD\"]",
      "[\"D\",\"Pcb\"]",
      "[\"i\",\"mNh\"]",
      "[\"U\",\"LjAS\"]",
      "[\"U\",\"bVnY\"]",
      "[\"D\",\"aN\"]",
      "[\"D\",\"iV\"]",
      "[\"U\",\"Vj\"]",
      "[\"D\",\"KsKZ\"]"
    };
  }

  private static double[] getMinResult() {
    return new double[] {
      614819680033322500.00000,
      614819680033322500.00000,
      614819680033322500.00000,
      614819680033322500.00000,
      614819680033322500.00000,
      614819680033322500.00000,
      614819680033322500.00000,
      614819680033322500.00000,
      1048718684474966140.00000,
      1048718684474966140.00000,
      1048718684474966140.00000,
      3703896352903212000.00000,
      3703896352903212000.00000,
      3703896352903212000.00000,
      3703896352903212000.00000
    };
  }

  private static String[] getMinGroupResult() {
    return new String[] {
      "[\"D\",\"Gac\"]",
      "[\"i\",\"mNh\"]",
      "[\"i\",\"VsKz\"]",
      "[\"D\",\"\"]",
      "[\"i\",\"yhq\"]",
      "[\"D\",\"CqC\"]",
      "[\"U\",\"\"]",
      "[\"i\",\"jb\"]",
      "[\"D\",\"bVnY\"]",
      "[\"i\",\"\"]",
      "[\"i\",\"QMl\"]",
      "[\"i\",\"Pcb\"]",
      "[\"i\",\"EXYv\"]",
      "[\"i\",\"bVnY\"]",
      "[\"i\",\"zZe\"]"
    };
  }

  private static double[] getAvgResult() {
    return new double[] {
      7768390271561314300.00000,
      7215319188094814200.00000,
      7105513810764889100.00000,
      7094438547504759800.00000,
      7004199482369404900.00000,
      6991851055242935300.00000,
      6987779156890090500.00000,
      6973627660796153900.00000,
      6970558938737374200.00000,
      6964262042984379400.00000,
      6912897688920598500.00000,
      6906152143309600800.00000,
      6888134675143909400.00000,
      6880505863259489300.00000,
      6878447250928267300.00000
    };
  }

  private static String[] getAvgGroupResult() {
    return new String[] {
      "[\"U\",\"yhq\"]",
      "[\"U\",\"mNh\"]",
      "[\"U\",\"Vj\"]",
      "[\"U\",\"OYMU\"]",
      "[\"U\",\"zZe\"]",
      "[\"U\",\"jb\"]",
      "[\"D\",\"aN\"]",
      "[\"U\",\"bVnY\"]",
      "[\"U\",\"iV\"]",
      "[\"i\",\"LjAS\"]",
      "[\"D\",\"xDLG\"]",
      "[\"U\",\"EXYv\"]",
      "[\"D\",\"iV\"]",
      "[\"D\",\"Gac\"]",
      "[\"D\",\"QMl\"]"
    };
  }

  private static double[] getDistinctCountResult() {
    return new double[] {128, 109, 100, 99, 84, 81, 77, 76, 75, 74, 71, 67, 67, 62, 57};
  }

  private static String[] getDistinctCountGroupResult() {
    return new String[] {
      "[\"i\",\"\"]",
      "[\"D\",\"\"]",
      "[\"i\",\"zZe\"]",
      "[\"i\",\"QMl\"]",
      "[\"i\",\"bVnY\"]",
      "[\"i\",\"iV\"]",
      "[\"i\",\"VsKz\"]",
      "[\"i\",\"CqC\"]",
      "[\"i\",\"EXYv\"]",
      "[\"i\",\"xDLG\"]",
      "[\"i\",\"yhq\"]",
      "[\"U\",\"\"]",
      "[\"D\",\"EXYv\"]",
      "[\"D\",\"LjAS\"]",
      "[\"i\",\"rNcu\"]"
    };
  }

  private static BrokerRequest getAggregationGroupByNoFilterBrokerRequest() {
    final BrokerRequest brokerRequest = new BrokerRequest();
    setAggregationsInfo(brokerRequest);
    return brokerRequest;
  }

  private static List<AggregationInfo> getAggregationsInfo() {
    final List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(getCountAggregationInfo());
    aggregationsInfo.add(getSumAggregationInfo());
    aggregationsInfo.add(getMaxAggregationInfo());
    aggregationsInfo.add(getMinAggregationInfo());
    aggregationsInfo.add(getAvgAggregationInfo());
    aggregationsInfo.add(getDistinctCountAggregationInfo("column12"));
    return aggregationsInfo;
  }

  private static Map<String, DataSource> getDataSourceMap() {
    final Map<String, DataSource> dataSourceMap = new HashMap<String, DataSource>();
    dataSourceMap.put("column11", _indexSegment.getDataSource("column11"));
    dataSourceMap.put("column12", _indexSegment.getDataSource("column12"));
    dataSourceMap.put("met_impressionCount", _indexSegment.getDataSource("met_impressionCount"));
    return dataSourceMap;
  }

  private static AggregationInfo getCountAggregationInfo() {
    final String type = "count";
    final Map<String, String> params = new HashMap<String, String>();
    params.put("column", "*");
    final AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getSumAggregationInfo() {
    final String type = "sum";
    return getAggregationInfo(type);
  }

  private static AggregationInfo getAggregationInfo(String type) {
    final Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met_impressionCount");
    final AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getMaxAggregationInfo() {
    final String type = "max";
    return getAggregationInfo(type);
  }

  private static AggregationInfo getMinAggregationInfo() {
    final String type = "min";
    return getAggregationInfo(type);
  }

  private static AggregationInfo getAvgAggregationInfo() {
    final String type = "avg";
    return getAggregationInfo(type);
  }

  private static AggregationInfo getDistinctCountAggregationInfo(String dim) {
    final String type = "distinctCount";
    final Map<String, String> params = new HashMap<String, String>();
    params.put("column", dim);

    final AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static GroupBy getGroupBy() {
    final GroupBy groupBy = new GroupBy();
    final List<String> columns = new ArrayList<String>();
    columns.add("column11");
    columns.add("column10");
    groupBy.setColumns(columns);
    groupBy.setTopN(15);
    return groupBy;
  }

  private static BrokerRequest getAggregationGroupByWithFilterBrokerRequest() {
    final BrokerRequest brokerRequest = new BrokerRequest();
    setAggregationsInfo(brokerRequest);
    setFilterQuery(brokerRequest);
    return brokerRequest;
  }

  private static void setAggregationsInfo(BrokerRequest brokerRequest) {
    final List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(getCountAggregationInfo());
    aggregationsInfo.add(getSumAggregationInfo());
    aggregationsInfo.add(getMaxAggregationInfo());
    aggregationsInfo.add(getMinAggregationInfo());
    aggregationsInfo.add(getAvgAggregationInfo());
    aggregationsInfo.add(getDistinctCountAggregationInfo("column12"));
    brokerRequest.setAggregationsInfo(aggregationsInfo);
    brokerRequest.setGroupBy(getGroupBy());
  }

  private static BrokerRequest setFilterQuery(BrokerRequest brokerRequest) {
    FilterQueryTree filterQueryTree;
    final String filterColumn = "column11";
    final String filterVal = "U";
    generateFilter(brokerRequest, filterColumn, filterVal);
    return brokerRequest;
  }

  private static void generateFilter(
      BrokerRequest brokerRequest, String filterColumn, String filterVal) {
    FilterQueryTree filterQueryTree;
    if (filterColumn.contains(",")) {
      final String[] filterColumns = filterColumn.split(",");
      final String[] filterValues = filterVal.split(",");
      final List<FilterQueryTree> nested = new ArrayList<FilterQueryTree>();
      for (int i = 0; i < filterColumns.length; i++) {

        final List<String> vals = new ArrayList<String>();
        vals.add(filterValues[i]);
        final FilterQueryTree d =
            new FilterQueryTree(i + 1, filterColumns[i], vals, FilterOperator.EQUALITY, null);
        nested.add(d);
      }
      filterQueryTree = new FilterQueryTree(0, null, null, FilterOperator.AND, nested);
    } else {
      final List<String> vals = new ArrayList<String>();
      vals.add(filterVal);
      filterQueryTree = new FilterQueryTree(0, filterColumn, vals, FilterOperator.EQUALITY, null);
    }
    RequestUtils.generateFilterFromTree(filterQueryTree, brokerRequest);
  }

  private static BrokerRequest getAggregationGroupByWithEmptyFilterBrokerRequest() {
    final BrokerRequest brokerRequest = new BrokerRequest();
    setAggregationsInfo(brokerRequest);
    setEmptyFilterQuery(brokerRequest);
    return brokerRequest;
  }

  private static BrokerRequest setEmptyFilterQuery(BrokerRequest brokerRequest) {
    FilterQueryTree filterQueryTree;
    final String filterColumn = "column11";
    final String filterVal = "uuuu";
    generateFilter(brokerRequest, filterColumn, filterVal);
    /*
    if (filterColumn.contains(",")) {
      final String[] filterColumns = filterColumn.split(",");
      final String[] filterValues = filterVal.split(",");
      final List<FilterQueryTree> nested = new ArrayList<FilterQueryTree>();
      for (int i = 0; i < filterColumns.length; i++) {

        final List<String> vals = new ArrayList<String>();
        vals.add(filterValues[i]);
        final FilterQueryTree d = new FilterQueryTree(i + 1, filterColumns[i], vals, FilterOperator.EQUALITY, null);
        nested.add(d);
      }
      filterQueryTree = new FilterQueryTree(0, null, null, FilterOperator.AND, nested);
    } else {
      final List<String> vals = new ArrayList<String>();
      vals.add(filterVal);
      filterQueryTree = new FilterQueryTree(0, filterColumn, vals, FilterOperator.EQUALITY, null);
    }
    RequestUtils.generateFilterFromTree(filterQueryTree, brokerRequest);
    */
    return brokerRequest;
  }
}
Example #18
0
 private Path getDefaultPath() {
   return FileUtils.getTempDirectory()
       .toPath()
       .resolve("Windup")
       .resolve("windupgraph_javaclasstest_" + RandomStringUtils.randomAlphanumeric(6));
 }
Example #19
0
 /**
  * 在系统临时目录中创建文件(文件名随机)并返回
  *
  * @author adli 2015年7月19日 下午5:56:56
  * @return
  */
 private static File getTmpFile() {
   File tmpDir = FileUtils.getTempDirectory();
   String tmpFileName = (Double.toString(Math.random() * 10000.0D)).replace(".", "");
   return new File(tmpDir, tmpFileName);
 }
 private Path getDefaultPath() {
   return FileUtils.getTempDirectory()
       .toPath()
       .resolve("Windup")
       .resolve("windupgraph_nonnamespacedmavendisc_" + RandomStringUtils.randomAlphanumeric(6));
 }
  /**
   * Runs a MAF file through the Oncotator and OMA tools.
   *
   * @param inputMAFURL String
   * @param outputMAFURL String
   * @throws Exception
   */
  @Override
  public void oncotateMAF(String inputMAFURL, String outputMAFURL) throws Exception {

    // sanity check
    if (inputMAFURL == null
        || inputMAFURL.length() == 0
        || outputMAFURL == null
        || outputMAFURL.length() == 0) {
      throw new IllegalArgumentException(
          "oncotateMAFdownloadFile(): url or urlDestination argument is null...");
    }

    URL inputMAF = new URL(inputMAFURL);
    URL outputMAF = new URL(outputMAFURL);

    // determine if we have to call liftover
    boolean cleanOncotatorInputFile = false;
    File oncotatorInputFile = new File(inputMAF.getFile());
    org.apache.commons.io.LineIterator it =
        org.apache.commons.io.FileUtils.lineIterator(oncotatorInputFile);
    it.nextLine(); // skip header
    String[] parts = it.nextLine().split("\t");
    if (parts[3].contains("36") || parts[3].equals("hg18")) {
      it.close();
      File liftoverInputFile =
          org.apache.commons.io.FileUtils.getFile(
              org.apache.commons.io.FileUtils.getTempDirectory(), "liftoverInputFile");
      org.apache.commons.io.FileUtils.copyFile(oncotatorInputFile, liftoverInputFile);
      oncotatorInputFile = new File(inputMAF.getFile());
      // call lift over
      if (LOG.isInfoEnabled()) {
        LOG.info("oncotateMAF(), calling Hg18ToHg19...");
      }
      Hg18ToHg19.driver(
          liftoverInputFile.getCanonicalPath(),
          oncotatorInputFile.getCanonicalPath(),
          getLiftOverBinary(),
          getLiftOverChain());
      org.apache.commons.io.FileUtils.forceDelete(liftoverInputFile);
      cleanOncotatorInputFile = true;
    }

    // create a temp output file from the oncotator
    File oncotatorOutputFile =
        org.apache.commons.io.FileUtils.getFile(
            org.apache.commons.io.FileUtils.getTempDirectory(), "oncotatorOutputFile");
    // call oncotator
    if (LOG.isInfoEnabled()) {
      LOG.info("oncotateMAF(), calling OncotateTool...");
    }
    OncotateTool.driver(
        oncotatorInputFile.getCanonicalPath(),
        oncotatorOutputFile.getCanonicalPath(),
        true,
        true,
        true);
    // we call OMA here -
    // we use output from oncotator as input file
    if (LOG.isInfoEnabled()) {
      LOG.info("oncotateMAF(), calling MutationAssessorTool...");
    }
    File outputMAFFile = new File(outputMAF.getFile());
    outputMAFFile.createNewFile();
    MutationAssessorTool.driver(
        oncotatorOutputFile.getCanonicalPath(),
        outputMAFFile.getCanonicalPath(),
        false,
        true,
        true);

    // clean up
    org.apache.commons.io.FileUtils.forceDelete(oncotatorOutputFile);
    if (cleanOncotatorInputFile) org.apache.commons.io.FileUtils.forceDelete(oncotatorInputFile);
  }
Example #22
0
public class PlanMakerTest {
  private static final Logger LOGGER = LoggerFactory.getLogger(PlanMakerTest.class);

  private static final String LARGE_AVRO_DATA = "data/simpleData2000001.avro";
  private static final String SMALL_AVRO_DATA = "data/simpleData200001.avro";
  private static File INDEX_DIR =
      new File(FileUtils.getTempDirectory() + File.separator + "TestPlanMaker");
  private static File INDEXES_DIR =
      new File(FileUtils.getTempDirectory() + File.separator + "TestPlanMakerList");

  private static final int COUNT_AGGREGATION_INDEX = 0;
  private static final int SUM_AGGREGATION_INDEX = 1;
  private static final int MAX_AGGREGATION_INDEX = 2;
  private static final int MIN_AGGREGATION_INDEX = 3;
  private static final int AVG_AGGREGATION_INDEX = 4;
  private static final int DISTINCT_DIM0_AGGREGATION_INDEX = 5;
  private static final int DISTINCT_DIM1_AGGREGATION_INDEX = 6;

  private BrokerRequest _brokerRequest;
  private IndexSegment _indexSegment;
  private List<IndexSegment> _indexSegmentList = new ArrayList<>();

  @BeforeClass
  public void setup() {
    _brokerRequest = getAggregationNoFilterBrokerRequest();
    try {
      setupSegment();
      setupSegmentList(20);
    } catch (Exception e) {
      e.printStackTrace();
    }
  }

  @AfterClass
  public void tearDown() {
    if (_indexSegment != null) {
      _indexSegment.destroy();
    }
    for (IndexSegment segment : _indexSegmentList) {
      segment.destroy();
    }
    _indexSegmentList.clear();
    if (INDEX_DIR.exists()) {
      FileUtils.deleteQuietly(INDEX_DIR);
    }
    if (INDEXES_DIR.exists()) {
      FileUtils.deleteQuietly(INDEXES_DIR);
    }
  }

  private void setupSegment() throws Exception {
    final String filePath =
        TestUtils.getFileFromResourceUrl(
            PlanMakerTest.class.getClassLoader().getResource(LARGE_AVRO_DATA));

    if (INDEX_DIR.exists()) {
      FileUtils.deleteQuietly(INDEX_DIR);
    }

    final SegmentGeneratorConfig config =
        SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
            new File(filePath), INDEX_DIR, "dim1", TimeUnit.DAYS, "test");

    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();

    LOGGER.debug("built at: {}", INDEX_DIR.getAbsolutePath());
    final File indexSegmentDir = new File(INDEX_DIR, driver.getSegmentName());
    _indexSegment = ColumnarSegmentLoader.load(indexSegmentDir, ReadMode.mmap);
  }

  private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath =
        TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(SMALL_AVRO_DATA));
    _indexSegmentList.clear();
    if (INDEXES_DIR.exists()) {
      FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
      final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

      final SegmentGeneratorConfig config =
          SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
              new File(filePath), segmentDir, "dim1", TimeUnit.DAYS, "test");

      final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
      driver.init(config);
      driver.build();

      LOGGER.debug("built at: {}", segmentDir.getAbsolutePath());
      _indexSegmentList.add(
          ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.mmap));
    }
  }

  @Test
  public void testInnerSegmentPlanMakerForAggregationNoFilter() {
    BrokerRequest brokerRequest = getAggregationNoFilterBrokerRequest();
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    IntermediateResultsBlock resultBlock =
        (IntermediateResultsBlock) rootPlanNode.run().nextBlock();
    for (int i = 0; i < 7; i++) {
      LOGGER.debug(resultBlock.getAggregationResult().get(i).toString());
    }
    assertEquals(((Number) resultBlock.getAggregationResult().get(0)).longValue(), 2000001L);
    assertEquals(resultBlock.getAggregationResult().get(1), 2000001000000.0);
    assertEquals(resultBlock.getAggregationResult().get(2), 2000000.0);
    assertEquals(resultBlock.getAggregationResult().get(3), 0.0);
    assertEquals(
        Double.parseDouble(resultBlock.getAggregationResult().get(4).toString()), 1000000.0);
    assertEquals(((IntOpenHashSet) resultBlock.getAggregationResult().get(5)).size(), 10);
    assertEquals(((IntOpenHashSet) resultBlock.getAggregationResult().get(6)).size(), 100);
  }

  @Test
  public void testInnerSegmentPlanMakerForAggregationWithFilter() {
    BrokerRequest brokerRequest = getAggregationWithFilterBrokerRequest();
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
  }

  @Test
  public void testInnerSegmentPlanMakerForSelectionNoFilter() {
    BrokerRequest brokerRequest = getSelectionNoFilterBrokerRequest();
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    IntermediateResultsBlock resultBlock =
        (IntermediateResultsBlock) rootPlanNode.run().nextBlock();
    PriorityQueue<Serializable[]> retPriorityQueue =
        (PriorityQueue<Serializable[]>) resultBlock.getSelectionResult();
    while (!retPriorityQueue.isEmpty()) {
      Serializable[] row = retPriorityQueue.poll();
      LOGGER.debug(Arrays.toString(row));
      assertEquals(row[0], 9);
      assertEquals(row[1], 99);
    }
  }

  @Test
  public void testInnerSegmentPlanMakerForSelectionNoFilterNoOrdering() {
    BrokerRequest brokerRequest = getSelectionNoFilterBrokerRequest();
    brokerRequest.getSelections().setSelectionSortSequence(null);
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    // USelectionOperator operator = (USelectionOperator) rootPlanNode.run();
    MSelectionOnlyOperator operator = (MSelectionOnlyOperator) rootPlanNode.run();
    IntermediateResultsBlock resultBlock = (IntermediateResultsBlock) operator.nextBlock();
    ArrayList<Serializable[]> retList =
        (ArrayList<Serializable[]>) resultBlock.getSelectionResult();
    int i = 0;
    for (Serializable[] row : retList) {
      LOGGER.debug(Arrays.toString(row));
      assertEquals(row[0], (i % 10));
      assertEquals(row[1], i);
      assertEquals(row[2], i);
      i++;
    }
  }

  @Test
  public void testInnerSegmentPlanMakerForSelectionWithFilter() {
    BrokerRequest brokerRequest = getSelectionWithFilterBrokerRequest();
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
  }

  @Test
  public void testInnerSegmentPlanMakerForAggregationGroupByNoFilter() {
    BrokerRequest brokerRequest = getAggregationGroupByNoFilterBrokerRequest();
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    IntermediateResultsBlock resultBlock =
        (IntermediateResultsBlock) rootPlanNode.run().nextBlock();
    LOGGER.debug("RunningTime: {}", resultBlock.getTimeUsedMs());
    LOGGER.debug("NumDocsScanned: {}", resultBlock.getNumDocsScanned());
    LOGGER.debug("TotalDocs: {}", resultBlock.getTotalRawDocs());
    List<Map<String, Serializable>> combinedGroupByResult =
        resultBlock.getAggregationGroupByOperatorResult();

    Map<String, Serializable> singleGroupByResult =
        combinedGroupByResult.get(COUNT_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(200001, ((Number) resultList).longValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(200000, ((Number) resultList).longValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(SUM_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedSumValue =
            ((Double.parseDouble(keyString) + 2000000 + Double.parseDouble(keyString)) * 200001)
                / 2;
        assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedSumValue =
            (((Double.parseDouble(keyString) + 2000000) - 10) + Double.parseDouble(keyString))
                * 100000;
        assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(MAX_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(2000000 + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(
            (2000000 - 10) + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(MIN_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(AVG_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);

        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedAvgValue =
            ((Double.parseDouble(keyString) + 2000000 + Double.parseDouble(keyString)) * 200001)
                / 2
                / 200001;
        assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedAvgValue =
            ((((Double.parseDouble(keyString) + 2000000) - 10) + Double.parseDouble(keyString))
                    * 100000)
                / 200000;
        assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
      }
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM0_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      Serializable resultList = singleGroupByResult.get(keyString);
      LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
      assertEquals(1, ((IntOpenHashSet) resultList).size());
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM1_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      Serializable resultList = singleGroupByResult.get(keyString);
      LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
      assertEquals(10, ((IntOpenHashSet) resultList).size());
    }
  }

  @Test
  public void testInnerSegmentPlanMakerForAggregationGroupByWithFilter() {
    BrokerRequest brokerRequest = getAggregationGroupByWithFilterBrokerRequest();
    brokerRequest.getGroupBy().getColumns().clear();
    brokerRequest.getGroupBy().getColumns().add("dim0");
    brokerRequest.getGroupBy().getColumns().add("dim1");
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    IntermediateResultsBlock resultBlock =
        (IntermediateResultsBlock) rootPlanNode.run().nextBlock();
    LOGGER.debug("RunningTime: {}", resultBlock.getTimeUsedMs());
    LOGGER.debug("NumDocsScanned: {}", resultBlock.getNumDocsScanned());
    LOGGER.debug("TotalDocs: {}", resultBlock.getTotalRawDocs());
    List<Map<String, Serializable>> combinedGroupByResult =
        resultBlock.getAggregationGroupByOperatorResult();
    for (int i = 0; i < combinedGroupByResult.size(); ++i) {
      LOGGER.debug("function: {}", brokerRequest.getAggregationsInfo().get(i));
      for (String keyString : combinedGroupByResult.get(i).keySet()) {
        LOGGER.debug(
            "grouped key: {}, value: {}", keyString, combinedGroupByResult.get(i).get(keyString));
      }
    }
  }

  @Test
  public void testInterSegmentAggregationPlanMaker() {
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    BrokerRequest brokerRequest = _brokerRequest.deepCopy();
    brokerRequest.setSelections(null);
    brokerRequest.setSelectionsIsSet(false);
    ExecutorService executorService =
        Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    Plan globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    brokerRequest = setFilterQuery(brokerRequest);
    globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
  }

  private List<SegmentDataManager> makeSegMgrList(List<IndexSegment> indexSegmentList) {
    List<SegmentDataManager> segMgrList = new ArrayList<>(indexSegmentList.size());
    for (IndexSegment segment : indexSegmentList) {
      segMgrList.add(new OfflineSegmentDataManager(segment));
    }
    return segMgrList;
  }

  @Test
  public void testInterSegmentAggregationPlanMakerAndRun() {
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    BrokerRequest brokerRequest = _brokerRequest.deepCopy();
    ExecutorService executorService =
        Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    Plan globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    globalPlan.execute();
    DataTable instanceResponse = globalPlan.getInstanceResponse();
    LOGGER.debug(Long.toString(instanceResponse.getLong(0, 0)));
    LOGGER.debug(Double.toString(instanceResponse.getDouble(0, 1)));
    LOGGER.debug(Double.toString(instanceResponse.getDouble(0, 2)));
    LOGGER.debug(Double.toString(instanceResponse.getDouble(0, 3)));
    LOGGER.debug(instanceResponse.getObject(0, 4).toString());
    LOGGER.debug(instanceResponse.getObject(0, 5).toString());
    LOGGER.debug(instanceResponse.getObject(0, 6).toString());
    LOGGER.debug("Query time: {}", instanceResponse.getMetadata().get("timeUsedMs"));
    assertEquals(200001L * _indexSegmentList.size(), instanceResponse.getLong(0, 0));
    assertEquals(20000100000.0 * _indexSegmentList.size(), instanceResponse.getDouble(0, 1));
    assertEquals(200000.0, instanceResponse.getDouble(0, 2));
    assertEquals(0.0, instanceResponse.getDouble(0, 3));
    assertEquals(100000.0, Double.parseDouble(instanceResponse.getObject(0, 4).toString()));
    assertEquals(10, ((IntOpenHashSet) instanceResponse.getObject(0, 5)).size());
    assertEquals(100, ((IntOpenHashSet) instanceResponse.getObject(0, 6)).size());
    DefaultReduceService reduceService = new DefaultReduceService();
    Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(new ServerInstance("localhost:1111"), instanceResponse);
    BrokerResponseJSON brokerResponse =
        reduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.debug(brokerResponse.getAggregationResults().toString());
  }

  @Test
  public void testInterSegmentAggregationGroupByPlanMakerAndRun() {
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    BrokerRequest brokerRequest = getAggregationGroupByNoFilterBrokerRequest();
    ExecutorService executorService =
        Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    Plan globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    globalPlan.execute();
    DataTable instanceResponse = globalPlan.getInstanceResponse();

    LOGGER.debug(instanceResponse.toString());
    List<DataTable> instanceResponseList = new ArrayList<DataTable>();
    instanceResponseList.add(instanceResponse);

    List<Map<String, Serializable>> combinedGroupByResult =
        AggregationGroupByOperatorService.transformDataTableToGroupByResult(instanceResponse);

    Map<String, Serializable> singleGroupByResult =
        combinedGroupByResult.get(COUNT_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(400020, ((Number) resultList).longValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(400000, ((Number) resultList).longValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(SUM_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedSumValue =
            (((Double.parseDouble(keyString) + 200000 + Double.parseDouble(keyString)) * 20001) / 2)
                * 20;
        assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedSumValue =
            (((Double.parseDouble(keyString) + 200000) - 10) + Double.parseDouble(keyString))
                * 10000
                * 20;

        assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(MAX_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(200000 + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(
            (200000 - 10) + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(MIN_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
      }
    }

    singleGroupByResult = combinedGroupByResult.get(AVG_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      if (keyString.equals("0")) {
        Serializable resultList = singleGroupByResult.get(keyString);

        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedAvgValue =
            ((((Double.parseDouble(keyString) + 200000 + Double.parseDouble(keyString)) * 20001)
                        / 2)
                    * 20)
                / 400020;
        assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
      } else {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        double expectedAvgValue =
            ((((Double.parseDouble(keyString) + 200000) - 10) + Double.parseDouble(keyString))
                    * 10000
                    * 20)
                / 400000;
        assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
      }
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM0_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      Serializable resultList = singleGroupByResult.get(keyString);
      LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
      int expectedAvgValue = 1;
      assertEquals(expectedAvgValue, ((IntOpenHashSet) resultList).size());
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM1_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
      Serializable resultList = singleGroupByResult.get(keyString);
      LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
      int expectedAvgValue = 10;
      assertEquals(expectedAvgValue, ((IntOpenHashSet) resultList).size());
    }

    DefaultReduceService defaultReduceService = new DefaultReduceService();
    Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    BrokerResponseJSON brokerResponse =
        defaultReduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.debug(new JSONArray(brokerResponse.getAggregationResults()).toString());
    LOGGER.debug("Time used: {}", brokerResponse.getTimeUsedMs());
  }

  @Test
  public void testInterSegmentSelectionPlanMaker() throws JSONException {
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    BrokerRequest brokerRequest = _brokerRequest.deepCopy();
    brokerRequest.setAggregationsInfo(null);
    brokerRequest.setAggregationsInfoIsSet(false);
    brokerRequest.setSelections(getSelectionQuery());
    brokerRequest.getSelections().setOffset(0);
    brokerRequest.getSelections().setSize(20);
    ExecutorService executorService =
        Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    Plan globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    brokerRequest = setFilterQuery(brokerRequest);
    globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    globalPlan.execute();
    DataTable instanceResponse = globalPlan.getInstanceResponse();

    DefaultReduceService defaultReduceService = new DefaultReduceService();
    Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    instanceResponseMap.put(new ServerInstance("localhost:1111"), instanceResponse);

    BrokerResponseJSON brokerResponse =
        defaultReduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.debug(brokerResponse.getSelectionResults().toString());
    LOGGER.debug("Time used: {}", brokerResponse.getTimeUsedMs());
    LOGGER.debug(brokerResponse.toString());
    JSONArray selectionResultsArray = brokerResponse.getSelectionResults().getJSONArray("results");
    for (int j = 0; j < selectionResultsArray.length(); ++j) {
      LOGGER.debug(selectionResultsArray.getJSONArray(j).toString());
      assertEquals(selectionResultsArray.getJSONArray(j).getInt(0), 1);
      assertEquals(selectionResultsArray.getJSONArray(j).getInt(1), 91);
      assertEquals(selectionResultsArray.getJSONArray(j).getInt(2) % 100, 91);
    }
  }

  @Test
  public void testInterSegmentSelectionNoOrderingPlanMaker() throws JSONException {
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    BrokerRequest brokerRequest = _brokerRequest.deepCopy();
    brokerRequest.setAggregationsInfo(null);
    brokerRequest.setAggregationsInfoIsSet(false);
    brokerRequest.setSelections(getSelectionQuery());
    brokerRequest.getSelections().setSelectionSortSequence(null);
    brokerRequest.getSelections().setOffset(0);
    brokerRequest.getSelections().setSize(20);
    ExecutorService executorService =
        Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    Plan globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    brokerRequest = setFilterQuery(brokerRequest);
    globalPlan =
        instancePlanMaker.makeInterSegmentPlan(
            makeSegMgrList(_indexSegmentList), brokerRequest, executorService, 150000);
    globalPlan.print();
    globalPlan.execute();
    DataTable instanceResponse = globalPlan.getInstanceResponse();

    DefaultReduceService defaultReduceService = new DefaultReduceService();
    Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    instanceResponseMap.put(new ServerInstance("localhost:1111"), instanceResponse);

    BrokerResponseJSON brokerResponse =
        defaultReduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.debug(brokerResponse.getSelectionResults().toString());
    LOGGER.debug("TimeUsedMs: {}", brokerResponse.getTimeUsedMs());
    LOGGER.debug(brokerResponse.toString());

    JSONArray selectionResultsArray = brokerResponse.getSelectionResults().getJSONArray("results");
    for (int j = 0; j < selectionResultsArray.length(); ++j) {
      assertEquals(selectionResultsArray.getJSONArray(j).getInt(0), 1);
    }
  }

  private static BrokerRequest getAggregationNoFilterBrokerRequest() {
    BrokerRequest brokerRequest = new BrokerRequest();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(getCountAggregationInfo());
    aggregationsInfo.add(getSumAggregationInfo());
    aggregationsInfo.add(getMaxAggregationInfo());
    aggregationsInfo.add(getMinAggregationInfo());
    aggregationsInfo.add(getAvgAggregationInfo());
    aggregationsInfo.add(getDistinctCountDim0AggregationInfo());
    aggregationsInfo.add(getDistinctCountDim1AggregationInfo());
    brokerRequest.setAggregationsInfo(aggregationsInfo);
    return brokerRequest;
  }

  private static BrokerRequest getAggregationWithFilterBrokerRequest() {
    BrokerRequest brokerRequest = new BrokerRequest();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(getCountAggregationInfo());
    aggregationsInfo.add(getSumAggregationInfo());
    aggregationsInfo.add(getMaxAggregationInfo());
    aggregationsInfo.add(getMinAggregationInfo());
    aggregationsInfo.add(getAvgAggregationInfo());
    aggregationsInfo.add(getDistinctCountDim0AggregationInfo());
    aggregationsInfo.add(getDistinctCountDim1AggregationInfo());
    brokerRequest.setAggregationsInfo(aggregationsInfo);
    brokerRequest = setFilterQuery(brokerRequest);
    return brokerRequest;
  }

  private static BrokerRequest getAggregationGroupByNoFilterBrokerRequest() {
    BrokerRequest brokerRequest = new BrokerRequest();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(getCountAggregationInfo());
    aggregationsInfo.add(getSumAggregationInfo());
    aggregationsInfo.add(getMaxAggregationInfo());
    aggregationsInfo.add(getMinAggregationInfo());
    aggregationsInfo.add(getAvgAggregationInfo());
    aggregationsInfo.add(getDistinctCountDim0AggregationInfo());
    aggregationsInfo.add(getDistinctCountDim1AggregationInfo());
    brokerRequest.setAggregationsInfo(aggregationsInfo);
    brokerRequest.setGroupBy(getGroupBy());
    return brokerRequest;
  }

  private static BrokerRequest getAggregationGroupByWithFilterBrokerRequest() {
    BrokerRequest brokerRequest = new BrokerRequest();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(getCountAggregationInfo());
    aggregationsInfo.add(getSumAggregationInfo());
    aggregationsInfo.add(getMaxAggregationInfo());
    aggregationsInfo.add(getMinAggregationInfo());
    aggregationsInfo.add(getAvgAggregationInfo());
    aggregationsInfo.add(getDistinctCountDim0AggregationInfo());
    aggregationsInfo.add(getDistinctCountDim1AggregationInfo());
    brokerRequest.setAggregationsInfo(aggregationsInfo);
    brokerRequest.setGroupBy(getGroupBy());
    brokerRequest = setFilterQuery(brokerRequest);
    return brokerRequest;
  }

  private static BrokerRequest getSelectionNoFilterBrokerRequest() {
    BrokerRequest brokerRequest = new BrokerRequest();
    brokerRequest.setSelections(getSelectionQuery());
    return brokerRequest;
  }

  private static BrokerRequest getSelectionWithFilterBrokerRequest() {
    BrokerRequest brokerRequest = new BrokerRequest();
    brokerRequest.setSelections(getSelectionQuery());
    brokerRequest = setFilterQuery(brokerRequest);
    return brokerRequest;
  }

  private static BrokerRequest setFilterQuery(BrokerRequest brokerRequest) {
    FilterQueryTree filterQueryTree;
    String filterColumn = "dim0";
    String filterVal = "1";
    if (filterColumn.contains(",")) {
      String[] filterColumns = filterColumn.split(",");
      String[] filterValues = filterVal.split(",");
      List<FilterQueryTree> nested = new ArrayList<FilterQueryTree>();
      for (int i = 0; i < filterColumns.length; i++) {

        List<String> vals = new ArrayList<String>();
        vals.add(filterValues[i]);
        FilterQueryTree d =
            new FilterQueryTree(i + 1, filterColumns[i], vals, FilterOperator.EQUALITY, null);
        nested.add(d);
      }
      filterQueryTree = new FilterQueryTree(0, null, null, FilterOperator.AND, nested);
    } else {
      List<String> vals = new ArrayList<String>();
      vals.add(filterVal);
      filterQueryTree = new FilterQueryTree(0, filterColumn, vals, FilterOperator.EQUALITY, null);
    }
    RequestUtils.generateFilterFromTree(filterQueryTree, brokerRequest);
    return brokerRequest;
  }

  private static Selection getSelectionQuery() {
    Selection selection = new Selection();
    selection.setOffset(10);
    selection.setSize(10);
    List<String> selectionColumns = new ArrayList<String>();
    selectionColumns.add("dim0");
    selectionColumns.add("dim1");
    selectionColumns.add("met");
    selection.setSelectionColumns(selectionColumns);

    List<SelectionSort> selectionSortSequence = new ArrayList<SelectionSort>();
    SelectionSort selectionSort = new SelectionSort();
    selectionSort.setColumn("dim0");
    selectionSort.setIsAsc(false);
    selectionSortSequence.add(selectionSort);
    selectionSort = new SelectionSort();
    selectionSort.setColumn("dim1");
    selectionSort.setIsAsc(false);
    selectionSortSequence.add(selectionSort);

    selection.setSelectionSortSequence(selectionSortSequence);

    return selection;
  }

  private static AggregationInfo getCountAggregationInfo() {
    String type = "count";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getSumAggregationInfo() {
    String type = "sum";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getMaxAggregationInfo() {
    String type = "max";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getMinAggregationInfo() {
    String type = "min";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getAvgAggregationInfo() {
    String type = "avg";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getDistinctCountDim0AggregationInfo() {
    String type = "distinctCount";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "dim0");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static AggregationInfo getDistinctCountDim1AggregationInfo() {
    String type = "distinctCount";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "dim1");
    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private static GroupBy getGroupBy() {
    GroupBy groupBy = new GroupBy();
    List<String> columns = new ArrayList<String>();
    columns.add("dim0");
    groupBy.setColumns(columns);
    groupBy.setTopN(15);
    return groupBy;
  }

  public static void main(String[] args) throws FileNotFoundException {
    PrintWriter jsonFilePW = new PrintWriter("/tmp/simpleData200001.json");
    for (int i = 0; i < 200001; ++i) {
      String s = "{\"dim0\":" + (i % 10) + ",";
      s += "\"dim1\":" + (i % 100) + ",";
      s += "\"met\":" + i + "}";
      jsonFilePW.println(s);
    }
    jsonFilePW.close();
  }
}
Example #23
0
  @Test
  public void testJavaClassCondition()
      throws IOException, InstantiationException, IllegalAccessException {
    try (GraphContext context = factory.create(getDefaultPath())) {
      final String inputDir = "src/test/resources/org/jboss/windup/rules/java";

      final Path outputPath =
          Paths.get(
              FileUtils.getTempDirectory().toString(),
              "windup_" + RandomStringUtils.randomAlphanumeric(6));
      FileUtils.deleteDirectory(outputPath.toFile());
      Files.createDirectories(outputPath);

      // Fill the graph with test data.
      ProjectModel pm = context.getFramed().addVertex(null, ProjectModel.class);
      pm.setName("Main Project");

      // Create FileModel for $inputDir
      FileModel inputPathFrame = context.getFramed().addVertex(null, FileModel.class);
      inputPathFrame.setFilePath(inputDir);
      inputPathFrame.setProjectModel(pm);
      pm.addFileModel(inputPathFrame);

      // Set project.rootFileModel to inputPath
      pm.setRootFileModel(inputPathFrame);

      // Create FileModel for $inputDir/HintsClassificationsTest.java
      FileModel fileModel = context.getFramed().addVertex(null, FileModel.class);
      fileModel.setFilePath(inputDir + "/JavaHintsClassificationsTest.java");
      fileModel.setProjectModel(pm);
      pm.addFileModel(fileModel);

      // Create FileModel for $inputDir/JavaClassTest.java
      fileModel = context.getFramed().addVertex(null, FileModel.class);
      fileModel.setFilePath(inputDir + "/JavaClassTest.java");
      fileModel.setProjectModel(pm);
      pm.addFileModel(fileModel);

      context.getGraph().getBaseGraph().commit();

      final WindupConfiguration processorConfig =
          new WindupConfiguration().setOutputDirectory(outputPath);
      processorConfig.setRuleProviderFilter(
          new RuleProviderWithDependenciesPredicate(TestJavaClassTestRuleProvider.class));
      processorConfig
          .setGraphContext(context)
          .setRuleProviderFilter(
              new RuleProviderWithDependenciesPredicate(TestJavaClassTestRuleProvider.class));
      processorConfig.setInputPath(Paths.get(inputDir));
      processorConfig.setOutputDirectory(outputPath);
      processorConfig.setOptionValue(ScanPackagesOption.NAME, Collections.singletonList(""));

      processor.execute(processorConfig);

      GraphService<JavaTypeReferenceModel> typeRefService =
          new GraphService<>(context, JavaTypeReferenceModel.class);
      Iterable<JavaTypeReferenceModel> typeReferences = typeRefService.findAll();
      Assert.assertTrue(typeReferences.iterator().hasNext());

      Assert.assertEquals(3, provider.getFirstRuleMatchCount());
      Assert.assertEquals(1, provider.getSecondRuleMatchCount());
    }
  }
Example #24
0
public class QueryExecutorTest {

  private final String SMALL_AVRO_DATA = "data/simpleData200001.avro";
  private static File INDEXES_DIR =
      new File(FileUtils.getTempDirectory() + File.separator + "TestQueryExecutorList");

  private List<IndexSegment> _indexSegmentList = new ArrayList<IndexSegment>();

  private static ServerQueryExecutorV1Impl _queryExecutor;

  private static final Logger LOGGER = LoggerFactory.getLogger(QueryExecutorTest.class);
  public static final String PINOT_PROPERTIES = "pinot.properties";

  @BeforeClass
  public void setup() throws Exception {
    TableDataManagerProvider.setServerMetrics(new ServerMetrics(new MetricsRegistry()));

    File confDir = new File(QueryExecutorTest.class.getClassLoader().getResource("conf").toURI());
    setupSegmentList(2);
    // ServerBuilder serverBuilder = new ServerBuilder(confDir.getAbsolutePath());
    String configFilePath = confDir.getAbsolutePath();

    // build _serverConf
    PropertiesConfiguration serverConf = new PropertiesConfiguration();
    serverConf.setDelimiterParsingDisabled(false);
    serverConf.load(new File(configFilePath, PINOT_PROPERTIES));

    FileBasedInstanceDataManager instanceDataManager =
        FileBasedInstanceDataManager.getInstanceDataManager();
    instanceDataManager.init(
        new FileBasedInstanceDataManagerConfig(serverConf.subset("pinot.server.instance")));
    instanceDataManager.start();

    for (int i = 0; i < 2; ++i) {
      instanceDataManager.getTableDataManager("midas");
      instanceDataManager.getTableDataManager("midas").addSegment(_indexSegmentList.get(i));
    }
    _queryExecutor = new ServerQueryExecutorV1Impl();
    _queryExecutor.init(
        serverConf.subset("pinot.server.query.executor"),
        instanceDataManager,
        new ServerMetrics(new MetricsRegistry()));
  }

  @AfterClass
  public void tearDown() {
    if (INDEXES_DIR.exists()) {
      FileUtils.deleteQuietly(INDEXES_DIR);
    }
    for (IndexSegment segment : _indexSegmentList) {
      segment.destroy();
    }
    _indexSegmentList.clear();
  }

  private void setupSegmentList(int numberOfSegments) throws Exception {
    final String filePath =
        TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource(SMALL_AVRO_DATA));
    _indexSegmentList.clear();
    if (INDEXES_DIR.exists()) {
      FileUtils.deleteQuietly(INDEXES_DIR);
    }
    INDEXES_DIR.mkdir();

    for (int i = 0; i < numberOfSegments; ++i) {
      final File segmentDir = new File(INDEXES_DIR, "segment_" + i);

      final SegmentGeneratorConfig config =
          SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(
              new File(filePath), segmentDir, "dim" + i, TimeUnit.DAYS, "midas");
      config.setSegmentNamePostfix(String.valueOf(i));
      final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
      driver.init(config);
      driver.build();

      File parent = new File(INDEXES_DIR, "segment_" + String.valueOf(i));
      String segmentName = parent.list()[0];
      _indexSegmentList.add(
          ColumnarSegmentLoader.load(new File(parent, segmentName), ReadMode.mmap));

      System.out.println("built at : " + segmentDir.getAbsolutePath());
    }
  }

  @Test
  public void testCountQuery() {

    BrokerRequest brokerRequest = getCountQuery();

    QuerySource querySource = new QuerySource();
    querySource.setTableName("midas");
    brokerRequest.setQuerySource(querySource);
    InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    for (IndexSegment segment : _indexSegmentList) {
      instanceRequest.getSearchSegments().add(segment.getSegmentName());
    }

    try {
      DataTable instanceResponse = _queryExecutor.processQuery(instanceRequest);
      LOGGER.info("InstanceResponse is " + instanceResponse.getLong(0, 0));
      Assert.assertEquals(instanceResponse.getLong(0, 0), 400002L);
      LOGGER.info(
          "Time used for instanceResponse is " + instanceResponse.getMetadata().get("timeUsedMs"));
    } catch (Exception e) {
      e.printStackTrace();
      // Should never happen
      Assert.assertEquals(true, false);
    }
  }

  @Test
  public void testSumQuery() {
    BrokerRequest brokerRequest = getSumQuery();

    QuerySource querySource = new QuerySource();
    querySource.setTableName("midas");
    brokerRequest.setQuerySource(querySource);
    InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    for (IndexSegment segment : _indexSegmentList) {
      instanceRequest.getSearchSegments().add(segment.getSegmentName());
    }
    try {
      DataTable instanceResponse = _queryExecutor.processQuery(instanceRequest);
      LOGGER.info("InstanceResponse is " + instanceResponse.getDouble(0, 0));
      Assert.assertEquals(instanceResponse.getDouble(0, 0), 40000200000.0);
      LOGGER.info(
          "Time used for instanceResponse is " + instanceResponse.getMetadata().get("timeUsedMs"));
    } catch (Exception e) {
      e.printStackTrace();
      // Should never happen
      Assert.assertEquals(true, false);
    }
  }

  @Test
  public void testMaxQuery() {

    BrokerRequest brokerRequest = getMaxQuery();

    QuerySource querySource = new QuerySource();
    querySource.setTableName("midas");
    brokerRequest.setQuerySource(querySource);
    InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    for (IndexSegment segment : _indexSegmentList) {
      instanceRequest.getSearchSegments().add(segment.getSegmentName());
    }
    try {
      DataTable instanceResponse = _queryExecutor.processQuery(instanceRequest);
      LOGGER.info("InstanceResponse is " + instanceResponse.getDouble(0, 0));
      Assert.assertEquals(instanceResponse.getDouble(0, 0), 200000.0);

      LOGGER.info(
          "Time used for instanceResponse is " + instanceResponse.getMetadata().get("timeUsedMs"));
    } catch (Exception e) {
      e.printStackTrace();
      // Should never happen
      Assert.assertEquals(true, false);
    }
  }

  @Test
  public void testMinQuery() {
    BrokerRequest brokerRequest = getMinQuery();

    QuerySource querySource = new QuerySource();
    querySource.setTableName("midas");
    brokerRequest.setQuerySource(querySource);
    InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
    instanceRequest.setSearchSegments(new ArrayList<String>());
    for (IndexSegment segment : _indexSegmentList) {
      instanceRequest.getSearchSegments().add(segment.getSegmentName());
    }
    try {
      DataTable instanceResponse = _queryExecutor.processQuery(instanceRequest);
      LOGGER.info("InstanceResponse is " + instanceResponse.getDouble(0, 0));
      Assert.assertEquals(instanceResponse.getDouble(0, 0), 0.0);
      LOGGER.info(
          "Time used for instanceResponse is " + instanceResponse.getMetadata().get("timeUsedMs"));
    } catch (Exception e) {
      e.printStackTrace();
      // Should never happen
      Assert.assertEquals(true, false);
    }
  }

  private BrokerRequest getCountQuery() {
    BrokerRequest query = new BrokerRequest();
    AggregationInfo aggregationInfo = getCountAggregationInfo();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(aggregationInfo);
    query.setAggregationsInfo(aggregationsInfo);
    FilterQuery filterQuery = getFilterQuery();
    query.setFilterQuery(filterQuery);
    return query;
  }

  private BrokerRequest getSumQuery() {
    BrokerRequest query = new BrokerRequest();
    AggregationInfo aggregationInfo = getSumAggregationInfo();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(aggregationInfo);
    query.setAggregationsInfo(aggregationsInfo);
    FilterQuery filterQuery = getFilterQuery();
    query.setFilterQuery(filterQuery);
    return query;
  }

  private BrokerRequest getMaxQuery() {
    BrokerRequest query = new BrokerRequest();
    AggregationInfo aggregationInfo = getMaxAggregationInfo();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(aggregationInfo);
    query.setAggregationsInfo(aggregationsInfo);
    FilterQuery filterQuery = getFilterQuery();
    query.setFilterQuery(filterQuery);
    return query;
  }

  private BrokerRequest getMinQuery() {
    BrokerRequest query = new BrokerRequest();
    AggregationInfo aggregationInfo = getMinAggregationInfo();
    List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
    aggregationsInfo.add(aggregationInfo);
    query.setAggregationsInfo(aggregationsInfo);
    FilterQuery filterQuery = getFilterQuery();
    query.setFilterQuery(filterQuery);
    return query;
  }

  private FilterQuery getFilterQuery() {
    FilterQuery filterQuery = new FilterQuery();
    return null;
  }

  private AggregationInfo getCountAggregationInfo() {
    String type = "count";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");

    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private AggregationInfo getSumAggregationInfo() {
    String type = "sum";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");

    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private AggregationInfo getMaxAggregationInfo() {
    String type = "max";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");

    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }

  private AggregationInfo getMinAggregationInfo() {
    String type = "min";
    Map<String, String> params = new HashMap<String, String>();
    params.put("column", "met");

    AggregationInfo aggregationInfo = new AggregationInfo();
    aggregationInfo.setAggregationType(type);
    aggregationInfo.setAggregationParams(params);
    return aggregationInfo;
  }
}