/**
   * @brief Test ordinary spark query.
   * @throws DDFException
   */
  @Test
  public void testLoading() throws DDFException {
    SQLDataSourceDescriptor sqlDataSourceDescriptor =
        new SQLDataSourceDescriptor(null, "SparkSQL", null, null, null);
    manager.sql("drop table if exists airline", sqlDataSourceDescriptor);

    manager.sql(
        "create table airline (Year int,Month int,DayofMonth int,"
            + "DayOfWeek int,DepTime int,CRSDepTime int,ArrTime int,"
            + "CRSArrTime int,UniqueCarrier string, FlightNum int, "
            + "TailNum string, ActualElapsedTime int, CRSElapsedTime int, "
            + "AirTime int, ArrDelay int, DepDelay int, Origin string, "
            + "Dest string, Distance int, TaxiIn int, TaxiOut int, Cancelled int, "
            + "CancellationCode string, Diverted string, CarrierDelay int, "
            + "WeatherDelay int, NASDelay int, SecurityDelay int, LateAircraftDelay int ) "
            + "ROW FORMAT DELIMITED FIELDS TERMINATED BY ','",
        sqlDataSourceDescriptor);

    manager.sql(
        "load data local inpath '../resources/test/airline.csv' " + "into table airline",
        sqlDataSourceDescriptor);

    DDF ddf =
        manager.sql2ddf(
            "select year, month, dayofweek, deptime, arrtime,origin, distance, arrdelay, "
                + "depdelay, carrierdelay, weatherdelay, nasdelay, securitydelay, lateaircraftdelay from airline",
            sqlDataSourceDescriptor);
    this.manager.setDDFName(ddf, "airlineDDF");
    // DDF sql2ddfRet = manager.sql2ddf("select * from " +
    //        "ddf://adatao/airlineDDF");
  }
  @BeforeClass
  public static void startServer() throws Exception {
    Thread.sleep(1000);
    // LOG = LoggerFactory.getLogger(BaseTest.class);
    // manager = DDFManager.get("spark");
    /*
    manager = DDFManager.get("jdbc", new JDBCDataSourceDescriptor(new
            DataSourceURI("jdbc:mysql://localhost/testdb"), new
            JDBCDataSourceDescriptor.JDBCDataSourceCredentials("pauser",
            "papwd"), null));
    DataSourceDescriptor ds = manager.getDataSourceDescriptor();
    if (ds instanceof
            JDBCDataSourceDescriptor) {
        System.out.println("hello");
    }
    DDF ret = manager.sql2ddf("select * from testtable", "jdbc");*/
    // Add 2 test ddfs.
    manager = DDFManager.get("spark");
    manager.setEngineName("spark");
    Schema schema = new Schema("tablename1", "d  d,d  d");
    DDF ddf =
        manager.newDDF(manager, new Class<?>[] {DDFManager.class}, "spark", "adatao", "a", schema);
    Schema schema2 = new Schema("tablename2", "d  d,d  d");
    DDF ddf2 =
        manager.newDDF(manager, new Class<?>[] {DDFManager.class}, "spark", "adatao", "b", schema2);

    parser = new CCJSqlParserManager();
  }
  @Override
  public DDF residuals() throws DDFException {
    SparkDDF predictionDDF = (SparkDDF) this.getDDF();
    JavaRDD<double[]> predictionRDD = predictionDDF.getJavaRDD(double[].class);

    JavaRDD<double[]> result = predictionRDD.map(new MetricsMapperResiduals());

    if (result == null) mLog.error(">> javaRDD result of MetricMapper residuals is null");
    if (predictionDDF.getManager() == null) mLog.error(">> predictionDDF.getManager() is null");
    if (result.rdd() == null) mLog.error(">> result.rdd() is null");
    if (predictionDDF.getSchema() == null) mLog.error(">> predictionDDF.getSchema() is null");
    if (predictionDDF.getName() == null) mLog.error(">> predictionDDF.getName() is null");

    Schema schema = new Schema("residuals double");
    DDFManager manager = this.getDDF().getManager();
    DDF residualDDF =
        manager.newDDF(
            manager, result.rdd(), new Class<?>[] {RDD.class, double[].class}, null, schema);

    if (residualDDF == null) mLog.error(">>>>>>>>>>>.residualDDF is null");

    return residualDDF;
  }
  @AfterClass
  public static void stopServer() throws Exception {

    manager.shutdown();
  }