private List<Tuple> getAllPartitions(Schema outSchema) {
    List<TablePartitionProto> partitionList = masterContext.getCatalog().getAllPartitions();
    List<Tuple> tuples = new ArrayList<Tuple>(partitionList.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;

    for (TablePartitionProto partition : partitionList) {
      aTuple = new VTuple(outSchema.size());

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column column = columns.get(fieldId);

        if ("partition_id".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(partition.getPartitionId()));
        } else if ("tid".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(partition.getTid()));
        } else if ("partition_name".equalsIgnoreCase(column.getSimpleName())) {
          if (partition.hasPartitionName()) {
            aTuple.put(fieldId, DatumFactory.createText(partition.getPartitionName()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        } else if ("path".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(partition.getPath()));
        }
      }

      tuples.add(aTuple);
    }

    return tuples;
  }
Beispiel #2
0
  @Before
  public void setUp() throws Exception {
    tuple1 =
        new VTuple(
            new Datum[] {
              DatumFactory.createBool(true),
              DatumFactory.createBit((byte) 0x99),
              DatumFactory.createChar('9'),
              DatumFactory.createInt2((short) 17),
              DatumFactory.createInt4(59),
              DatumFactory.createInt8(23l),
              DatumFactory.createFloat4(77.9f),
              DatumFactory.createFloat8(271.9f),
              DatumFactory.createText("hyunsik"),
              DatumFactory.createBlob("hyunsik".getBytes()),
              DatumFactory.createInet4("192.168.0.1")
            });

    tuple2 =
        new VTuple(
            new Datum[] {
              DatumFactory.createBool(true),
              DatumFactory.createBit((byte) 0x99),
              DatumFactory.createChar('9'),
              DatumFactory.createInt2((short) 17),
              DatumFactory.createInt4(59),
              DatumFactory.createInt8(23l),
              DatumFactory.createFloat4(77.9f),
              DatumFactory.createFloat8(271.9f),
              DatumFactory.createText("hyunsik"),
              DatumFactory.createBlob("hyunsik".getBytes()),
              DatumFactory.createInet4("192.168.0.1")
            });
  }
  private List<Tuple> getTablespaces(Schema outSchema) {
    List<TablespaceProto> tablespaces = masterContext.getCatalog().getAllTablespaces();
    List<Tuple> tuples = new ArrayList<Tuple>(tablespaces.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;

    for (TablespaceProto tablespace : tablespaces) {
      aTuple = new VTuple(outSchema.size());

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column column = columns.get(fieldId);
        if ("space_id".equalsIgnoreCase(column.getSimpleName())) {
          if (tablespace.hasId()) {
            aTuple.put(fieldId, DatumFactory.createInt4(tablespace.getId()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        } else if ("space_name".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(tablespace.getSpaceName()));
        } else if ("space_handler".equalsIgnoreCase(column.getSimpleName())) {
          if (tablespace.hasHandler()) {
            aTuple.put(fieldId, DatumFactory.createText(tablespace.getHandler()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        } else if ("space_uri".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(tablespace.getUri()));
        }
      }
      tuples.add(aTuple);
    }

    return tuples;
  }
  private List<Tuple> getTables(Schema outSchema) {
    List<TableDescriptorProto> tables = masterContext.getCatalog().getAllTables();
    List<Tuple> tuples = new ArrayList<Tuple>(tables.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;

    for (TableDescriptorProto table : tables) {
      aTuple = new VTuple(outSchema.size());

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column column = columns.get(fieldId);
        if ("tid".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(table.getTid()));
        } else if ("db_id".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(table.getDbId()));
        } else if ("table_name".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(table.getName()));
        } else if ("table_type".equalsIgnoreCase(column.getSimpleName())) {
          if (table.hasTableType()) {
            aTuple.put(fieldId, DatumFactory.createText(table.getTableType()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        } else if ("path".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(table.getPath()));
        } else if ("store_type".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(table.getStoreType()));
        }
      }

      tuples.add(aTuple);
    }

    return tuples;
  }
  private List<Tuple> getAllTableOptions(Schema outSchema) {
    List<TableOptionProto> optionList = masterContext.getCatalog().getAllTableOptions();
    List<Tuple> tuples = new ArrayList<Tuple>(optionList.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;

    for (TableOptionProto option : optionList) {
      aTuple = new VTuple(outSchema.size());

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column column = columns.get(fieldId);

        if ("tid".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(option.getTid()));
        } else if ("key_".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(option.getKeyval().getKey()));
        } else if ("value_".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(option.getKeyval().getValue()));
        }
      }

      tuples.add(aTuple);
    }

    return tuples;
  }
Beispiel #6
0
  @Override
  public Datum eval(Tuple params) {

    if (params.isBlankOrNull(0) || params.isBlankOrNull(1)) {
      return NullDatum.get();
    }

    String fillText;
    if (hasFillCharacters) {
      fillText = params.getText(2);
    } else {
      fillText = " ";
    }

    String input = params.getText(0);
    int expected = params.getInt4(1);

    int templen = expected - params.size(0);

    if (templen <= 0) {
      return DatumFactory.createText(input.substring(0, expected));
    } else {
      return DatumFactory.createText(StringUtils.leftPad(input, expected, fillText));
    }
  }
  private Tuple getWorkerTuple(Schema outSchema, Worker aWorker) {
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple = new VTuple(outSchema.size());
    WorkerResource aResource = aWorker.getResource();

    for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
      Column column = columns.get(fieldId);

      if ("host".equalsIgnoreCase(column.getSimpleName())) {
        if (aWorker.getConnectionInfo() != null && aWorker.getConnectionInfo().getHost() != null) {
          aTuple.put(fieldId, DatumFactory.createText(aWorker.getConnectionInfo().getHost()));
        } else {
          aTuple.put(fieldId, DatumFactory.createNullDatum());
        }
      } else if ("port".equalsIgnoreCase(column.getSimpleName())) {
        if (aWorker.getConnectionInfo() != null) {
          aTuple.put(
              fieldId, DatumFactory.createInt4(aWorker.getConnectionInfo().getPeerRpcPort()));
        } else {
          aTuple.put(fieldId, DatumFactory.createNullDatum());
        }
      } else if ("type".equalsIgnoreCase(column.getSimpleName())) {
        aTuple.put(fieldId, DatumFactory.createText("Worker"));
      } else if ("status".equalsIgnoreCase(column.getSimpleName())) {
        aTuple.put(fieldId, DatumFactory.createText(aWorker.getState().toString()));
      } else if ("RUNNING".equalsIgnoreCase(aWorker.getState().toString())) {
        if ("total_cpu".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(aResource.getCpuCoreSlots()));
        } else if ("used_mem".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt8(aResource.getUsedMemoryMB() * 1048576l));
        } else if ("total_mem".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt8(aResource.getMemoryMB() * 1048576l));
        } else if ("free_heap".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt8(aResource.getFreeHeap()));
        } else if ("max_heap".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt8(aResource.getMaxHeap()));
        } else if ("used_diskslots".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createFloat4(aResource.getUsedDiskSlots()));
        } else if ("total_diskslots".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createFloat4(aResource.getDiskSlots()));
        } else if ("running_tasks".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(aResource.getNumRunningTasks()));
        } else if ("last_heartbeat_ts".equalsIgnoreCase(column.getSimpleName())) {
          if (aWorker.getLastHeartbeatTime() > 0) {
            aTuple.put(
                fieldId,
                DatumFactory.createTimestmpDatumWithJavaMillis(aWorker.getLastHeartbeatTime()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        }
      } else {
        aTuple.put(fieldId, DatumFactory.createNullDatum());
      }
    }

    return aTuple;
  }
Beispiel #8
0
 public static Datum cast(Datum operandDatum, DataType target, @Nullable TimeZone tz) {
   switch (target.getType()) {
     case BOOLEAN:
       return DatumFactory.createBool(operandDatum.asBool());
     case CHAR:
       return DatumFactory.createChar(operandDatum.asChar());
     case INT1:
     case INT2:
       return DatumFactory.createInt2(operandDatum.asInt2());
     case INT4:
       return DatumFactory.createInt4(operandDatum.asInt4());
     case INT8:
       return DatumFactory.createInt8(operandDatum.asInt8());
     case FLOAT4:
       return DatumFactory.createFloat4(operandDatum.asFloat4());
     case FLOAT8:
       return DatumFactory.createFloat8(operandDatum.asFloat8());
     case VARCHAR:
     case TEXT:
       switch (operandDatum.type()) {
         case TIMESTAMP:
           {
             TimestampDatum timestampDatum = (TimestampDatum) operandDatum;
             if (tz != null) {
               return DatumFactory.createText(
                   TimestampDatum.asChars(operandDatum.asTimeMeta(), tz, false));
             } else {
               return DatumFactory.createText(timestampDatum.asChars());
             }
           }
         default:
           return DatumFactory.createText(operandDatum.asTextBytes());
       }
     case DATE:
       return DatumFactory.createDate(operandDatum);
     case TIME:
       return DatumFactory.createTime(operandDatum);
     case TIMESTAMP:
       return DatumFactory.createTimestamp(operandDatum, tz);
     case BLOB:
       return DatumFactory.createBlob(operandDatum.asByteArray());
     case INET4:
       return DatumFactory.createInet4(operandDatum.asByteArray());
     case ANY:
       return DatumFactory.createAny(operandDatum);
     default:
       throw new TajoRuntimeException(
           new InvalidValueForCastException(operandDatum.type(), target.getType()));
   }
 }
  static {
    schema.addColumn("col1", Type.BOOLEAN);
    schema.addColumn("col2", Type.CHAR, 7);
    schema.addColumn("col3", Type.INT2);
    schema.addColumn("col4", Type.INT4);
    schema.addColumn("col5", Type.INT8);
    schema.addColumn("col6", Type.FLOAT4);
    schema.addColumn("col7", Type.FLOAT8);
    schema.addColumn("col8", Type.TEXT);
    schema.addColumn("col9", Type.BLOB);
    schema.addColumn("col10", Type.INET4);

    baseTuple =
        new VTuple(
            new Datum[] {
              DatumFactory.createBool(true), // 0
              DatumFactory.createChar("hyunsik"), // 1
              DatumFactory.createInt2((short) 17), // 2
              DatumFactory.createInt4(59), // 3
              DatumFactory.createInt8(23l), // 4
              DatumFactory.createFloat4(77.9f), // 5
              DatumFactory.createFloat8(271.9d), // 6
              DatumFactory.createText("hyunsik"), // 7
              DatumFactory.createBlob("hyunsik".getBytes()), // 8
              DatumFactory.createInet4("192.168.0.1"), // 9
            });
  }
  private List<Tuple> getDatabases(Schema outSchema) {
    List<DatabaseProto> databases = masterContext.getCatalog().getAllDatabases();
    List<Tuple> tuples = new ArrayList<Tuple>(databases.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;

    for (DatabaseProto database : databases) {
      aTuple = new VTuple(outSchema.size());

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column column = columns.get(fieldId);
        if ("db_id".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(database.getId()));
        } else if ("db_name".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(database.getName()));
        } else if ("space_id".equalsIgnoreCase(column.getSimpleName())) {
          if (database.hasSpaceId()) {
            aTuple.put(fieldId, DatumFactory.createInt4(database.getSpaceId()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        }
      }

      tuples.add(aTuple);
    }

    return tuples;
  }
  private List<Tuple> getColumns(Schema outSchema) {
    List<ColumnProto> columnsList = masterContext.getCatalog().getAllColumns();
    List<Tuple> tuples = new ArrayList<Tuple>(columnsList.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;
    int columnId = 1, prevtid = -1, tid = 0;

    for (ColumnProto column : columnsList) {
      aTuple = new VTuple(outSchema.size());

      tid = column.getTid();
      if (prevtid != tid) {
        columnId = 1;
        prevtid = tid;
      }

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column colObj = columns.get(fieldId);

        if ("tid".equalsIgnoreCase(colObj.getSimpleName())) {
          if (column.hasTid()) {
            aTuple.put(fieldId, DatumFactory.createInt4(tid));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        } else if ("column_name".equalsIgnoreCase(colObj.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(column.getName()));
        } else if ("ordinal_position".equalsIgnoreCase(colObj.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(columnId));
        } else if ("data_type".equalsIgnoreCase(colObj.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(column.getDataType().getType().toString()));
        } else if ("type_length".equalsIgnoreCase(colObj.getSimpleName())) {
          DataType dataType = column.getDataType();
          if (dataType.hasLength()) {
            aTuple.put(fieldId, DatumFactory.createInt4(dataType.getLength()));
          } else {
            aTuple.put(fieldId, DatumFactory.createNullDatum());
          }
        }
      }

      columnId++;
      tuples.add(aTuple);
    }

    return tuples;
  }
Beispiel #12
0
  public void execExplain(
      Session session,
      String query,
      LogicalPlan plan,
      QueryContext queryContext,
      boolean isGlobal,
      SubmitQueryResponse.Builder response)
      throws Exception {

    String explainStr;
    boolean isTest = queryContext.getBool(SessionVars.TEST_PLAN_SHAPE_FIX_ENABLED);
    if (isTest) {
      ExplainPlanPreprocessorForTest preprocessorForTest = new ExplainPlanPreprocessorForTest();
      preprocessorForTest.prepareTest(plan);
    }

    if (isGlobal) {
      GlobalPlanner planner = new GlobalPlanner(context.getConf(), context.getCatalog());
      MasterPlan masterPlan = compileMasterPlan(plan, queryContext, planner);
      if (isTest) {
        ExplainGlobalPlanPreprocessorForTest globalPlanPreprocessorForTest =
            new ExplainGlobalPlanPreprocessorForTest();
        globalPlanPreprocessorForTest.prepareTest(masterPlan);
      }
      explainStr = masterPlan.toString();
    } else {
      explainStr = PlannerUtil.buildExplainString(plan.getRootBlock().getRoot());
    }

    Schema schema = new Schema();
    schema.addColumn("explain", TajoDataTypes.Type.TEXT);
    RowStoreUtil.RowStoreEncoder encoder = RowStoreUtil.createEncoder(schema);

    SerializedResultSet.Builder serializedResBuilder = SerializedResultSet.newBuilder();

    VTuple tuple = new VTuple(1);
    String[] lines = explainStr.split("\n");
    int bytesNum = 0;
    for (String line : lines) {
      tuple.put(0, DatumFactory.createText(line));
      byte[] encodedData = encoder.toBytes(tuple);
      bytesNum += encodedData.length;
      serializedResBuilder.addSerializedTuples(ByteString.copyFrom(encodedData));
    }
    serializedResBuilder.setSchema(schema.getProto());
    serializedResBuilder.setBytesNum(bytesNum);

    QueryInfo queryInfo =
        context
            .getQueryJobManager()
            .createNewSimpleQuery(
                queryContext, session, query, (LogicalRootNode) plan.getRootBlock().getRoot());

    response.setState(OK);
    response.setQueryId(queryInfo.getQueryId().getProto());
    response.setResultType(ResultType.ENCLOSED);
    response.setResultSet(serializedResBuilder.build());
    response.setMaxRowNum(lines.length);
  }
  private List<Tuple> getSessionInfo(Schema outSchema) {
    List<Tuple> outputs = Lists.newArrayList();
    Tuple eachVariable;

    try {
      for (Map.Entry<String, String> var :
          masterContext.getSessionManager().getAllVariables(sessionId).entrySet()) {
        eachVariable = new VTuple(outSchema.size());
        eachVariable.put(0, DatumFactory.createText(var.getKey()));
        eachVariable.put(1, DatumFactory.createText(var.getValue()));

        outputs.add(eachVariable);
      }
    } catch (InvalidSessionException e) {
      LOG.error(e);
    }

    return outputs;
  }
Beispiel #14
0
  @Override
  public Datum eval(Tuple params) {
    if (params.isBlankOrNull(0)) {
      return NullDatum.get();
    }

    int value = params.getInt4(0);
    if (value <= 0 || value > 65525) {
      return NullDatum.get();
    } else {
      return DatumFactory.createText(String.valueOf((char) value));
    }
  }
  private List<Tuple> getIndexes(Schema outSchema) {
    List<IndexProto> indexList = masterContext.getCatalog().getAllIndexes();
    List<Tuple> tuples = new ArrayList<Tuple>(indexList.size());
    List<Column> columns = outSchema.getRootColumns();
    Tuple aTuple;

    for (IndexProto index : indexList) {
      aTuple = new VTuple(outSchema.size());

      for (int fieldId = 0; fieldId < columns.size(); fieldId++) {
        Column column = columns.get(fieldId);

        if ("db_id".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(index.getDbId()));
        } else if ("tid".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createInt4(index.getTId()));
        } else if ("index_name".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(index.getIndexName()));
        } else if ("column_name".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(index.getColumnName()));
        } else if ("data_type".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(index.getDataType()));
        } else if ("index_type".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createText(index.getIndexType()));
        } else if ("is_unique".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createBool(index.getIsUnique()));
        } else if ("is_clustered".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createBool(index.getIsClustered()));
        } else if ("is_ascending".equalsIgnoreCase(column.getSimpleName())) {
          aTuple.put(fieldId, DatumFactory.createBool(index.getIsAscending()));
        }
      }

      tuples.add(aTuple);
    }

    return tuples;
  }
  @Before
  public void setUp() throws Exception {
    this.conf = new TajoConf();
    util = new TajoTestingCluster();
    catalog = util.startCatalogCluster().getCatalog();
    testDir = CommonTestingUtil.getTestDir(TEST_PATH);
    conf.setVar(TajoConf.ConfVars.WORKER_TEMPORAL_DIR, testDir.toString());
    sm = StorageManagerFactory.getStorageManager(conf, testDir);

    Schema schema = new Schema();
    schema.addColumn("managerId", Type.INT4);
    schema.addColumn("empId", Type.INT4);
    schema.addColumn("deptName", Type.TEXT);

    TableMeta employeeMeta = CatalogUtil.newTableMeta(StoreType.CSV);
    Path employeePath = new Path(testDir, "employee.csv");
    Appender appender =
        StorageManagerFactory.getStorageManager(conf)
            .getAppender(employeeMeta, schema, employeePath);
    appender.enableStats();
    appender.init();
    Tuple tuple = new VTuple(schema.getColumnNum());
    for (int i = 0; i < numTuple; i++) {
      tuple.put(
          new Datum[] {
            DatumFactory.createInt4(rnd.nextInt(50)),
            DatumFactory.createInt4(rnd.nextInt(100)),
            DatumFactory.createText("dept_" + i),
          });
      appender.addTuple(tuple);
    }
    appender.flush();
    appender.close();

    System.out.println(
        appender.getStats().getNumRows()
            + " rows ("
            + (appender.getStats().getNumBytes() / 1048576)
            + " MB)");

    employee = new TableDesc("employee", schema, employeeMeta, employeePath);
    catalog.addTable(employee);
    analyzer = new SQLAnalyzer();
    planner = new LogicalPlanner(catalog);
  }
Beispiel #17
0
 public Datum toDatum(int i) {
   if (keys[i] == null) {
     return NullDatum.get();
   }
   switch (keyTypes[i]) {
     case NULL_TYPE:
       return NullDatum.get();
     case BOOLEAN:
       return DatumFactory.createBool((Boolean) keys[i]);
     case BIT:
       return DatumFactory.createBit((Byte) keys[i]);
     case INT1:
     case INT2:
       return DatumFactory.createInt2((Short) keys[i]);
     case INT4:
       return DatumFactory.createInt4((Integer) keys[i]);
     case DATE:
       return DatumFactory.createDate((Integer) keys[i]);
     case INT8:
       return DatumFactory.createInt8((Long) keys[i]);
     case TIME:
       return DatumFactory.createTime((Long) keys[i]);
     case TIMESTAMP:
       return DatumFactory.createTimestamp((Long) keys[i]);
     case FLOAT4:
       return DatumFactory.createFloat4((Float) keys[i]);
     case FLOAT8:
       return DatumFactory.createFloat8((Double) keys[i]);
     case TEXT:
       return DatumFactory.createText((byte[]) keys[i]);
     case CHAR:
       return DatumFactory.createChar((byte[]) keys[i]);
     case BLOB:
       return DatumFactory.createBlob((byte[]) keys[i]);
     case DATUM:
       return (Datum) keys[i];
     default:
       throw new IllegalArgumentException();
   }
 }
  @Before
  public void setUp() throws Exception {
    util = new TajoTestingCluster();
    util.initTestDir();
    util.startCatalogCluster();
    catalog = util.getCatalogService();
    testDir = CommonTestingUtil.getTestDir(TEST_PATH);
    catalog.createTablespace(DEFAULT_TABLESPACE_NAME, testDir.toUri().toString());
    catalog.createDatabase(DEFAULT_DATABASE_NAME, DEFAULT_TABLESPACE_NAME);
    conf = util.getConfiguration();

    // ----------------- dep3 ------------------------------
    // dep_id | dep_name  | loc_id
    // --------------------------------
    //  0     | dep_0     | 1000
    //  1     | dep_1     | 1001
    //  2     | dep_2     | 1002
    //  3     | dep_3     | 1003
    //  4     | dep_4     | 1004
    //  5     | dep_5     | 1005
    //  6     | dep_6     | 1006
    //  7     | dep_7     | 1007
    //  8     | dep_8     | 1008
    //  9     | dep_9     | 1009
    Schema dep3Schema = new Schema();
    dep3Schema.addColumn("dep_id", Type.INT4);
    dep3Schema.addColumn("dep_name", Type.TEXT);
    dep3Schema.addColumn("loc_id", Type.INT4);

    TableMeta dep3Meta = CatalogUtil.newTableMeta("TEXT");
    Path dep3Path = new Path(testDir, "dep3.csv");
    Appender appender1 =
        ((FileTablespace) TablespaceManager.getLocalFs())
            .getAppender(dep3Meta, dep3Schema, dep3Path);
    appender1.init();
    VTuple tuple = new VTuple(dep3Schema.size());
    for (int i = 0; i < 10; i++) {
      tuple.put(
          new Datum[] {
            DatumFactory.createInt4(i),
            DatumFactory.createText("dept_" + i),
            DatumFactory.createInt4(1000 + i)
          });
      appender1.addTuple(tuple);
    }

    appender1.flush();
    appender1.close();
    dep3 = CatalogUtil.newTableDesc(DEP3_NAME, dep3Schema, dep3Meta, dep3Path);
    catalog.createTable(dep3);

    // ----------------- dep4 ------------------------------
    // dep_id | dep_name  | loc_id
    // --------------------------------
    //  0     | dep_0     | 1000
    //  1     | dep_1     | 1001
    //  2     | dep_2     | 1002
    //  3     | dep_3     | 1003
    //  4     | dep_4     | 1004
    //  5     | dep_5     | 1005
    //  6     | dep_6     | 1006
    //  7     | dep_7     | 1007
    //  8     | dep_8     | 1008
    //  9     | dep_9     | 1009
    // 10     | dep_10    | 1010
    Schema dep4Schema = new Schema();
    dep4Schema.addColumn("dep_id", Type.INT4);
    dep4Schema.addColumn("dep_name", Type.TEXT);
    dep4Schema.addColumn("loc_id", Type.INT4);

    TableMeta dep4Meta = CatalogUtil.newTableMeta("TEXT");
    Path dep4Path = new Path(testDir, "dep4.csv");
    Appender appender4 =
        ((FileTablespace) TablespaceManager.getLocalFs())
            .getAppender(dep4Meta, dep4Schema, dep4Path);
    appender4.init();
    VTuple tuple4 = new VTuple(dep4Schema.size());
    for (int i = 0; i < 11; i++) {
      tuple4.put(
          new Datum[] {
            DatumFactory.createInt4(i),
            DatumFactory.createText("dept_" + i),
            DatumFactory.createInt4(1000 + i)
          });
      appender4.addTuple(tuple4);
    }

    appender4.flush();
    appender4.close();
    dep4 = CatalogUtil.newTableDesc(DEP4_NAME, dep4Schema, dep4Meta, dep4Path);
    catalog.createTable(dep4);

    // ----------------- job3 ------------------------------
    //  job_id  | job_title
    // ----------------------
    //   101    |  job_101
    //   102    |  job_102
    //   103    |  job_103

    Schema job3Schema = new Schema();
    job3Schema.addColumn("job_id", Type.INT4);
    job3Schema.addColumn("job_title", Type.TEXT);

    TableMeta job3Meta = CatalogUtil.newTableMeta("TEXT");
    Path job3Path = new Path(testDir, "job3.csv");
    Appender appender2 =
        ((FileTablespace) TablespaceManager.getLocalFs())
            .getAppender(job3Meta, job3Schema, job3Path);
    appender2.init();
    VTuple tuple2 = new VTuple(job3Schema.size());
    for (int i = 1; i < 4; i++) {
      int x = 100 + i;
      tuple2.put(
          new Datum[] {DatumFactory.createInt4(100 + i), DatumFactory.createText("job_" + x)});
      appender2.addTuple(tuple2);
    }

    appender2.flush();
    appender2.close();
    job3 = CatalogUtil.newTableDesc(JOB3_NAME, job3Schema, job3Meta, job3Path);
    catalog.createTable(job3);

    // ---------------------emp3 --------------------
    // emp_id  | first_name | last_name | dep_id | salary | job_id
    // ------------------------------------------------------------
    //  11     |  fn_11     |  ln_11    |  1     | 123    | 101
    //  13     |  fn_13     |  ln_13    |  3     | 369    | 103
    //  15     |  fn_15     |  ln_15    |  5     | 615    | null
    //  17     |  fn_17     |  ln_17    |  7     | 861    | null
    //  19     |  fn_19     |  ln_19    |  9     | 1107   | null
    //  21     |  fn_21     |  ln_21    |  1     | 123    | 101
    //  23     |  fn_23     |  ln_23    |  3     | 369    | 103

    Schema emp3Schema = new Schema();
    emp3Schema.addColumn("emp_id", Type.INT4);
    emp3Schema.addColumn("first_name", Type.TEXT);
    emp3Schema.addColumn("last_name", Type.TEXT);
    emp3Schema.addColumn("dep_id", Type.INT4);
    emp3Schema.addColumn("salary", Type.FLOAT4);
    emp3Schema.addColumn("job_id", Type.INT4);

    TableMeta emp3Meta = CatalogUtil.newTableMeta("TEXT");
    Path emp3Path = new Path(testDir, "emp3.csv");
    Appender appender3 =
        ((FileTablespace) TablespaceManager.getLocalFs())
            .getAppender(emp3Meta, emp3Schema, emp3Path);
    appender3.init();
    VTuple tuple3 = new VTuple(emp3Schema.size());

    for (int i = 1; i < 4; i += 2) {
      int x = 10 + i;
      tuple3.put(
          new Datum[] {
            DatumFactory.createInt4(10 + i),
            DatumFactory.createText("firstname_" + x),
            DatumFactory.createText("lastname_" + x),
            DatumFactory.createInt4(i),
            DatumFactory.createFloat4(123 * i),
            DatumFactory.createInt4(100 + i)
          });
      appender3.addTuple(tuple3);

      int y = 20 + i;
      tuple3.put(
          new Datum[] {
            DatumFactory.createInt4(20 + i),
            DatumFactory.createText("firstname_" + y),
            DatumFactory.createText("lastname_" + y),
            DatumFactory.createInt4(i),
            DatumFactory.createFloat4(123 * i),
            DatumFactory.createInt4(100 + i)
          });
      appender3.addTuple(tuple3);
    }

    for (int i = 5; i < 10; i += 2) {
      int x = 10 + i;
      tuple3.put(
          new Datum[] {
            DatumFactory.createInt4(10 + i),
            DatumFactory.createText("firstname_" + x),
            DatumFactory.createText("lastname_" + x),
            DatumFactory.createInt4(i),
            DatumFactory.createFloat4(123 * i),
            DatumFactory.createNullDatum()
          });
      appender3.addTuple(tuple3);
    }

    appender3.flush();
    appender3.close();
    emp3 = CatalogUtil.newTableDesc(EMP3_NAME, emp3Schema, emp3Meta, emp3Path);
    catalog.createTable(emp3);

    // ---------------------phone3 --------------------
    // emp_id  | phone_number
    // -----------------------------------------------
    // this table is empty, no rows

    Schema phone3Schema = new Schema();
    phone3Schema.addColumn("emp_id", Type.INT4);
    phone3Schema.addColumn("phone_number", Type.TEXT);

    TableMeta phone3Meta = CatalogUtil.newTableMeta("TEXT");
    Path phone3Path = new Path(testDir, "phone3.csv");
    Appender appender5 =
        ((FileTablespace) TablespaceManager.getLocalFs())
            .getAppender(phone3Meta, phone3Schema, phone3Path);
    appender5.init();

    appender5.flush();
    appender5.close();
    phone3 = CatalogUtil.newTableDesc(PHONE3_NAME, phone3Schema, phone3Meta, phone3Path);
    catalog.createTable(phone3);

    analyzer = new SQLAnalyzer();
    planner = new LogicalPlanner(catalog, TablespaceManager.getInstance());

    defaultContext = LocalTajoTestingUtility.createDummyContext(conf);
  }
 @Override
 public final void addBinary(Binary value) {
   parent.add(DatumFactory.createText(value.getBytes()));
 }
  @Before
  public void setup() throws Exception {
    this.randomValues = new HashMap<Integer, Integer>();
    this.conf = new TajoConf();
    util = new TajoTestingCluster();
    util.startCatalogCluster();
    catalog = util.getMiniCatalogCluster().getCatalog();

    Path workDir = CommonTestingUtil.getTestDir();
    catalog.createTablespace(DEFAULT_TABLESPACE_NAME, workDir.toUri().toString());
    catalog.createDatabase(TajoConstants.DEFAULT_DATABASE_NAME, DEFAULT_TABLESPACE_NAME);
    sm = StorageManagerFactory.getStorageManager(conf, workDir);

    idxPath = new Path(workDir, "test.idx");

    Schema schema = new Schema();
    schema.addColumn("managerid", Type.INT4);
    schema.addColumn("empid", Type.INT4);
    schema.addColumn("deptname", Type.TEXT);

    this.idxSchema = new Schema();
    idxSchema.addColumn("managerid", Type.INT4);
    SortSpec[] sortKeys = new SortSpec[1];
    sortKeys[0] = new SortSpec(idxSchema.getColumn("managerid"), true, false);
    this.comp = new TupleComparator(idxSchema, sortKeys);

    this.writer =
        new BSTIndex(conf)
            .getIndexWriter(idxPath, BSTIndex.TWO_LEVEL_INDEX, this.idxSchema, this.comp);
    writer.setLoadNum(100);
    writer.open();
    long offset;

    meta = CatalogUtil.newTableMeta(StoreType.CSV);
    tablePath = StorageUtil.concatPath(workDir, "employee", "table.csv");
    fs = tablePath.getFileSystem(conf);
    fs.mkdirs(tablePath.getParent());

    FileAppender appender =
        (FileAppender)
            StorageManagerFactory.getStorageManager(conf).getAppender(meta, schema, tablePath);
    appender.init();
    Tuple tuple = new VTuple(schema.size());
    for (int i = 0; i < 10000; i++) {

      Tuple key = new VTuple(this.idxSchema.size());
      int rndKey = rnd.nextInt(250);
      if (this.randomValues.containsKey(rndKey)) {
        int t = this.randomValues.remove(rndKey) + 1;
        this.randomValues.put(rndKey, t);
      } else {
        this.randomValues.put(rndKey, 1);
      }

      key.put(new Datum[] {DatumFactory.createInt4(rndKey)});
      tuple.put(
          new Datum[] {
            DatumFactory.createInt4(rndKey),
            DatumFactory.createInt4(rnd.nextInt(10)),
            DatumFactory.createText("dept_" + rnd.nextInt(10))
          });
      offset = appender.getOffset();
      appender.addTuple(tuple);
      writer.write(key, offset);
    }
    appender.flush();
    appender.close();
    writer.close();

    TableDesc desc =
        new TableDesc(
            CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "employee"),
            schema,
            meta,
            sm.getTablePath("employee"));
    catalog.createTable(desc);

    analyzer = new SQLAnalyzer();
    planner = new LogicalPlanner(catalog);
    optimizer = new LogicalOptimizer(conf);
  }