示例#1
0
  /**
   * Builds the projection for the scan
   *
   * @param context query context kept between compilation of different query clauses
   * @param statement TODO
   * @param groupBy compiled GROUP BY clause
   * @param targetColumns list of columns, parallel to aliasedNodes, that are being set for an
   *     UPSERT SELECT statement. Used to coerce expression types to the expected target type.
   * @return projector used to access row values during scan
   * @throws SQLException
   */
  public static RowProjector compile(
      StatementContext context,
      SelectStatement statement,
      GroupBy groupBy,
      List<? extends PDatum> targetColumns)
      throws SQLException {
    List<AliasedNode> aliasedNodes = statement.getSelect();
    // Setup projected columns in Scan
    SelectClauseVisitor selectVisitor = new SelectClauseVisitor(context, groupBy);
    List<ExpressionProjector> projectedColumns = new ArrayList<ExpressionProjector>();
    TableRef tableRef = context.getResolver().getTables().get(0);
    PTable table = tableRef.getTable();
    boolean isWildcard = false;
    Scan scan = context.getScan();
    int index = 0;
    List<Expression> projectedExpressions = Lists.newArrayListWithExpectedSize(aliasedNodes.size());
    List<byte[]> projectedFamilies = Lists.newArrayListWithExpectedSize(aliasedNodes.size());
    for (AliasedNode aliasedNode : aliasedNodes) {
      ParseNode node = aliasedNode.getNode();
      // TODO: visitor?
      if (node instanceof WildcardParseNode) {
        if (statement.isAggregate()) {
          ExpressionCompiler.throwNonAggExpressionInAggException(node.toString());
        }
        isWildcard = true;
        if (tableRef.getTable().getType() == PTableType.INDEX
            && ((WildcardParseNode) node).isRewrite()) {
          projectAllIndexColumns(context, tableRef, projectedExpressions, projectedColumns);
        } else {
          projectAllTableColumns(context, tableRef, projectedExpressions, projectedColumns);
        }
      } else if (node instanceof FamilyWildcardParseNode) {
        // Project everything for SELECT cf.*
        // TODO: support cf.* expressions for multiple tables the same way with *.
        String cfName = ((FamilyWildcardParseNode) node).getName();
        // Delay projecting to scan, as when any other column in the column family gets
        // added to the scan, it overwrites that we want to project the entire column
        // family. Instead, we do the projection at the end.
        // TODO: consider having a ScanUtil.addColumn and ScanUtil.addFamily to work
        // around this, as this code depends on this function being the last place where
        // columns are projected (which is currently true, but could change).
        projectedFamilies.add(Bytes.toBytes(cfName));
        if (tableRef.getTable().getType() == PTableType.INDEX
            && ((FamilyWildcardParseNode) node).isRewrite()) {
          projectIndexColumnFamily(
              context, cfName, tableRef, projectedExpressions, projectedColumns);
        } else {
          projectTableColumnFamily(
              context, cfName, tableRef, projectedExpressions, projectedColumns);
        }
      } else {
        Expression expression = node.accept(selectVisitor);
        projectedExpressions.add(expression);
        if (index < targetColumns.size()) {
          PDatum targetColumn = targetColumns.get(index);
          if (targetColumn.getDataType() != expression.getDataType()) {
            PDataType targetType = targetColumn.getDataType();
            // Check if coerce allowed using more relaxed isCastableTo check, since we promote
            // INTEGER to LONG
            // during expression evaluation and then convert back to INTEGER on UPSERT SELECT (and
            // we don't have
            // (an actual value we can specifically check against).
            if (expression.getDataType() != null
                && !expression.getDataType().isCastableTo(targetType)) {
              throw new ArgumentTypeMismatchException(
                  targetType, expression.getDataType(), "column: " + targetColumn);
            }
            expression = CoerceExpression.create(expression, targetType);
          }
        }
        if (node instanceof BindParseNode) {
          context.getBindManager().addParamMetaData((BindParseNode) node, expression);
        }
        if (!node.isStateless()) {
          if (!selectVisitor.isAggregate() && statement.isAggregate()) {
            ExpressionCompiler.throwNonAggExpressionInAggException(expression.toString());
          }
        }
        String columnAlias =
            aliasedNode.getAlias() != null
                ? aliasedNode.getAlias()
                : SchemaUtil.normalizeIdentifier(aliasedNode.getNode().getAlias());
        boolean isCaseSensitive =
            (columnAlias != null
                    && (aliasedNode.isCaseSensitve() || SchemaUtil.isCaseSensitive(columnAlias)))
                || selectVisitor.isCaseSensitive;
        String name = columnAlias == null ? expression.toString() : columnAlias;
        projectedColumns.add(
            new ExpressionProjector(
                name, table.getName().getString(), expression, isCaseSensitive));
      }
      selectVisitor.reset();
      index++;
    }

    table = context.getCurrentTable().getTable(); // switch to current table for scan projection
    // TODO make estimatedByteSize more accurate by counting the joined columns.
    int estimatedKeySize = table.getRowKeySchema().getEstimatedValueLength();
    int estimatedByteSize = 0;
    for (Map.Entry<byte[], NavigableSet<byte[]>> entry : scan.getFamilyMap().entrySet()) {
      PColumnFamily family = table.getColumnFamily(entry.getKey());
      if (entry.getValue() == null) {
        for (PColumn column : family.getColumns()) {
          Integer byteSize = column.getByteSize();
          estimatedByteSize +=
              SizedUtil.KEY_VALUE_SIZE
                  + estimatedKeySize
                  + (byteSize == null ? RowKeySchema.ESTIMATED_VARIABLE_LENGTH_SIZE : byteSize);
        }
      } else {
        for (byte[] cq : entry.getValue()) {
          PColumn column = family.getColumn(cq);
          Integer byteSize = column.getByteSize();
          estimatedByteSize +=
              SizedUtil.KEY_VALUE_SIZE
                  + estimatedKeySize
                  + (byteSize == null ? RowKeySchema.ESTIMATED_VARIABLE_LENGTH_SIZE : byteSize);
        }
      }
    }

    selectVisitor.compile();
    // Since we don't have the empty key value in read-only tables,
    // we must project everything.
    boolean isProjectEmptyKeyValue =
        table.getType() != PTableType.VIEW && table.getViewType() != ViewType.MAPPED && !isWildcard;
    if (isProjectEmptyKeyValue) {
      for (byte[] family : projectedFamilies) {
        projectColumnFamily(table, scan, family);
      }
    } else {
      /*
       * TODO: this could be optimized by detecting:
       * - if a column is projected that's not in the where clause
       * - if a column is grouped by that's not in the where clause
       * - if we're not using IS NULL or CASE WHEN expressions
       */
      projectAllColumnFamilies(table, scan);
    }
    return new RowProjector(projectedColumns, estimatedByteSize, isProjectEmptyKeyValue);
  }
/**
 * Tests using native HBase types (i.e. Bytes.toBytes methods) for integers and longs. Phoenix can
 * support this if the numbers are positive.
 *
 * @author jtaylor
 * @since 0.1
 */
public class NativeHBaseTypesTest extends BaseClientManagedTimeTest {
  private static final byte[] HBASE_NATIVE_BYTES =
      SchemaUtil.getTableNameAsBytes(HBASE_NATIVE_SCHEMA_NAME, HBASE_NATIVE);
  private static final byte[] FAMILY_NAME = Bytes.toBytes(SchemaUtil.normalizeIdentifier("1"));
  private static final byte[][] SPLITS = new byte[][] {Bytes.toBytes(20), Bytes.toBytes(30)};
  private static final long ts = nextTimestamp();

  @BeforeClass
  public static void doBeforeTestSetup() throws Exception {
    HBaseAdmin admin = driver.getConnectionQueryServices(getUrl(), TEST_PROPERTIES).getAdmin();
    try {
      try {
        admin.disableTable(HBASE_NATIVE_BYTES);
        admin.deleteTable(HBASE_NATIVE_BYTES);
      } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
      }
      HTableDescriptor descriptor = new HTableDescriptor(HBASE_NATIVE_BYTES);
      HColumnDescriptor columnDescriptor = new HColumnDescriptor(FAMILY_NAME);
      columnDescriptor.setKeepDeletedCells(true);
      descriptor.addFamily(columnDescriptor);
      admin.createTable(descriptor, SPLITS);
      initTableValues();
    } finally {
      admin.close();
    }
  }

  private static void initTableValues() throws Exception {
    ConnectionQueryServices services = driver.getConnectionQueryServices(getUrl(), TEST_PROPERTIES);
    HTableInterface hTable =
        services.getTable(SchemaUtil.getTableNameAsBytes(HBASE_NATIVE_SCHEMA_NAME, HBASE_NATIVE));
    try {
      // Insert rows using standard HBase mechanism with standard HBase "types"
      List<Row> mutations = new ArrayList<Row>();
      byte[] family = Bytes.toBytes("1");
      byte[] uintCol = Bytes.toBytes("UINT_COL");
      byte[] ulongCol = Bytes.toBytes("ULONG_COL");
      byte[] key, bKey;
      Put put;

      key = ByteUtil.concat(Bytes.toBytes(10), Bytes.toBytes(100L), Bytes.toBytes("a"));
      put = new Put(key);
      put.add(family, uintCol, ts - 2, Bytes.toBytes(5));
      put.add(family, ulongCol, ts - 2, Bytes.toBytes(50L));
      mutations.add(put);
      put = new Put(key);
      put.add(family, uintCol, ts, Bytes.toBytes(10));
      put.add(family, ulongCol, ts, Bytes.toBytes(100L));
      mutations.add(put);

      bKey = key = ByteUtil.concat(Bytes.toBytes(20), Bytes.toBytes(200L), Bytes.toBytes("b"));
      put = new Put(key);
      put.add(family, uintCol, ts - 4, Bytes.toBytes(5000));
      put.add(family, ulongCol, ts - 4, Bytes.toBytes(50000L));
      mutations.add(put);
      @SuppressWarnings(
          "deprecation") // FIXME: Remove when unintentionally deprecated method is fixed
                         // (HBASE-7870).
      // FIXME: the version of the Delete constructor without the lock args was introduced
      // in 0.94.4, thus if we try to use it here we can no longer use the 0.94.2 version
      // of the client.
      Delete del = new Delete(key, ts - 2, null);
      mutations.add(del);
      put = new Put(key);
      put.add(family, uintCol, ts, Bytes.toBytes(2000));
      put.add(family, ulongCol, ts, Bytes.toBytes(20000L));
      mutations.add(put);

      key = ByteUtil.concat(Bytes.toBytes(30), Bytes.toBytes(300L), Bytes.toBytes("c"));
      put = new Put(key);
      put.add(family, uintCol, ts, Bytes.toBytes(3000));
      put.add(family, ulongCol, ts, Bytes.toBytes(30000L));
      mutations.add(put);

      key = ByteUtil.concat(Bytes.toBytes(40), Bytes.toBytes(400L), Bytes.toBytes("d"));
      put = new Put(key);
      put.add(family, uintCol, ts, Bytes.toBytes(4000));
      put.add(family, ulongCol, ts, Bytes.toBytes(40000L));
      mutations.add(put);

      hTable.batch(mutations);

      Result r = hTable.get(new Get(bKey));
      assertFalse(r.isEmpty());
    } finally {
      hTable.close();
    }
    // Create Phoenix table after HBase table was created through the native APIs
    // The timestamp of the table creation must be later than the timestamp of the data
    ensureTableCreated(getUrl(), HBASE_NATIVE, null, ts + 1);
  }

  @Test
  public void testRangeQuery1() throws Exception {
    String query =
        "SELECT uint_key, ulong_key, string_key FROM HBASE_NATIVE WHERE uint_key > 20 and ulong_key >= 400";
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 5); // Run query at timestamp 5
    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(url, props);
    try {
      PreparedStatement statement = conn.prepareStatement(query);
      ResultSet rs = statement.executeQuery();
      assertTrue(rs.next());
      assertEquals(40, rs.getInt(1));
      assertEquals(400L, rs.getLong(2));
      assertEquals("d", rs.getString(3));
      assertFalse(rs.next());
    } finally {
      conn.close();
    }
  }

  @Test
  public void testRangeQuery2() throws Exception {
    String query =
        "SELECT uint_key, ulong_key, string_key FROM HBASE_NATIVE WHERE uint_key > 20 and uint_key < 40";
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 5); // Run query at timestamp 5
    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(url, props);
    try {
      PreparedStatement statement = conn.prepareStatement(query);
      ResultSet rs = statement.executeQuery();
      assertTrue(rs.next());
      assertEquals(30, rs.getInt(1));
      assertEquals(300L, rs.getLong(2));
      assertEquals("c", rs.getString(3));
      assertFalse(rs.next());
    } finally {
      conn.close();
    }
  }

  @Test
  public void testRangeQuery3() throws Exception {
    String query =
        "SELECT uint_key, ulong_key, string_key FROM HBASE_NATIVE WHERE ulong_key > 200 and ulong_key < 400";
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 5); // Run query at timestamp 5
    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(url, props);
    try {
      PreparedStatement statement = conn.prepareStatement(query);
      ResultSet rs = statement.executeQuery();
      assertTrue(rs.next());
      assertEquals(30, rs.getInt(1));
      assertEquals(300L, rs.getLong(2));
      assertEquals("c", rs.getString(3));
      assertFalse(rs.next());
    } finally {
      conn.close();
    }
  }

  @Test
  public void testNegativeAgainstUnsignedNone() throws Exception {
    String query = "SELECT uint_key, ulong_key, string_key FROM HBASE_NATIVE WHERE ulong_key < -1";
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 5); // Run query at timestamp 5
    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(url, props);
    try {
      PreparedStatement statement = conn.prepareStatement(query);
      ResultSet rs = statement.executeQuery();
      assertFalse(rs.next());
    } finally {
      conn.close();
    }
  }

  @Test
  public void testNegativeAgainstUnsignedAll() throws Exception {
    String query = "SELECT string_key FROM HBASE_NATIVE WHERE ulong_key > -100";
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 5); // Run query at timestamp 5
    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(url, props);
    try {
      PreparedStatement statement = conn.prepareStatement(query);
      ResultSet rs = statement.executeQuery();
      assertTrue(rs.next());
      assertEquals("a", rs.getString(1));
      assertTrue(rs.next());
      assertEquals("b", rs.getString(1));
      assertTrue(rs.next());
      assertEquals("c", rs.getString(1));
      assertTrue(rs.next());
      assertEquals("d", rs.getString(1));
      assertFalse(rs.next());
    } finally {
      conn.close();
    }
  }

  @Test
  public void testNegativeAddNegativeValue() throws Exception {
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 5); // Run query at timestamp 5
    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(url, props);
    try {
      PreparedStatement stmt =
          conn.prepareStatement(
              "UPSERT INTO HBASE_NATIVE(uint_key,ulong_key,string_key, uint_col) VALUES(?,?,?,?)");
      stmt.setInt(1, -1);
      stmt.setLong(2, 2L);
      stmt.setString(3, "foo");
      stmt.setInt(4, 3);
      stmt.execute();
      fail();
    } catch (SQLException e) {
      assertTrue(e.getMessage().contains("Type mismatch"));
    }
  }

  @Test
  public void testNegativeCompareNegativeValue() throws Exception {
    String query = "SELECT string_key FROM HBASE_NATIVE WHERE uint_key > 100000";
    String url =
        PHOENIX_JDBC_URL
            + ";"
            + PhoenixRuntime.CURRENT_SCN_ATTRIB
            + "="
            + (ts + 7); // Run query at timestamp 7
    Properties props = new Properties(TEST_PROPERTIES);
    PhoenixConnection conn =
        DriverManager.getConnection(url, props).unwrap(PhoenixConnection.class);
    HTableInterface hTable =
        conn.getQueryServices()
            .getTable(SchemaUtil.getTableNameAsBytes(HBASE_NATIVE_SCHEMA_NAME, HBASE_NATIVE));

    List<Row> mutations = new ArrayList<Row>();
    byte[] family = Bytes.toBytes("1");
    byte[] uintCol = Bytes.toBytes("UINT_COL");
    byte[] ulongCol = Bytes.toBytes("ULONG_COL");
    byte[] key;
    Put put;

    // Need to use native APIs because the Phoenix APIs wouldn't let you insert a
    // negative number for an unsigned type
    key = ByteUtil.concat(Bytes.toBytes(-10), Bytes.toBytes(100L), Bytes.toBytes("e"));
    put = new Put(key);
    // Insert at later timestamp than other queries in this test are using, so that
    // we don't affect them
    put.add(family, uintCol, ts + 6, Bytes.toBytes(10));
    put.add(family, ulongCol, ts + 6, Bytes.toBytes(100L));
    put.add(family, QueryConstants.EMPTY_COLUMN_BYTES, ts + 6, ByteUtil.EMPTY_BYTE_ARRAY);
    mutations.add(put);
    hTable.batch(mutations);

    // Demonstrates weakness of HBase Bytes serialization. Negative numbers
    // show up as bigger than positive numbers
    PreparedStatement statement = conn.prepareStatement(query);
    ResultSet rs = statement.executeQuery();
    assertTrue(rs.next());
    assertEquals("e", rs.getString(1));
    assertFalse(rs.next());
  }
}
 private void assertColumnMetaData(ResultSet rs, String schema, String table, String column)
     throws SQLException {
   assertEquals(schema, rs.getString("TABLE_SCHEM"));
   assertEquals(table, rs.getString("TABLE_NAME"));
   assertEquals(SchemaUtil.normalizeIdentifier(column), rs.getString("COLUMN_NAME"));
 }