/** * Returns a list of the columns making up a table. The argument is a serialized * TDescribeTableParams object. The return type is a serialised TDescribeTableResult object. * * @see Frontend#describeTable */ public byte[] describeTable(byte[] thriftDescribeTableParams) throws ImpalaException { TDescribeTableParams params = new TDescribeTableParams(); deserializeThrift(params, thriftDescribeTableParams); TDescribeTableResult result = new TDescribeTableResult(); result.setColumns(frontend.describeTable(params.getDb(), params.getTable_name())); TSerializer serializer = new TSerializer(protocolFactory); try { return serializer.serialize(result); } catch (TException e) { throw new InternalException(e.getMessage()); } }
/* * Builds results for a DESCRIBE <table> command. This consists of the column * definition for each column in the table. */ private static TDescribeTableResult describeTableMinimal(Table table) { TDescribeTableResult descResult = new TDescribeTableResult(); descResult.results = Lists.newArrayList(); // Get description of all the table's columns (includes partition columns). for (Column column : table.getColumnsInHiveOrder()) { TColumnValue colNameCol = new TColumnValue(); colNameCol.setString_val(column.getName()); TColumnValue dataTypeCol = new TColumnValue(); dataTypeCol.setString_val(column.getType().toString().toLowerCase()); TColumnValue commentCol = new TColumnValue(); commentCol.setString_val(column.getComment() != null ? column.getComment() : ""); descResult.results.add( new TResultRow(Lists.newArrayList(colNameCol, dataTypeCol, commentCol))); } return descResult; }
/* * Builds a TDescribeTableResult that contains the result of a DESCRIBE FORMATTED * <table> command. For the formatted describe output the goal is to be exactly the * same as what Hive (via HiveServer2) outputs, for compatibility reasons. To do this, * Hive's MetadataFormatUtils class is used to build the results. */ private static TDescribeTableResult describeTableFormatted(Table table) { TDescribeTableResult descResult = new TDescribeTableResult(); descResult.results = Lists.newArrayList(); org.apache.hadoop.hive.metastore.api.Table msTable = table.getMetaStoreTable().deepCopy(); // Fixup the metastore table so the output of DESCRIBE FORMATTED matches Hive's. // This is to distinguish between empty comments and no comments (value is null). for (FieldSchema fs : msTable.getSd().getCols()) fs.setComment(table.getColumn(fs.getName()).getComment()); for (FieldSchema fs : msTable.getPartitionKeys()) { fs.setComment(table.getColumn(fs.getName()).getComment()); } // To avoid initializing any of the SerDe classes in the metastore table Thrift // struct, create the ql.metadata.Table object by calling the empty c'tor and // then calling setTTable(). org.apache.hadoop.hive.ql.metadata.Table hiveTable = new org.apache.hadoop.hive.ql.metadata.Table(); hiveTable.setTTable(msTable); StringBuilder sb = new StringBuilder(); // First add all the columns (includes partition columns). sb.append( MetaDataFormatUtils.getAllColumnsInformation( msTable.getSd().getCols(), msTable.getPartitionKeys())); // Add the extended table metadata information. sb.append(MetaDataFormatUtils.getTableInformation(hiveTable)); for (String line : sb.toString().split("\n")) { // To match Hive's HiveServer2 output, split each line into multiple column // values based on the field delimiter. String[] columns = line.split(MetaDataFormatUtils.FIELD_DELIM); TResultRow resultRow = new TResultRow(); for (int i = 0; i < NUM_DESC_FORMATTED_RESULT_COLS; ++i) { TColumnValue colVal = new TColumnValue(); colVal.setString_val(null); if (columns.length > i) { // Add the column value. colVal.setString_val(columns[i]); } resultRow.addToColVals(colVal); } descResult.results.add(resultRow); } return descResult; }