public static org.apache.hadoop.hive.metastore.api.Table toMetastoreApiTable( Table table, PrincipalPrivilegeSet privileges) { org.apache.hadoop.hive.metastore.api.Table result = new org.apache.hadoop.hive.metastore.api.Table(); result.setDbName(table.getDatabaseName()); result.setTableName(table.getTableName()); result.setOwner(table.getOwner()); result.setTableType(table.getTableType()); result.setParameters(table.getParameters()); result.setPartitionKeys( table .getPartitionColumns() .stream() .map(MetastoreUtil::toMetastoreApiFieldSchema) .collect(toList())); result.setSd( makeStorageDescriptor(table.getTableName(), table.getDataColumns(), table.getStorage())); result.setPrivileges(privileges); result.setViewOriginalText(table.getViewOriginalText().orElse(null)); result.setViewExpandedText(table.getViewExpandedText().orElse(null)); return result; }
@Override public void createView( ConnectorSession session, SchemaTableName viewName, String viewData, boolean replace) { if (replace) { try { dropView(session, viewName); } catch (ViewNotFoundException ignored) { } } Map<String, String> properties = ImmutableMap.<String, String>builder() .put("comment", "Presto View") .put(PRESTO_VIEW_FLAG, "true") .build(); FieldSchema dummyColumn = new FieldSchema("dummy", STRING_TYPE_NAME, null); StorageDescriptor sd = new StorageDescriptor(); sd.setCols(ImmutableList.of(dummyColumn)); sd.setSerdeInfo(new SerDeInfo()); Table table = new Table(); table.setDbName(viewName.getSchemaName()); table.setTableName(viewName.getTableName()); table.setOwner(session.getUser()); table.setTableType(TableType.VIRTUAL_VIEW.name()); table.setParameters(properties); table.setViewOriginalText(encodeViewData(viewData)); table.setViewExpandedText("/* Presto View */"); table.setSd(sd); try { metastore.createTable(table); } catch (TableAlreadyExistsException e) { throw new ViewAlreadyExistsException(e.getTableName()); } }
public void setFieldValue(_Fields field, Object value) { switch (field) { case TABLE_NAME: if (value == null) { unsetTableName(); } else { setTableName((String) value); } break; case DB_NAME: if (value == null) { unsetDbName(); } else { setDbName((String) value); } break; case OWNER: if (value == null) { unsetOwner(); } else { setOwner((String) value); } break; case CREATE_TIME: if (value == null) { unsetCreateTime(); } else { setCreateTime((Integer) value); } break; case LAST_ACCESS_TIME: if (value == null) { unsetLastAccessTime(); } else { setLastAccessTime((Integer) value); } break; case RETENTION: if (value == null) { unsetRetention(); } else { setRetention((Integer) value); } break; case SD: if (value == null) { unsetSd(); } else { setSd((StorageDescriptor) value); } break; case PARTITION_KEYS: if (value == null) { unsetPartitionKeys(); } else { setPartitionKeys((List<FieldSchema>) value); } break; case PARAMETERS: if (value == null) { unsetParameters(); } else { setParameters((Map<String, String>) value); } break; case VIEW_ORIGINAL_TEXT: if (value == null) { unsetViewOriginalText(); } else { setViewOriginalText((String) value); } break; case VIEW_EXPANDED_TEXT: if (value == null) { unsetViewExpandedText(); } else { setViewExpandedText((String) value); } break; case TABLE_TYPE: if (value == null) { unsetTableType(); } else { setTableType((String) value); } break; case PRIVILEGES: if (value == null) { unsetPrivileges(); } else { setPrivileges((PrincipalPrivilegeSet) value); } break; } }
@Test public void testHiveLocalMetaStore() { // Create a table and display it back try { HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveLocalMetaStore.getHiveConf()); hiveClient.dropTable( propertyParser.getProperty(ConfigVars.HIVE_TEST_DATABASE_NAME_KEY), propertyParser.getProperty(ConfigVars.HIVE_TEST_TABLE_NAME_KEY), true, true); // Define the cols List<FieldSchema> cols = new ArrayList<FieldSchema>(); cols.add(new FieldSchema("id", serdeConstants.INT_TYPE_NAME, "")); cols.add(new FieldSchema("msg", serdeConstants.STRING_TYPE_NAME, "")); // Values for the StorageDescriptor String location = new File(propertyParser.getProperty(ConfigVars.HIVE_TEST_TABLE_NAME_KEY)) .getAbsolutePath(); String inputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"; String outputFormat = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"; int numBuckets = 16; Map<String, String> orcProps = new HashMap<String, String>(); orcProps.put("orc.compress", "NONE"); SerDeInfo serDeInfo = new SerDeInfo(OrcSerde.class.getSimpleName(), OrcSerde.class.getName(), orcProps); List<String> bucketCols = new ArrayList<String>(); bucketCols.add("id"); // Build the StorageDescriptor StorageDescriptor sd = new StorageDescriptor(); sd.setCols(cols); sd.setLocation(location); sd.setInputFormat(inputFormat); sd.setOutputFormat(outputFormat); sd.setNumBuckets(numBuckets); sd.setSerdeInfo(serDeInfo); sd.setBucketCols(bucketCols); sd.setSortCols(new ArrayList<Order>()); sd.setParameters(new HashMap<String, String>()); // Define the table Table tbl = new Table(); tbl.setDbName(propertyParser.getProperty(ConfigVars.HIVE_TEST_DATABASE_NAME_KEY)); tbl.setTableName(propertyParser.getProperty(ConfigVars.HIVE_TEST_TABLE_NAME_KEY)); tbl.setSd(sd); tbl.setOwner(System.getProperty("user.name")); tbl.setParameters(new HashMap<String, String>()); tbl.setViewOriginalText(""); tbl.setViewExpandedText(""); tbl.setTableType(TableType.EXTERNAL_TABLE.name()); List<FieldSchema> partitions = new ArrayList<FieldSchema>(); partitions.add(new FieldSchema("dt", serdeConstants.STRING_TYPE_NAME, "")); tbl.setPartitionKeys(partitions); // Create the table hiveClient.createTable(tbl); // Describe the table Table createdTable = hiveClient.getTable( propertyParser.getProperty(ConfigVars.HIVE_TEST_DATABASE_NAME_KEY), propertyParser.getProperty(ConfigVars.HIVE_TEST_TABLE_NAME_KEY)); LOG.info("HIVE: Created Table: {}", createdTable.toString()); assertThat( createdTable.toString(), containsString(propertyParser.getProperty(ConfigVars.HIVE_TEST_TABLE_NAME_KEY))); } catch (MetaException e) { e.printStackTrace(); } catch (TException e) { e.printStackTrace(); } }