@BeforeClass public static void beforeClass() throws InterruptedException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); bigquery = bigqueryHelper.options().service(); storage = gcsHelper.options().service(); storage.create(BucketInfo.of(BUCKET)); storage.create( BlobInfo.builder(BUCKET, LOAD_FILE).contentType("text/plain").build(), CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); storage.create( BlobInfo.builder(BUCKET, JSON_LOAD_FILE).contentType("application/json").build(), JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); bigquery.create(info); LoadJobConfiguration configuration = LoadJobConfiguration.builder( TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .schema(TABLE_SCHEMA) .build(); Job job = bigquery.create(JobInfo.of(configuration)); while (!job.isDone()) { Thread.sleep(1000); } assertNull(job.status().error()); }
@Test public void testCreateExternalTable() throws InterruptedException { String tableName = "test_create_external_table"; TableId tableId = TableId.of(DATASET, tableName); ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of( "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); Table createdTable = bigquery.create(tableInfo); assertNotNull(createdTable); assertEquals(DATASET, createdTable.tableId().dataset()); assertEquals(tableName, createdTable.tableId().table()); Table remoteTable = bigquery.getTable(DATASET, tableName); assertNotNull(remoteTable); assertTrue(remoteTable.definition() instanceof ExternalTableDefinition); assertEquals(createdTable.tableId(), remoteTable.tableId()); assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); QueryRequest request = QueryRequest.builder( "SELECT TimestampField, StringField, IntegerField, BooleanField FROM " + DATASET + "." + tableName) .defaultDataset(DatasetId.of(DATASET)) .maxWaitTime(60000L) .maxResults(1000L) .build(); QueryResponse response = bigquery.query(request); while (!response.jobCompleted()) { response = bigquery.getQueryResults(response.jobId()); Thread.sleep(1000); } long integerValue = 0; int rowCount = 0; for (List<FieldValue> row : response.result().values()) { FieldValue timestampCell = row.get(0); FieldValue stringCell = row.get(1); FieldValue integerCell = row.get(2); FieldValue booleanCell = row.get(3); assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); assertEquals(FieldValue.Attribute.PRIMITIVE, integerCell.attribute()); assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); assertEquals(1408452095220000L, timestampCell.timestampValue()); assertEquals("stringValue", stringCell.stringValue()); assertEquals(integerValue, integerCell.longValue()); assertEquals(false, booleanCell.booleanValue()); integerValue = ~integerValue & 0x1; rowCount++; } assertEquals(4, rowCount); assertTrue(remoteTable.delete()); }
@Test public void testInsertFromFile() throws InterruptedException { String destinationTableName = "test_insert_from_file_table"; TableId tableId = TableId.of(DATASET, destinationTableName); WriteChannelConfiguration configuration = WriteChannelConfiguration.builder(tableId) .formatOptions(FormatOptions.json()) .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .schema(TABLE_SCHEMA) .build(); try (WriteChannel channel = bigquery.writer(configuration)) { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } catch (IOException e) { fail("IOException was not expected"); } // wait until the new table is created. If the table is never created the test will time-out while (bigquery.getTable(tableId) == null) { Thread.sleep(1000L); } Page<List<FieldValue>> rows = bigquery.listTableData(tableId); int rowCount = 0; for (List<FieldValue> row : rows.values()) { FieldValue timestampCell = row.get(0); FieldValue stringCell = row.get(1); FieldValue integerCell = row.get(2); FieldValue booleanCell = row.get(3); FieldValue recordCell = row.get(4); assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); assertEquals(1408452095220000L, timestampCell.timestampValue()); assertEquals("stringValue", stringCell.stringValue()); assertEquals(0, integerCell.repeatedValue().get(0).longValue()); assertEquals(1, integerCell.repeatedValue().get(1).longValue()); assertEquals(false, booleanCell.booleanValue()); assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); assertTrue(recordCell.recordValue().get(1).isNull()); assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); assertEquals(true, recordCell.recordValue().get(3).booleanValue()); rowCount++; } assertEquals(2, rowCount); assertTrue(bigquery.delete(DATASET, destinationTableName)); }