@Override public TableReference apply(String from) { return parseTableSpec(from); } }
public TableReference getTableReference() { return BigQueryHelpers.parseTableSpec(tableSpec); }
@Test public void testTableParsing_validPatterns() { BigQueryHelpers.parseTableSpec("a123-456:foo_bar.d"); BigQueryHelpers.parseTableSpec("a12345:b.c"); BigQueryHelpers.parseTableSpec("b12345.c"); }
/** Writes the accumulated rows into BigQuery with streaming API. */ @FinishBundle public void finishBundle(FinishBundleContext context) throws Exception { List<ValueInSingleWindow<ErrorT>> failedInserts = Lists.newArrayList(); BigQueryOptions options = context.getPipelineOptions().as(BigQueryOptions.class); for (Map.Entry<String, List<ValueInSingleWindow<TableRow>>> entry : tableRows.entrySet()) { TableReference tableReference = BigQueryHelpers.parseTableSpec(entry.getKey()); flushRows( tableReference, entry.getValue(), uniqueIdsForTableRows.get(entry.getKey()), options, failedInserts); } tableRows.clear(); uniqueIdsForTableRows.clear(); for (ValueInSingleWindow<ErrorT> row : failedInserts) { context.output(failedOutputTag, row.getValue(), row.getTimestamp(), row.getWindow()); } }
@Test public void testTableParsingError_2() { thrown.expect(IllegalArgumentException.class); BigQueryHelpers.parseTableSpec("myproject:.bar"); }
@Override public BigQueryInsertError decode(InputStream inStream) throws IOException { TableDataInsertAllResponse.InsertErrors err = MAPPER.readValue( StringUtf8Coder.of().decode(inStream), TableDataInsertAllResponse.InsertErrors.class); TableRow row = TableRowJsonCoder.of().decode(inStream); TableReference ref = BigQueryHelpers.parseTableSpec(StringUtf8Coder.of().decode(inStream)); return new BigQueryInsertError(row, err, ref); }
@Test public void testTableParsingError() { thrown.expect(IllegalArgumentException.class); BigQueryHelpers.parseTableSpec("0123456:foo.bar"); }
@Test public void testTableParsingError_3() { thrown.expect(IllegalArgumentException.class); BigQueryHelpers.parseTableSpec(":a.b"); }
@Test public void testTableParsingError_slash() { thrown.expect(IllegalArgumentException.class); BigQueryHelpers.parseTableSpec("a\\b12345:c.d"); }
@Test public void testRemoveTemporaryTables() throws Exception { FakeDatasetService datasetService = new FakeDatasetService(); String projectId = "project"; String datasetId = "dataset"; datasetService.createDataset(projectId, datasetId, "", "", null); List<TableReference> tableRefs = Lists.newArrayList( BigQueryHelpers.parseTableSpec( String.format("%s:%s.%s", projectId, datasetId, "table1")), BigQueryHelpers.parseTableSpec( String.format("%s:%s.%s", projectId, datasetId, "table2")), BigQueryHelpers.parseTableSpec( String.format("%s:%s.%s", projectId, datasetId, "table3"))); for (TableReference tableRef : tableRefs) { datasetService.createTable(new Table().setTableReference(tableRef)); } // Add one more table to delete that does not actually exist. tableRefs.add( BigQueryHelpers.parseTableSpec(String.format("%s:%s.%s", projectId, datasetId, "table4"))); WriteRename.removeTemporaryTables(datasetService, tableRefs); for (TableReference ref : tableRefs) { loggedWriteRename.verifyDebug("Deleting table " + toJsonString(ref)); checkState(datasetService.getTable(ref) == null, "Table " + ref + " was not deleted!"); } }
onInsertAll(errorsIndices); TableReference ref = BigQueryHelpers.parseTableSpec("project:dataset.table"); DatasetServiceImpl datasetService = new DatasetServiceImpl(mockClient, options, 5);
@Test public void testTableParsing_noProjectId() { TableReference ref = BigQueryHelpers.parseTableSpec("data_set.table_name"); assertEquals(null, ref.getProjectId()); assertEquals("data_set", ref.getDatasetId()); assertEquals("table_name", ref.getTableId()); }
@Test public void testTableParsing() { TableReference ref = BigQueryHelpers.parseTableSpec("my-project:data_set.table_name"); assertEquals("my-project", ref.getProjectId()); assertEquals("data_set", ref.getDatasetId()); assertEquals("table_name", ref.getTableId()); }
@Test public void testWriteEmptyPCollection() throws Exception { TableSchema schema = new TableSchema() .setFields( ImmutableList.of(new TableFieldSchema().setName("number").setType("INTEGER"))); p.apply(Create.empty(TableRowJsonCoder.of())) .apply( BigQueryIO.writeTableRows() .to("project-id:dataset-id.table-id") .withTestServices(fakeBqServices) .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND) .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withSchema(schema) .withoutValidation()); p.run(); checkNotNull( fakeDatasetService.getTable( BigQueryHelpers.parseTableSpec("project-id:dataset-id.table-id"))); }
.containsInAnyOrder( new BigQueryInsertError( row2, persistentError, BigQueryHelpers.parseTableSpec(tableSpec))); p.run();
new TableRow().set("name", "f").set("number", 6L)); TableReference table = BigQueryHelpers.parseTableSpec("project:data_set.table_name"); fakeDatasetService.createDataset("project", "data_set", "", "", null); fakeDatasetService.createTable(
new TableRow().set("name", "f").set("number", 6L)); TableReference table = BigQueryHelpers.parseTableSpec("project:data_set.table_name"); fakeDatasetService.createDataset("project", "data_set", "", "", null); fakeDatasetService.createTable(
public void testTimePartitioning(BigQueryIO.Write.Method insertMethod) throws Exception { TableRow row1 = new TableRow().set("name", "a").set("number", "1"); TableRow row2 = new TableRow().set("name", "b").set("number", "2"); TimePartitioning timePartitioning = new TimePartitioning().setType("DAY").setExpirationMs(1000L); TableSchema schema = new TableSchema() .setFields( ImmutableList.of(new TableFieldSchema().setName("number").setType("INTEGER"))); p.apply(Create.of(row1, row2)) .apply( BigQueryIO.writeTableRows() .to("project-id:dataset-id.table-id") .withTestServices(fakeBqServices) .withMethod(insertMethod) .withSchema(schema) .withTimePartitioning(timePartitioning) .withoutValidation()); p.run(); Table table = fakeDatasetService.getTable( BigQueryHelpers.parseTableSpec("project-id:dataset-id.table-id")); assertEquals(schema, table.getSchema()); assertEquals(timePartitioning, table.getTimePartitioning()); }
new TableRow().set("name", "f").set("number", 6L)); TableReference table = BigQueryHelpers.parseTableSpec("project:data_set.table_name"); fakeDatasetService.createDataset("project", "data_set", "", "", null); fakeDatasetService.createTable(