public TableDestination getTable(String tableSpec) { return new TableDestination(tableSpec, "Table " + tableSpec); }
public TableDestination withTableReference(TableReference tableReference) { return new TableDestination(tableReference, tableDescription, jsonTimePartitioning); }
@Override public TableDestination getDestination(ValueInSingleWindow<T> element) { String tableSpec = this.tableSpec.get(); checkArgument(tableSpec != null, "tableSpec can not be null"); return new TableDestination(tableSpec, tableDescription); }
@Override public TableDestination getTable(String destination) { return new TableDestination(destination, destination); }
@Override public TableDestination decode(InputStream inStream) throws IOException { String tableSpec = tableSpecCoder.decode(inStream); String tableDescription = tableDescriptionCoder.decode(inStream); return new TableDestination(tableSpec, tableDescription); }
@Override public TableDestination getDestination(ValueInSingleWindow<T> element) { TableDestination destination = super.getDestination(element); String partitioning = this.jsonTimePartitioning.get(); checkArgument(partitioning != null, "jsonTimePartitioning can not be null"); return new TableDestination( destination.getTableSpec(), destination.getTableDescription(), partitioning); }
@Override public TableDestination getTable(Integer userId) { verifySideInputs(); // Each user in it's own table. return new TableDestination( "dataset-id.userid-" + userId + "$" + partitionDecorator, "table for userid " + userId); }
@Override public TableDestination getTable(Long destination) { return new TableDestination( new TableReference() .setProjectId(writeProject) .setDatasetId(writeDataset) .setTableId(writeTable + "_" + destination), "Table for year " + destination); }
@Override public TableDestination decode(InputStream inStream) throws IOException { TableDestination destination = TableDestinationCoder.of().decode(inStream); String jsonTimePartitioning = timePartitioningCoder.decode(inStream); return new TableDestination( destination.getTableSpec(), destination.getTableDescription(), jsonTimePartitioning); }
List<ShardedKey<TableDestination>> expectedPartitions = Lists.newArrayList(); if (isSingleton) { expectedPartitions.add(ShardedKey.of(new TableDestination("SINGLETON", ""), 1)); } else { for (int i = 0; i < numTables; ++i) { for (int j = 1; j <= expectedNumPartitionsPerTable; ++j) { String tableName = String.format("project-id:dataset-id.tables%05d", i); expectedPartitions.add(ShardedKey.of(new TableDestination(tableName, ""), j)); files.add( new WriteBundlesToFiles.Result<>( fileName, fileSize, new TableDestination(tableName, "")));
for (int i = 0; i < numTables; ++i) { String tableName = String.format("project-id:dataset-id.table%05d", i); TableDestination tableDestination = new TableDestination(tableName, tableName); for (int j = 0; j < numPartitions; ++j) { String tempTableId = BigQueryHelpers.createJobId(jobIdToken, tableDestination, j, 0);
for (int i = 0; i < numFinalTables; ++i) { String tableName = "project-id:dataset-id.table_" + i; TableDestination tableDestination = new TableDestination(tableName, "table_" + i + "_desc"); for (int j = 0; i < numTempTablesPerFinalTable; ++i) { TableReference tempTable =
for (int i = 0; i < 5; i++) { TableDestination destination = new TableDestination("project-id:dataset-id" + ".table-id-" + i, ""); targetTables.put(i, destination);