/** Example of listing table rows with schema. */ // [TARGET listTableData(TableId, Schema, TableDataListOption...)] public FieldValueList listTableDataSchemaId() { // [START ] Schema schema = Schema.of( Field.of("word", LegacySQLTypeName.STRING), Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING), Field.of("corpus_date", LegacySQLTypeName.STRING)); TableResult tableData = bigquery.listTableData( TableId.of("bigquery-public-data", "samples", "shakespeare"), schema); FieldValueList row = tableData.getValues().iterator().next(); System.out.println(row.get("word").getStringValue()); // [END ] return row; }
throw new IllegalArgumentException("Unrecognized field type '" + typeString + "'."); schemaFields.add(Field.of(fieldName, fieldType));
@Test public void testSubFieldWithClonedType() throws Exception { LegacySQLTypeName record = LegacySQLTypeName.RECORD; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(record); oos.flush(); oos.close(); InputStream is = new ByteArrayInputStream(baos.toByteArray()); ObjectInputStream ois = new ObjectInputStream(is); LegacySQLTypeName clonedRecord = (LegacySQLTypeName) ois.readObject(); ois.close(); Field.of("field", clonedRecord, Field.of("subfield", LegacySQLTypeName.BOOLEAN)); }
public static void main(String... args) throws InterruptedException, TimeoutException { BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); TableId tableId = TableId.of("dataset", "table"); Table table = bigquery.getTable(tableId); if (table == null) { System.out.println("Creating table " + tableId); Field integerField = Field.of("fieldName", LegacySQLTypeName.INTEGER); Schema schema = Schema.of(integerField); table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema))); } System.out.println("Loading data into table " + tableId); Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path"); loadJob = loadJob.waitFor(); if (loadJob.getStatus().getError() != null) { System.out.println("Job completed with errors"); } else { System.out.println("Job succeeded"); } } }
/** Example of creating a table. */ // [TARGET create(TableInfo, TableOption...)] // [VARIABLE "my_dataset_name"] // [VARIABLE "my_table_name"] // [VARIABLE "string_field"] public Table createTable(String datasetName, String tableName, String fieldName) { // [START bigquery_create_table] TableId tableId = TableId.of(datasetName, tableName); // Table field definition Field field = Field.of(fieldName, LegacySQLTypeName.STRING); // Table schema definition Schema schema = Schema.of(field); TableDefinition tableDefinition = StandardTableDefinition.of(schema); TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); Table table = bigquery.create(tableInfo); // [END bigquery_create_table] return table; }
/** Example of creating a table in the dataset with schema and time partitioning. */ // [TARGET create(String, TableDefinition, TableOption...)] // [VARIABLE “my_table”] // [VARIABLE “my_field”] public Table createTable(String tableName, String fieldName) { // [START ] Schema schema = Schema.of(Field.of(fieldName, LegacySQLTypeName.STRING)); StandardTableDefinition definition = StandardTableDefinition.newBuilder() .setSchema(schema) .setTimePartitioning(TimePartitioning.of(TimePartitioning.Type.DAY)) .build(); Table table = dataset.create(tableName, definition); // [END ] return table; } }
Field stringField = Field.of("StringField", LegacySQLTypeName.STRING);
.setCompleted(true) .setTotalRows(1) // Lies to force call of listTableData(). .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) .setErrors(ImmutableList.<BigQueryError>of()) .build();
try { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); TableDefinition tableDef = StandardTableDefinition.of(schema); Table table = bigquery.create(TableInfo.newBuilder(tableId, tableDef).build());
/** * Creates a new table with given schema when it not exists. * * @param tableName name of the desired table * @param schema schema of consequent table fields (name, type pairs). */ public void createTableIfNotExists(String tableName, Map<String, String> schema) { TableId tableId = TableId.of(projectId, dataset, tableName); if (client.getTable(tableId, FIELD_OPTIONS) == null) { List<Field> schemaFields = schema .entrySet() .stream() .map(entry -> Field.of(entry.getKey(), LegacySQLTypeName.valueOf(entry.getValue()))) .collect(Collectors.toList()); createTable(tableId, Schema.of(schemaFields)); } }
super.convertSchema(kafkaConnectSchema); Field topicField = Field.of(KAFKA_DATA_TOPIC_FIELD_NAME, LegacySQLTypeName.STRING); Field partitionField = Field.of(KAFKA_DATA_PARTITION_FIELD_NAME, LegacySQLTypeName.INTEGER); Field offsetField = Field.of(KAFKA_DATA_OFFSET_FIELD_NAME, LegacySQLTypeName.INTEGER); Field.Builder insertTimeBuilder = Field.newBuilder(KAFKA_DATA_INSERT_TIME_FIELD_NAME, LegacySQLTypeName.TIMESTAMP)
private Field getKafkaDataField() { Field topicField = Field.of("topic", LegacySQLTypeName.STRING); Field partitionField = Field.of("partition", LegacySQLTypeName.INTEGER); Field offsetField = Field.of("offset", LegacySQLTypeName.INTEGER); Field insertTimeField = Field.newBuilder("insertTime",LegacySQLTypeName.TIMESTAMP) .setMode(Field.Mode.NULLABLE) .build(); return Field.newBuilder("kafkaData", LegacySQLTypeName.RECORD, topicField, partitionField, offsetField, insertTimeField) .setMode(Field.Mode.NULLABLE) .build(); } }
com.google.cloud.bigquery.Schema.of(Field.of("mock field", LegacySQLTypeName.STRING));