public Builder addRowField(String name, Schema fieldSchema) { fields.add(Field.of(name, FieldType.row(fieldSchema))); return this; }
switch (avroSchema.getType()) { case RECORD: return Schema.FieldType.row(toSchema(avroSchema));
Schema.builder() .addInt32Field("f_int") .addArrayField("f_arrayOfRows", Schema.FieldType.row(elementSchema)) .build();
@Test public void testEquivalent() { final Schema expectedNested1 = Schema.builder().addStringField("yard1").addInt64Field("yard2").build(); final Schema expectedSchema1 = Schema.builder() .addStringField("field1") .addInt64Field("field2") .addRowField("field3", expectedNested1) .addArrayField("field4", FieldType.row(expectedNested1)) .addMapField("field5", FieldType.STRING, FieldType.row(expectedNested1)) .build(); final Schema expectedNested2 = Schema.builder().addInt64Field("yard2").addStringField("yard1").build(); final Schema expectedSchema2 = Schema.builder() .addMapField("field5", FieldType.STRING, FieldType.row(expectedNested2)) .addArrayField("field4", FieldType.row(expectedNested2)) .addRowField("field3", expectedNested2) .addInt64Field("field2") .addStringField("field1") .build(); assertNotEquals(expectedSchema1, expectedSchema2); assertTrue(expectedSchema1.equivalent(expectedSchema2)); }
static Schema getOutputSchema(Schema inputSchema, FieldAccessDescriptor fieldAccessDescriptor) { if (fieldAccessDescriptor.allFields()) { return inputSchema; } Schema.Builder builder = new Schema.Builder(); for (int fieldId : fieldAccessDescriptor.fieldIdsAccessed()) { builder.addField(inputSchema.getField(fieldId)); } for (Map.Entry<Integer, FieldAccessDescriptor> nested : fieldAccessDescriptor.nestedFields().entrySet()) { Field field = inputSchema.getField(nested.getKey()); FieldAccessDescriptor nestedDescriptor = nested.getValue(); FieldType nestedType = FieldType.row(getOutputSchema(field.getType().getRowSchema(), nestedDescriptor)); if (field.getNullable()) { builder.addNullableField(field.getName(), nestedType); } else { builder.addField(field.getName(), nestedType); } } return builder.build(); }
@Test public void testCreatesRowArray() { Schema nestedType = Stream.of(Schema.Field.of("f1_str", FieldType.STRING)).collect(toSchema()); List<Row> data = Lists.newArrayList( Row.withSchema(nestedType).addValues("one").build(), Row.withSchema(nestedType).addValues("two").build(), Row.withSchema(nestedType).addValues("three").build()); Schema type = Stream.of(Schema.Field.of("array", FieldType.array(FieldType.row(nestedType)))) .collect(toSchema()); Row row = Row.withSchema(type).addArray(data).build(); assertEquals(data, row.getArray("array")); }
@Test(expected = NonDeterministicException.class) public void testVerifyDeterministicNestedRow() throws NonDeterministicException { Schema schema = Schema.builder() .addField( "f1", FieldType.row( Schema.builder() .addField("a1", FieldType.DOUBLE) .addField("a2", FieldType.INT64) .build())) .build(); RowCoder coder = RowCoder.of(schema); coder.verifyDeterministic(); }
@Test public void testArrayOfRow() throws Exception { Schema nestedSchema = Schema.builder().addInt32Field("f1_int").addStringField("f1_str").build(); FieldType collectionElementType = FieldType.row(nestedSchema); Schema schema = Schema.builder().addArrayField("f_array", collectionElementType).build(); Row row = Row.withSchema(schema) .addArray( Row.withSchema(nestedSchema).addValues(1, "one").build(), Row.withSchema(nestedSchema).addValues(2, "two").build(), Row.withSchema(nestedSchema).addValues(3, "three").build()) .build(); CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row); }
@Test public void testCreateMapWithRowValue() { Schema nestedType = Stream.of(Schema.Field.of("f1_str", FieldType.STRING)).collect(toSchema()); Map<Integer, Row> data = ImmutableMap.<Integer, Row>builder() .put(1, Row.withSchema(nestedType).addValues("one").build()) .put(2, Row.withSchema(nestedType).addValues("two").build()) .build(); Schema type = Stream.of(Schema.Field.of("map", FieldType.map(FieldType.INT32, FieldType.row(nestedType)))) .collect(toSchema()); Row row = Row.withSchema(type).addValue(data).build(); assertEquals(data, row.getMap("map")); }
@Test public void testCreatesNestedRow() { Schema nestedType = Stream.of(Schema.Field.of("f1_str", Schema.FieldType.STRING)).collect(toSchema()); Schema type = Stream.of( Schema.Field.of("f_int", Schema.FieldType.INT32), Schema.Field.of("nested", Schema.FieldType.row(nestedType))) .collect(toSchema()); Row nestedRow = Row.withSchema(nestedType).addValues("foobar").build(); Row row = Row.withSchema(type).addValues(42, nestedRow).build(); assertEquals((int) 42, (Object) row.getInt32("f_int")); assertEquals("foobar", row.getRow("nested").getString("f1_str")); }
public static FieldType toFieldType(RelDataType calciteType) { switch (calciteType.getSqlTypeName()) { case ARRAY: case MULTISET: return FieldType.array(toFieldType(calciteType.getComponentType())); case MAP: return FieldType.map( toFieldType(calciteType.getKeyType()), toFieldType(calciteType.getValueType())); case ROW: return FieldType.row(toSchema(calciteType)); default: return toFieldType(calciteType.getSqlTypeName()); } }
@Test public void testRowTypeToJavaType() { assertEquals( TypeDescriptors.lists(TypeDescriptors.rows()), FieldTypeDescriptors.javaTypeForFieldType( FieldType.array(FieldType.row(Schema.builder().build())))); }
@Test public void testNestedMapsNotEquivalent() { Schema nestedSchema1 = Schema.builder().addInt64Field("foo").build(); Schema nestedSchema2 = Schema.builder().addStringField("foo").build(); Schema schema1 = Schema.builder().addMapField("foo", FieldType.STRING, FieldType.row(nestedSchema1)).build(); Schema schema2 = Schema.builder().addMapField("foo", FieldType.STRING, FieldType.row(nestedSchema2)).build(); assertNotEquals(schema1, schema2); assertFalse(schema1.equivalent(schema2)); } }
@Override public FieldType generateFieldType(SourceOfRandomness random, GenerationStatus status) { // stop at 10 levels of nesting to avoid stack overflows FieldTypeGenerator rowFieldTypesGenerator = (nestingLevel(status) >= 10) ? PRIMITIVE_TYPES : ANY_TYPE; return FieldType.row(generateSchema(rowFieldTypesGenerator, random, status)); }
@Override public Row deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { // Parse and convert the root object to Row as if it's a nested field with name 'root' return (Row) extractJsonNodeValue( FieldValue.of("root", FieldType.row(schema), jsonParser.readValueAsTree())); }
@Test public void testArrayOfRowSchema() { Schema nestedSchema = Schema.of(Field.of("f1_str", FieldType.STRING)); FieldType arrayType = FieldType.array(FieldType.row(nestedSchema)); Schema schema = Schema.of(Field.of("f_array", arrayType)); Field field = schema.getField("f_array"); assertEquals("f_array", field.getName()); assertEquals(arrayType, field.getType()); }
@Test public void testNestedSchema() { Schema nestedSchema = Schema.of(Field.of("f1_str", FieldType.STRING)); Schema schema = Schema.of(Field.of("nested", FieldType.row(nestedSchema))); Field inner = schema.getField("nested").getType().getRowSchema().getField("f1_str"); assertEquals("f1_str", inner.getName()); assertEquals(FieldType.STRING, inner.getType()); }
@Test public void testNestedArraysNotEquivalent() { Schema nestedSchema1 = Schema.builder().addInt64Field("foo").build(); Schema nestedSchema2 = Schema.builder().addStringField("foo").build(); Schema schema1 = Schema.builder().addArrayField("foo", FieldType.row(nestedSchema1)).build(); Schema schema2 = Schema.builder().addArrayField("foo", FieldType.row(nestedSchema2)).build(); assertNotEquals(schema1, schema2); assertFalse(schema1.equivalent(schema2)); }