keySchemasEqual && valueSchemasEqual && Objects.equals(schema1.parameters(), schema2.parameters());
private SchemaBuilder copySchemaExcludingName(Schema source, SchemaBuilder builder, boolean copyFields) { builder.version(source.version()); builder.doc(source.doc()); Map<String, String> params = source.parameters(); if (params != null) { builder.parameters(params); } if (source.isOptional()) { builder.optional(); } else { builder.required(); } if (copyFields) { for (org.apache.kafka.connect.data.Field field : source.fields()) { builder.field(field.name(), field.schema()); } } return builder; } }
/** * Asserts that the two given schemas are equal. * * @param fieldName * name of the field owning that schema, if it's not a top-level schema * @param actual * the actual schema * @param expected * the expected schema */ public static void assertConnectSchemasAreEqual(String fieldName, Schema actual, Schema expected) { if (!areConnectSchemasEqual(actual, expected)) { // first try failing with an assertion message that shows the actual difference assertThat(SchemaUtil.asString(actual)).describedAs("field name: " + fieldName).isEqualTo(SchemaUtil.asString(expected)); // compare schema parameters assertThat(actual.parameters()).describedAs("field '" + fieldName + "' parameters").isEqualTo(expected.parameters()); // fall-back just in case (e.g. differences of element schemas of arrays) fail("field '" + fieldName + "': " + SchemaUtil.asString(actual) + " was not equal to " + SchemaUtil.asString(expected)); } }
.field("id") .schema() .parameters(); .field("c1") .schema() .parameters(); .field("c2") .schema() .parameters(); .field("c3a") .schema() .parameters(); .field("c3b") .schema() .parameters();
protected void assertField(Field field, String fieldName, Schema expectedSchema, boolean optional) { assertThat(field.name()).isEqualTo(fieldName); Schema schema = field.schema(); assertThat(schema.name()).isEqualTo(expectedSchema.name()); assertThat(schema.doc()).isEqualTo(expectedSchema.doc()); assertThat(schema.parameters()).isEqualTo(expectedSchema.parameters()); assertThat(schema.version()).isEqualTo(expectedSchema.version()); assertThat(schema.isOptional()).isEqualTo(optional); switch (expectedSchema.type()) { case STRUCT: for (Field f : expectedSchema.fields()) { assertField(schema.field(f.name()),f.name(),f.schema(),f.schema().isOptional()); } break; default: } }
.field("rating1") .schema() .parameters(); .field("rating2") .schema() .parameters(); .field("rating3") .schema() .parameters(); .field("rating4") .schema() .parameters();
.field("rating1") .schema() .parameters(); .field("rating2") .schema() .parameters(); .field("rating3") .schema() .parameters(); .field("rating4") .schema() .parameters();
public Map<String, String> schemaParameters() { return schema.parameters(); }
static Object decimal(Schema schema, Object value) { if (value instanceof byte[]) { byte[] bytes = (byte[]) value; return Decimal.toLogical(schema, bytes); } if (value instanceof BigDecimal) { BigDecimal decimal = (BigDecimal) value; final int scale = Integer.parseInt(schema.parameters().get(Decimal.SCALE_FIELD)); if (scale == decimal.scale()) { return decimal; } else { return decimal.setScale(scale); } } if (value instanceof Number) { Number number = (Number) value; int scale = Integer.parseInt(schema.parameters().get(Decimal.SCALE_FIELD)); BigDecimal decimal = BigDecimal.valueOf(number.longValue(), scale); return decimal; } return value; }
static Object decimal(Schema schema, Object value) { if (value instanceof byte[]) { byte[] bytes = (byte[]) value; return Decimal.toLogical(schema, bytes); } if (value instanceof BigDecimal) { BigDecimal decimal = (BigDecimal) value; final int scale = Integer.parseInt(schema.parameters().get(Decimal.SCALE_FIELD)); if (scale == decimal.scale()) { return decimal; } else { return decimal.setScale(scale); } } return value; }
private static int scale(Schema schema) { String scaleString = (String) schema.parameters().get("scale"); if (scaleString == null) { throw new DataException("Invalid Decimal schema: scale parameter not found."); } else { try { return Integer.parseInt(scaleString); } catch (NumberFormatException var3) { throw new DataException("Invalid scale parameter found in Decimal schema: ", var3); } } }
private static int scale(Schema schema) { String scaleString = schema.parameters().get(SCALE_FIELD); if (scaleString == null) throw new DataException("Invalid Decimal schema: scale parameter not found."); try { return Integer.parseInt(scaleString); } catch (NumberFormatException e) { throw new DataException("Invalid scale parameter found in Decimal schema: ", e); } } }
private static int scaleInternal(Schema schema) { if (null == schema.parameters()) { throw new DataException(NOT_FOUND_MESSAGE); } String scaleString = schema.parameters().get(Decimal.SCALE_FIELD); if (scaleString == null) { throw new DataException(NOT_FOUND_MESSAGE); } else { try { return Integer.parseInt(scaleString); } catch (NumberFormatException var3) { throw new DataException(NOT_PARSABLE_MESSAGE, var3); } } }
private static void checkMaybeCompatible(Schema source, Schema target) { if (source.type() != target.type() && !isPromotable(source.type(), target.type())) { throw new SchemaProjectorException("Schema type mismatch. source type: " + source.type() + " and target type: " + target.type()); } else if (!Objects.equals(source.name(), target.name())) { throw new SchemaProjectorException("Schema name mismatch. source name: " + source.name() + " and target name: " + target.name()); } else if (!Objects.equals(source.parameters(), target.parameters())) { throw new SchemaProjectorException("Schema parameters not equal. source parameters: " + source.parameters() + " and target parameters: " + target.parameters()); } }
void addFields(List<Change.ColumnValue> columnValues, List<String> fieldNames, SchemaBuilder builder) { for (Change.ColumnValue columnValue : columnValues) { Preconditions.checkNotNull(columnValue.schema(), "schema() for %s cannot be null", columnValue.columnName()); Preconditions.checkNotNull(columnValue.schema().parameters(), "schema().parameters() for %s cannot be null", columnValue.columnName()); Preconditions.checkState( columnValue.schema().parameters().containsKey(Change.ColumnValue.COLUMN_NAME), "The schema.parameters() for field(%s) does not contain a value for %s.", columnValue.columnName(), Change.ColumnValue.COLUMN_NAME ); String fieldName = fieldName(columnValue); fieldNames.add(fieldName); builder.field(fieldName, columnValue.schema()); } }
void addFields(List<Change.ColumnValue> columnValues, List<String> fieldNames, SchemaBuilder builder) { for (Change.ColumnValue columnValue : columnValues) { Preconditions.checkNotNull(columnValue.schema(), "schema() for %s cannot be null", columnValue.columnName()); Preconditions.checkNotNull(columnValue.schema().parameters(), "schema().parameters() for %s cannot be null", columnValue.columnName()); Preconditions.checkState( columnValue.schema().parameters().containsKey(Change.ColumnValue.COLUMN_NAME), "The schema.parameters() for field(%s) does not contain a value for %s.", columnValue.columnName(), Change.ColumnValue.COLUMN_NAME ); String fieldName = fieldName(columnValue); fieldNames.add(fieldName); builder.field(fieldName, columnValue.schema()); } }
public static SchemaBuilder copySchemaBasics(Schema source, SchemaBuilder builder) { builder.name(source.name()); builder.version(source.version()); builder.doc(source.doc()); final Map<String, String> params = source.parameters(); if (params != null) { builder.parameters(params); } return builder; }
Storage(Schema schema) { this.name = schema.name(); this.doc = schema.doc(); this.type = schema.type(); this.defaultValue = schema.defaultValue(); this.version = schema.version(); this.parameters = schema.parameters(); this.isOptional = schema.isOptional(); if (Schema.Type.MAP == this.type) { this.keySchema = schema.keySchema(); this.valueSchema = schema.valueSchema(); } else if (Schema.Type.ARRAY == this.type) { this.keySchema = null; this.valueSchema = schema.valueSchema(); } else if (Schema.Type.STRUCT == this.type) { this.fieldSchemas = new LinkedHashMap<>(); for (Field field : schema.fields()) { this.fieldSchemas.put(field.name(), field.schema()); } } }
private SchemaBuilder copySchemaExcludingName(Schema source, SchemaBuilder builder, boolean copyFields) { builder.version(source.version()); builder.doc(source.doc()); Map<String, String> params = source.parameters(); if (params != null) { builder.parameters(params); } if (source.isOptional()) { builder.optional(); } else { builder.required(); } if (copyFields) { for (org.apache.kafka.connect.data.Field field : source.fields()) { builder.field(field.name(), field.schema()); } } return builder; } }
@Override public R apply(R record) { final Schema schema = operatingSchema(record); requireSchema(schema, "updating schema metadata"); final boolean isArray = schema.type() == Schema.Type.ARRAY; final boolean isMap = schema.type() == Schema.Type.MAP; final Schema updatedSchema = new ConnectSchema( schema.type(), schema.isOptional(), schema.defaultValue(), schemaName != null ? schemaName : schema.name(), schemaVersion != null ? schemaVersion : schema.version(), schema.doc(), schema.parameters(), schema.fields(), isMap ? schema.keySchema() : null, isMap || isArray ? schema.valueSchema() : null ); return newRecord(record, updatedSchema); }