private static JsonNode rawSchemaAsJsonNode(String rawSchema) { ObjectMapper mapper = new ObjectMapper(); JsonNode avroRecordSchemaJson; try { avroRecordSchemaJson = mapper.readValue(rawSchema, JsonNode.class); } catch (IOException e) { throw new ValidationException( "Could not parse the avro record as JSON.", e); } return avroRecordSchemaJson; } }
private static JsonNode rawSchemaAsJsonNode(String rawSchema) { ObjectMapper mapper = new ObjectMapper(); JsonNode avroRecordSchemaJson; try { avroRecordSchemaJson = mapper.readValue(rawSchema, JsonNode.class); } catch (IOException e) { throw new ValidationException( "Could not parse the avro record as JSON.", e); } return avroRecordSchemaJson; } }
public static String valuesString(Class<? extends Comparable> type) { if (String.class.isAssignableFrom(type)) { return STRING_TYPE; } else if (Integer.class.isAssignableFrom(type)) { return INT_TYPE; } else if (Long.class.isAssignableFrom(type)) { return LONG_TYPE; } throw new ValidationException("Not a valid provided type: " + type); } }
public static <T> T parse(File file, Class<T> returnType) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(file, returnType); } catch (JsonParseException e) { throw new ValidationException("Invalid JSON", e); } catch (JsonMappingException e) { throw new ValidationException("Invalid JSON", e); } catch (IOException e) { throw new DatasetIOException("Cannot initialize JSON parser", e); } }
public static <T> T parse(InputStream in, Class<T> returnType) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(in, returnType); } catch (JsonParseException e) { throw new ValidationException("Invalid JSON", e); } catch (JsonMappingException e) { throw new ValidationException("Invalid JSON", e); } catch (IOException e) { throw new DatasetIOException("Cannot initialize JSON parser", e); } }
public static Class<? extends Comparable> valuesType(@Nullable String type) { if (type == null) { return String.class; } else if (STRING_TYPE.equals(type)) { return String.class; } else if (INT_TYPE.equals(type)) { return Integer.class; } else if (LONG_TYPE.equals(type)) { return Long.class; } throw new ValidationException("Not a valid provided type: " + type); }
public static <T> T parse(String json, Class<T> returnType) { ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(json, returnType); } catch (JsonParseException e) { throw new ValidationException("Invalid JSON", e); } catch (JsonMappingException e) { throw new ValidationException("Invalid JSON", e); } catch (IOException e) { throw new DatasetIOException("Cannot initialize JSON parser", e); } }
private String getEntityNameFromSchemaString(String schema) { JsonNode node; try { JsonParser jp = factory.createJsonParser(schema); node = mapper.readTree(jp); if (node.get("name") == null) { return null; } return node.get("name").textValue(); } catch (JsonParseException e) { throw new ValidationException(e); } catch (IOException e) { throw new ValidationException(e); } }
private String getEntityNameFromSchemaString(String schema) { JsonNode node; try { JsonParser jp = factory.createJsonParser(schema); node = mapper.readTree(jp); if (node.get("name") == null) { return null; } return node.get("name").textValue(); } catch (JsonParseException e) { throw new ValidationException(e); } catch (IOException e) { throw new ValidationException(e); } }
/** * Precondition-style validation that throws a {@link ValidationException}. * * @param isValid * {@code true} if valid, {@code false} if an exception should be * thrown * @param message * A String message for the exception. */ public static void check(boolean isValid, String message, Object... args) { if (!isValid) { String[] argStrings = new String[args.length]; for (int i = 0; i < args.length; i += 1) { argStrings[i] = String.valueOf(args[i]); } throw new ValidationException( String.format(String.valueOf(message), (Object[]) argStrings)); } } }
@Override Format directory(FileSystem fs, Path path, List<Format> formats) throws IOException { Format format = null; for (Format otherFormat : formats) { if (format == null) { format = otherFormat; } else if (!format.equals(otherFormat)) { throw new ValidationException(String.format( "Path contains multiple formats (%s, %s): %s", format, otherFormat, path)); } } return format; }
private List<String> getTablesFromSchemaString(String schema) { JsonNode node; try { JsonParser jp = factory.createJsonParser(schema); node = mapper.readTree(jp); if (node.get("tables") == null) { return new ArrayList<String>(); } List<String> result = new ArrayList<String>(node.get("tables").size()); for (Iterator<JsonNode> it = node.get("tables").elements(); it .hasNext();) { result.add(it.next().textValue()); } return result; } catch (JsonParseException e) { throw new ValidationException(e); } catch (IOException e) { throw new ValidationException(e); } }
private List<String> getTablesFromSchemaString(String schema) { JsonNode node; try { JsonParser jp = factory.createJsonParser(schema); node = mapper.readTree(jp); if (node.get("tables") == null) { return new ArrayList<String>(); } List<String> result = new ArrayList<String>(node.get("tables").size()); for (Iterator<JsonNode> it = node.get("tables").elements(); it .hasNext();) { result.add(it.next().textValue()); } return result; } catch (JsonParseException e) { throw new ValidationException(e); } catch (IOException e) { throw new ValidationException(e); } }
@Override public Object deserializeColumnValueFromBytes(String fieldName, byte[] bytes) { Field field = avroSchema.getAvroSchema().getField(fieldName); DatumReader<Object> datumReader = fieldDatumReaders.get(fieldName); if (field == null) { throw new ValidationException("Invalid field name " + fieldName + " for schema " + avroSchema.toString()); } if (datumReader == null) { throw new ValidationException("No datum reader for field name: " + fieldName); } ByteArrayInputStream byteIn = new ByteArrayInputStream(bytes); Decoder decoder = getColumnDecoder(field.schema(), byteIn); return AvroUtils.readAvroEntity(decoder, datumReader); }
@Override public byte[] serializeColumnValueToBytes(String fieldName, Object columnValue) { Field field = avroSchema.getAvroSchema().getField(fieldName); DatumWriter<Object> datumWriter = fieldDatumWriters.get(fieldName); if (field == null) { throw new ValidationException("Invalid field name " + fieldName + " for schema " + avroSchema.toString()); } if (datumWriter == null) { throw new ValidationException("No datum writer for field name: " + fieldName); } ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); Encoder encoder = getColumnEncoder(field.schema(), byteOut); AvroUtils.writeAvroEntity(columnValue, encoder, fieldDatumWriters.get(fieldName)); return byteOut.toByteArray(); }
@Override public byte[] serializeColumnValueToBytes(String fieldName, Object columnValue) { Field field = avroSchema.getAvroSchema().getField(fieldName); DatumWriter<Object> datumWriter = fieldDatumWriters.get(fieldName); if (field == null) { throw new ValidationException("Invalid field name " + fieldName + " for schema " + avroSchema.toString()); } if (datumWriter == null) { throw new ValidationException("No datum writer for field name: " + fieldName); } ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); Encoder encoder = getColumnEncoder(field.schema(), byteOut); AvroUtils.writeAvroEntity(columnValue, encoder, fieldDatumWriters.get(fieldName)); return byteOut.toByteArray(); }
@Override public Object deserializeColumnValueFromBytes(String fieldName, byte[] bytes) { Field field = avroSchema.getAvroSchema().getField(fieldName); DatumReader<Object> datumReader = fieldDatumReaders.get(fieldName); if (field == null) { throw new ValidationException("Invalid field name " + fieldName + " for schema " + avroSchema.toString()); } if (datumReader == null) { throw new ValidationException("No datum reader for field name: " + fieldName); } ByteArrayInputStream byteIn = new ByteArrayInputStream(bytes); Decoder decoder = getColumnDecoder(field.schema(), byteIn); return AvroUtils.readAvroEntity(decoder, datumReader); }
@Override public CharSequence deserializeKeyAsColumnKeyFromBytes(String fieldName, byte[] columnKeyBytes) { Field field = avroSchema.getAvroSchema().getField(fieldName); if (field == null) { throw new ValidationException("Invalid field name " + fieldName + " for schema " + avroSchema.toString()); } Schema.Type schemaType = field.schema().getType(); if (schemaType == Schema.Type.MAP) { String stringProp = field.schema().getProp("avro.java.string"); if (stringProp != null && stringProp.equals("String")) { return new String(columnKeyBytes); } else { return new Utf8(columnKeyBytes); } } else if (schemaType == Schema.Type.RECORD) { return new String(columnKeyBytes); } else { throw new ValidationException("Unsupported type for keyAsColumn: " + schemaType); } }
@Override public CharSequence deserializeKeyAsColumnKeyFromBytes(String fieldName, byte[] columnKeyBytes) { Field field = avroSchema.getAvroSchema().getField(fieldName); if (field == null) { throw new ValidationException("Invalid field name " + fieldName + " for schema " + avroSchema.toString()); } Schema.Type schemaType = field.schema().getType(); if (schemaType == Schema.Type.MAP) { String stringProp = field.schema().getProp("avro.java.string"); if (stringProp != null && stringProp.equals("String")) { return new String(columnKeyBytes); } else { return new Utf8(columnKeyBytes); } } else if (schemaType == Schema.Type.RECORD) { return new String(columnKeyBytes); } else { throw new ValidationException("Unsupported type for keyAsColumn: " + schemaType); } }
/** * Deserialize an entity field from the HBase Result. * * @param fieldMapping * The FieldMapping that specifies this field's mapping type and * field name. * @param result * The HBase Result that represents a row in HBase. * @return The field Object we deserialized from the Result. */ public Object deserialize(FieldMapping fieldMapping, Result result) { String fieldName = fieldMapping.getFieldName(); MappingType mappingType = fieldMapping.getMappingType(); if (mappingType == MappingType.COLUMN || mappingType == MappingType.COUNTER) { return deserializeColumn(fieldMapping.getFieldName(), fieldMapping.getFamily(), fieldMapping.getQualifier(), result); } else if (mappingType == MappingType.KEY_AS_COLUMN) { return deserializeKeyAsColumn(fieldMapping.getFieldName(), fieldMapping.getFamily(), fieldMapping.getPrefix(), result); } else if (mappingType == MappingType.OCC_VERSION) { return deserializeOCCColumn(result); } else { throw new ValidationException( "Invalid field mapping for field with name: " + fieldName); } }