@Override public boolean canHandle(final Class clazz) { if ("java.lang.Class".equals(clazz.getName())) { return false; } try { ReflectData.get().getSchema(clazz); } catch (final RuntimeException e) { return false; } return true; }
@Override protected boolean isRecord(Object datum) { if (datum == null) return false; if (super.isRecord(datum)) return true; if (datum instanceof Collection) return false; if (datum instanceof Map) return false; if (datum instanceof GenericFixed) return false; return getSchema(datum.getClass()).getType() == Schema.Type.RECORD; }
@Override protected boolean isRecord(Object datum) { if (datum == null) return false; if (super.isRecord(datum)) return true; if (datum instanceof Collection) return false; if (datum instanceof Map) return false; if (datum instanceof GenericFixed) return false; return getSchema(datum.getClass()).getType() == Schema.Type.RECORD; }
@InterfaceAudience.Private @Override public Schema getSchema(Object t) { return ReflectData.get().getSchema(t.getClass()); }
/** * Creates a ParquetWriterFactory for the given type. The Parquet writers will use Avro * to reflectively create a schema for the type and use that schema to write the columnar data. * * @param type The class of the type to write. */ public static <T> ParquetWriterFactory<T> forReflectRecord(Class<T> type) { final String schemaString = ReflectData.get().getSchema(type).toString(); final ParquetBuilder<T> builder = (out) -> createAvroParquetWriter(schemaString, ReflectData.get(), out); return new ParquetWriterFactory<>(builder); }
@Override public Schema retrieveReaderSchema(Object source) { Class<?> clazz; try { clazz = Class.forName("org.apache.hadoop.hive.hbase.avro.Employee"); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } return ReflectData.get().getSchema(clazz); }
@Override public Schema retrieveWriterSchema(Object source) { Class<?> clazz; try { clazz = Class.forName("org.apache.hadoop.hive.hbase.avro.Employee"); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } return ReflectData.get().getSchema(clazz); }
@SuppressWarnings("OptionalUsedAsFieldOrParameterType") private static <T> AvroFactory<T> fromReflective(Class<T> type, ClassLoader cl, Optional<Schema> previousSchema) { ReflectData reflectData = new ReflectData(cl); Schema newSchema = reflectData.getSchema(type); return new AvroFactory<>( reflectData, newSchema, new ReflectDatumReader<>(previousSchema.orElse(newSchema), newSchema, reflectData), new ReflectDatumWriter<>(newSchema, reflectData) ); }
private static Schema tryExtractAvroSchema(ClassLoader cl, Class<?> runtimeType) { if (isGenericRecord(runtimeType)) { return null; } if (isSpecificRecord(runtimeType)) { SpecificData d = new SpecificData(cl); return d.getSchema(runtimeType); } ReflectData d = new ReflectData(cl); return d.getSchema(runtimeType); }
private void testPrimitiveArray(Class<?> c, boolean blocking) throws Exception { ReflectData data = new ReflectData(); Random r = new Random(); int size = 200; Object array = Array.newInstance(c, size); Schema s = data.getSchema(array.getClass()); for(int i = 0; i < size; i++) { Array.set(array, i, randomFor(c, r)); } checkBinary(data, s, array, false, blocking); }
private <T> GenericRecord testJsonDecoder (String testType, byte[] bytes, T entityObj) throws IOException { ReflectData rdata = ReflectData.AllowNull.get(); Schema schema = rdata.getSchema(entityObj.getClass()); GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>(schema); Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new String(bytes)); GenericRecord r = datumReader.read(null, decoder); return r; }
@SuppressWarnings({ "unchecked", "rawtypes" }) public void checkStringable(Class c, String value) throws Exception { ReflectData data = new ReflectData(); Schema schema = data.getSchema(c); assertEquals ("{\"type\":\"string\",\"java-class\":\""+c.getName()+"\"}", schema.toString()); checkBinary(schema, c.getConstructor(String.class).newInstance(value)); }
@Test public void testAvroEncodeInducing() throws IOException { Schema schm = ReflectData.get().getSchema(AvroEncRecord.class); assertEquals(schm.toString(), "{\"type\":\"record\",\"name\":\"AvroEncRecord\",\"namespace" + "\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[{\"name\":\"date\"," + "\"type\":{\"type\":\"long\",\"CustomEncoding\":\"DateAsLongEncoding\"}}]}"); }
private Schema nullableSchema(Class<?> type) { return Schema.createUnion(Arrays.asList( Schema.create(Schema.Type.NULL), ReflectData.get().getSchema(type))); } }
@Test public void testNoPackage() throws Exception { Class<?> noPackage = Class.forName("NoPackage"); Schema s = ReflectData.get().getSchema(noPackage); assertEquals(noPackage.getName(), ReflectData.getClassName(s)); }
@Test public void testR12() throws Exception { Schema s = ReflectData.get().getSchema(R12.class); assertEquals(Schema.Type.INT, s.getField("x").schema().getType()); assertEquals(Schema.parse ("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}"), s.getField("strings").schema()); }
@Test public void testR10() throws Exception { Schema r10Schema = ReflectData.get().getSchema(R10.class); assertEquals(Schema.Type.STRING, r10Schema.getType()); assertEquals(R10.class.getName(), r10Schema.getProp("java-class")); checkReadWrite(new R10("foo"), r10Schema); }
private void writeLinesFile(File dir) throws IOException { DatumWriter<Text> writer = new ReflectDatumWriter<>(); DataFileWriter<Text> out = new DataFileWriter<>(writer); File linesFile = new File(dir+"/lines.avro"); dir.mkdirs(); out.create(ReflectData.get().getSchema(Text.class), linesFile); for (String line : WordCountUtil.LINES) out.append(new Text(line)); out.close(); }
@Test public void testReflectMatchStructure() throws SchemaValidationException { testValidatorPasses(builder.canBeReadStrategy().validateAll(), circleSchemaDifferentNames, ReflectData.get().getSchema(Circle.class)); }
@Test public void test() throws Exception{ Schema schema = ReflectData.get().getSchema(X.class); ByteArrayOutputStream bout = new ByteArrayOutputStream(); writeOneXAsAvro(schema, bout); X record = readOneXFromAvro(schema, bout); String expected = getmd5(content); String actual = getmd5(record.content); assertEquals("md5 for result differed from input",expected,actual); }