@Override protected WritableTypeInfo<?>[] getTestData() { return new WritableTypeInfo<?>[] { new WritableTypeInfo<>(TestClass.class), new WritableTypeInfo<>(AlternateClass.class) }; }
@PublicEvolving static <T extends Writable> TypeInformation<T> getWritableTypeInfo(Class<T> typeClass) { if (Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class)) { return new WritableTypeInfo<T>(typeClass); } else { throw new InvalidTypesException("The given class is no subclass of " + Writable.class.getName()); } }
/** * Creates a HCatInputFormat for the given database, table, and * {@link org.apache.hadoop.conf.Configuration}. * By default, the InputFormat returns {@link org.apache.hive.hcatalog.data.HCatRecord}. * The return type of the InputFormat can be changed to Flink-native tuples by calling * {@link HCatInputFormatBase#asFlinkTuples()}. * * @param database The name of the database to read from. * @param table The name of the table to read. * @param config The Configuration for the InputFormat. * @throws java.io.IOException */ public HCatInputFormatBase(String database, String table, Configuration config) throws IOException { super(); this.configuration = config; HadoopUtils.mergeHadoopConf(this.configuration); this.hCatInputFormat = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(this.configuration, database, table); this.outputSchema = org.apache.hive.hcatalog.mapreduce.HCatInputFormat.getTableSchema(this.configuration); // configure output schema of HCatFormat configuration.set("mapreduce.lib.hcat.output.schema", HCatUtil.serialize(outputSchema)); // set type information this.resultType = new WritableTypeInfo(DefaultHCatRecord.class); }
@Test public void testValidateTypeInfo() { // validate unrelated type info TypeExtractor.validateIfWritable(BasicTypeInfo.STRING_TYPE_INFO, String.class); // validate writable type info correctly TypeExtractor.validateIfWritable(new WritableTypeInfo<>( DirectWritable.class), DirectWritable.class); TypeExtractor.validateIfWritable(new WritableTypeInfo<>( ViaInterfaceExtension.class), ViaInterfaceExtension.class); TypeExtractor.validateIfWritable(new WritableTypeInfo<>( ViaAbstractClassExtension.class), ViaAbstractClassExtension.class); // incorrect case: not writable at all try { TypeExtractor.validateIfWritable(new WritableTypeInfo<>( DirectWritable.class), String.class); fail("should have failed with an exception"); } catch (InvalidTypesException e) { // expected } // incorrect case: wrong writable try { TypeExtractor.validateIfWritable(new WritableTypeInfo<>( ViaInterfaceExtension.class), DirectWritable.class); fail("should have failed with an exception"); } catch (InvalidTypesException e) { // expected } }
@Test public void testInputValidationError() { RichMapFunction<Writable, String> function = new RichMapFunction<Writable, String>() { @Override public String map(Writable value) throws Exception { return null; } }; @SuppressWarnings("unchecked") TypeInformation<Writable> inType = (TypeInformation<Writable>) (TypeInformation<?>) new WritableTypeInfo<>(DirectWritable.class); try { TypeExtractor.getMapReturnTypes(function, inType); fail("exception expected"); } catch (InvalidTypesException e) { // right } }
@Test public void testExtractFromFunction() { RichMapFunction<DirectWritable, DirectWritable> function = new RichMapFunction<DirectWritable, DirectWritable>() { @Override public DirectWritable map(DirectWritable value) throws Exception { return null; } }; TypeInformation<DirectWritable> outType = TypeExtractor.getMapReturnTypes(function, new WritableTypeInfo<>(DirectWritable.class)); assertTrue(outType instanceof WritableTypeInfo); assertEquals(DirectWritable.class, outType.getTypeClass()); }
@Test public void testExtractAsPartOfPojo() { PojoTypeInfo<PojoWithWritable> pojoInfo = (PojoTypeInfo<PojoWithWritable>) TypeExtractor.getForClass(PojoWithWritable.class); boolean foundWritable = false; for (int i = 0; i < pojoInfo.getArity(); i++) { PojoField field = pojoInfo.getPojoFieldAt(i); String name = field.getField().getName(); if (name.equals("hadoopCitizen")) { if (foundWritable) { fail("already seen"); } foundWritable = true; assertEquals(new WritableTypeInfo<>(DirectWritable.class), field.getTypeInformation()); assertEquals(DirectWritable.class, field.getTypeInformation().getTypeClass()); } } assertTrue("missed the writable type", foundWritable); }
private TypeInformation getTypeInfoForClass(Class typeClass) { // try to create BasicTypeInfo TypeInformation typeInfo = BasicTypeInfo.getInfoFor(typeClass); // create TypeInformation for Writable type if(typeInfo == null && Writable.class.isAssignableFrom(typeClass)) { typeInfo = new WritableTypeInfo(typeClass); } return typeInfo; } }
@PublicEvolving static <T extends Writable> TypeInformation<T> getWritableTypeInfo(Class<T> typeClass) { if (Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class)) { return new WritableTypeInfo<T>(typeClass); } else { throw new InvalidTypesException("The given class is no subclass of " + Writable.class.getName()); } }
@PublicEvolving static <T extends Writable> TypeInformation<T> getWritableTypeInfo(Class<T> typeClass) { if (Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class)) { return new WritableTypeInfo<T>(typeClass); } else { throw new InvalidTypesException("The given class is no subclass of " + Writable.class.getName()); } }
@PublicEvolving static <T extends Writable> TypeInformation<T> getWritableTypeInfo(Class<T> typeClass) { if (Writable.class.isAssignableFrom(typeClass) && !typeClass.equals(Writable.class)) { return new WritableTypeInfo<T>(typeClass); } else { throw new InvalidTypesException("The given class is no subclass of " + Writable.class.getName()); } }