/** Return the value of a metadata property. */ public long getMetaLong(String key) { return Long.parseLong(getMetaString(key)); }
/** Return the value of a metadata property. */ public long getMetaLong(String key) { return Long.parseLong(getMetaString(key)); }
final DataFileStream<GenericRecord> reader = new DataFileStream<>(in, new GenericDatumReader<GenericRecord>())) { final AtomicReference<String> codec = new AtomicReference<>(reader.getMetaString(DataFileConstants.CODEC)); if (codec.get() == null) { codec.set(DataFileConstants.NULL_CODEC); final Map<String,String> metadata = new HashMap<>(); for (String metaKey : reader.getMetaKeys()) { metadata.put(metaKey, reader.getMetaString(metaKey));
Codec resolveCodec() { String codecStr = getMetaString(DataFileConstants.CODEC); if (codecStr != null) { return CodecFactory.fromString(codecStr).createInstance(); } else { return CodecFactory.nullCodec().createInstance(); } }
Codec resolveCodec() { String codecStr = getMetaString(DataFileConstants.CODEC); if (codecStr != null) { return CodecFactory.fromString(codecStr).createInstance(); } else { return CodecFactory.nullCodec().createInstance(); } }
avroMetadata.put(key, reader.getMetaString(key));
private CodecFactory getCodec(File output) throws Exception { try (DataFileStream<GenericRecord> reader = new DataFileStream<>( new FileInputStream(output), new GenericDatumReader<>())) { String codec = reader.getMetaString(DataFileConstants.CODEC); return codec == null ? CodecFactory.nullCodec() : CodecFactory.fromString(codec); } }
header.schema = Schema.parse(getMetaString(DataFileConstants.SCHEMA),false); this.codec = resolveCodec(); reader.setSchema(header.schema);
header.schema = Schema.parse(getMetaString(DataFileConstants.SCHEMA),false); this.codec = resolveCodec(); reader.setSchema(header.schema);
final String codec = reader.getMetaString(DataFileConstants.CODEC) == null ? DataFileConstants.NULL_CODEC : reader.getMetaString(DataFileConstants.CODEC);
inputCodec = reader.getMetaString(DataFileConstants.CODEC); if(inputCodec == null) { inputCodec = DataFileConstants.NULL_CODEC; String thisCodec = reader.getMetaString(DataFileConstants.CODEC); if(thisCodec == null) { thisCodec = DataFileConstants.NULL_CODEC;
@Test() public void testUseMeta() throws IOException { DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>()); File f = new File(DIR.getRoot().getPath(), "testDataFileMeta.avro"); w.setMeta("hello", "bar"); w.create(Schema.create(Type.NULL), f); w.close(); DataFileStream<Void> r = new DataFileStream<>(new FileInputStream(f), new GenericDatumReader<>()); assertTrue(r.getMetaKeys().contains("hello")); assertEquals("bar", r.getMetaString("hello")); }
public static void checkMeta(DataFileStream<?> in) throws Exception { assertEquals(STRING_META_VALUE, in.getMetaString(STRING_KEY)); assertEquals(LONG_META_VALUE, in.getMetaLong(LONG_KEY)); assertTrue(Arrays.equals(BYTES_META_VALUE, in.getMeta(BYTES_KEY))); }
@Test public void testCompression() throws SQLException, CompressorException, IOException { // remove previous test database, if any final File dbLocation = new File(DB_LOCATION); dbLocation.delete(); // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException sqle) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (0, NULL, 1)"); stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (1, 1, 1)"); runner.setIncomingConnection(false); runner.setProperty(ExecuteSQL.COMPRESSION_FORMAT, AvroUtil.CodecType.BZIP2.name()); runner.setProperty(ExecuteSQL.SQL_SELECT_QUERY, "SELECT * FROM TEST_NULL_INT"); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); try (DataFileStream<GenericRecord> dfs = new DataFileStream<>(new ByteArrayInputStream(flowFile.toByteArray()), new GenericDatumReader<GenericRecord>())) { assertEquals(AvroUtil.CodecType.BZIP2.name().toLowerCase(), dfs.getMetaString(DataFileConstants.CODEC).toLowerCase()); } }
new GenericDatumWriter<>()); String codecName = reader.getMetaString(DataFileConstants.CODEC); CodecFactory codec = (codecName == null) ? CodecFactory.fromString(DataFileConstants.NULL_CODEC)
/** Return the value of a metadata property. */ public long getMetaLong(String key) { return Long.parseLong(getMetaString(key)); }
Codec resolveCodec() { String codecStr = getMetaString(DataFileConstants.CODEC); if (codecStr != null) { return CodecFactory.fromString(codecStr).createInstance(); } else { return CodecFactory.nullCodec().createInstance(); } }
Codec resolveCodec() { String codecStr = getMetaString(DataFileConstants.CODEC); if (codecStr != null) { return CodecFactory.fromString(codecStr).createInstance(); } else { return CodecFactory.nullCodec().createInstance(); } }
Codec resolveCodec() { String codecStr = getMetaString(DataFileConstants.CODEC); if (codecStr != null) { return CodecFactory.fromString(codecStr).createInstance(); } else { return CodecFactory.nullCodec().createInstance(); } }
Codec resolveCodec() { String codecStr = getMetaString(DataFileConstants.CODEC); if (codecStr != null) { return CodecFactory.fromString(codecStr).createInstance(); } else { return CodecFactory.nullCodec().createInstance(); } }