protected void writeAndValidate(Schema schema) throws IOException { List<Record> expected = RandomData.generateList(schema, 100, 0L); File testFile = temp.newFile(); Assert.assertTrue("Delete should succeed", testFile.delete()); try (FileAppender<Record> writer = Avro.write(Files.localOutput(testFile)) .schema(schema) .named("test") .build()) { for (Record rec : expected) { writer.add(rec); } } List<InternalRow> rows; try (AvroIterable<InternalRow> reader = Avro.read(Files.localInput(testFile)) .createReaderFunc(SparkAvroReader::new) .project(schema) .build()) { rows = Lists.newArrayList(reader); } for (int i = 0; i < expected.size(); i += 1) { assertEqualsUnsafe(schema.asStruct(), expected.get(i), rows.get(i)); } } }
protected void writeAndValidate(Schema schema) throws IOException { List<Record> expected = RandomGenericData.generate(schema, 100, 0L); File testFile = temp.newFile(); Assert.assertTrue("Delete should succeed", testFile.delete()); try (FileAppender<Record> writer = Avro.write(Files.localOutput(testFile)) .schema(schema) .createWriterFunc(DataWriter::create) .named("test") .build()) { for (Record rec : expected) { writer.add(rec); } } List<Record> rows; try (AvroIterable<Record> reader = Avro.read(Files.localInput(testFile)) .project(schema) .createReaderFunc(DataReader::create) .build()) { rows = Lists.newArrayList(reader); } for (int i = 0; i < expected.size(); i += 1) { DataTestHelpers.assertEquals(schema.asStruct(), expected.get(i), rows.get(i)); } } }
.createReaderFunc(SparkAvroReader::new) .project(schema) .build()) {
private CloseableIterable<Record> open(FileScanTask task) { InputFile input = ops.io().newInputFile(task.file().path().toString()); // TODO: join to partition data from the manifest file switch (task.file().format()) { case AVRO: Avro.ReadBuilder avro = Avro.read(input) .project(projection) .createReaderFunc(DataReader::create) .split(task.start(), task.length()); if (reuseContainers) { avro.reuseContainers(); } return avro.build(); case PARQUET: Parquet.ReadBuilder parquet = Parquet.read(input) .project(projection) .createReaderFunc(fileSchema -> buildReader(projection, fileSchema)) .split(task.start(), task.length()); if (reuseContainers) { parquet.reuseContainers(); } return parquet.build(); default: throw new UnsupportedOperationException(String.format("Cannot read %s file: %s", task.file().format().name(), task.file().path())); } }
protected Record writeAndRead(String desc, Schema writeSchema, Schema readSchema, Record record) throws IOException { File file = temp.newFile(desc + ".avro"); file.delete(); try (FileAppender<Record> appender = Avro.write(Files.localOutput(file)) .schema(writeSchema) .createWriterFunc(DataWriter::create) .build()) { appender.add(record); } Iterable<Record> records = Avro.read(Files.localInput(file)) .project(readSchema) .createReaderFunc(DataReader::create) .build(); return Iterables.getOnlyElement(records); } }
private CloseableIterable<InternalRow> newAvroIterable(InputFile location, FileScanTask task, Schema readSchema) { return Avro.read(location) .reuseContainers() .project(readSchema) .split(task.start(), task.length()) .createReaderFunc(SparkAvroReader::new) .build(); }