private static OrcWriter createOrcFileWriter(OrcDataSink sink, List<Type> types) { List<String> columnNames = IntStream.range(0, types.size()) .mapToObj(String::valueOf) .collect(toImmutableList()); return new OrcWriter( sink, columnNames, types, ORC, LZ4, new OrcWriterOptions() .withMaxStringStatisticsLimit(new DataSize(0, BYTE)) .withStripeMinSize(new DataSize(64, MEGABYTE)) .withDictionaryMaxMemory(new DataSize(1, MEGABYTE)), ImmutableMap.of(), UTC, false, OrcWriteValidationMode.BOTH, new OrcWriterStats()); } }
public PrestoDwrfFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec) throws IOException { writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(targetFile)), columnNames, types, DWRF, compressionCodec.getOrcCompressionKind(), new OrcWriterOptions(), ImmutableMap.of(), hiveStorageTimeZone, false, BOTH, new OrcWriterStats()); }
public PrestoOrcFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec) throws IOException { writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(targetFile)), columnNames, types, ORC, compressionCodec.getOrcCompressionKind(), new OrcWriterOptions(), ImmutableMap.of(), hiveStorageTimeZone, false, BOTH, new OrcWriterStats()); }
public PrestoOrcFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec) throws IOException { writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(targetFile)), columnNames, types, ORC, compressionCodec.getOrcCompressionKind(), new OrcWriterOptions(), ImmutableMap.of(), hiveStorageTimeZone, false, BOTH, new OrcWriterStats()); }
public PrestoDwrfFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec) throws IOException { writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(targetFile)), columnNames, types, DWRF, compressionCodec.getOrcCompressionKind(), new OrcWriterOptions(), ImmutableMap.of(), hiveStorageTimeZone, false, BOTH, new OrcWriterStats()); }
true, BOTH, new OrcWriterStats());
true, BOTH, new OrcWriterStats());
private void assertRoundTrip(Type writeType, Type readType, List<?> writeValues, List<?> readValues, boolean verifyWithHiveReader) throws Exception OrcWriterStats stats = new OrcWriterStats(); for (Format format : formats) { if (!format.supportsType(readType)) {
private void assertRoundTrip(Type writeType, Type readType, List<?> writeValues, List<?> readValues, boolean verifyWithHiveReader) throws Exception OrcWriterStats stats = new OrcWriterStats(); for (Format format : formats) { if (!format.supportsType(readType)) {
true, validationMode, new OrcWriterStats());
true, validationMode, new OrcWriterStats());