Tabnine Logo
OutputStreamOrcDataSink
Code IndexAdd Tabnine to your IDE (free)

How to use
OutputStreamOrcDataSink
in
io.prestosql.orc

Best Java code snippets using io.prestosql.orc.OutputStreamOrcDataSink (Showing top 11 results out of 315)

origin: prestosql/presto

/**
 * Allow subclass to replace data sink implementation.
 */
protected OrcDataSink createOrcDataSink(ConnectorSession session, FileSystem fileSystem, Path path)
    throws IOException
{
  return new OutputStreamOrcDataSink(fileSystem.create(path));
}
origin: io.prestosql/presto-hive

public PrestoDwrfFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec)
    throws IOException
{
  writer = new OrcWriter(
      new OutputStreamOrcDataSink(new FileOutputStream(targetFile)),
      columnNames,
      types,
      DWRF,
      compressionCodec.getOrcCompressionKind(),
      new OrcWriterOptions(),
      ImmutableMap.of(),
      hiveStorageTimeZone,
      false,
      BOTH,
      new OrcWriterStats());
}
origin: prestosql/presto

public PrestoOrcFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec)
    throws IOException
{
  writer = new OrcWriter(
      new OutputStreamOrcDataSink(new FileOutputStream(targetFile)),
      columnNames,
      types,
      ORC,
      compressionCodec.getOrcCompressionKind(),
      new OrcWriterOptions(),
      ImmutableMap.of(),
      hiveStorageTimeZone,
      false,
      BOTH,
      new OrcWriterStats());
}
origin: io.prestosql/presto-hive

public PrestoOrcFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec)
    throws IOException
{
  writer = new OrcWriter(
      new OutputStreamOrcDataSink(new FileOutputStream(targetFile)),
      columnNames,
      types,
      ORC,
      compressionCodec.getOrcCompressionKind(),
      new OrcWriterOptions(),
      ImmutableMap.of(),
      hiveStorageTimeZone,
      false,
      BOTH,
      new OrcWriterStats());
}
origin: prestosql/presto

public PrestoDwrfFormatWriter(File targetFile, List<String> columnNames, List<Type> types, DateTimeZone hiveStorageTimeZone, HiveCompressionCodec compressionCodec)
    throws IOException
{
  writer = new OrcWriter(
      new OutputStreamOrcDataSink(new FileOutputStream(targetFile)),
      columnNames,
      types,
      DWRF,
      compressionCodec.getOrcCompressionKind(),
      new OrcWriterOptions(),
      ImmutableMap.of(),
      hiveStorageTimeZone,
      false,
      BOTH,
      new OrcWriterStats());
}
origin: io.prestosql/presto-orc

private static void writeOrcColumnPresto(File outputFile, Format format, CompressionKind compression, Type type, Iterator<?> values, OrcWriterStats stats)
    throws Exception
{
  ImmutableMap.Builder<String, String> metadata = ImmutableMap.builder();
  metadata.put("columns", "test");
  metadata.put("columns.types", createSettableStructObjectInspector("test", type).getTypeName());
  OrcWriter writer;
  writer = new OrcWriter(
      new OutputStreamOrcDataSink(new FileOutputStream(outputFile)),
      ImmutableList.of("test"),
      ImmutableList.of(type),
      format.getOrcEncoding(),
      compression,
      new OrcWriterOptions(),
      ImmutableMap.of(),
      HIVE_STORAGE_TIME_ZONE,
      true,
      BOTH,
      stats);
  BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024);
  while (values.hasNext()) {
    Object value = values.next();
    writeValue(type, blockBuilder, value);
  }
  writer.write(new Page(blockBuilder.build()));
  writer.close();
  writer.validate(new FileOrcDataSource(outputFile, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true));
}
origin: prestosql/presto

private static void writeOrcColumnPresto(File outputFile, Format format, CompressionKind compression, Type type, Iterator<?> values, OrcWriterStats stats)
    throws Exception
{
  ImmutableMap.Builder<String, String> metadata = ImmutableMap.builder();
  metadata.put("columns", "test");
  metadata.put("columns.types", createSettableStructObjectInspector("test", type).getTypeName());
  OrcWriter writer;
  writer = new OrcWriter(
      new OutputStreamOrcDataSink(new FileOutputStream(outputFile)),
      ImmutableList.of("test"),
      ImmutableList.of(type),
      format.getOrcEncoding(),
      compression,
      new OrcWriterOptions(),
      ImmutableMap.of(),
      HIVE_STORAGE_TIME_ZONE,
      true,
      BOTH,
      stats);
  BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024);
  while (values.hasNext()) {
    Object value = values.next();
    writeValue(type, blockBuilder, value);
  }
  writer.write(new Page(blockBuilder.build()));
  writer.close();
  writer.validate(new FileOrcDataSource(outputFile, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true));
}
origin: prestosql/presto

new OutputStreamOrcDataSink(new FileOutputStream(tempFile.getFile())),
ImmutableList.of(STRUCT_COL_NAME),
ImmutableList.of(writerType),
origin: io.prestosql/presto-orc

new OutputStreamOrcDataSink(new FileOutputStream(tempFile.getFile())),
ImmutableList.of(STRUCT_COL_NAME),
ImmutableList.of(writerType),
origin: io.prestosql/presto-orc

TempFile tempFile = new TempFile();
OrcWriter writer = new OrcWriter(
    new OutputStreamOrcDataSink(new FileOutputStream(tempFile.getFile())),
    ImmutableList.of("test1", "test2", "test3", "test4", "test5"),
    ImmutableList.of(VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR),
origin: prestosql/presto

TempFile tempFile = new TempFile();
OrcWriter writer = new OrcWriter(
    new OutputStreamOrcDataSink(new FileOutputStream(tempFile.getFile())),
    ImmutableList.of("test1", "test2", "test3", "test4", "test5"),
    ImmutableList.of(VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR),
io.prestosql.orcOutputStreamOrcDataSink

Most used methods

  • <init>

Popular in Java

  • Reading from database using SQL prepared statement
  • runOnUiThread (Activity)
  • setContentView (Activity)
  • getSystemService (Context)
  • BorderLayout (java.awt)
    A border layout lays out a container, arranging and resizing its components to fit in five regions:
  • ConnectException (java.net)
    A ConnectException is thrown if a connection cannot be established to a remote host on a specific po
  • Time (java.sql)
    Java representation of an SQL TIME value. Provides utilities to format and parse the time's represen
  • ResourceBundle (java.util)
    ResourceBundle is an abstract class which is the superclass of classes which provide Locale-specifi
  • Handler (java.util.logging)
    A Handler object accepts a logging request and exports the desired messages to a target, for example
  • Loader (org.hibernate.loader)
    Abstract superclass of object loading (and querying) strategies. This class implements useful common
  • Top Sublime Text plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now