public static CompressedSequentialWriter open(String dataFilePath, String offsetsPath, CompressionParameters parameters, MetadataCollector sstableMetadataCollector) { return new CompressedSequentialWriter(new File(dataFilePath), offsetsPath, parameters, sstableMetadataCollector); }
dataFile = new CompressedSequentialWriter(new File(getFilename()), descriptor.filenameFor(Component.COMPRESSION_INFO), new File(descriptor.filenameFor(descriptor.digestComponent)),
dataFile = new CompressedSequentialWriter(new File(getFilename()), descriptor.filenameFor(Component.COMPRESSION_INFO), new File(descriptor.filenameFor(descriptor.digestComponent)),
dataFile = new CompressedSequentialWriter(new File(getFilename()), descriptor.filenameFor(Component.COMPRESSION_INFO), new File(descriptor.filenameFor(descriptor.digestComponent)),
public BigTableWriter(Descriptor descriptor, long keyCount, long repairedAt, CFMetaData metadata, MetadataCollector metadataCollector, SerializationHeader header) { super(descriptor, keyCount, repairedAt, metadata, metadataCollector, header); //txn.trackNew(this); // must track before any files are created if (compression) { dataFile = new CompressedSequentialWriter(getFilename(), descriptor.filenameFor(Component.COMPRESSION_INFO), descriptor.filenameFor(descriptor.digestComponent), writerOption, metadata.params.compression, metadataCollector); } else { dataFile = new ChecksummedSequentialWriter(getFilename(), descriptor.filenameFor(Component.CRC), descriptor.filenameFor(descriptor.digestComponent), writerOption); } dbuilder = new FileHandle.Builder(descriptor.filenameFor(Component.DATA)).compressed(compression); chunkCache.ifPresent(dbuilder::withChunkCache); iwriter = new IndexWriter(keyCount); columnIndexWriter = new ColumnIndex(this.header, dataFile, descriptor.version, this.observers, getRowIndexEntrySerializer().indexInfoSerializer()); }