/** * Read column schema from binary * * @param schemaArray * @throws IOException */ public static List<ColumnSchema> readColumnSchema(byte[] schemaArray) throws IOException { // uncompress it. schemaArray = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor().unCompressByte( schemaArray); ByteArrayInputStream schemaStream = new ByteArrayInputStream(schemaArray); DataInput schemaInput = new DataInputStream(schemaStream); List<ColumnSchema> columnSchemas = new ArrayList<>(); int size = schemaInput.readShort(); for (int i = 0; i < size; i++) { ColumnSchema columnSchema = new ColumnSchema(); columnSchema.readFields(schemaInput); columnSchemas.add(columnSchema); } return columnSchemas; }
/** * Convert schema to binary */ public static byte[] convertSchemaToBinary(List<ColumnSchema> columnSchemas) throws IOException { ByteArrayOutputStream stream = new ByteArrayOutputStream(); DataOutput dataOutput = new DataOutputStream(stream); dataOutput.writeShort(columnSchemas.size()); for (ColumnSchema columnSchema : columnSchemas) { if (columnSchema.getColumnReferenceId() == null) { columnSchema.setColumnReferenceId(columnSchema.getColumnUniqueId()); } columnSchema.write(dataOutput); } byte[] byteArray = stream.toByteArray(); // Compress to reduce the size of schema return CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor().compressByte( byteArray); }
private CompressorFactory() { for (NativeSupportedCompressor nativeSupportedCompressor : NativeSupportedCompressor.values()) { allSupportedCompressors.put(nativeSupportedCompressor.getName(), nativeSupportedCompressor.getCompressor()); } }
/** * Constructor to get minimum parameter to create instance of this class * * @param blockletInfo blocklet info * @param eachColumnValueSize size of the each column value * @param filePath file from which data will be read */ public CompressedDimensionChunkFileBasedReaderV1(final BlockletInfo blockletInfo, final int[] eachColumnValueSize, final String filePath) { super(eachColumnValueSize, filePath, blockletInfo.getNumberOfRows()); this.dimensionColumnChunk = blockletInfo.getDimensionColumnChunk(); // for v1 store, the compressor is snappy this.compressor = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor(); }
/** * Constructor to get minimum parameter to create instance of this class * * @param blockletInfo BlockletInfo * @param filePath file from which data will be read */ public CompressedMeasureChunkFileBasedReaderV2(final BlockletInfo blockletInfo, final String filePath) { super(blockletInfo, filePath); this.compressor = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor(); }
/** * Constructor to get minimum parameter to create instance of this class * * @param blockletInfo * @param eachColumnValueSize * @param filePath */ public CompressedDimensionChunkFileBasedReaderV2(final BlockletInfo blockletInfo, final int[] eachColumnValueSize, final String filePath) { super(blockletInfo, eachColumnValueSize, filePath); // for v2 store, the compressor is snappy this.compressor = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor(); }