/** * Runs inside the task to deserialize column family to compression algorithm * map from the configuration. * * @param conf to read the serialized values from * @return a map from column family to the configured compression algorithm */ @VisibleForTesting static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf) { Map<byte[], String> stringMap = createFamilyConfValueMap(conf, COMPRESSION_FAMILIES_CONF_KEY); Map<byte[], Algorithm> compressionMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (Map.Entry<byte[], String> e : stringMap.entrySet()) { Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue()); compressionMap.put(e.getKey(), algorithm); } return compressionMap; }
Compression.Algorithm.NONE.getName()); final Algorithm defaultCompression = HFileWriterImpl .compressionByName(defaultCompressionStr); final boolean compactionExclude = conf.getBoolean( "hbase.mapreduce.hfileoutputformat.compaction.exclude", false);
public static void doSmokeTest(FileSystem fs, Path path, String codec) throws Exception { Configuration conf = HBaseConfiguration.create(); HFileContext context = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(codec)).build(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, path) .withFileContext(context) .create(); // Write any-old Cell... final byte [] rowKey = Bytes.toBytes("compressiontestkey"); Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval")); writer.append(c); writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval")); writer.close(); Cell cc = null; HFile.Reader reader = HFile.createReader(fs, path, CacheConfig.DISABLED, true, conf); try { reader.loadFileInfo(); HFileScanner scanner = reader.getScanner(false, true); scanner.seekTo(); // position to the start of file // Scanner does not do Cells yet. Do below for now till fixed. cc = scanner.getCell(); if (CellComparator.getInstance().compareRows(c, cc) != 0) { throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString()); } } finally { reader.close(); } }
private void metablocks(final String compress) throws Exception { Path mFile = new Path(ROOT_DIR, "meta.hfile"); FSDataOutputStream fout = createFSOutput(mFile); HFileContext meta = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(compress)) .withBlockSize(minBlockSize).build(); Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(fout) .withFileContext(meta) .create(); someTestingWithMetaBlock(writer); writer.close(); fout.close(); FSDataInputStream fin = fs.open(mFile); Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile), this.fs.getFileStatus(mFile).getLen(), cacheConf, conf); reader.loadFileInfo(); // No data -- this should return false. assertFalse(reader.getScanner(false, false).seekTo()); someReadingWithMetaBlock(reader); fs.delete(mFile, true); reader.close(); fin.close(); }
@Override void setUp() throws Exception { HFileContextBuilder builder = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(codec)) .withBlockSize(RFILE_BLOCKSIZE); if (cipher == "aes") { byte[] cipherKey = new byte[AES.KEY_LENGTH]; new SecureRandom().nextBytes(cipherKey); builder.withEncryptionContext(Encryption.newContext(conf) .setCipher(Encryption.getCipher(conf, cipher)) .setKey(cipherKey)); } else if (!"none".equals(cipher)) { throw new IOException("Cipher " + cipher + " not supported."); } HFileContext hFileContext = builder.build(); writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, mf) .withFileContext(hFileContext) .withComparator(CellComparator.getInstance()) .create(); }
HFileContext context = new HFileContextBuilder() .withBlockSize(options.minBlockSize) .withCompression(HFileWriterImpl.compressionByName(options.compress)) .build(); Writer writer = HFile.getWriterFactoryNoCache(conf)
HFileContext meta = new HFileContextBuilder() .withBlockSize(minBlockSize) .withCompression(HFileWriterImpl.compressionByName(codec)) .build(); Writer writer = HFile.getWriterFactory(conf, cacheConf)
/** * Runs inside the task to deserialize column family to compression algorithm * map from the configuration. * * @param conf to read the serialized values from * @return a map from column family to the configured compression algorithm */ @VisibleForTesting static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf,final String tableName) { Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],Algorithm>(Bytes.BYTES_COMPARATOR); Map<String, String> tableConfigs = getTableConfigurations(conf, tableName); if(tableConfigs == null) { return compressionMap; } Map<byte[], String> stringMap = createFamilyConfValueMap(tableConfigs,COMPRESSION_FAMILIES_CONF_KEY); for (Map.Entry<byte[], String> e : stringMap.entrySet()) { Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue()); compressionMap.put(e.getKey(), algorithm); } return compressionMap; }
Compression.Algorithm.NONE.getName()); final Algorithm defaultCompression = HFileWriterImpl .compressionByName(defaultCompressionStr); final boolean compactionExclude = conf.getBoolean( "hbase.mapreduce.hfileoutputformat.compaction.exclude", false);
/** * Runs inside the task to deserialize column family to compression algorithm * map from the configuration. * * @param conf to read the serialized values from * @return a map from column family to the configured compression algorithm */ @VisibleForTesting static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf,final String tableName) { Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],Algorithm>(Bytes.BYTES_COMPARATOR); Map<String, String> tableConfigs = getTableConfigurations(conf, tableName); if(tableConfigs == null) { return compressionMap; } Map<byte[], String> stringMap = createFamilyConfValueMap(tableConfigs,COMPRESSION_FAMILIES_CONF_KEY); for (Map.Entry<byte[], String> e : stringMap.entrySet()) { Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue()); compressionMap.put(e.getKey(), algorithm); } return compressionMap; }
/** * Runs inside the task to deserialize column family to compression algorithm * map from the configuration. * * @param conf to read the serialized values from * @return a map from column family to the configured compression algorithm */ @VisibleForTesting static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf) { Map<byte[], String> stringMap = createFamilyConfValueMap(conf, COMPRESSION_FAMILIES_CONF_KEY); Map<byte[], Algorithm> compressionMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (Map.Entry<byte[], String> e : stringMap.entrySet()) { Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue()); compressionMap.put(e.getKey(), algorithm); } return compressionMap; }
/** * Runs inside the task to deserialize column family to compression algorithm * map from the configuration. * * @param conf to read the serialized values from * @return a map from column family to the configured compression algorithm */ @VisibleForTesting static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf,final String tableName) { Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],Algorithm>(Bytes.BYTES_COMPARATOR); Map<String, String> tableConfigs = getTableConfigurations(conf, tableName); if(tableConfigs == null) { return compressionMap; } Map<byte[], String> stringMap = createFamilyConfValueMap(tableConfigs,COMPRESSION_FAMILIES_CONF_KEY); for (Map.Entry<byte[], String> e : stringMap.entrySet()) { Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue()); compressionMap.put(e.getKey(), algorithm); } return compressionMap; }
/** * Runs inside the task to deserialize column family to compression algorithm * map from the configuration. * * @param conf to read the serialized values from * @return a map from column family to the configured compression algorithm */ @VisibleForTesting static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf) { Map<byte[], String> stringMap = createFamilyConfValueMap(conf, COMPRESSION_FAMILIES_CONF_KEY); Map<byte[], Algorithm> compressionMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (Map.Entry<byte[], String> e : stringMap.entrySet()) { Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue()); compressionMap.put(e.getKey(), algorithm); } return compressionMap; }
private HFileContext getContext(Configuration conf) { HFileContextBuilder contextBuilder = new HFileContextBuilder(); String compressionType = conf.get(HCOLUMN_DESCRIPTOR_COMPRESSION_TYPE_KEY); if (compressionType != null) { contextBuilder.withCompression(HFileWriterImpl.compressionByName(compressionType)); } String dataBlockEncoding = conf.get(HCOLUMN_DESCRIPTOR_DATA_BLOCK_ENCODING_KEY); if (dataBlockEncoding != null) { contextBuilder.withDataBlockEncoding(DataBlockEncoding.valueOf(dataBlockEncoding)); } return contextBuilder.build(); } }
Compression.Algorithm.NONE.getName()); final Algorithm defaultCompression = HFileWriterImpl .compressionByName(defaultCompressionStr); final boolean compactionExclude = conf.getBoolean( "hbase.mapreduce.hfileoutputformat.compaction.exclude", false);
Compression.Algorithm.NONE.getName()); final Algorithm defaultCompression = HFileWriterImpl .compressionByName(defaultCompressionStr); final boolean compactionExclude = conf.getBoolean( "hbase.mapreduce.hfileoutputformat.compaction.exclude", false);
private void metablocks(final String compress) throws Exception { if (cacheConf == null) cacheConf = new CacheConfig(conf); Path mFile = new Path(ROOT_DIR, "meta.hfile"); FSDataOutputStream fout = createFSOutput(mFile); HFileContext meta = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(compress)) .withBlockSize(minBlockSize).build(); Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(fout) .withFileContext(meta) .create(); someTestingWithMetaBlock(writer); writer.close(); fout.close(); FSDataInputStream fin = fs.open(mFile); Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile), this.fs.getFileStatus(mFile).getLen(), cacheConf, conf); reader.loadFileInfo(); // No data -- this should return false. assertFalse(reader.getScanner(false, false).seekTo()); someReadingWithMetaBlock(reader); fs.delete(mFile, true); reader.close(); fin.close(); }
@Override void setUp() throws Exception { HFileContextBuilder builder = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(codec)) .withBlockSize(RFILE_BLOCKSIZE); if (cipher == "aes") { byte[] cipherKey = new byte[AES.KEY_LENGTH]; new SecureRandom().nextBytes(cipherKey); builder.withEncryptionContext(Encryption.newContext(conf) .setCipher(Encryption.getCipher(conf, cipher)) .setKey(cipherKey)); } else if (!"none".equals(cipher)) { throw new IOException("Cipher " + cipher + " not supported."); } HFileContext hFileContext = builder.build(); writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, mf) .withFileContext(hFileContext) .withComparator(CellComparator.getInstance()) .create(); }
HFileContext context = new HFileContextBuilder() .withBlockSize(options.minBlockSize) .withCompression(HFileWriterImpl.compressionByName(options.compress)) .build(); Writer writer = HFile.getWriterFactoryNoCache(conf)
HFileContext meta = new HFileContextBuilder() .withBlockSize(minBlockSize) .withCompression(HFileWriterImpl.compressionByName(codec)) .build(); Writer writer = HFile.getWriterFactory(conf, cacheConf)