@Override public Compression.Algorithm getCompressionType() { return getStringOrDefault(COMPRESSION_BYTES, n -> Compression.Algorithm.valueOf(n.toUpperCase()), DEFAULT_COMPRESSION); }
@Override public Compression.Algorithm getCompactionCompressionType() { return getStringOrDefault(COMPRESSION_COMPACT_BYTES, n -> Compression.Algorithm.valueOf(n.toUpperCase()), getCompressionType()); }
opts.compression = Compression.Algorithm.valueOf(cmd.substring(compress.length())); continue;
/** @return compression type being used for the column family */ public Compression.Algorithm getCompression() { String n = getValue(COMPRESSION); if (n == null) { return Compression.Algorithm.NONE; } return Compression.Algorithm.valueOf(n.toUpperCase()); }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
private static HColumnDescriptor getHColumnDesciptor(ColumnFamilyDescriptor descriptor) { HColumnDescriptor hFamily = new HColumnDescriptor(descriptor.getName()); hFamily.setMaxVersions(descriptor.getMaxVersions()); hFamily.setCompressionType(Compression.Algorithm.valueOf(descriptor.getCompressionType().name())); hFamily.setBloomFilterType(org.apache.hadoop.hbase.regionserver.BloomType.valueOf( descriptor.getBloomType().name())); for (Map.Entry<String, String> property : descriptor.getProperties().entrySet()) { hFamily.setValue(property.getKey(), property.getValue()); } return hFamily; }
Assert.assertEquals(Algorithm.valueOf("GZ"), columnDescriptor.getCompactionCompressionType()); Assert.assertEquals(Integer.parseInt("12345678"), columnDescriptor.getTimeToLive()); Assert.assertEquals(Algorithm.valueOf("GZ"), columnDescriptor.getCompressionType()); Assert.assertEquals(Integer.parseInt("6"), columnDescriptor.getMaxVersions()); Assert.assertEquals(Integer.parseInt("3"), columnDescriptor.getMinVersions());
Assert.assertEquals(Algorithm.valueOf("GZ"), columnDescriptor.getCompactionCompressionType()); Assert.assertEquals(Integer.parseInt("12345678"), columnDescriptor.getTimeToLive()); Assert.assertEquals(Algorithm.valueOf("GZ"), columnDescriptor.getCompressionType()); Assert.assertEquals(Integer.parseInt("6"), columnDescriptor.getMaxVersions()); Assert.assertEquals(Integer.parseInt("3"), columnDescriptor.getMinVersions());
compr = Compression.Algorithm.valueOf( cmdLine.getOptionValue(COMPRESSION_OPTION));
this.compression = Compression.Algorithm.valueOf(cmd.substring(compress.length())); continue;
private void parseColumnFamilyOptions(CommandLine cmd) { String dataBlockEncodingStr = cmd.getOptionValue(HFileTestUtil.OPT_DATA_BLOCK_ENCODING); dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null : DataBlockEncoding.valueOf(dataBlockEncodingStr); String compressStr = cmd.getOptionValue(OPT_COMPRESSION); compressAlgo = compressStr == null ? Compression.Algorithm.NONE : Compression.Algorithm.valueOf(compressStr); String bloomStr = cmd.getOptionValue(OPT_BLOOM); bloomType = bloomStr == null ? BloomType.ROW : BloomType.valueOf(bloomStr); if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) { if (!cmd.hasOption(OPT_BLOOM_PARAM)) { LOG.error("the parameter of bloom filter {} is not specified", bloomType.name()); } else { conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, cmd.getOptionValue(OPT_BLOOM_PARAM)); } } if (bloomType == BloomType.ROWPREFIX_DELIMITED) { if (!cmd.hasOption(OPT_BLOOM_PARAM)) { LOG.error("the parameter of bloom filter {} is not specified", bloomType.name()); } else { conf.set(BloomFilterUtil.DELIMITER_KEY, cmd.getOptionValue(OPT_BLOOM_PARAM)); } } inMemoryCF = cmd.hasOption(OPT_INMEMORY); if (cmd.hasOption(OPT_ENCRYPTION)) { cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION)); } }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }
@Override public void setCompression(HColumnDescriptor cd, String algo) { cd.setCompressionType(Compression.Algorithm.valueOf(algo)); }