public HFileSortedOplogWriter(int keys) throws IOException { try { int hfileBlockSize = Integer.getInteger( HoplogConfig.HFILE_BLOCK_SIZE_CONF, (1 << 16)); Algorithm compress = Algorithm.valueOf(System.getProperty(HoplogConfig.COMPRESSION, HoplogConfig.COMPRESSION_DEFAULT)); // ByteComparator bc = new ByteComparator(); writer = HFile.getWriterFactory(conf, cacheConf) .withPath(fsProvider.getFS(), path) .withBlockSize(hfileBlockSize) // .withComparator(bc) .withCompression(compress) .create(); // bfw = BloomFilterFactory.createGeneralBloomAtWrite(conf, cacheConf, BloomType.ROW, keys, // writer, bc); bfw = BloomFilterFactory.createGeneralBloomAtWrite(conf, cacheConf, BloomType.ROW, keys, writer); if (logger.isDebugEnabled()) logger.debug("{}Created hoplog writer with compression " + compress, logPrefix); } catch (IOException e) { if (logger.isDebugEnabled()) logger.debug("{}IO Error while creating writer", logPrefix); throw e; } }
public static void doSmokeTest(FileSystem fs, Path path, String codec) throws Exception { Configuration conf = HBaseConfiguration.create(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, path) .withCompression(codec) .create(); writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval")); writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval")); writer.close(); HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf)); reader.loadFileInfo(); byte[] key = reader.getFirstKey(); boolean rc = Bytes.toString(key).equals("testkey"); reader.close(); if (!rc) { throw new Exception("Read back incorrect result: " + Bytes.toStringBinary(key)); } }
.withPath(fs, path) .withBlockSize(blocksize) .withCompression(compress) .withDataBlockEncoder(dataBlockEncoder) .withComparator(comparator.getRawComparator())