Tabnine Logo
KeyValueCodec
Code IndexAdd Tabnine to your IDE (free)

How to use
KeyValueCodec
in
org.apache.hadoop.hbase.codec

Best Java code snippets using org.apache.hadoop.hbase.codec.KeyValueCodec (Showing top 15 results out of 315)

origin: apache/hbase

@Test
public void testEmptyWorks() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(0, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(0, cis.getCount());
}
origin: apache/hbase

@Test
public void testBuildCellBlock() throws IOException {
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), null);
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new DefaultCodec());
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new GzipCodec());
}
origin: apache/hbase

 /**
  * For running a few tests of methods herein.
  * @param args
  * @throws IOException
  */
 public static void main(String[] args) throws IOException {
  int count = 1024;
  int size = 10240;
  for (String arg : args) {
   if (arg.startsWith(COUNT)) {
    count = Integer.parseInt(arg.replace(COUNT, ""));
   } else if (arg.startsWith(SIZE)) {
    size = Integer.parseInt(arg.replace(SIZE, ""));
   } else {
    usage(1);
   }
  }
  CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
  timerTests(builder, count, size, new KeyValueCodec(), null);
  timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
  timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
 }
}
origin: apache/hbase

@Test
public void testOne() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 final KeyValue kv =
  new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
 final int length = kv.getLength() + Bytes.SIZEOF_INT;
 encoder.write(kv);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(length, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertTrue(decoder.advance()); // First read should pull in the KV
 // Second read should trip over the end-of-stream  marker and return false
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(length, cis.getCount());
}
origin: apache/hbase

 public static void main(String[] args) throws IOException {
  // How many Cells to encode/decode on each cycle.
  final int count = 100000;
  // How many times to do an operation; repeat gives hotspot chance to warm up.
  final int cycles = 30;

  Cell [] cells = getCells(count);
  int size = getRoughSize(cells);
  int initialBufferSize = 2 * size; // Multiply by 2 to ensure we don't have to grow buffer

  // Test KeyValue codec.
  doCodec(new KeyValueCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new CellCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new MessageCodec(), cells, cycles, count, initialBufferSize);
 }
}
origin: apache/hbase

CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
KeyValueCodec kvc = new KeyValueCodec();
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv1 =
 new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
 new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
DataInputStream dis = new DataInputStream(cis);
Codec.Decoder decoder = kvc.getDecoder(dis);
assertTrue(decoder.advance());
KeyValue kv = (KeyValue)decoder.current();
origin: org.apache.hbase/hbase-client

@Test
public void testBuildCellBlock() throws IOException {
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), null);
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new DefaultCodec());
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new GzipCodec());
}
origin: org.apache.hbase/hbase-common

@Test
public void testEmptyWorks() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(0, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(0, cis.getCount());
}
origin: org.apache.hbase/hbase-client

 /**
  * For running a few tests of methods herein.
  * @param args
  * @throws IOException
  */
 public static void main(String[] args) throws IOException {
  int count = 1024;
  int size = 10240;
  for (String arg : args) {
   if (arg.startsWith(COUNT)) {
    count = Integer.parseInt(arg.replace(COUNT, ""));
   } else if (arg.startsWith(SIZE)) {
    size = Integer.parseInt(arg.replace(SIZE, ""));
   } else {
    usage(1);
   }
  }
  CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
  timerTests(builder, count, size, new KeyValueCodec(), null);
  timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
  timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
 }
}
origin: com.aliyun.hbase/alihbase-common

@Test
public void testEmptyWorks() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(0, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(0, cis.getCount());
}
origin: org.apache.hbase/hbase-server

 public static void main(String[] args) throws IOException {
  // How many Cells to encode/decode on each cycle.
  final int count = 100000;
  // How many times to do an operation; repeat gives hotspot chance to warm up.
  final int cycles = 30;

  Cell [] cells = getCells(count);
  int size = getRoughSize(cells);
  int initialBufferSize = 2 * size; // Multiply by 2 to ensure we don't have to grow buffer

  // Test KeyValue codec.
  doCodec(new KeyValueCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new CellCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new MessageCodec(), cells, cycles, count, initialBufferSize);
 }
}
origin: com.aliyun.hbase/alihbase-common

@Test
public void testOne() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 final KeyValue kv =
  new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
 final int length = kv.getLength() + Bytes.SIZEOF_INT;
 encoder.write(kv);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(length, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertTrue(decoder.advance()); // First read should pull in the KV
 // Second read should trip over the end-of-stream  marker and return false
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(length, cis.getCount());
}
origin: org.apache.hbase/hbase-common

@Test
public void testOne() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 final KeyValue kv =
  new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
 final int length = kv.getLength() + Bytes.SIZEOF_INT;
 encoder.write(kv);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(length, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertTrue(decoder.advance()); // First read should pull in the KV
 // Second read should trip over the end-of-stream  marker and return false
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(length, cis.getCount());
}
origin: com.aliyun.hbase/alihbase-common

CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
KeyValueCodec kvc = new KeyValueCodec();
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv1 =
 new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
 new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
DataInputStream dis = new DataInputStream(cis);
Codec.Decoder decoder = kvc.getDecoder(dis);
assertTrue(decoder.advance());
KeyValue kv = (KeyValue)decoder.current();
origin: org.apache.hbase/hbase-common

CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
KeyValueCodec kvc = new KeyValueCodec();
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv1 =
 new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
 new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
DataInputStream dis = new DataInputStream(cis);
Codec.Decoder decoder = kvc.getDecoder(dis);
assertTrue(decoder.advance());
KeyValue kv = (KeyValue)decoder.current();
org.apache.hadoop.hbase.codecKeyValueCodec

Javadoc

Codec that does KeyValue version 1 serialization.

Encodes Cell as serialized in KeyValue with total length prefix. This is how KVs were serialized in Puts, Deletes and Results pre-0.96. Its what would happen if you called the Writable#write KeyValue implementation. This encoder will fail if the passed Cell is not an old-school pre-0.96 KeyValue. Does not copy bytes writing. It just writes them direct to the passed stream.

If you wrote two KeyValues to this encoder, it would look like this in the stream:

 
length-of-KeyValue1 // A java int with the length of KeyValue1 backing array 
KeyValue1 backing array filled with a KeyValue serialized in its particular format 
length-of-KeyValue2 
KeyValue2 backing array 

Most used methods

  • <init>
  • getDecoder
  • getEncoder

Popular in Java

  • Creating JSON documents from java classes using gson
  • putExtra (Intent)
  • onCreateOptionsMenu (Activity)
  • runOnUiThread (Activity)
  • Socket (java.net)
    Provides a client-side TCP socket.
  • Time (java.sql)
    Java representation of an SQL TIME value. Provides utilities to format and parse the time's represen
  • TreeSet (java.util)
    TreeSet is an implementation of SortedSet. All optional operations (adding and removing) are support
  • Executor (java.util.concurrent)
    An object that executes submitted Runnable tasks. This interface provides a way of decoupling task s
  • JTable (javax.swing)
  • StringUtils (org.apache.commons.lang)
    Operations on java.lang.String that arenull safe. * IsEmpty/IsBlank - checks if a String contains
  • Best IntelliJ plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now