Tabnine Logo
KeyValueCodec.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.apache.hadoop.hbase.codec.KeyValueCodec
constructor

Best Java code snippets using org.apache.hadoop.hbase.codec.KeyValueCodec.<init> (Showing top 15 results out of 315)

origin: apache/hbase

@Test
public void testBuildCellBlock() throws IOException {
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), null);
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new DefaultCodec());
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new GzipCodec());
}
origin: apache/hbase

 /**
  * For running a few tests of methods herein.
  * @param args
  * @throws IOException
  */
 public static void main(String[] args) throws IOException {
  int count = 1024;
  int size = 10240;
  for (String arg : args) {
   if (arg.startsWith(COUNT)) {
    count = Integer.parseInt(arg.replace(COUNT, ""));
   } else if (arg.startsWith(SIZE)) {
    size = Integer.parseInt(arg.replace(SIZE, ""));
   } else {
    usage(1);
   }
  }
  CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
  timerTests(builder, count, size, new KeyValueCodec(), null);
  timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
  timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
 }
}
origin: apache/hbase

@Test
public void testEmptyWorks() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(0, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(0, cis.getCount());
}
origin: apache/hbase

@Test
public void testOne() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 final KeyValue kv =
  new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
 final int length = kv.getLength() + Bytes.SIZEOF_INT;
 encoder.write(kv);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(length, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertTrue(decoder.advance()); // First read should pull in the KV
 // Second read should trip over the end-of-stream  marker and return false
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(length, cis.getCount());
}
origin: apache/hbase

CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
KeyValueCodec kvc = new KeyValueCodec();
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv1 =
origin: apache/hbase

 public static void main(String[] args) throws IOException {
  // How many Cells to encode/decode on each cycle.
  final int count = 100000;
  // How many times to do an operation; repeat gives hotspot chance to warm up.
  final int cycles = 30;

  Cell [] cells = getCells(count);
  int size = getRoughSize(cells);
  int initialBufferSize = 2 * size; // Multiply by 2 to ensure we don't have to grow buffer

  // Test KeyValue codec.
  doCodec(new KeyValueCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new CellCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new MessageCodec(), cells, cycles, count, initialBufferSize);
 }
}
origin: org.apache.hbase/hbase-client

@Test
public void testBuildCellBlock() throws IOException {
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), null);
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new DefaultCodec());
 doBuildCellBlockUndoCellBlock(this.builder, new KeyValueCodec(), new GzipCodec());
}
origin: org.apache.hbase/hbase-client

 /**
  * For running a few tests of methods herein.
  * @param args
  * @throws IOException
  */
 public static void main(String[] args) throws IOException {
  int count = 1024;
  int size = 10240;
  for (String arg : args) {
   if (arg.startsWith(COUNT)) {
    count = Integer.parseInt(arg.replace(COUNT, ""));
   } else if (arg.startsWith(SIZE)) {
    size = Integer.parseInt(arg.replace(SIZE, ""));
   } else {
    usage(1);
   }
  }
  CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
  timerTests(builder, count, size, new KeyValueCodec(), null);
  timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
  timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
 }
}
origin: org.apache.hbase/hbase-common

@Test
public void testEmptyWorks() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(0, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(0, cis.getCount());
}
origin: com.aliyun.hbase/alihbase-common

@Test
public void testEmptyWorks() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(0, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(0, cis.getCount());
}
origin: com.aliyun.hbase/alihbase-common

@Test
public void testOne() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 final KeyValue kv =
  new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
 final int length = kv.getLength() + Bytes.SIZEOF_INT;
 encoder.write(kv);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(length, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertTrue(decoder.advance()); // First read should pull in the KV
 // Second read should trip over the end-of-stream  marker and return false
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(length, cis.getCount());
}
origin: org.apache.hbase/hbase-common

@Test
public void testOne() throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 CountingOutputStream cos = new CountingOutputStream(baos);
 DataOutputStream dos = new DataOutputStream(cos);
 KeyValueCodec kvc = new KeyValueCodec();
 Codec.Encoder encoder = kvc.getEncoder(dos);
 final KeyValue kv =
  new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
 final int length = kv.getLength() + Bytes.SIZEOF_INT;
 encoder.write(kv);
 encoder.flush();
 dos.close();
 long offset = cos.getCount();
 assertEquals(length, offset);
 CountingInputStream cis =
  new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
 DataInputStream dis = new DataInputStream(cis);
 Codec.Decoder decoder = kvc.getDecoder(dis);
 assertTrue(decoder.advance()); // First read should pull in the KV
 // Second read should trip over the end-of-stream  marker and return false
 assertFalse(decoder.advance());
 dis.close();
 assertEquals(length, cis.getCount());
}
origin: com.aliyun.hbase/alihbase-common

CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
KeyValueCodec kvc = new KeyValueCodec();
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv1 =
origin: org.apache.hbase/hbase-common

CountingOutputStream cos = new CountingOutputStream(baos);
DataOutputStream dos = new DataOutputStream(cos);
KeyValueCodec kvc = new KeyValueCodec();
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv1 =
origin: org.apache.hbase/hbase-server

 public static void main(String[] args) throws IOException {
  // How many Cells to encode/decode on each cycle.
  final int count = 100000;
  // How many times to do an operation; repeat gives hotspot chance to warm up.
  final int cycles = 30;

  Cell [] cells = getCells(count);
  int size = getRoughSize(cells);
  int initialBufferSize = 2 * size; // Multiply by 2 to ensure we don't have to grow buffer

  // Test KeyValue codec.
  doCodec(new KeyValueCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new CellCodec(), cells, cycles, count, initialBufferSize);
  doCodec(new MessageCodec(), cells, cycles, count, initialBufferSize);
 }
}
org.apache.hadoop.hbase.codecKeyValueCodec<init>

Popular methods of KeyValueCodec

  • getDecoder
  • getEncoder

Popular in Java

  • Reactive rest calls using spring rest template
  • requestLocationUpdates (LocationManager)
  • setScale (BigDecimal)
  • findViewById (Activity)
  • Table (com.google.common.collect)
    A collection that associates an ordered pair of keys, called a row key and a column key, with a sing
  • Kernel (java.awt.image)
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • LinkedList (java.util)
    Doubly-linked list implementation of the List and Dequeinterfaces. Implements all optional list oper
  • DataSource (javax.sql)
    An interface for the creation of Connection objects which represent a connection to a database. This
  • JFrame (javax.swing)
  • From CI to AI: The AI layer in your organization
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now