Tabnine Logo
CodedInputStream
Code IndexAdd Tabnine to your IDE (free)

How to use
CodedInputStream
in
org.apache.hbase.thirdparty.com.google.protobuf

Best Java code snippets using org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream (Showing top 20 results out of 315)

origin: apache/hbase

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
   namespaceName_ = input.readBytes();
   break;
   int rawValue = input.readEnum();
   org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue);
   if (value == null) {
   int length = input.readRawVarint32();
   int oldLimit = input.pushLimit(length);
   while(input.getBytesUntilLimit() > 0) {
    int rawValue = input.readEnum();
    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue);
    if (value == null) {
   input.popLimit(oldLimit);
   break;
origin: apache/hbase

/**
 * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
 * buffers when working with byte arrays
 * @param builder current message builder
 * @param b byte array
 * @throws IOException
 */
public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException {
 final CodedInputStream codedInput = CodedInputStream.newInstance(b);
 codedInput.setSizeLimit(b.length);
 builder.mergeFrom(codedInput);
 codedInput.checkLastTagWas(0);
}
origin: apache/hbase

public static void mergeFrom(Message.Builder builder, CodedInputStream codedInput, int length)
  throws IOException {
 codedInput.resetSizeCounter();
 int prevLimit = codedInput.setSizeLimit(length);
 int limit = codedInput.pushLimit(length);
 builder.mergeFrom(codedInput);
 codedInput.popLimit(limit);
 codedInput.checkLastTagWas(0);
 codedInput.setSizeLimit(prevLimit);
}
origin: apache/hbase

/**
 * This version of protobuf's mergeDelimitedFrom avoid the hard-coded 64MB limit for decoding
 * buffers
 * @param builder current message builder
 * @param in Inputsream with delimited protobuf data
 * @throws IOException
 */
public static void mergeDelimitedFrom(Message.Builder builder, InputStream in)
 throws IOException {
 // This used to be builder.mergeDelimitedFrom(in);
 // but is replaced to allow us to bump the protobuf size limit.
 final int firstByte = in.read();
 if (firstByte != -1) {
  final int size = CodedInputStream.readRawVarint32(firstByte, in);
  final InputStream limitedInput = ByteStreams.limit(in, size);
  final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
  codedInput.setSizeLimit(size);
  builder.mergeFrom(codedInput);
  codedInput.checkLastTagWas(0);
 }
}
origin: apache/hbase

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
   org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x00000001;
   className_ = bs;
   parentId_ = input.readUInt64();
   break;
   procId_ = input.readUInt64();
   break;
   submittedTime_ = input.readUInt64();
   break;
   org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x00000010;
   owner_ = bs;
   int rawValue = input.readEnum();
   org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(rawValue);
   if (value == null) {
   stackId_.add(input.readUInt32());
   break;
origin: apache/hbase

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
   int rawValue = input.readEnum();
   org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(rawValue);
   if (value == null) {
     input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry));
   break;
   procId_ = input.readUInt64();
   break;
   childId_.add(input.readUInt64());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) {
    childId_ = new java.util.ArrayList<java.lang.Long>();
    mutable_bitField0_ |= 0x00000008;
   while (input.getBytesUntilLimit() > 0) {
    childId_.add(input.readUInt64());
   input.popLimit(limit);
origin: apache/hbase

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
    mutable_bitField0_ |= 0x00000001;
   timestamps_.add(input.readInt64());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
    timestamps_ = new java.util.ArrayList<java.lang.Long>();
    mutable_bitField0_ |= 0x00000001;
   while (input.getBytesUntilLimit() > 0) {
    timestamps_.add(input.readInt64());
   input.popLimit(limit);
   break;
   canHint_ = input.readBool();
   break;
origin: apache/hbase

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
    mutable_bitField0_ |= 0x00000001;
   state_.add(input.readUInt32());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
    state_ = new java.util.ArrayList<java.lang.Integer>();
    mutable_bitField0_ |= 0x00000001;
   while (input.getBytesUntilLimit() > 0) {
    state_.add(input.readUInt32());
   input.popLimit(limit);
   break;
origin: apache/hbase

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
   startId_ = input.readUInt64();
   break;
   updated_.add(input.readUInt64());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) {
    updated_ = new java.util.ArrayList<java.lang.Long>();
    mutable_bitField0_ |= 0x00000002;
   while (input.getBytesUntilLimit() > 0) {
    updated_.add(input.readUInt64());
   input.popLimit(limit);
   break;
   deleted_.add(input.readUInt64());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
origin: apache/hbase

   .unsafeWrap(new ByteBuffByteInput(buf, 0, buf.limit()), 0, buf.limit()).newCodedInput();
cis.enableAliasing(true);
int headerSize = cis.readRawVarint32();
offset = cis.getTotalBytesRead();
Message.Builder builder = RequestHeader.newBuilder();
ProtobufUtil.mergeFrom(builder, cis, headerSize);
   throw new UnsupportedOperationException(header.getMethodName());
  builder = this.service.getRequestPrototype(md).newBuilderForType();
  cis.resetSizeCounter();
  int paramSize = cis.readRawVarint32();
  offset += cis.getTotalBytesRead();
  if (builder != null) {
   ProtobufUtil.mergeFrom(builder, cis, paramSize);
origin: apache/hbase

/**
 * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
 * buffers when working with ByteStrings
 * @param builder current message builder
 * @param bs ByteString containing the
 * @throws IOException
 */
public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException {
 final CodedInputStream codedInput = bs.newCodedInput();
 codedInput.setSizeLimit(bs.size());
 builder.mergeFrom(codedInput);
 codedInput.checkLastTagWas(0);
}
origin: apache/hbase

private SnapshotDataManifest readDataManifest() throws IOException {
 FSDataInputStream in = null;
 try {
  in = workingDirFs.open(new Path(workingDir, DATA_MANIFEST_NAME));
  CodedInputStream cin = CodedInputStream.newInstance(in);
  cin.setSizeLimit(manifestSizeLimit);
  return SnapshotDataManifest.parseFrom(cin);
 } catch (FileNotFoundException e) {
  return null;
 } catch (InvalidProtocolBufferException e) {
  throw new CorruptedSnapshotException("unable to parse data manifest " + e.getMessage(), e);
 } finally {
  if (in != null) in.close();
 }
}
origin: apache/hbase

CodedInputStream cis = CodedInputStream.newInstance(is);
int headerSize = cis.readRawVarint32();
Message.Builder builder = RequestHeader.newBuilder();
ProtobufUtil.mergeFrom(builder, cis, headerSize);
origin: apache/hbase

 throw new EOFException("First byte is negative at offset " + originalPosition);
size = CodedInputStream.readRawVarint32(firstByte, this.inputStream);
origin: apache/hbase

private void loadNamespaceIntoCache() throws IOException {
 try (Table table = masterServices.getConnection().getTable(TableName.META_TABLE_NAME);
  ResultScanner scanner = table.getScanner(HConstants.NAMESPACE_FAMILY)) {
  for (Result result;;) {
   result = scanner.next();
   if (result == null) {
    break;
   }
   Cell cell = result.getColumnLatestCell(HConstants.NAMESPACE_FAMILY,
    HConstants.NAMESPACE_COL_DESC_QUALIFIER);
   NamespaceDescriptor ns = ProtobufUtil
    .toNamespaceDescriptor(HBaseProtos.NamespaceDescriptor.parseFrom(CodedInputStream
     .newInstance(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())));
   cache.put(ns.getName(), ns);
  }
 }
}
origin: org.apache.hbase/hbase-protocol-shaded

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
   org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x00000001;
   className_ = bs;
   parentId_ = input.readUInt64();
   break;
   procId_ = input.readUInt64();
   break;
   submittedTime_ = input.readUInt64();
   break;
   org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes();
   bitField0_ |= 0x00000010;
   owner_ = bs;
   int rawValue = input.readEnum();
   org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(rawValue);
   if (value == null) {
   stackId_.add(input.readUInt32());
   break;
origin: org.apache.hbase/hbase-protocol-shaded

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
   int rawValue = input.readEnum();
   org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(rawValue);
   if (value == null) {
     input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry));
   break;
   procId_ = input.readUInt64();
   break;
   childId_.add(input.readUInt64());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) {
    childId_ = new java.util.ArrayList<java.lang.Long>();
    mutable_bitField0_ |= 0x00000008;
   while (input.getBytesUntilLimit() > 0) {
    childId_.add(input.readUInt64());
   input.popLimit(limit);
origin: org.apache.hbase/hbase-client

/**
 * This version of protobuf's mergeDelimitedFrom avoid the hard-coded 64MB limit for decoding
 * buffers
 * @param builder current message builder
 * @param in Inputsream with delimited protobuf data
 * @throws IOException
 */
public static void mergeDelimitedFrom(Message.Builder builder, InputStream in)
 throws IOException {
 // This used to be builder.mergeDelimitedFrom(in);
 // but is replaced to allow us to bump the protobuf size limit.
 final int firstByte = in.read();
 if (firstByte != -1) {
  final int size = CodedInputStream.readRawVarint32(firstByte, in);
  final InputStream limitedInput = ByteStreams.limit(in, size);
  final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
  codedInput.setSizeLimit(size);
  builder.mergeFrom(codedInput);
  codedInput.checkLastTagWas(0);
 }
}
origin: org.apache.hbase/hbase-protocol-shaded

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
    mutable_bitField0_ |= 0x00000001;
   timestamps_.add(input.readInt64());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
    timestamps_ = new java.util.ArrayList<java.lang.Long>();
    mutable_bitField0_ |= 0x00000001;
   while (input.getBytesUntilLimit() > 0) {
    timestamps_.add(input.readInt64());
   input.popLimit(limit);
   break;
   canHint_ = input.readBool();
   break;
origin: org.apache.hbase/hbase-protocol-shaded

boolean done = false;
while (!done) {
 int tag = input.readTag();
 switch (tag) {
  case 0:
    mutable_bitField0_ |= 0x00000001;
   state_.add(input.readUInt32());
   break;
   int length = input.readRawVarint32();
   int limit = input.pushLimit(length);
   if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
    state_ = new java.util.ArrayList<java.lang.Integer>();
    mutable_bitField0_ |= 0x00000001;
   while (input.getBytesUntilLimit() > 0) {
    state_.add(input.readUInt32());
   input.popLimit(limit);
   break;
org.apache.hbase.thirdparty.com.google.protobufCodedInputStream

Most used methods

  • readRawVarint32
  • newInstance
  • popLimit
  • pushLimit
  • resetSizeCounter
  • setSizeLimit
  • checkLastTagWas
  • enableAliasing
  • getBytesUntilLimit
  • getTotalBytesRead
  • readBool
  • readBytes
  • readBool,
  • readBytes,
  • readDouble,
  • readEnum,
  • readFloat,
  • readInt32,
  • readInt64,
  • readMessage,
  • readTag,
  • readUInt32

Popular in Java

  • Start an intent from android
  • startActivity (Activity)
  • setScale (BigDecimal)
  • getContentResolver (Context)
  • NoSuchElementException (java.util)
    Thrown when trying to retrieve an element past the end of an Enumeration or Iterator.
  • BlockingQueue (java.util.concurrent)
    A java.util.Queue that additionally supports operations that wait for the queue to become non-empty
  • AtomicInteger (java.util.concurrent.atomic)
    An int value that may be updated atomically. See the java.util.concurrent.atomic package specificati
  • Manifest (java.util.jar)
    The Manifest class is used to obtain attribute information for a JarFile and its entries.
  • XPath (javax.xml.xpath)
    XPath provides access to the XPath evaluation environment and expressions. Evaluation of XPath Expr
  • Scheduler (org.quartz)
    This is the main interface of a Quartz Scheduler. A Scheduler maintains a registry of org.quartz.Job
  • Best plugins for Eclipse
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now