boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: namespaceName_ = input.readBytes(); break; int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); if (value == null) { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue); if (value == null) { input.popLimit(oldLimit); break;
/** * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding * buffers when working with byte arrays * @param builder current message builder * @param b byte array * @throws IOException */ public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException { final CodedInputStream codedInput = CodedInputStream.newInstance(b); codedInput.setSizeLimit(b.length); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); }
public static void mergeFrom(Message.Builder builder, CodedInputStream codedInput, int length) throws IOException { codedInput.resetSizeCounter(); int prevLimit = codedInput.setSizeLimit(length); int limit = codedInput.pushLimit(length); builder.mergeFrom(codedInput); codedInput.popLimit(limit); codedInput.checkLastTagWas(0); codedInput.setSizeLimit(prevLimit); }
/** * This version of protobuf's mergeDelimitedFrom avoid the hard-coded 64MB limit for decoding * buffers * @param builder current message builder * @param in Inputsream with delimited protobuf data * @throws IOException */ public static void mergeDelimitedFrom(Message.Builder builder, InputStream in) throws IOException { // This used to be builder.mergeDelimitedFrom(in); // but is replaced to allow us to bump the protobuf size limit. final int firstByte = in.read(); if (firstByte != -1) { final int size = CodedInputStream.readRawVarint32(firstByte, in); final InputStream limitedInput = ByteStreams.limit(in, size); final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput); codedInput.setSizeLimit(size); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); } }
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; className_ = bs; parentId_ = input.readUInt64(); break; procId_ = input.readUInt64(); break; submittedTime_ = input.readUInt64(); break; org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; owner_ = bs; int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(rawValue); if (value == null) { stackId_.add(input.readUInt32()); break;
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(rawValue); if (value == null) { input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); break; procId_ = input.readUInt64(); break; childId_.add(input.readUInt64()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) { childId_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000008; while (input.getBytesUntilLimit() > 0) { childId_.add(input.readUInt64()); input.popLimit(limit);
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: mutable_bitField0_ |= 0x00000001; timestamps_.add(input.readInt64()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { timestamps_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000001; while (input.getBytesUntilLimit() > 0) { timestamps_.add(input.readInt64()); input.popLimit(limit); break; canHint_ = input.readBool(); break;
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: mutable_bitField0_ |= 0x00000001; state_.add(input.readUInt32()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { state_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; while (input.getBytesUntilLimit() > 0) { state_.add(input.readUInt32()); input.popLimit(limit); break;
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: startId_ = input.readUInt64(); break; updated_.add(input.readUInt64()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { updated_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000002; while (input.getBytesUntilLimit() > 0) { updated_.add(input.readUInt64()); input.popLimit(limit); break; deleted_.add(input.readUInt64()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length);
.unsafeWrap(new ByteBuffByteInput(buf, 0, buf.limit()), 0, buf.limit()).newCodedInput(); cis.enableAliasing(true); int headerSize = cis.readRawVarint32(); offset = cis.getTotalBytesRead(); Message.Builder builder = RequestHeader.newBuilder(); ProtobufUtil.mergeFrom(builder, cis, headerSize); throw new UnsupportedOperationException(header.getMethodName()); builder = this.service.getRequestPrototype(md).newBuilderForType(); cis.resetSizeCounter(); int paramSize = cis.readRawVarint32(); offset += cis.getTotalBytesRead(); if (builder != null) { ProtobufUtil.mergeFrom(builder, cis, paramSize);
/** * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding * buffers when working with ByteStrings * @param builder current message builder * @param bs ByteString containing the * @throws IOException */ public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException { final CodedInputStream codedInput = bs.newCodedInput(); codedInput.setSizeLimit(bs.size()); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); }
private SnapshotDataManifest readDataManifest() throws IOException { FSDataInputStream in = null; try { in = workingDirFs.open(new Path(workingDir, DATA_MANIFEST_NAME)); CodedInputStream cin = CodedInputStream.newInstance(in); cin.setSizeLimit(manifestSizeLimit); return SnapshotDataManifest.parseFrom(cin); } catch (FileNotFoundException e) { return null; } catch (InvalidProtocolBufferException e) { throw new CorruptedSnapshotException("unable to parse data manifest " + e.getMessage(), e); } finally { if (in != null) in.close(); } }
CodedInputStream cis = CodedInputStream.newInstance(is); int headerSize = cis.readRawVarint32(); Message.Builder builder = RequestHeader.newBuilder(); ProtobufUtil.mergeFrom(builder, cis, headerSize);
throw new EOFException("First byte is negative at offset " + originalPosition); size = CodedInputStream.readRawVarint32(firstByte, this.inputStream);
private void loadNamespaceIntoCache() throws IOException { try (Table table = masterServices.getConnection().getTable(TableName.META_TABLE_NAME); ResultScanner scanner = table.getScanner(HConstants.NAMESPACE_FAMILY)) { for (Result result;;) { result = scanner.next(); if (result == null) { break; } Cell cell = result.getColumnLatestCell(HConstants.NAMESPACE_FAMILY, HConstants.NAMESPACE_COL_DESC_QUALIFIER); NamespaceDescriptor ns = ProtobufUtil .toNamespaceDescriptor(HBaseProtos.NamespaceDescriptor.parseFrom(CodedInputStream .newInstance(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()))); cache.put(ns.getName(), ns); } } }
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; className_ = bs; parentId_ = input.readUInt64(); break; procId_ = input.readUInt64(); break; submittedTime_ = input.readUInt64(); break; org.apache.hbase.thirdparty.com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000010; owner_ = bs; int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(rawValue); if (value == null) { stackId_.add(input.readUInt32()); break;
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: int rawValue = input.readEnum(); org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry.Type.valueOf(rawValue); if (value == null) { input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry)); break; procId_ = input.readUInt64(); break; childId_.add(input.readUInt64()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) { childId_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000008; while (input.getBytesUntilLimit() > 0) { childId_.add(input.readUInt64()); input.popLimit(limit);
/** * This version of protobuf's mergeDelimitedFrom avoid the hard-coded 64MB limit for decoding * buffers * @param builder current message builder * @param in Inputsream with delimited protobuf data * @throws IOException */ public static void mergeDelimitedFrom(Message.Builder builder, InputStream in) throws IOException { // This used to be builder.mergeDelimitedFrom(in); // but is replaced to allow us to bump the protobuf size limit. final int firstByte = in.read(); if (firstByte != -1) { final int size = CodedInputStream.readRawVarint32(firstByte, in); final InputStream limitedInput = ByteStreams.limit(in, size); final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput); codedInput.setSizeLimit(size); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); } }
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: mutable_bitField0_ |= 0x00000001; timestamps_.add(input.readInt64()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { timestamps_ = new java.util.ArrayList<java.lang.Long>(); mutable_bitField0_ |= 0x00000001; while (input.getBytesUntilLimit() > 0) { timestamps_.add(input.readInt64()); input.popLimit(limit); break; canHint_ = input.readBool(); break;
boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: mutable_bitField0_ |= 0x00000001; state_.add(input.readUInt32()); break; int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { state_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000001; while (input.getBytesUntilLimit() > 0) { state_.add(input.readUInt32()); input.popLimit(limit); break;