Tabnine Logo
AggregateProtos$AggregateRequest
Code IndexAdd Tabnine to your IDE (free)

How to use
AggregateProtos$AggregateRequest
in
org.apache.hadoop.hbase.protobuf.generated

Best Java code snippets using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos$AggregateRequest (Showing top 20 results out of 315)

origin: apache/hbase

public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest buildPartial() {
 org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
  to_bitField0_ |= 0x00000001;
 }
 result.interpreterClassName_ = interpreterClassName_;
 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
  to_bitField0_ |= 0x00000002;
 }
 if (scanBuilder_ == null) {
  result.scan_ = scan_;
 } else {
  result.scan_ = scanBuilder_.build();
 }
 if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
  to_bitField0_ |= 0x00000004;
 }
 result.interpreterSpecificBytes_ = interpreterSpecificBytes_;
 result.bitField0_ = to_bitField0_;
 onBuilt();
 return result;
}
origin: apache/hbase

public final com.google.protobuf.Message
  getRequestPrototype(
  com.google.protobuf.Descriptors.MethodDescriptor method) {
 if (method.getService() != getDescriptor()) {
  throw new java.lang.IllegalArgumentException(
   "Service.getRequestPrototype() given method " +
   "descriptor for wrong service type.");
 }
 switch(method.getIndex()) {
  case 0:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  case 1:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  case 2:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  case 3:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  case 4:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  case 5:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  case 6:
   return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
  default:
   throw new java.lang.AssertionError("Can't get here.");
 }
}
origin: apache/hbase

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasInterpreterClassName()) {
  hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterClassName().hashCode();
 }
 if (hasScan()) {
  hash = (37 * hash) + SCAN_FIELD_NUMBER;
  hash = (53 * hash) + getScan().hashCode();
 }
 if (hasInterpreterSpecificBytes()) {
  hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: harbby/presto-connectors

S sumVal = null, sumSqVal = null, tempVal = null;
long rowCountVal = 0l;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: harbby/presto-connectors

S sumVal = null;
Long rowCountVal = 0l;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: harbby/presto-connectors

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: org.apache.hbase/hbase-endpoint

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: com.aliyun.hbase/alihbase-endpoint

S sumVal = null, sumSqVal = null, tempVal = null;
long rowCountVal = 0l;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: org.apache.hbase/hbase-endpoint

S sumVal = null, sumSqVal = null, tempVal = null;
long rowCountVal = 0L;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: com.aliyun.hbase/alihbase-endpoint

S sumVal = null;
Long rowCountVal = 0l;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: com.aliyun.hbase/alihbase-endpoint

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: harbby/presto-connectors

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
List<Cell> results = new ArrayList<Cell>();
origin: com.aliyun.hbase/alihbase-endpoint

S sumVal = null;
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: harbby/presto-connectors

InternalScanner scanner = null;
try {
 Scan scan = ProtobufUtil.toScan(request.getScan());
 byte[][] colFamilies = scan.getFamilies();
 byte[] colFamily = colFamilies != null ? colFamilies[0] : null;
origin: com.aliyun.hbase/alihbase-endpoint

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
List<Cell> results = new ArrayList<>();
origin: com.aliyun.hbase/alihbase-endpoint

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
List<Cell> results = new ArrayList<>();
origin: org.apache.hbase/hbase-endpoint

ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
List<Cell> results = new ArrayList<>();
origin: org.apache.hbase/hbase-endpoint

S sumVal = null;
Long rowCountVal = 0L;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: org.apache.hbase/hbase-endpoint

S sumVal = null;
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
byte[] colFamily = scan.getFamilies()[0];
origin: org.apache.hbase/hbase-endpoint

InternalScanner scanner = null;
try {
 Scan scan = ProtobufUtil.toScan(request.getScan());
 byte[][] colFamilies = scan.getFamilies();
 byte[] colFamily = colFamilies != null ? colFamilies[0] : null;
org.apache.hadoop.hbase.protobuf.generatedAggregateProtos$AggregateRequest

Javadoc

Protobuf type hbase.pb.AggregateRequest

Most used methods

  • <init>
  • getDefaultInstance
  • getDescriptorForType
  • getInterpreterClassName
    required string interpreter_class_name = 1; The request passed to the AggregateService consists of
  • getInterpreterClassNameBytes
    required string interpreter_class_name = 1; The request passed to the AggregateService consists of
  • getInterpreterSpecificBytes
    optional bytes interpreter_specific_bytes = 3;
  • getScan
    required .hbase.pb.Scan scan = 2;
  • getSerializedSize
  • getUnknownFields
  • hasInterpreterClassName
    required string interpreter_class_name = 1; The request passed to the AggregateService consists of
  • hasInterpreterSpecificBytes
    optional bytes interpreter_specific_bytes = 3;
  • hasScan
    required .hbase.pb.Scan scan = 2;
  • hasInterpreterSpecificBytes,
  • hasScan,
  • initFields,
  • isInitialized,
  • makeExtensionsImmutable,
  • newBuilder,
  • parseUnknownField

Popular in Java

  • Reactive rest calls using spring rest template
  • runOnUiThread (Activity)
  • getSystemService (Context)
  • scheduleAtFixedRate (Timer)
  • IOException (java.io)
    Signals a general, I/O-related error. Error details may be specified when calling the constructor, a
  • ConnectException (java.net)
    A ConnectException is thrown if a connection cannot be established to a remote host on a specific po
  • Date (java.sql)
    A class which can consume and produce dates in SQL Date format. Dates are represented in SQL as yyyy
  • Collection (java.util)
    Collection is the root of the collection hierarchy. It defines operations on data collections and t
  • SortedSet (java.util)
    SortedSet is a Set which iterates over its elements in a sorted order. The order is determined eithe
  • ConcurrentHashMap (java.util.concurrent)
    A plug-in replacement for JDK1.5 java.util.concurrent.ConcurrentHashMap. This version is based on or
  • Top Vim plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now