Tabnine Logo
AggregateProtos$AggregateRequest.getInterpreterClassName
Code IndexAdd Tabnine to your IDE (free)

How to use
getInterpreterClassName
method
in
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos$AggregateRequest

Best Java code snippets using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos$AggregateRequest.getInterpreterClassName (Showing top 16 results out of 315)

origin: apache/hbase

@SuppressWarnings("unchecked")
// Used server-side too by Aggregation Coprocesor Endpoint. Undo this interdependence. TODO.
ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest(
  AggregateRequest request) throws IOException {
 String className = request.getInterpreterClassName();
 try {
  ColumnInterpreter<T,S,P,Q,R> ci;
  Class<?> cls = Class.forName(className);
  ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance();
  if (request.hasInterpreterSpecificBytes()) {
   ByteString b = request.getInterpreterSpecificBytes();
   P initMsg = getParsedGenericInstance(ci.getClass(), 2, b);
   ci.initialize(initMsg);
  }
  return ci;
 } catch (ClassNotFoundException | InstantiationException | IllegalAccessException |
   NoSuchMethodException | InvocationTargetException e) {
  throw new IOException(e);
 }
}
origin: apache/hbase

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasInterpreterClassName()) {
  hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterClassName().hashCode();
 }
 if (hasScan()) {
  hash = (37 * hash) + SCAN_FIELD_NUMBER;
  hash = (53 * hash) + getScan().hashCode();
 }
 if (hasInterpreterSpecificBytes()) {
  hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: apache/hbase

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
 boolean result = true;
 result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
 if (hasInterpreterClassName()) {
  result = result && getInterpreterClassName()
    .equals(other.getInterpreterClassName());
 }
 result = result && (hasScan() == other.hasScan());
 if (hasScan()) {
  result = result && getScan()
    .equals(other.getScan());
 }
 result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
 if (hasInterpreterSpecificBytes()) {
  result = result && getInterpreterSpecificBytes()
    .equals(other.getInterpreterSpecificBytes());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: harbby/presto-connectors

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
 boolean result = true;
 result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
 if (hasInterpreterClassName()) {
  result = result && getInterpreterClassName()
    .equals(other.getInterpreterClassName());
 }
 result = result && (hasScan() == other.hasScan());
 if (hasScan()) {
  result = result && getScan()
    .equals(other.getScan());
 }
 result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
 if (hasInterpreterSpecificBytes()) {
  result = result && getInterpreterSpecificBytes()
    .equals(other.getInterpreterSpecificBytes());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: org.apache.hbase/hbase-endpoint

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
 boolean result = true;
 result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
 if (hasInterpreterClassName()) {
  result = result && getInterpreterClassName()
    .equals(other.getInterpreterClassName());
 }
 result = result && (hasScan() == other.hasScan());
 if (hasScan()) {
  result = result && getScan()
    .equals(other.getScan());
 }
 result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
 if (hasInterpreterSpecificBytes()) {
  result = result && getInterpreterSpecificBytes()
    .equals(other.getInterpreterSpecificBytes());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: harbby/presto-connectors

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasInterpreterClassName()) {
  hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterClassName().hashCode();
 }
 if (hasScan()) {
  hash = (37 * hash) + SCAN_FIELD_NUMBER;
  hash = (53 * hash) + getScan().hashCode();
 }
 if (hasInterpreterSpecificBytes()) {
  hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: com.aliyun.hbase/alihbase-endpoint

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasInterpreterClassName()) {
  hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterClassName().hashCode();
 }
 if (hasScan()) {
  hash = (37 * hash) + SCAN_FIELD_NUMBER;
  hash = (53 * hash) + getScan().hashCode();
 }
 if (hasInterpreterSpecificBytes()) {
  hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: org.apache.hbase/hbase-endpoint

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasInterpreterClassName()) {
  hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterClassName().hashCode();
 }
 if (hasScan()) {
  hash = (37 * hash) + SCAN_FIELD_NUMBER;
  hash = (53 * hash) + getScan().hashCode();
 }
 if (hasInterpreterSpecificBytes()) {
  hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
  hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: harbby/presto-connectors

@SuppressWarnings("unchecked")
ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest(
  AggregateRequest request) throws IOException {
 String className = request.getInterpreterClassName();
 Class<?> cls;
 try {
  cls = Class.forName(className);
  ColumnInterpreter<T,S,P,Q,R> ci = (ColumnInterpreter<T, S, P, Q, R>) cls.newInstance();
  if (request.hasInterpreterSpecificBytes()) {
   ByteString b = request.getInterpreterSpecificBytes();
   P initMsg = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 2, b);
   ci.initialize(initMsg);
  }
  return ci;
 } catch (ClassNotFoundException e) {
  throw new IOException(e);
 } catch (InstantiationException e) {
  throw new IOException(e);
 } catch (IllegalAccessException e) {
  throw new IOException(e);
 }
}
origin: com.aliyun.hbase/alihbase-endpoint

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
 boolean result = true;
 result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
 if (hasInterpreterClassName()) {
  result = result && getInterpreterClassName()
    .equals(other.getInterpreterClassName());
 }
 result = result && (hasScan() == other.hasScan());
 if (hasScan()) {
  result = result && getScan()
    .equals(other.getScan());
 }
 result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
 if (hasInterpreterSpecificBytes()) {
  result = result && getInterpreterSpecificBytes()
    .equals(other.getInterpreterSpecificBytes());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: org.apache.hbase/hbase-endpoint

@SuppressWarnings("unchecked")
// Used server-side too by Aggregation Coprocesor Endpoint. Undo this interdependence. TODO.
ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest(
  AggregateRequest request) throws IOException {
 String className = request.getInterpreterClassName();
 try {
  ColumnInterpreter<T,S,P,Q,R> ci;
  Class<?> cls = Class.forName(className);
  ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance();
  if (request.hasInterpreterSpecificBytes()) {
   ByteString b = request.getInterpreterSpecificBytes();
   P initMsg = getParsedGenericInstance(ci.getClass(), 2, b);
   ci.initialize(initMsg);
  }
  return ci;
 } catch (ClassNotFoundException | InstantiationException | IllegalAccessException |
   NoSuchMethodException | InvocationTargetException e) {
  throw new IOException(e);
 }
}
origin: com.aliyun.hbase/alihbase-endpoint

@SuppressWarnings("unchecked")
// Used server-side too by Aggregation Coprocesor Endpoint. Undo this interdependence. TODO.
ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest(
  AggregateRequest request) throws IOException {
 String className = request.getInterpreterClassName();
 try {
  ColumnInterpreter<T,S,P,Q,R> ci;
  Class<?> cls = Class.forName(className);
  ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance();
  if (request.hasInterpreterSpecificBytes()) {
   ByteString b = request.getInterpreterSpecificBytes();
   P initMsg = getParsedGenericInstance(ci.getClass(), 2, b);
   ci.initialize(initMsg);
  }
  return ci;
 } catch (ClassNotFoundException | InstantiationException | IllegalAccessException |
   NoSuchMethodException | InvocationTargetException e) {
  throw new IOException(e);
 }
}
origin: harbby/presto-connectors

/**
 * <code>required string interpreter_class_name = 1;</code>
 *
 * <pre>
 ** The request passed to the AggregateService consists of three parts
 *  (1) the (canonical) classname of the ColumnInterpreter implementation
 *  (2) the Scan query
 *  (3) any bytes required to construct the ColumnInterpreter object
 *      properly
 * </pre>
 */
public Builder clearInterpreterClassName() {
 bitField0_ = (bitField0_ & ~0x00000001);
 interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
 onChanged();
 return this;
}
/**
origin: com.aliyun.hbase/alihbase-endpoint

/**
 * <code>required string interpreter_class_name = 1;</code>
 *
 * <pre>
 ** The request passed to the AggregateService consists of three parts
 *  (1) the (canonical) classname of the ColumnInterpreter implementation
 *  (2) the Scan query
 *  (3) any bytes required to construct the ColumnInterpreter object
 *      properly
 * </pre>
 */
public Builder clearInterpreterClassName() {
 bitField0_ = (bitField0_ & ~0x00000001);
 interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
 onChanged();
 return this;
}
/**
origin: org.apache.hbase/hbase-endpoint

/**
 * <code>required string interpreter_class_name = 1;</code>
 *
 * <pre>
 ** The request passed to the AggregateService consists of three parts
 *  (1) the (canonical) classname of the ColumnInterpreter implementation
 *  (2) the Scan query
 *  (3) any bytes required to construct the ColumnInterpreter object
 *      properly
 * </pre>
 */
public Builder clearInterpreterClassName() {
 bitField0_ = (bitField0_ & ~0x00000001);
 interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
 onChanged();
 return this;
}
/**
origin: apache/hbase

/**
 * <code>required string interpreter_class_name = 1;</code>
 *
 * <pre>
 ** The request passed to the AggregateService consists of three parts
 *  (1) the (canonical) classname of the ColumnInterpreter implementation
 *  (2) the Scan query
 *  (3) any bytes required to construct the ColumnInterpreter object
 *      properly
 * </pre>
 */
public Builder clearInterpreterClassName() {
 bitField0_ = (bitField0_ & ~0x00000001);
 interpreterClassName_ = getDefaultInstance().getInterpreterClassName();
 onChanged();
 return this;
}
/**
org.apache.hadoop.hbase.protobuf.generatedAggregateProtos$AggregateRequestgetInterpreterClassName

Javadoc

required string interpreter_class_name = 1;
 
The request passed to the AggregateService consists of three parts 
(1) the (canonical) classname of the ColumnInterpreter implementation 
(2) the Scan query 
(3) any bytes required to construct the ColumnInterpreter object 
properly 

Popular methods of AggregateProtos$AggregateRequest

  • <init>
  • getDefaultInstance
  • getDescriptorForType
  • getInterpreterClassNameBytes
    required string interpreter_class_name = 1; The request passed to the AggregateService consists of
  • getInterpreterSpecificBytes
    optional bytes interpreter_specific_bytes = 3;
  • getScan
    required .hbase.pb.Scan scan = 2;
  • getSerializedSize
  • getUnknownFields
  • hasInterpreterClassName
    required string interpreter_class_name = 1; The request passed to the AggregateService consists of
  • hasInterpreterSpecificBytes
    optional bytes interpreter_specific_bytes = 3;
  • hasScan
    required .hbase.pb.Scan scan = 2;
  • initFields
  • hasScan,
  • initFields,
  • isInitialized,
  • makeExtensionsImmutable,
  • newBuilder,
  • parseUnknownField

Popular in Java

  • Making http post requests using okhttp
  • getSupportFragmentManager (FragmentActivity)
  • putExtra (Intent)
  • getSystemService (Context)
  • OutputStream (java.io)
    A writable sink for bytes.Most clients will use output streams that write data to the file system (
  • BigDecimal (java.math)
    An immutable arbitrary-precision signed decimal.A value is represented by an arbitrary-precision "un
  • ByteBuffer (java.nio)
    A buffer for bytes. A byte buffer can be created in either one of the following ways: * #allocate
  • Charset (java.nio.charset)
    A charset is a named mapping between Unicode characters and byte sequences. Every Charset can decode
  • Time (java.sql)
    Java representation of an SQL TIME value. Provides utilities to format and parse the time's represen
  • Get (org.apache.hadoop.hbase.client)
    Used to perform Get operations on a single row. To get everything for a row, instantiate a Get objec
  • CodeWhisperer alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now