@SuppressWarnings("unchecked") // Used server-side too by Aggregation Coprocesor Endpoint. Undo this interdependence. TODO. ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest( AggregateRequest request) throws IOException { String className = request.getInterpreterClassName(); try { ColumnInterpreter<T,S,P,Q,R> ci; Class<?> cls = Class.forName(className); ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance(); if (request.hasInterpreterSpecificBytes()) { ByteString b = request.getInterpreterSpecificBytes(); P initMsg = getParsedGenericInstance(ci.getClass(), 2, b); ci.initialize(initMsg); } return ci; } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { throw new IOException(e); } }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInterpreterClassName()) { hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getInterpreterClassName().hashCode(); } if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasInterpreterSpecificBytes()) { hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER; hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj; boolean result = true; result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); if (hasInterpreterClassName()) { result = result && getInterpreterClassName() .equals(other.getInterpreterClassName()); } result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); } result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); if (hasInterpreterSpecificBytes()) { result = result && getInterpreterSpecificBytes() .equals(other.getInterpreterSpecificBytes()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj; boolean result = true; result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); if (hasInterpreterClassName()) { result = result && getInterpreterClassName() .equals(other.getInterpreterClassName()); } result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); } result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); if (hasInterpreterSpecificBytes()) { result = result && getInterpreterSpecificBytes() .equals(other.getInterpreterSpecificBytes()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj; boolean result = true; result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); if (hasInterpreterClassName()) { result = result && getInterpreterClassName() .equals(other.getInterpreterClassName()); } result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); } result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); if (hasInterpreterSpecificBytes()) { result = result && getInterpreterSpecificBytes() .equals(other.getInterpreterSpecificBytes()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInterpreterClassName()) { hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getInterpreterClassName().hashCode(); } if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasInterpreterSpecificBytes()) { hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER; hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInterpreterClassName()) { hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getInterpreterClassName().hashCode(); } if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasInterpreterSpecificBytes()) { hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER; hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasInterpreterClassName()) { hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER; hash = (53 * hash) + getInterpreterClassName().hashCode(); } if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasInterpreterSpecificBytes()) { hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER; hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@SuppressWarnings("unchecked") ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest( AggregateRequest request) throws IOException { String className = request.getInterpreterClassName(); Class<?> cls; try { cls = Class.forName(className); ColumnInterpreter<T,S,P,Q,R> ci = (ColumnInterpreter<T, S, P, Q, R>) cls.newInstance(); if (request.hasInterpreterSpecificBytes()) { ByteString b = request.getInterpreterSpecificBytes(); P initMsg = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 2, b); ci.initialize(initMsg); } return ci; } catch (ClassNotFoundException e) { throw new IOException(e); } catch (InstantiationException e) { throw new IOException(e); } catch (IllegalAccessException e) { throw new IOException(e); } }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj; boolean result = true; result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); if (hasInterpreterClassName()) { result = result && getInterpreterClassName() .equals(other.getInterpreterClassName()); } result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); } result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); if (hasInterpreterSpecificBytes()) { result = result && getInterpreterSpecificBytes() .equals(other.getInterpreterSpecificBytes()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@SuppressWarnings("unchecked") // Used server-side too by Aggregation Coprocesor Endpoint. Undo this interdependence. TODO. ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest( AggregateRequest request) throws IOException { String className = request.getInterpreterClassName(); try { ColumnInterpreter<T,S,P,Q,R> ci; Class<?> cls = Class.forName(className); ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance(); if (request.hasInterpreterSpecificBytes()) { ByteString b = request.getInterpreterSpecificBytes(); P initMsg = getParsedGenericInstance(ci.getClass(), 2, b); ci.initialize(initMsg); } return ci; } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { throw new IOException(e); } }
@SuppressWarnings("unchecked") // Used server-side too by Aggregation Coprocesor Endpoint. Undo this interdependence. TODO. ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest( AggregateRequest request) throws IOException { String className = request.getInterpreterClassName(); try { ColumnInterpreter<T,S,P,Q,R> ci; Class<?> cls = Class.forName(className); ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance(); if (request.hasInterpreterSpecificBytes()) { ByteString b = request.getInterpreterSpecificBytes(); P initMsg = getParsedGenericInstance(ci.getClass(), 2, b); ci.initialize(initMsg); } return ci; } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { throw new IOException(e); } }
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**