@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasPurgedMemoryBytes()) { hash = (37 * hash) + PURGED_MEMORY_BYTES_FIELD_NUMBER; hash = (53 * hash) + hashLong(getPurgedMemoryBytes()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance(); case 1: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, purgedMemoryBytes_); } getUnknownFields().writeTo(output); }
@Override public LlapDaemonProtocolProtos.PurgeCacheResponseProto purgeCache(final RpcController controller, final LlapDaemonProtocolProtos.PurgeCacheRequestProto request) throws ServiceException { LlapDaemonProtocolProtos.PurgeCacheResponseProto.Builder responseProtoBuilder = LlapDaemonProtocolProtos .PurgeCacheResponseProto.newBuilder(); LlapIo<?> llapIo = LlapProxy.getIo(); if (llapIo != null) { responseProtoBuilder.setPurgedMemoryBytes(llapIo.purge()); } else { responseProtoBuilder.setPurgedMemoryBytes(0); } return responseProtoBuilder.build(); }
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable();
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto)) { return super.equals(obj); } org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto) obj; boolean result = true; result = result && (hasPurgedMemoryBytes() == other.hasPurgedMemoryBytes()); if (hasPurgedMemoryBytes()) { result = result && (getPurgedMemoryBytes() == other.getPurgedMemoryBytes()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public Builder toBuilder() { return newBuilder(this); }
public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance(); case 1: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public void purgeCache( com.google.protobuf.RpcController controller, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto> done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance())); } }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto getDefaultInstanceForType() { return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance(); }
@Override public Long call() { try { LlapManagementProtocolClientImpl client = new LlapManagementProtocolClientImpl(conf, instance.getHost(), instance.getManagementPort(), retryPolicy, socketFactory); LlapDaemonProtocolProtos.PurgeCacheResponseProto resp = client.purgeCache(null, LlapDaemonProtocolProtos .PurgeCacheRequestProto.newBuilder().build()); return resp.getPurgedMemoryBytes(); } catch (Exception e) { LOG.warn("Exception while purging cache.", e); return 0L; } } }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto prototype) {
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
@Override public LlapDaemonProtocolProtos.PurgeCacheResponseProto purgeCache(final RpcController controller, final LlapDaemonProtocolProtos.PurgeCacheRequestProto request) throws ServiceException { LlapDaemonProtocolProtos.PurgeCacheResponseProto.Builder responseProtoBuilder = LlapDaemonProtocolProtos .PurgeCacheResponseProto.newBuilder(); LlapIo<?> llapIo = LlapProxy.getIo(); if (llapIo != null) { responseProtoBuilder.setPurgedMemoryBytes(llapIo.purge()); } else { responseProtoBuilder.setPurgedMemoryBytes(0); } return responseProtoBuilder.build(); }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto purgeCache( com.google.protobuf.RpcController controller, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheRequestProto request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance()); }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto buildPartial() { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.purgedMemoryBytes_ = purgedMemoryBytes_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, purgedMemoryBytes_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto other) { if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.PurgeCacheResponseProto.getDefaultInstance()) return this; if (other.hasPurgedMemoryBytes()) { setPurgedMemoryBytes(other.getPurgedMemoryBytes()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }