for (int j = 0; j < FAVORED_NODES_NUM; j++) { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder b = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(); b.setHostName(nodes[(i + j) % REGION_SERVERS].getAddress().getHostAddress()); b.setPort(nodes[(i + j) % REGION_SERVERS].getPort());
sn.getHostName(), sn.getPort(), sn.getStartCode()); } catch (InvalidProtocolBufferException e) { throw new DeserializationException("Unable to parse meta region location");
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) obj; boolean result = true; result = result && (hasHostName() == other.hasHostName()); if (hasHostName()) { result = result && getHostName() .equals(other.getHostName()); } result = result && (hasPort() == other.hasPort()); if (hasPort()) { result = result && (getPort() == other.getPort()); } result = result && (hasStartCode() == other.hasStartCode()); if (hasStartCode()) { result = result && (getStartCode() == other.getStartCode()); } result = result && unknownFields.equals(other.unknownFields); return result; }
for (int j = 0; j < FAVORED_NODES_NUM; j++) { org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder b = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(); b.setHostName(nodes[(i + j) % REGION_SERVERS].getAddress().getHostAddress()); b.setPort(nodes[(i + j) % REGION_SERVERS].getPort());
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName sn = rss.getMaster(); return ServerName.valueOf(sn.getHostName(), sn.getPort(), sn.getStartCode()); } catch (/*InvalidProtocolBufferException*/IOException e) {
sn.getHostName(), sn.getPort(), sn.getStartCode()); } catch (InvalidProtocolBufferException e) { throw new DeserializationException("Unable to parse meta region location");
/** * <pre> * The ServerName hosting the meta region currently, or destination server, * if meta region is in transition. * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <pre> * The ServerName of the current Master * </pre> * * <code>required .hbase.pb.ServerName master = 1;</code> */ public Builder mergeMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && master_ != null && master_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); } else { master_ = value; } onChanged(); } else { masterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) obj; boolean result = true; result = result && (hasHostName() == other.hasHostName()); if (hasHostName()) { result = result && getHostName() .equals(other.getHostName()); } result = result && (hasPort() == other.hasPort()); if (hasPort()) { result = result && (getPort() == other.getPort()); } result = result && (hasStartCode() == other.hasStartCode()); if (hasStartCode()) { result = result && (getStartCode() == other.getStartCode()); } result = result && unknownFields.equals(other.unknownFields); return result; }
/** * <pre> ** name of the server experiencing the error * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <pre> * Server who opened the region * </pre> * * <code>optional .hbase.pb.ServerName server = 6;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /**
/** * <pre> ** This region server's server name * </pre> * * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <pre> * optional RecoveryMode DEPRECATED_mode = 3 [default = UNKNOWN]; * </pre> * * <code>required .hbase.pb.ServerName server_name = 2;</code> */ public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && serverName_ != null && serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); } else { serverName_ = value; } onChanged(); } else { serverNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
/** * <code>optional .hbase.pb.ServerName master = 7;</code> */ public Builder mergeMaster(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (masterBuilder_ == null) { if (((bitField0_ & 0x00000040) == 0x00000040) && master_ != null && master_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { master_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); } else { master_ = value; } onChanged(); } else { masterBuilder_.mergeFrom(value); } bitField0_ |= 0x00000040; return this; } /**
/** * <code>optional .hbase.pb.ServerName destination_server = 4;</code> */ public Builder mergeDestinationServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (destinationServerBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && destinationServer_ != null && destinationServer_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { destinationServer_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial(); } else { destinationServer_ = value; } onChanged(); } else { destinationServerBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHostName()) { hash = (37 * hash) + HOST_NAME_FIELD_NUMBER; hash = (53 * hash) + getHostName().hashCode(); } if (hasPort()) { hash = (37 * hash) + PORT_FIELD_NUMBER; hash = (53 * hash) + getPort(); } if (hasStartCode()) { hash = (37 * hash) + START_CODE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getStartCode()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; }
/** * <code>optional .hbase.pb.ServerName server_name = 2;</code> */ public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverNameBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && serverName_ != null && serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { serverName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); } else { serverName_ = value; } onChanged(); } else { serverNameBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
/** * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * Convert a ServerName to a protocol buffer ServerName * * @param serverName the ServerName to convert * @return the converted protocol buffer ServerName * @see #toServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName) */ public static HBaseProtos.ServerName toServerName(final ServerName serverName) { if (serverName == null) return null; HBaseProtos.ServerName.Builder builder = HBaseProtos.ServerName.newBuilder(); builder.setHostName(serverName.getHostname()); if (serverName.getPort() >= 0) { builder.setPort(serverName.getPort()); } if (serverName.getStartcode() >= 0) { builder.setStartCode(serverName.getStartcode()); } return builder.build(); }
/** * <code>required .hbase.pb.ServerName server = 1;</code> */ public Builder mergeServer(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { if (serverBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && server_ != null && server_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); } else { server_ = value; } onChanged(); } else { serverBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**