@Override protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp) throws Exception { Status pipelineStatus = resp.getStatus(); if (PipelineAck.isRestartOOBStatus(pipelineStatus)) { throw new IOException("datanode " + dnInfo + " is restarting"); if (resp.getStatus() != Status.SUCCESS) { if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new InvalidBlockTokenException("Got access token error" + ", status message " + resp.getMessage() + ", " + logInfo); } else { throw new IOException("Got error" + ", status=" + resp.getStatus().name() + ", status message " + resp.getMessage() + ", " + logInfo);
BlockOpResponseProto connectAck = BlockOpResponseProto.parseFrom(PBHelperClient.vintPrefixed(mirrorIn)); mirrorInStatus = connectAck.getStatus(); firstBadLink = connectAck.getFirstBadLink(); if (mirrorInStatus != SUCCESS) {
/** Receive a reportedBlock copy response from the input stream */ private void receiveResponse(DataInputStream in) throws IOException { long startTime = Time.monotonicNow(); BlockOpResponseProto response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); while (response.getStatus() == Status.IN_PROGRESS) { // read intermediate responses response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); // Stop waiting for slow block moves. Even if it stops waiting, // the actual move may continue. if (stopWaitingForResponse(startTime)) { throw new IOException("Block move timed out"); } } String logInfo = "reportedBlock move is failed"; DataTransferProtoUtil.checkBlockOpStatus(response, logInfo, true); }
if (hasStatus()) { hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStatus());
public static void checkBlockOpStatus(BlockOpResponseProto response, String logInfo, boolean checkBlockPinningErr) throws IOException { if (response.getStatus() != Status.SUCCESS) { if (response.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new InvalidBlockTokenException( "Got access token error" + ", status message " + response.getMessage() + ", " + logInfo ); } else if (checkBlockPinningErr && response.getStatus() == Status.ERROR_BLOCK_PINNED) { throw new BlockPinningException( "Got error" + ", status=" + response.getStatus().name() + ", status message " + response.getMessage() + ", " + logInfo ); } else { throw new IOException( "Got error" + ", status=" + response.getStatus().name() + ", status message " + response.getMessage() + ", " + logInfo ); } } } }
if (hasStatus()) { hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStatus());
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto.getDefaultInstance()) return this; if (other.hasStatus()) { setStatus(other.getStatus()); } if (other.hasFirstBadLink()) { bitField0_ |= 0x00000002; firstBadLink_ = other.firstBadLink_; onChanged(); } if (other.hasChecksumResponse()) { mergeChecksumResponse(other.getChecksumResponse()); } if (other.hasReadOpChecksumInfo()) { mergeReadOpChecksumInfo(other.getReadOpChecksumInfo()); } if (other.hasMessage()) { bitField0_ |= 0x00000010; message_ = other.message_; onChanged(); } if (other.hasShortCircuitAccessVersion()) { setShortCircuitAccessVersion(other.getShortCircuitAccessVersion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
if (hasStatus()) { hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStatus());
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto.getDefaultInstance()) return this; if (other.hasStatus()) { setStatus(other.getStatus()); } if (other.hasFirstBadLink()) { bitField0_ |= 0x00000002; firstBadLink_ = other.firstBadLink_; onChanged(); } if (other.hasChecksumResponse()) { mergeChecksumResponse(other.getChecksumResponse()); } if (other.hasReadOpChecksumInfo()) { mergeReadOpChecksumInfo(other.getReadOpChecksumInfo()); } if (other.hasMessage()) { bitField0_ |= 0x00000010; message_ = other.message_; onChanged(); } if (other.hasShortCircuitAccessVersion()) { setShortCircuitAccessVersion(other.getShortCircuitAccessVersion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public static boolean replaceBlock(ExtendedBlock block, DatanodeInfo source, DatanodeInfo sourceProxy, DatanodeInfo destination, StorageType targetStorageType, Status opStatus) throws IOException, SocketException { Socket sock = new Socket(); try { sock.connect(NetUtils.createSocketAddr(destination.getXferAddr()), HdfsServerConstants.READ_TIMEOUT); sock.setKeepAlive(true); // sendRequest DataOutputStream out = new DataOutputStream(sock.getOutputStream()); new Sender(out).replaceBlock(block, targetStorageType, BlockTokenSecretManager.DUMMY_TOKEN, source.getDatanodeUuid(), sourceProxy); out.flush(); // receiveResponse DataInputStream reply = new DataInputStream(sock.getInputStream()); BlockOpResponseProto proto = BlockOpResponseProto.parseDelimitedFrom( reply); while (proto.getStatus() == Status.IN_PROGRESS) { proto = BlockOpResponseProto.parseDelimitedFrom(reply); } return proto.getStatus() == opStatus; } finally { sock.close(); } } }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto.getDefaultInstance()) return this; if (other.hasStatus()) { setStatus(other.getStatus()); } if (other.hasFirstBadLink()) { bitField0_ |= 0x00000002; firstBadLink_ = other.firstBadLink_; onChanged(); } if (other.hasChecksumResponse()) { mergeChecksumResponse(other.getChecksumResponse()); } if (other.hasReadOpChecksumInfo()) { mergeReadOpChecksumInfo(other.getReadOpChecksumInfo()); } if (other.hasMessage()) { bitField0_ |= 0x00000010; message_ = other.message_; onChanged(); } if (other.hasShortCircuitAccessVersion()) { setShortCircuitAccessVersion(other.getShortCircuitAccessVersion()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public static void checkBlockOpStatus( BlockOpResponseProto response, String logInfo) throws IOException { if (response.getStatus() != Status.SUCCESS) { if (response.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new InvalidBlockTokenException( "Got access token error" + ", status message " + response.getMessage() + ", " + logInfo ); } else { throw new IOException( "Got error" + ", status message " + response.getMessage() + ", " + logInfo ); } } } }
public static void checkBlockOpStatus( BlockOpResponseProto response, String logInfo) throws IOException { if (response.getStatus() != Status.SUCCESS) { if (response.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new InvalidBlockTokenException( "Got access token error" + ", status message " + response.getMessage() + ", " + logInfo ); } else { throw new IOException( "Got error" + ", status message " + response.getMessage() + ", " + logInfo ); } } } }
void sendTransferBlock(final DatanodeInfo[] targets, final StorageType[] targetStorageTypes, final Token<BlockTokenIdentifier> blockToken) throws IOException { //send the TRANSFER_BLOCK request new Sender(out).transferBlock(block.getCurrentBlock(), blockToken, dfsClient.clientName, targets, targetStorageTypes); out.flush(); //ack BlockOpResponseProto transferResponse = BlockOpResponseProto .parseFrom(PBHelper.vintPrefixed(in)); if (SUCCESS != transferResponse.getStatus()) { throw new IOException("Failed to add a datanode. Response status: " + transferResponse.getStatus()); } }
/** Receive a reportedBlock copy response from the input stream */ private void receiveResponse(DataInputStream in) throws IOException { long startTime = Time.monotonicNow(); BlockOpResponseProto response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); while (response.getStatus() == Status.IN_PROGRESS) { // read intermediate responses response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); // Stop waiting for slow block moves. Even if it stops waiting, // the actual move may continue. if (stopWaitingForResponse(startTime)) { throw new IOException("Block move timed out"); } } String logInfo = "block move is failed"; DataTransferProtoUtil.checkBlockOpStatus(response, logInfo); }
/** Receive a reportedBlock copy response from the input stream */ private void receiveResponse(DataInputStream in) throws IOException { long startTime = Time.monotonicNow(); BlockOpResponseProto response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); while (response.getStatus() == Status.IN_PROGRESS) { // read intermediate responses response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); // Stop waiting for slow block moves. Even if it stops waiting, // the actual move may continue. if (stopWaitingForResponse(startTime)) { throw new IOException("Block move timed out"); } } String logInfo = "block move is failed"; DataTransferProtoUtil.checkBlockOpStatus(response, logInfo); }
void sendTransferBlock(final DatanodeInfo[] targets, final StorageType[] targetStorageTypes, final String[] targetStorageIDs, final Token<BlockTokenIdentifier> blockToken) throws IOException { //send the TRANSFER_BLOCK request new Sender(out).transferBlock(block.getCurrentBlock(), blockToken, dfsClient.clientName, targets, targetStorageTypes, targetStorageIDs); out.flush(); //ack BlockOpResponseProto transferResponse = BlockOpResponseProto .parseFrom(PBHelperClient.vintPrefixed(in)); if (SUCCESS != transferResponse.getStatus()) { throw new IOException("Failed to add a datanode. Response status: " + transferResponse.getStatus()); } }
void sendTransferBlock(final DatanodeInfo[] targets, final StorageType[] targetStorageTypes, final Token<BlockTokenIdentifier> blockToken) throws IOException { //send the TRANSFER_BLOCK request new Sender(out).transferBlock(block.getCurrentBlock(), blockToken, dfsClient.clientName, targets, targetStorageTypes); out.flush(); //ack BlockOpResponseProto transferResponse = BlockOpResponseProto .parseFrom(PBHelper.vintPrefixed(in)); if (SUCCESS != transferResponse.getStatus()) { throw new IOException("Failed to add a datanode. Response status: " + transferResponse.getStatus()); } }
/** Receive a reportedBlock copy response from the input stream. */ private static void receiveResponse(DataInputStream in) throws IOException { BlockOpResponseProto response = BlockOpResponseProto .parseFrom(vintPrefixed(in)); while (response.getStatus() == Status.IN_PROGRESS) { // read intermediate responses response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); } String logInfo = "reportedBlock move is failed"; DataTransferProtoUtil.checkBlockOpStatus(response, logInfo); } }