} else if (attemptNumber.get() + 1 > params.attempts) { result.completeExceptionally(new RetriesExhaustedException(ex)); } else {
@Override public SegmentProperties getStreamSegmentInfo(String streamSegmentName) throws StreamSegmentException { ensureInitializedAndNotClosed(); long traceId = LoggerHelpers.traceEnter(log, "getStreamSegmentInfo", streamSegmentName); try { return HDFS_RETRY.run(() -> { FileStatus last = findStatusForSegment(streamSegmentName, true); boolean isSealed = isSealed(last.getPath()); StreamSegmentInformation result = StreamSegmentInformation.builder().name(streamSegmentName).length(last.getLen()).sealed(isSealed).build(); LoggerHelpers.traceLeave(log, "getStreamSegmentInfo", traceId, streamSegmentName, result); return result; }); } catch (IOException e) { throw HDFSExceptionHelpers.convertException(streamSegmentName, e); } catch (RetriesExhaustedException e) { throw HDFSExceptionHelpers.convertException(streamSegmentName, e.getCause()); } }
@Override public int read(SegmentHandle handle, long offset, byte[] buffer, int bufferOffset, int length) throws StreamSegmentException { ensureInitializedAndNotClosed(); long traceId = LoggerHelpers.traceEnter(log, "read", handle, offset, length); if (offset < 0 || bufferOffset < 0 || length < 0 || buffer.length < bufferOffset + length) { throw new ArrayIndexOutOfBoundsException(String.format( "Offset (%s) must be non-negative, and bufferOffset (%s) and length (%s) must be valid indices into buffer of size %s.", offset, bufferOffset, length, buffer.length)); } Timer timer = new Timer(); try { return HDFS_RETRY.run(() -> { int totalBytesRead = readInternal(handle, buffer, offset, bufferOffset, length); HDFSMetrics.READ_LATENCY.reportSuccessEvent(timer.getElapsed()); HDFSMetrics.READ_BYTES.add(totalBytesRead); LoggerHelpers.traceLeave(log, "read", traceId, handle, offset, totalBytesRead); return totalBytesRead; }); } catch (IOException e) { throw HDFSExceptionHelpers.convertException(handle.getSegmentName(), e); } catch (RetriesExhaustedException e) { throw HDFSExceptionHelpers.convertException(handle.getSegmentName(), e.getCause()); } }
throw new RetriesExhaustedException(ex); } else {
@SuppressWarnings("unchecked") public <RetryT extends Exception, ReturnT> ReturnT run(Retryable<ReturnT, RetryT, ThrowsT> r) throws ThrowsT { Preconditions.checkNotNull(r); long delay = params.initialMillis; Exception last = null; for (int attemptNumber = 1; attemptNumber <= params.attempts; attemptNumber++) { try { return r.attempt(); } catch (Exception e) { if (canRetry(e)) { last = e; } else if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw (ThrowsT) e; } } if (attemptNumber < params.attempts) { // no need to sleep if it is the last attempt final long sleepFor = delay; Exceptions.handleInterrupted(() -> Thread.sleep(sleepFor)); delay = Math.min(params.maxDelay, params.multiplier * delay); log.debug("Retrying command. Retry #{}, timestamp={}", attemptNumber, Instant.now()); } } throw new RetriesExhaustedException(last); }
if (attemptCount > this.config.getMaxWriteAttempts()) { throw new RetriesExhaustedException(w.getFailureCause());
@SneakyThrows private static <T> int readEvents(EventStreamReader<T> reader, int limit) { final int timeout = 1000; final int interReadWait = 50; EventRead<T> event; int validEvents = 0; try { do { event = reader.readNextEvent(timeout); Exceptions.handleInterrupted(() -> Thread.sleep(interReadWait)); if (event.getEvent() != null) { validEvents++; } } while ((event.getEvent() != null || event.isCheckpoint()) && validEvents < limit); reader.close(); } catch (TruncatedDataException e) { reader.close(); throw new TruncatedDataException(e.getCause()); } catch (RuntimeException e) { if (e.getCause() instanceof RetriesExhaustedException) { throw new RetriesExhaustedException(e.getCause()); } else { throw e; } } return validEvents; }