@Override public boolean datasetExists(final String name) throws DatasetManagementException { return Retries.callWithRetries(new Retries.Callable<Boolean, DatasetManagementException>() { @Override public Boolean call() throws DatasetManagementException { return datasetFramework.getDatasetSpec(createInstanceId(name)) != null; } }, retryStrategy); }
@Override public boolean datasetExists(final String name) throws DatasetManagementException { return Retries.callWithRetries(new Retries.Callable<Boolean, DatasetManagementException>() { @Override public Boolean call() throws DatasetManagementException { return datasetFramework.getDatasetSpec(createInstanceId(name)) != null; } }, retryStrategy); }
@Override public String getDatasetType(final String name) throws DatasetManagementException { return Retries.callWithRetries(new Retries.Callable<String, DatasetManagementException>() { @Override public String call() throws DatasetManagementException { DatasetSpecification spec = datasetFramework.getDatasetSpec(createInstanceId(name)); if (spec == null) { throw new InstanceNotFoundException(name); } return spec.getType(); } }, retryStrategy); }
@Override public DatasetProperties getDatasetProperties(final String name) throws DatasetManagementException { return Retries.callWithRetries(new Retries.Callable<DatasetProperties, DatasetManagementException>() { @Override public DatasetProperties call() throws DatasetManagementException { DatasetSpecification spec = datasetFramework.getDatasetSpec(createInstanceId(name)); if (spec == null) { throw new InstanceNotFoundException(name); } return DatasetProperties.of(spec.getOriginalProperties()); } }, retryStrategy); }
private void write(final String stream, final ByteBuffer data, final Map<String, String> headers) throws IOException { Retries.callWithRetries(new Retries.Callable<Void, IOException>() { @Override public Void call() throws IOException { HttpRequest.Builder requestBuilder = remoteClient.requestBuilder(HttpMethod.POST, stream).withBody(data); for (Map.Entry<String, String> header : headers.entrySet()) { requestBuilder.addHeader(stream + "." + header.getKey(), header.getValue()); } writeToStream(namespace.stream(stream), requestBuilder); return null; } }, retryStrategy); }
protected <V, T extends Throwable> V callWithRetries(Retries.Callable<V, T> callable) throws T { return Retries.callWithRetries(callable, retryStrategy, IS_RETRYABLE); } }
private List<ArtifactInfo> listArtifacts(NamespaceId namespaceId) throws IOException { return Retries.callWithRetries(() -> { try { List<ArtifactInfo> result = new ArrayList<>(artifactRepository.getArtifactsInfo(namespaceId)); result.addAll(artifactRepository.getArtifactsInfo(NamespaceId.SYSTEM)); return result; } catch (IOException | RuntimeException e) { throw e; } catch (Exception e) { throw new IOException(e); } }, retryStrategy); }
@Override public void publish(EntityId publisher, MetadataOperation operation) { MetadataMessage message = new MetadataMessage(Type.METADATA_OPERATION, publisher, GSON.toJsonTree(operation)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); LOG.trace("Publishing message {} to topic {}", message, topic); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish metadata operation: " + operation, e); } }
@Override public void publish(ProgramRunId programRunId, MetadataOperation operation) { MetadataMessage message = new MetadataMessage(Type.METADATA_OPERATION, programRunId, GSON.toJsonTree(operation)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); LOG.trace("Publishing message {} to topic {}", message, topic); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish metadata operation: " + operation, e); } }
@Override public void publish(EntityId entityId, DatasetInstanceOperation datasetInstanceOperation) { MetadataMessage message = new MetadataMessage(Type.DATASET_OPERATION, entityId, GSON.toJsonTree(datasetInstanceOperation)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); LOG.trace("Publishing message {} to topic {}", message, topic); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish metadata operation: " + datasetInstanceOperation, e); } } }
@Override public void setWorkflowToken(ProgramRunId workflowRunId, WorkflowToken token) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.WORKFLOW_TOKEN, workflowRunId, GSON.toJsonTree(token)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { // Don't log the workflow token, as it can be large and may contain sensitive data throw new RuntimeException("Failed to publish workflow token for workflow run " + workflowRunId, e); } }
private void publish(MetadataMessage message) { StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { LOG.trace("Failed to publish metadata message: {}", message); ProgramRunId programRunId = (ProgramRunId) message.getEntityId(); throw new RuntimeException(String.format("Failed to publish metadata message of type '%s' for program " + "run '%s'.", message.getType(), programRunId), e); } } }
private void publish(MetadataMessage message) { StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { LOG.trace("Failed to publish metadata message: {}", message); ProgramRunId programRunId = (ProgramRunId) message.getEntityId(); throw new RuntimeException(String.format("Failed to publish metadata message of type '%s' for program " + "run '%s'.", message.getType(), programRunId), e); } } }
@Override public void setWorkflowToken(ProgramRunId workflowRunId, WorkflowToken token) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.WORKFLOW_TOKEN, workflowRunId, GSON.toJsonTree(token)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { // Don't log the workflow token, as it can be large and may contain sensitive data throw new RuntimeException("Failed to publish workflow token for workflow run " + workflowRunId, e); } }
@Override public void register(ProgramId programId, DatasetId datasetId) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.USAGE, programId, GSON.toJsonTree(new DatasetUsage(datasetId))); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish usage for " + datasetId + " for program " + programId, e); } }
@Override public void register(ProgramId programId, DatasetId datasetId) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.USAGE, programId, GSON.toJsonTree(new DatasetUsage(datasetId))); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish usage for " + datasetId + " for program " + programId, e); } }
@Override public void register(ProgramId programId, StreamId streamId) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.USAGE, programId, GSON.toJsonTree(new DatasetUsage(streamId))); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish usage for " + streamId + " for program " + programId, e); } }
@Override public void addWorkflowNodeState(ProgramRunId workflowRunId, WorkflowNodeStateDetail state) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.WORKFLOW_STATE, workflowRunId, GSON.toJsonTree(state)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish workflow node state for workflow run " + workflowRunId + "of node " + state.getNodeId() + " with state " + state.getNodeStatus(), e); } } }
@Override public void addWorkflowNodeState(ProgramRunId workflowRunId, WorkflowNodeStateDetail state) { MetadataMessage message = new MetadataMessage(MetadataMessage.Type.WORKFLOW_STATE, workflowRunId, GSON.toJsonTree(state)); StoreRequest request = StoreRequestBuilder.of(topic).addPayload(GSON.toJson(message)).build(); try { Retries.callWithRetries(() -> messagingService.publish(request), retryStrategy, Retries.ALWAYS_TRUE); } catch (Exception e) { throw new RuntimeException("Failed to publish workflow node state for workflow run " + workflowRunId + "of node " + state.getNodeId() + " with state " + state.getNodeStatus(), e); } } }
private String updateKafkaBrokerList(CConfiguration cConf, BrokerService brokerService) { String brokerList = Retries.callWithRetries( brokerService::getBrokerList, RetryStrategies.timeLimit(10, TimeUnit.SECONDS, RetryStrategies.exponentialDelay(100, 2000, TimeUnit.MILLISECONDS)) ); for (String param : kafkaBrokerListParams) { cConf.set(param, brokerList); } return brokerList; }