private Collection<?> deserializeCollection( ModuleDescriptor module, CollectionType collectionType, ArrayValue value ) throws IOException { Collection<?> collection = collectionType.isSet() ? new LinkedHashSet( value.size() ) : new ArrayList( value.size() ); for( Value element : value.list() ) { collection.add( doDeserialize( module, collectionType.collectedType(), element ) ); } return collection; }
private static List<ArrayValue> downloadFirstResults(TDJobOperator j, int max, TaskState state, String stateKey, DurationInterval retryInterval) { return pollingRetryExecutor(state, stateKey) .retryUnless(TDOperator::isDeterministicClientException) .withRetryInterval(retryInterval) .withErrorMessage("Failed to download result of job '%s'", j.getJobId()) .run(s -> { try { return j.getResult(ite -> { List<ArrayValue> results = new ArrayList<>(max); for (int i = 0; i < max; i++) { if (ite.hasNext()) { ArrayValue row = ite.next().asArrayValue(); results.add(row); } else { break; } } return results; }); } catch (TDClientHttpNotFoundException ex) { // this happens if query is INSERT or CREATE. return empty results return ImmutableList.of(); } }); }
private List<Config> fetchRows(TDJobOperator job) { return PollingRetryExecutor.pollingRetryExecutor(state, RESULT) .retryUnless(TDOperator::isDeterministicClientException) .withErrorMessage("Failed to download result of job '%s'", job.getJobId()) .run(s -> { List<String> columnNames = job.getResultColumnNames(); List<Config> result = job.getResult(ite -> { List<Config> rows = new ArrayList<>(); while (ite.hasNext()) { rows.add(row(columnNames, ite.next().asArrayValue())); if (rows.size() > Limits.maxWorkflowTasks()) { TaskLimitExceededException cause = new TaskLimitExceededException("Too many tasks. Limit: " + Limits.maxWorkflowTasks()); throw new TaskExecutionException(cause); } } return rows; }); return result; }); }
private boolean fetchJobResult(TDJobOperator job) { Optional<ArrayValue> firstRow = pollingRetryExecutor(state, RESULT) .retryUnless(TDOperator::isDeterministicClientException) .withErrorMessage("Failed to download result of job '%s'", job.getJobId()) .run(s -> job.getResult( ite -> ite.hasNext() ? Optional.of(ite.next()) : Optional.absent())); // There must be at least one row in the result for the wait condition to be fulfilled. if (!firstRow.isPresent()) { return false; } ArrayValue row = firstRow.get(); if (row.size() < 1) { throw new TaskExecutionException("Got empty row in result of query"); } Value firstCol = row.get(0); return isTruthy(firstCol); }
static void downloadJobResult(TDJobOperator j, Workspace workspace, Optional<String> downloadFile, TaskState state, DurationInterval retryInterval) { if (!downloadFile.isPresent()) { return; } pollingRetryExecutor(state, DOWNLOAD) .retryUnless(TDOperator::isDeterministicClientException) .withRetryInterval(retryInterval) .withErrorMessage("Failed to download result of job '%s'", j.getJobId()) .runOnce(s -> j.getResult(ite -> { try (BufferedWriter out = workspace.newBufferedWriter(downloadFile.get(), UTF_8)) { addCsvHeader(out, j.getResultColumnNames()); while (ite.hasNext()) { addCsvRow(out, ite.next().asArrayValue()); } return true; } catch (IOException ex) { throw new UncheckedIOException(ex); } })); }
static Config buildStoreParams(ConfigFactory cf, TDJobOperator j, boolean storeLastResults, TaskState state, DurationInterval retryInterval) { if (storeLastResults) { Config td = cf.create(); List<ArrayValue> results = downloadFirstResults(j, 1, state, RESULT, retryInterval); Map<RawValue, Value> map = new LinkedHashMap<>(); if (!results.isEmpty()) { ArrayValue row = results.get(0); List<String> columnNames = j.getResultColumnNames(); for (int i = 0; i < Math.min(row.size(), columnNames.size()); i++) { map.put(ValueFactory.newString(columnNames.get(i)), row.get(i)); } } MapValue lastResults = ValueFactory.newMap(map); try { td.set("last_results", new ObjectMapper().readTree(lastResults.toJson())); } catch (IOException ex) { throw Throwables.propagate(ex); } return cf.create().set("td", td); } else { return cf.create(); } }
private boolean fetchJobResult(int rows, TDJobOperator job) { Optional<ArrayValue> firstRow = pollingRetryExecutor(state, RESULT) .retryUnless(TDOperator::isDeterministicClientException) .withErrorMessage("Failed to download result of job '%s'", job.getJobId()) .withRetryInterval(retryInterval) .run(s -> job.getResult( ite -> ite.hasNext() ? Optional.of(ite.next()) : Optional.absent())); if (!firstRow.isPresent()) { throw new TaskExecutionException("Got unexpected empty result for count job: " + job.getJobId()); } ArrayValue row = firstRow.get(); if (row.size() != 1) { throw new TaskExecutionException("Got unexpected result row size for count job: " + row.size()); } Value count = row.get(0); IntegerValue actualRows; try { actualRows = count.asIntegerValue(); } catch (MessageTypeCastException e) { throw new TaskExecutionException("Got unexpected value type count job: " + count.getValueType()); } return BigInteger.valueOf(rows).compareTo(actualRows.asBigInteger()) <= 0; }
Value visitArray(String rootPath, ArrayValue arrayValue) int size = arrayValue.size(); ArrayList<Value> newValue = new ArrayList<>(size); int j = 0; String newPath = newArrayJsonPath(rootPath, i); if (! jsonDropColumns.contains(newPath)) { Value v = arrayValue.get(i); newValue.add(j++, visit(newPath, v)); int src = jsonColumn.getSrcTailIndex().intValue(); if (src == JsonColumn.WILDCARD_INDEX) { Value v = arrayValue.get(i); if (v == null) { v = jsonColumn.getDefaultValue(); Value v = (src < arrayValue.size() ? arrayValue.get(src) : null); if (v == null) { v = jsonColumn.getDefaultValue(); for (int i = 0; i < size; i++) { String newPath = newArrayJsonPath(rootPath, i); Value v = arrayValue.get(i); newValue.add(j++, visit(newPath, v)); Value v = (src < arrayValue.size() ? arrayValue.get(src) : null); if (v == null) { v = jsonColumn.getDefaultValue();