private RecordValue ofJobBatchRecord(LoggedEvent event) { final JobBatchRecord record = new JobBatchRecord(); event.readValue(record); final List<Long> jobKeys = StreamSupport.stream(record.jobKeys().spliterator(), false) .map(LongValue::getValue) .collect(Collectors.toList()); final List<JobRecordValue> jobs = StreamSupport.stream(record.jobs().spliterator(), false) .map(this::ofJobRecord) .collect(Collectors.toList()); return new JobBatchRecordValueImpl( objectMapper, asString(record.getType()), asString(record.getWorker()), Duration.ofMillis(record.getTimeout()), record.getAmount(), jobKeys, jobs, record.getTruncated()); }
public BrokerActivateJobsRequest setAmount(int amount) { requestDto.setAmount(amount); return this; }
public BrokerActivateJobsRequest setTimeout(long timeout) { requestDto.setTimeout(timeout); return this; }
@Override protected JobBatchRecord toResponseDto(DirectBuffer buffer) { final JobBatchRecord responseDto = new JobBatchRecord(); responseDto.wrap(buffer); return responseDto; } }
new JobBatchRecord() .setAmount(amount) .setTimeout(timeout) .setType(type) .setWorker(worker) .setTruncated(true); record.jobKeys().add().setValue(3L); final JobRecord jobRecord = record.jobs().add(); Arrays.asList(3L), Arrays.asList(jobRecordValue), record.getTruncated());
private void collectJobsToActivate(TypedRecord<JobBatchRecord> record, AtomicInteger amount) { final JobBatchRecord value = record.getValue(); final ValueArray<JobRecord> jobIterator = value.jobs(); final ValueArray<LongValue> jobKeyIterator = value.jobKeys(); final ValueArray<StringValue> variables = value.variables(); value.getType(), (key, jobRecord) -> { int remainingAmount = amount.get(); final long deadline = currentTimeMillis() + value.getTimeout(); jobRecord.setDeadline(deadline).setWorker(value.getWorker()); && value.getLength() + Long.BYTES + jobRecord.getLength() <= record.getMaxValueLength()) { remainingAmount = amount.decrementAndGet(); arrayValueJob.wrap(buffer); } else { value.setTruncated(true);
private void activateJobs(TypedStreamWriter streamWriter, JobBatchRecord value) { final Iterator<JobRecord> iterator = value.jobs().iterator(); final Iterator<LongValue> keyIt = value.jobKeys().iterator(); while (iterator.hasNext() && keyIt.hasNext()) { final JobRecord jobRecord = iterator.next(); final LongValue next1 = keyIt.next(); final long key = next1.getValue(); // update state and write follow up event for job record final long elementInstanceKey = jobRecord.getHeaders().getElementInstanceKey(); if (elementInstanceKey >= 0) { final DirectBuffer payload = collectPayload(variableNames, elementInstanceKey); jobRecord.setPayload(payload); } else { jobRecord.setPayload(WorkflowInstanceRecord.EMPTY_PAYLOAD); } // we have to copy the job record because #write will reset the iterator state final ExpandableArrayBuffer copy = new ExpandableArrayBuffer(); jobRecord.write(copy, 0); final JobRecord copiedJob = new JobRecord(); copiedJob.wrap(copy, 0, jobRecord.getLength()); state.activate(key, copiedJob); streamWriter.appendFollowUpEvent(key, JobIntent.ACTIVATED, copiedJob); } }
private void initEventTypeMap() { recordsByType.put(ValueType.DEPLOYMENT, new DeploymentRecord()); recordsByType.put(ValueType.JOB, new JobRecord()); recordsByType.put(ValueType.WORKFLOW_INSTANCE, new WorkflowInstanceRecord()); recordsByType.put(ValueType.MESSAGE, new MessageRecord()); recordsByType.put(ValueType.JOB_BATCH, new JobBatchRecord()); recordsByType.put(ValueType.INCIDENT, new IncidentRecord()); }
jobType, responseObserver, response.getTruncated()); }, error -> {
public BrokerActivateJobsRequest setWorker(String worker) { requestDto.setWorker(worker); return this; }
public BrokerActivateJobsRequest(String jobType) { super(ValueType.JOB_BATCH, JobBatchIntent.ACTIVATE); requestDto.setType(jobType); }
public BrokerActivateJobsRequest setVariables(List<String> fetchVariables) { final ValueArray<StringValue> variables = requestDto.variables(); fetchVariables.stream() .map(BufferUtil::wrapString) .forEach(buffer -> variables.add().wrap(buffer)); return this; }
new JobBatchRecord() .setAmount(amount) .setTimeout(timeout) .setType(type) .setWorker(worker) .setTruncated(true); record.jobKeys().add().setValue(3L); final JobRecord jobRecord = record.jobs().add(); Arrays.asList(3L), Arrays.asList(jobRecordValue), record.getTruncated());
private void collectJobsToActivate(TypedRecord<JobBatchRecord> record, AtomicInteger amount) { final JobBatchRecord value = record.getValue(); final ValueArray<JobRecord> jobIterator = value.jobs(); final ValueArray<LongValue> jobKeyIterator = value.jobKeys(); final ValueArray<StringValue> variables = value.variables(); value.getType(), (key, jobRecord) -> { int remainingAmount = amount.get(); final long deadline = currentTimeMillis() + value.getTimeout(); jobRecord.setDeadline(deadline).setWorker(value.getWorker()); && value.getLength() + Long.BYTES + jobRecord.getLength() <= record.getMaxValueLength()) { remainingAmount = amount.decrementAndGet(); arrayValueJob.wrap(buffer); } else { value.setTruncated(true);
private void activateJobs(TypedStreamWriter streamWriter, JobBatchRecord value) { final Iterator<JobRecord> iterator = value.jobs().iterator(); final Iterator<LongValue> keyIt = value.jobKeys().iterator(); while (iterator.hasNext() && keyIt.hasNext()) { final JobRecord jobRecord = iterator.next(); final LongValue next1 = keyIt.next(); final long key = next1.getValue(); // update state and write follow up event for job record final long elementInstanceKey = jobRecord.getHeaders().getElementInstanceKey(); if (elementInstanceKey >= 0) { final DirectBuffer payload = collectPayload(variableNames, elementInstanceKey); jobRecord.setPayload(payload); } else { jobRecord.setPayload(WorkflowInstanceRecord.EMPTY_PAYLOAD); } // we have to copy the job record because #write will reset the iterator state final ExpandableArrayBuffer copy = new ExpandableArrayBuffer(); jobRecord.write(copy, 0); final JobRecord copiedJob = new JobRecord(); copiedJob.wrap(copy, 0, jobRecord.getLength()); state.activate(key, copiedJob); streamWriter.appendFollowUpEvent(key, JobIntent.ACTIVATED, copiedJob); } }
private void initEventTypeMap() { recordsByType.put(ValueType.DEPLOYMENT, new DeploymentRecord()); recordsByType.put(ValueType.JOB, new JobRecord()); recordsByType.put(ValueType.WORKFLOW_INSTANCE, new WorkflowInstanceRecord()); recordsByType.put(ValueType.MESSAGE, new MessageRecord()); recordsByType.put(ValueType.JOB_BATCH, new JobBatchRecord()); recordsByType.put(ValueType.INCIDENT, new IncidentRecord()); }
private RecordValue ofJobBatchRecord(LoggedEvent event) { final JobBatchRecord record = new JobBatchRecord(); event.readValue(record); final List<Long> jobKeys = StreamSupport.stream(record.jobKeys().spliterator(), false) .map(LongValue::getValue) .collect(Collectors.toList()); final List<JobRecordValue> jobs = StreamSupport.stream(record.jobs().spliterator(), false) .map(this::ofJobRecord) .collect(Collectors.toList()); return new JobBatchRecordValueImpl( objectMapper, asString(record.getType()), asString(record.getWorker()), Duration.ofMillis(record.getTimeout()), record.getAmount(), jobKeys, jobs, record.getTruncated()); }
public static ActivateJobsResponse toActivateJobsResponse( long key, JobBatchRecord brokerResponse) { final ActivateJobsResponse.Builder responseBuilder = ActivateJobsResponse.newBuilder(); final Iterator<LongValue> jobKeys = brokerResponse.jobKeys().iterator(); final Iterator<JobRecord> jobs = brokerResponse.jobs().iterator(); while (jobKeys.hasNext() && jobs.hasNext()) { final LongValue jobKey = jobKeys.next(); final JobRecord job = jobs.next(); final ActivatedJob activatedJob = ActivatedJob.newBuilder() .setKey(jobKey.getValue()) .setType(bufferAsString(job.getType())) .setJobHeaders(fromBrokerJobHeaders(job.getHeaders())) .setCustomHeaders(bufferAsJson(job.getCustomHeaders())) .setWorker(bufferAsString(job.getWorker())) .setRetries(job.getRetries()) .setDeadline(job.getDeadline()) .setPayload(bufferAsJson(job.getPayload())) .build(); responseBuilder.addJobs(activatedJob); } return responseBuilder.build(); }