private UnitOfWork<?> startAndGetUnitOfWork() { return DefaultUnitOfWork.startAndGet(null); }
@Override public ResultValidator<T> expectState(Consumer<T> aggregateStateValidator) { DefaultUnitOfWork<Message<?>> uow = DefaultUnitOfWork.startAndGet(null); try { state.get().execute(aggregateStateValidator); } finally { uow.rollback(); } return this; }
/** * Handles the given {@code event} in the scope of a Unit of Work. If handling the event results in an exception * the exception will be wrapped in a {@link FixtureExecutionException}. * * @param event The event message to handle */ protected void handleInSaga(EventMessage<?> event) { ensureSagaResourcesInitialized(); ResultMessage<?> resultMessage = DefaultUnitOfWork.startAndGet(event).executeWithResult(() -> { sagaManager.handle(event, Segment.ROOT_SEGMENT); return null; }); if (resultMessage.isExceptional()) { Throwable e = resultMessage.exceptionResult(); if (Error.class.isAssignableFrom(e.getClass())) { throw (Error) e; } throw new FixtureExecutionException("Exception occurred while handling an event", e); } }
/** * Performs the actual handling logic. * * @param command The actual command to handle * @param handler The handler that must be invoked for this command * @param callback The callback to notify of the result * @param <C> The type of payload of the command * @param <R> The type of result expected from the command handler */ @SuppressWarnings({"unchecked"}) protected <C, R> void handle(CommandMessage<C> command, MessageHandler<? super CommandMessage<?>> handler, CommandCallback<? super C, ? super R> callback) { if (logger.isDebugEnabled()) { logger.debug("Handling command [{}]", command.getCommandName()); } UnitOfWork<CommandMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(command); unitOfWork.attachTransaction(transactionManager); InterceptorChain chain = new DefaultInterceptorChain<>(unitOfWork, handlerInterceptors, handler); CommandResultMessage<R> resultMessage = asCommandResultMessage(unitOfWork.executeWithResult(chain::proceed, rollbackConfiguration)); callback.onResult(command, resultMessage); }
long leftTimeout = getRemainingOfDeadline(deadline); ResultMessage<CompletableFuture<QueryResponseMessage<R>>> resultMessage = interceptAndInvoke(DefaultUnitOfWork.startAndGet(interceptedQuery), handler); QueryResponseMessage<R> response = null;
@Override public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("Starting job to publish a scheduled event"); JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); EventJobDataBinder jobDataBinder = (EventJobDataBinder) schedulerContext.get(EVENT_JOB_DATA_BINDER_KEY); Object event = jobDataBinder.fromJobData(jobData); EventMessage<?> eventMessage = createMessage(event); EventBus eventBus = (EventBus) context.getScheduler().getContext().get(EVENT_BUS_KEY); TransactionManager txManager = (TransactionManager) context.getScheduler().getContext().get(TRANSACTION_MANAGER_KEY); UnitOfWork<EventMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(null); if (txManager != null) { unitOfWork.attachTransaction(txManager); } unitOfWork.execute(() -> eventBus.publish(eventMessage)); if (logger.isInfoEnabled()) { logger.info("Job successfully executed. Scheduled Event [{}] has been published.", eventMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { logger.error("Exception occurred while publishing scheduled event [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }
ScopeDescriptor deadlineScope = deadlineScope(serializer, jobData); DefaultUnitOfWork<DeadlineMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(deadlineMessage); unitOfWork.attachTransaction(transactionManager); InterceptorChain chain =
@Override public TestExecutor<T> givenState(Supplier<T> aggregate) { clearGivenWhenState(); DefaultUnitOfWork.startAndGet(null).execute(() -> { if (repository == null) { registerRepository(new InMemoryRepository<>(aggregateType, eventStore, getRepositoryProvider())); } try { repository.newInstance(aggregate::get); } catch (Exception e) { throw new FixtureExecutionException( "An exception occurred while trying to initialize repository with given aggregate (using 'givenState')", e); } }); return this; }
boolean invocationSuccess = false; while (!invocationSuccess && handlerIterator.hasNext()) { DefaultUnitOfWork<QueryMessage<Q, R>> uow = DefaultUnitOfWork.startAndGet(interceptedQuery); ResultMessage<CompletableFuture<QueryResponseMessage<R>>> resultMessage = interceptAndInvoke(uow, handlerIterator.next());
private void detectIllegalStateChanges(MatchAllFieldFilter fieldFilter, Aggregate<T> workingAggregate) { logger.debug("Starting separate Unit of Work for the purpose of checking illegal state changes in Aggregate"); if (aggregateIdentifier != null && workingAggregate != null && reportIllegalStateChange) { UnitOfWork<?> uow = DefaultUnitOfWork.startAndGet(null); try { Aggregate<T> aggregate2 = repository.delegate.load(aggregateIdentifier); if (workingAggregate.isDeleted()) { throw new AxonAssertionError("The working aggregate was considered deleted, " + "but the Repository still contains a non-deleted copy of " + "the aggregate. Make sure the aggregate explicitly marks " + "itself as deleted in an EventHandler."); } assertValidWorkingAggregateState(aggregate2, fieldFilter, workingAggregate); } catch (AggregateNotFoundException notFound) { if (!workingAggregate.isDeleted()) { throw new AxonAssertionError("The working aggregate was not considered deleted, " //NOSONAR + "but the Repository cannot recover the state of the " + "aggregate, as it is considered deleted there."); } } catch (Exception e) { throw new FixtureExecutionException("An Exception occurred while reconstructing the Aggregate from " + "given and published events. This may be an indication " + "that the aggregate cannot be recreated from its events.", e); } finally { // rollback to prevent changes bing pushed to event store uow.rollback(); } } }
private DeadlineMessage<?> consumeDeadline(DeadlineConsumer deadlineConsumer, ScheduledDeadlineInfo scheduledDeadlineInfo) { DefaultUnitOfWork<? extends DeadlineMessage<?>> uow = DefaultUnitOfWork.startAndGet(scheduledDeadlineInfo.deadlineMessage()); InterceptorChain chain = new DefaultInterceptorChain<>(uow, handlerInterceptors, deadlineMessage -> { deadlineConsumer.consume(scheduledDeadlineInfo.getDeadlineScope(), deadlineMessage); return deadlineMessage; }); ResultMessage<?> resultMessage = uow.executeWithResult(chain::proceed); if (resultMessage.isExceptional()) { Throwable e = resultMessage.exceptionResult(); throw new FixtureExecutionException("Exception occurred while handling the deadline", e); } return (DeadlineMessage<?>) resultMessage.getPayload(); } }
@Test public void testCommandHandlerLoadsSameAggregateTwice() throws Exception { DefaultUnitOfWork.startAndGet(null); repository.newInstance(() -> new StubAggregate(aggregateIdentifier)).execute(StubAggregate::doSomething); CurrentUnitOfWork.commit(); DefaultUnitOfWork.startAndGet(null); repository.load(aggregateIdentifier).execute(StubAggregate::doSomething); repository.load(aggregateIdentifier).execute(StubAggregate::doSomething); CurrentUnitOfWork.commit(); Iterator<? extends DomainEventMessage<?>> es = stubEventStore.readEvents(aggregateIdentifier); assertTrue(es.hasNext()); assertEquals((Object) 0L, es.next().getSequenceNumber()); assertTrue(es.hasNext()); assertEquals((Object) 1L, es.next().getSequenceNumber()); assertTrue(es.hasNext()); assertEquals((Object) 2L, es.next().getSequenceNumber()); assertFalse(es.hasNext()); }
@Test public void orderInEventSourcedAggregate() { Repository<MyAggregate> repository = configuration.repository(MyAggregate.class); configuration.commandGateway().sendAndWait(command); UnitOfWork unitOfWork = DefaultUnitOfWork.startAndGet(GenericCommandMessage.asCommandMessage("loading")); MyAggregate loadedAggregate = repository.load(aggregateIdentifier).invoke(Function.identity()); unitOfWork.commit(); assertEquals(expectedDescriptions(command), loadedAggregate.getHandledCommands()); }
@Override public <Q, R> Stream<QueryResponseMessage<R>> scatterGather(QueryMessage<Q, R> query, long timeout, TimeUnit unit) { MessageMonitor.MonitorCallback monitorCallback = messageMonitor.onMessageIngested(query); QueryMessage<Q, R> interceptedQuery = intercept(query); List<MessageHandler<? super QueryMessage<?, ?>>> handlers = getHandlersForMessage(interceptedQuery); if (handlers.isEmpty()) { monitorCallback.reportIgnored(); return Stream.empty(); } long deadline = System.currentTimeMillis() + unit.toMillis(timeout); return handlers.stream() .map(handler -> { try { long leftTimeout = getRemainingOfDeadline(deadline); QueryResponseMessage<R> response = interceptAndInvoke(DefaultUnitOfWork.startAndGet(interceptedQuery), handler) .get(leftTimeout, TimeUnit.MILLISECONDS); monitorCallback.reportSuccess(); return response; } catch (Exception e) { monitorCallback.reportFailure(e); errorHandler.onError(e, interceptedQuery, handler); return null; } }).filter(Objects::nonNull); }
/** * Performs the actual handling logic. * * @param command The actual command to handle * @param handler The handler that must be invoked for this command * @param callback The callback to notify of the result * @param <C> The type of payload of the command * @param <R> The type of result expected from the command handler */ @SuppressWarnings({"unchecked"}) protected <C, R> void handle(CommandMessage<C> command, MessageHandler<? super CommandMessage<?>> handler, CommandCallback<? super C, ? super R> callback) { if (logger.isDebugEnabled()) { logger.debug("Handling command [{}]", command.getCommandName()); } UnitOfWork<CommandMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(command); unitOfWork.attachTransaction(transactionManager); InterceptorChain chain = new DefaultInterceptorChain<>(unitOfWork, handlerInterceptors, handler); CommandResultMessage<R> resultMessage = asCommandResultMessage(unitOfWork.executeWithResult(chain::proceed, rollbackConfiguration)); callback.onResult(command, resultMessage); }
/** * Performs the actual handling logic. * * @param command The actual command to handle * @param handler The handler that must be invoked for this command * @param callback The callback to notify of the result * @param <C> The type of payload of the command * @param <R> The type of result expected from the command handler */ @SuppressWarnings({"unchecked"}) protected <C, R> void handle(CommandMessage<C> command, MessageHandler<? super CommandMessage<?>> handler, CommandCallback<? super C, R> callback) { if (logger.isDebugEnabled()) { logger.debug("Handling command [{}]", command.getCommandName()); } try { UnitOfWork<CommandMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(command); unitOfWork.attachTransaction(transactionManager); InterceptorChain chain = new DefaultInterceptorChain<>(unitOfWork, handlerInterceptors, handler); R result = (R) unitOfWork.executeWithResult(chain::proceed, rollbackConfiguration); callback.onSuccess(command, result); } catch (Exception e) { callback.onFailure(command, e); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Starting a deadline job"); } JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); Serializer serializer = (Serializer) schedulerContext.get(JOB_DATA_SERIALIZER); TransactionManager transactionManager = (TransactionManager) schedulerContext.get(TRANSACTION_MANAGER_KEY); ScopeAwareProvider scopeAwareComponents = (ScopeAwareProvider) schedulerContext.get(SCOPE_AWARE_RESOLVER); DeadlineMessage<?> deadlineMessage = deadlineMessage(serializer, jobData); ScopeDescriptor deadlineScope = deadlineScope(serializer, jobData); DefaultUnitOfWork<DeadlineMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(deadlineMessage); unitOfWork.attachTransaction(transactionManager); unitOfWork.execute(() -> executeScheduledDeadline(scopeAwareComponents, deadlineMessage, deadlineScope)); if (LOGGER.isInfoEnabled()) { LOGGER.info("Job successfully executed. Deadline message [{}] processed.", deadlineMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { LOGGER.error("Exception occurred during processing a deadline job [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }
long leftTimeout = getRemainingOfDeadline(deadline); ResultMessage<CompletableFuture<QueryResponseMessage<R>>> resultMessage = interceptAndInvoke(DefaultUnitOfWork.startAndGet(interceptedQuery), handler); QueryResponseMessage<R> response = null;
@Override public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("Starting job to publish a scheduled event"); JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); EventJobDataBinder jobDataBinder = (EventJobDataBinder) schedulerContext.get(EVENT_JOB_DATA_BINDER_KEY); Object event = jobDataBinder.fromJobData(jobData); EventMessage<?> eventMessage = createMessage(event); EventBus eventBus = (EventBus) context.getScheduler().getContext().get(EVENT_BUS_KEY); TransactionManager txManager = (TransactionManager) context.getScheduler().getContext().get(TRANSACTION_MANAGER_KEY); UnitOfWork<EventMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(null); if (txManager != null) { unitOfWork.attachTransaction(txManager); } unitOfWork.execute(() -> eventBus.publish(eventMessage)); if (logger.isInfoEnabled()) { logger.info("Job successfully executed. Scheduled Event [{}] has been published.", eventMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { logger.error("Exception occurred while publishing scheduled event [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("Starting job to publish a scheduled event"); JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); EventJobDataBinder jobDataBinder = (EventJobDataBinder) schedulerContext.get(EVENT_JOB_DATA_BINDER_KEY); Object event = jobDataBinder.fromJobData(jobData); EventMessage<?> eventMessage = createMessage(event); EventBus eventBus = (EventBus) context.getScheduler().getContext().get(EVENT_BUS_KEY); TransactionManager txManager = (TransactionManager) context.getScheduler().getContext().get(TRANSACTION_MANAGER_KEY); UnitOfWork<EventMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(null); if (txManager != null) { unitOfWork.attachTransaction(txManager); } unitOfWork.execute(() -> eventBus.publish(eventMessage)); if (logger.isInfoEnabled()) { logger.info("Job successfully executed. Scheduled Event [{}] has been published.", eventMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { logger.error("Exception occurred while publishing scheduled event [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }