public StringTemplate3StatementLocator build() { return new StringTemplate3StatementLocator(basePath, superGroupPath, errorListener, allowImplicitTemplateGroupEnabled, treatLiteralsAsTemplatesEnabled, shouldCacheEnabled); } }
public StringTemplate3StatementLocator build() { return new StringTemplate3StatementLocator(basePath, superGroupPath, errorListener, allowImplicitTemplateGroupEnabled, treatLiteralsAsTemplatesEnabled, shouldCacheEnabled); } }
public SqlStatementCustomizer createForType(final Annotation annotation, final Class sqlObjectType) { new StringTemplate3StatementLocator(QueueSqlDao.class, true, true); l = new StringTemplate3StatementLocator(sqlObjectType, true, true); } else { l = new StringTemplate3StatementLocator(a.value(), true, true);
@Override public Void withHandle(final Handle handle) throws Exception { // MySQL needs special setup to make it stream the results. See: // http://javaquirks.blogspot.com/2007/12/mysql-streaming-result-set.html // http://stackoverflow.com/questions/2447324/streaming-large-result-sets-with-mysql final Query<Map<String, Object>> query = handle.createQuery("getStreamingAggregationCandidates") .setFetchSize(Integer.MIN_VALUE) .bind("aggregationLevel", aggregationLevel) .bind("tenantRecordId", createCallContext().getTenantRecordId()); query.setStatementLocator(new StringTemplate3StatementLocator(TimelineAggregatorSqlDao.class)); ResultIterator<TimelineChunk> iterator = null; try { iterator = query .map(timelineChunkMapper) .iterator(); while (iterator.hasNext()) { aggregationConsumer.processTimelineChunk(iterator.next()); } } catch (Exception e) { log.error(String.format("Exception during aggregation of level %d", aggregationLevel), e); } finally { if (iterator != null) { iterator.close(); } } return null; }
@Override public Void withHandle(final Handle handle) throws Exception { handle.setStatementLocator(new StringTemplate3StatementLocator(TimelineSqlDao.class));