/** * Represents a limit which only accepts the first item. * * @return a new limit which will only accept the first item */ public static Limit singleItem() { return new Limit(0, 1); }
protected void processResultSet(Function<Row, Boolean> handler, Limit effectiveLimit, ResultSet resultSet, TaskContext taskContext) throws SQLException { while (resultSet.next() && taskContext.isActive()) { Row row = loadIntoRow(resultSet); if (effectiveLimit.nextRow()) { if (!handler.apply(row)) { return; } } if (!effectiveLimit.shouldContinue()) { return; } } }
protected void applyMaxRows(PreparedStatement stmt, Limit effectiveLimit) throws SQLException { if (effectiveLimit.getTotalItems() > 0) { stmt.setMaxRows(effectiveLimit.getTotalItems()); } }
TaskContext ctx = TaskContext.get(); RateLimit rateLimit = RateLimit.timeInterval(1, TimeUnit.SECONDS); Limit effectiveLimit = new Limit(skip, limit); scrollResponse = executeScroll(entity -> { if (!effectiveLimit.nextRow()) { return true; return effectiveLimit.shouldContinue(); }, scrollResponse); } finally {
/** * Converts the limit into a predicate. * <p> * Note that the limit is stateful and therefore asPredicate should only be called once. * * @param <C> type used by the stream using this predicate. Ignored as we do not operate on the * items itself * @return a predicate implementing the limit */ public <C> Predicate<C> asPredicate() { return object -> nextRow(); }
/** * Parses the previously supplied input and calls the given consumer for each row. * <p> * Note that this method will close the given input. * * @param consumer the consume to call for each line * @throws IOException if an IO error occures while reading from the given input */ public void execute(Consumer<Values> consumer) throws IOException { try { this.consumer = consumer; TaskContext tc = TaskContext.get(); read(); while (tc.isActive() && !isEOF() && limit.shouldContinue()) { readRow(); consumeNewLine(); } } finally { input.close(); } }
/** * Get <tt>size</tt> number of files, starting at <tt>start</tt>. Only get files containing the query. Leave the * query empty to get all files. * * @param query the query to filter for * @param limit the limit to apply * @return all files which contain the query */ public List<StoredObject> getObjects(@Nonnull String query, Limit limit) { try (Stream<Path> stream = Files.list(file.toPath())) { return stream.map(Path::toFile) .filter(currentFile -> isMatchingObject(query, currentFile)) .filter(limit.asPredicate()) .map(StoredObject::new) .collect(Collectors.toList()); } catch (IOException e) { throw Exceptions.handle(e); } }
/** * Executes the given query returning the first matching row. * <p> * If the resulting row contains a {@link Blob} an {@link OutputStream} as to be passed in as parameter * with the name name as the column. The contents of the blob will then be written into the given * output stream (without closing it). * * @return the first matching row for the given query or <tt>null</tt> if no matching row was found * @throws SQLException in case of a database error */ @Nullable public Row queryFirst() throws SQLException { ValueHolder<Row> result = ValueHolder.of(null); iterateAll(result, Limit.singleItem()); return result.get(); }
private void readRow() throws IOException { List<String> row = Lists.newArrayList(); while (!isEOF() && !isAtNewline()) { row.add(readField()); if (buffer == separator) { read(); } } if (limit.nextRow()) { consumer.accept(Values.of(row)); } }
@Override protected void processResultSet(Function<Row, Boolean> handler, Limit effectiveLimit, ResultSet resultSet, TaskContext taskContext) throws SQLException { while (resultSet.next() && taskContext.isActive()) { Row row = loadIntoRow(resultSet); if (effectiveLimit.nextRow()) { if (!handler.apply(row)) { return; } } if (!effectiveLimit.shouldContinue()) { return; } } }
protected Limit getLimit() { return new Limit(skip, limit); }
protected void tuneStatement(PreparedStatement stmt, Limit limit, boolean nativeLimit) throws SQLException { if (!nativeLimit && limit.getTotalItems() > 0) { stmt.setMaxRows(limit.getTotalItems()); } if (limit.getTotalItems() > 1000 || limit.getTotalItems() <= 0) { if (db.hasCapability(Capability.STREAMING)) { stmt.setFetchSize(Integer.MIN_VALUE); } else { stmt.setFetchSize(1000); } } }
private boolean processEntity(Function<? super E, Boolean> consumer, Limit lim, TaskContext ctx, RateLimit rateLimit, Set<String> entityDeDuplicator, E entity) { if (!entityDeDuplicator.contains(entity.getId())) { if (lim.nextRow()) { if (!consumer.apply(entity)) { return false; } if (!lim.shouldContinue()) { return false; } } if (rateLimit.check()) { // Check is the user tries to cancel this task if (!ctx.isActive()) { return false; } } } return true; }
return; Limit lim = new Limit(0, limit); TaskContext ctx = TaskContext.get(); RateLimit rateLimit = RateLimit.timeInterval(1, TimeUnit.SECONDS);
protected void applyFetchSize(PreparedStatement stmt, Limit effectiveLimit) throws SQLException { if (effectiveLimit.getTotalItems() > 1000 || effectiveLimit.getTotalItems() <= 0) { if (ds.hasCapability(Capability.STREAMING)) { stmt.setFetchSize(Integer.MIN_VALUE); } else { stmt.setFetchSize(1000); } } }
@SuppressWarnings("unchecked") protected void execIterate(Function<E, Boolean> handler, Compiler compiler, Limit limit, boolean nativeLimit, ResultSet rs) throws Exception { TaskContext tc = TaskContext.get(); Set<String> columns = dbs.readColumns(rs); while (rs.next() && tc.isActive()) { if (nativeLimit || limit.nextRow()) { SQLEntity e = makeEntity(descriptor, null, columns, rs); compiler.executeJoinFetches(e, columns, rs); if (!handler.apply((E) e)) { return; } } if (!nativeLimit && !limit.shouldContinue()) { return; } } }
private void executeScroll(SearchResponse initialSearchResponse, ResultHandler<? super E> handler, EntityDescriptor entityDescriptor) { SearchResponse searchResponse = initialSearchResponse; TaskContext ctx = TaskContext.get(); RateLimit rateLimit = RateLimit.timeInterval(1, TimeUnit.SECONDS); long lastScroll = 0; Limit lim = new Limit(start, limit); while (true) { lastScroll = performScrollMonitoring(lastScroll); for (SearchHit hit : searchResponse.getHits()) { if (!processHit(handler, entityDescriptor, ctx, rateLimit, lim, hit)) { return; } } if (searchResponse.getHits().getHits().length == 0) { return; } searchResponse = scrollFurther(entityDescriptor, searchResponse.getScrollId()); } }
private boolean processHit(ResultHandler<? super E> handler, EntityDescriptor entityDescriptor, TaskContext ctx, RateLimit rateLimit, Limit lim, SearchHit hit) { try { E entity = clazz.newInstance(); entity.setId(hit.getId()); entity.initSourceTracing(); entity.setVersion(hit.getVersion()); entity.setMatchedNamedQueries(hit.getMatchedQueries()); entityDescriptor.readSource(entity, hit.getSourceAsMap()); if (lim.nextRow()) { if (!handler.handleRow(entity)) { return false; } if (!lim.shouldContinue()) { return false; } } if (rateLimit.check() && !ctx.isActive()) { return false; } } catch (Exception e) { Exceptions.handle().to(IndexAccess.LOG).error(e).handle(); } return true; }