Refine search
private List<SegmentIdWithShardSpec> getPendingSegmentsForIntervalWithHandle( final Handle handle, final String dataSource, final Interval interval ) throws IOException { final List<SegmentIdWithShardSpec> identifiers = new ArrayList<>(); final ResultIterator<byte[]> dbSegments = handle.createQuery( StringUtils.format( "SELECT payload FROM %1$s WHERE dataSource = :dataSource AND start <= :end and %2$send%2$s >= :start", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .iterator(); while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); final SegmentIdWithShardSpec identifier = jsonMapper.readValue(payload, SegmentIdWithShardSpec.class); if (interval.overlaps(identifier.getInterval())) { identifiers.add(identifier); } } dbSegments.close(); return identifiers; }
@Test public void testDoubleCleanup() throws Exception { final Handle handle = dbi.open(); final Query<Integer> q = handle .createQuery("SELECT id FROM something") .cleanupHandle() .cleanupHandle() .mapTo(Integer.class); final ResultIterator<Integer> it = q.iterator(); while (it.hasNext()) { it.next(); } assertFalse(it.hasNext()); assertTrue(handle.getConnection().isClosed()); }
@Test public void testJustNext() throws Exception { h.createStatement("insert into something (id, name) values (1, 'eric')").execute(); h.createStatement("insert into something (id, name) values (2, 'brian')").execute(); h.createStatement("insert into something (id, name) values (3, 'john')").execute(); ResultIterator<Map<String, Object>> it = h.createQuery("select * from something order by id") .cleanupHandle() .iterator(); it.next(); it.next(); it.next(); }
final Handle h = dbi.open(); try { binds.add(query -> { String bindPath = baseDBPath + incrementKey(startAfter); query.bind("startAfter", bindPath); }); Query<Map<String, Object>> query = h.createQuery(sql.toString()).bind("like", like); for (Consumer<Query<Map<String, Object>>> bind : binds) { bind.accept(query); if (iterator.hasNext()) { result = output -> { try (JsonRecordConsumer toJson = new JsonRecordConsumer(baseDBPath, output, o)) { while ( !toJson.isClosed() && iterator.hasNext() ) { toJson.accept(iterator.next()); iterator.close(); h.close(); iterator.close(); h.close();
Optional<LocalDate> endDate, Optional<String> filter) { long numRows; try (Handle handle = dbi.open()) { numRows = handle.createQuery("select sum(shards.row_count) as row_count from tables join shards on (shards.table_id = tables.table_id) where table_name = :collection and schema_name = :schema") .bind("collection", checkProject(collection)) .bind("schema", checkProject(project)) .map(LongMapper.FIRST).iterator().next();
@Test public void testIteratorBehavior() throws Exception { h.insert("insert into something (id, name) values (1, 'eric')"); h.insert("insert into something (id, name) values (2, 'brian')"); ResultIterator<Something> i = h.createQuery("select * from something order by id") .map(Something.class) .iterator(); assertTrue(i.hasNext()); assertTrue(i.hasNext()); Something first = i.next(); assertEquals("eric", first.getName()); assertTrue(i.hasNext()); Something second = i.next(); assertEquals(2, second.getId()); assertFalse(i.hasNext()); i.close(); }
@Override public Void withHandle(final Handle handle) throws Exception { // MySQL needs special setup to make it stream the results. See: // http://javaquirks.blogspot.com/2007/12/mysql-streaming-result-set.html // http://stackoverflow.com/questions/2447324/streaming-large-result-sets-with-mysql final Query<Map<String, Object>> query = handle.createQuery("getStreamingAggregationCandidates") .setFetchSize(Integer.MIN_VALUE) .bind("aggregationLevel", aggregationLevel) .bind("tenantRecordId", createCallContext().getTenantRecordId()); query.setStatementLocator(new StringTemplate3StatementLocator(TimelineAggregatorSqlDao.class)); ResultIterator<TimelineChunk> iterator = null; try { iterator = query .map(timelineChunkMapper) .iterator(); while (iterator.hasNext()) { aggregationConsumer.processTimelineChunk(iterator.next()); } } catch (Exception e) { log.error(String.format("Exception during aggregation of level %d", aggregationLevel), e); } finally { if (iterator != null) { iterator.close(); } } return null; }
@Override public Void withHandle(final Handle handle) throws Exception { handle.setStatementLocator(new StringTemplate3StatementLocator(TimelineSqlDao.class)); .createQuery("getSamplesBySourceRecordIdsAndMetricRecordIds") .bind("startTime", DateTimeUtils.unixSeconds(startTime)) .bind("endTime", DateTimeUtils.unixSeconds(endTime)) .bind("tenantRecordId", context.getTenantRecordId()) .define("sourceIds", JOINER.join(sourceIdList)); .iterator(); while (iterator.hasNext()) { chunkConsumer.processTimelineChunk(iterator.next()); if (iterator != null) { try { iterator.close(); } catch (Exception e) { log.error("Exception closing TimelineChunkAndTimes iterator for sourceIds {} and metricIds {}", sourceIdList, metricIdList);
@Override public void write(OutputStream os) throws IOException { Writer writer = new BufferedWriter(new OutputStreamWriter(os)); Handle h = db.open(); ResultIterator<Map<String, Object>> rs = h.createQuery( "SELECT user_id, bucket_label, timestamp, 'ACTION' AS event_type, action AS name, payload" + " FROM event_action" + "and timestamp >= '" + fromTsFinal + "'" + " and timestamp <= '" + toTsFinal + "'" + " ORDER BY user_id, timestamp") .iterator(); String header = "userId" + "\t" + "bucketLabel" + "\t" + "payload" + "\n"; writer.write(header); while (rs.hasNext()) { Map<String, Object> row = rs.next(); writer.write(row.get("user_id") + "\t" + row.get("bucket_label") + "\t" + ); rs.close(); h.close(); writer.flush();
@Test public void testFetchSize() throws Exception { h.createScript("default-data").execute(); Query<Something> q = h.createQuery("select id, name from something order by id").map(Something.class); q.setFetchSize(1); ResultIterator<Something> r = q.iterator(); assertTrue(r.hasNext()); r.next(); assertTrue(r.hasNext()); r.next(); assertFalse(r.hasNext()); }
jdbi.registerArgumentFactory(new JodaArgumentFactory()); JDBILogFileDAO dao = jdbi.onDemand(DefaultJDBILogFileDAO.class); try (ResultIterator<LogFile> iter = (states.isEmpty()) ? JDBILogFileDAOHelper.listLogFilesByDate(dao, startDate, endDate) : JDBILogFileDAOHelper.listLogFilesByDateAndState(dao, states, startDate, endDate)) { if (iter.hasNext()) { System.out.println(String.format("%-14s | %-20s | %10s | %15s | %-25s", "COHORT", "STATE", "SERIAL", "SIZE", "OWNER")); System.out.println(StringUtils.repeat("_", 14) + "_|_" + StringUtils.repeat("_", 25)); while (iter.hasNext()) { LogFile lf = iter.next(); System.out.println(String.format("%-14s | %20s | %10d | %15d | %-25s", lf.getRollingCohort(),
@Override protected void run(Environment env, Namespace namespace, T configuration) throws Exception { CliConveniences.quietLogging("ifar"); final DBIFactory factory = new DBIFactory(); final DBI jdbi = factory.build(env, getSkidRoadReadOnlyConfiguration(configuration).getDatabaseConfiguration(), "logfile"); JDBILogFileDAO dao = jdbi.onDemand(DefaultJDBILogFileDAO.class); try (ResultIterator<String> ownerUris = dao.listOwnerUris()) { while (ownerUris.hasNext()) { System.out.println(ownerUris.next()); } } } }
@Test public void testEmptyExplosion() throws Exception { ResultIterator<Map<String, Object>> it = h.createQuery("select * from something order by id") .cleanupHandle() .iterator(); try { it.next(); fail("Expected IllegalStateException did not show up!"); } catch (IllegalStateException iae) { // TestCase does not deal with the annotations... } }
@Test public void testEmptyWorksToo() throws Exception { ResultIterator<Map<String, Object>> it = h.createQuery("select * from something order by id") .cleanupHandle() .iterator(); assertFalse(it.hasNext()); }
@Override public boolean isFinished() { return !iterator.hasNext(); }
@Override public synchronized T next() { checkState(!closed, "already closed"); return iterator.next(); }
@Override public synchronized void close() { if (!closed) { closed = true; iterator.close(); } } }
// Iterate through the results. final ResultIterator iterator = baseApi.getResultIterator(); String lastUTF8Text; float lastConfidence; int count = 0; iterator.begin(); do { lastUTF8Text = iterator.getUTF8Text(PageIteratorLevel.RIL_WORD); lastConfidence = iterator.confidence(PageIteratorLevel.RIL_WORD); count++; } while (iterator.next(PageIteratorLevel.RIL_WORD));