public static Committed create( Map<SegmentIdWithShardSpec, Integer> hydrants0, Object metadata ) { final ImmutableMap.Builder<String, Integer> hydrants = ImmutableMap.builder(); for (Map.Entry<SegmentIdWithShardSpec, Integer> entry : hydrants0.entrySet()) { hydrants.put(entry.getKey().toString(), entry.getValue()); } return new Committed(hydrants.build(), metadata); }
private File computePersistDir(SegmentIdWithShardSpec identifier) { return new File(tuningConfig.getBasePersistDirectory(), identifier.toString()); }
.addData("identifier", entry.getKey().toString()) .emit();
currentHydrants.put(identifier.toString(), hydrants.size()); numPersistedRows += sink.getNumRowsInMemory(); bytesPersisted += sink.getBytesInMemory();
); final int committedHydrants = committed.getCommittedHydrants(identifier.toString());
void add(SegmentIdWithShardSpec identifier) { intervalToSegmentStates.computeIfAbsent( identifier.getInterval().getStartMillis(), k -> new SegmentsOfInterval(identifier.getInterval()) ).setAppendingSegment(SegmentWithState.newSegment(identifier)); lastSegmentId = identifier.toString(); }
); Assert.assertEquals("ds_2017-01-01T00:00:00.000Z_2017-02-01T00:00:00.000Z_version", identifier.toString()); identifier.toString(), interval, identifier.getVersion(), ); Assert.assertEquals("ds_2017-01-01T00:00:00.000Z_2017-02-01T00:00:00.000Z_version_1", identifier1.toString()); identifier1.toString(), interval, identifier1.getVersion(), ); Assert.assertEquals("ds_2017-01-01T00:00:00.000Z_2017-02-01T00:00:00.000Z_version_2", identifier2.toString()); identifier1.toString(), interval, identifier1.getVersion(), Assert.assertEquals("ds_2017-01-01T00:00:00.000Z_2017-02-01T00:00:00.000Z_version_2", identifier3.toString()); Assert.assertEquals(identifier2, identifier3); Assert.assertEquals("ds_2017-01-01T00:00:00.000Z_2017-02-01T00:00:00.000Z_version_3", identifier4.toString());
private void insertToMetastore( Handle handle, SegmentIdWithShardSpec newIdentifier, String dataSource, Interval interval, String previousSegmentId, String sequenceName, String sequenceNamePrevIdSha1 ) throws JsonProcessingException { handle.createStatement( StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, sequence_name, sequence_prev_id, sequence_name_prev_id_sha1, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :sequence_name, :sequence_prev_id, :sequence_name_prev_id_sha1, :payload)", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("id", newIdentifier.toString()) .bind("dataSource", dataSource) .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .bind("sequence_name", sequenceName) .bind("sequence_prev_id", previousSegmentId) .bind("sequence_name_prev_id_sha1", sequenceNamePrevIdSha1) .bind("payload", jsonMapper.writeValueAsBytes(newIdentifier)) .execute(); }
.addData("segment", identifier.toString()) .addData("count", indexToPersist.getCount()) .emit();
Granularities.HOUR, "s1", id1.toString() ); final SegmentIdWithShardSpec id3 = allocate( Granularities.HOUR, "s1", id2.toString() ); final SegmentIdWithShardSpec id4 = allocate( Granularities.HOUR, "s1", id1.toString() ); final SegmentIdWithShardSpec id5 = allocate( Granularities.HOUR, "s1", id1.toString() ); final SegmentIdWithShardSpec id6 = allocate( Granularities.MINUTE, "s1", id1.toString() ); final SegmentIdWithShardSpec id7 = allocate(
Granularities.HOUR, "s1", id1.toString() ); final SegmentIdWithShardSpec id4 = allocate( Granularities.HOUR, "s1", id3.toString() ); final SegmentIdWithShardSpec id5 = allocate( Granularities.HOUR, "s2", id2.toString() ); final SegmentIdWithShardSpec id6 = allocate(task, PARTY_TIME, Granularities.NONE, Granularities.HOUR, "s1", null);
Granularities.HOUR, "s1", id1.toString() );
Granularities.HOUR, "s1", id1.toString() );