Refine search
/** * Check if workunit needs to be created. Returns <code>true</code> If the * <code>updateTime</code> is greater than the <code>lowWatermark</code> and <code>maxLookBackTime</code> * <code>createTime</code> is not used. It exists for backward compatibility */ protected boolean shouldCreateWorkunit(long createTime, long updateTime, LongWatermark lowWatermark) { if (new DateTime(updateTime).isBefore(this.maxLookBackTime)) { return false; } return new DateTime(updateTime).isAfter(lowWatermark.getValue()); }
/** * Return true iff input folder time is between compaction.timebased.min.time.ago and * compaction.timebased.max.time.ago. */ protected boolean folderWithinAllowedPeriod(Path inputFolder, DateTime folderTime) { DateTime currentTime = new DateTime(this.timeZone); PeriodFormatter periodFormatter = getPeriodFormatter(); DateTime earliestAllowedFolderTime = getEarliestAllowedFolderTime(currentTime, periodFormatter); DateTime latestAllowedFolderTime = getLatestAllowedFolderTime(currentTime, periodFormatter); if (folderTime.isBefore(earliestAllowedFolderTime)) { log.info(String.format("Folder time for %s is %s, earlier than the earliest allowed folder time, %s. Skipping", inputFolder, folderTime, earliestAllowedFolderTime)); return false; } else if (folderTime.isAfter(latestAllowedFolderTime)) { log.info(String.format("Folder time for %s is %s, later than the latest allowed folder time, %s. Skipping", inputFolder, folderTime, latestAllowedFolderTime)); return false; } else { return true; } }
/** * Determine if the {@link Table} or {@link Partition} should be validated by checking if its create time * lies between maxLookBackTime and skipRecentThanTime window. */ private boolean shouldValidate(Partition partition) { for (String pathToken : this.ignoreDataPathIdentifierList) { if (partition.getDataLocation().toString().toLowerCase().contains(pathToken.toLowerCase())) { log.info("Skipping partition " + partition.getCompleteName() + " containing invalid token " + pathToken .toLowerCase()); return false; } } try { long createTime = getPartitionCreateTime(partition.getName()); boolean withinTimeWindow = new DateTime(createTime).isAfter(this.maxLookBackTime) && new DateTime(createTime) .isBefore(this.skipRecentThanTime); if (!withinTimeWindow) { log.info("Skipping partition " + partition.getCompleteName() + " as create time " + new DateTime(createTime) .toString() + " is not within validation time window "); } else { log.info("Validating partition " + partition.getCompleteName()); return withinTimeWindow; } } catch (ParseException e) { Throwables.propagate(e); } return false; }
DateTime remainingEnd = totalInterval.getEnd(); for (Interval skipInterval : skipIntervals) { if (skipInterval.getStart().isBefore(remainingStart) && skipInterval.getEnd().isAfter(remainingStart)) { remainingStart = skipInterval.getEnd(); } else if (skipInterval.getStart().isBefore(remainingEnd) && skipInterval.getEnd().isAfter(remainingEnd)) { remainingEnd = skipInterval.getStart(); } else if (!remainingStart.isAfter(skipInterval.getStart()) && !remainingEnd.isBefore(skipInterval.getEnd())) { filteredIntervals.add(new Interval(remainingStart, skipInterval.getStart())); remainingStart = skipInterval.getEnd();
private static Interval computeMergedInterval(final List<DataSegment> segments) { Preconditions.checkArgument(segments.size() > 0, "segments.size() > 0"); DateTime start = null; DateTime end = null; for (final DataSegment segment : segments) { if (start == null || segment.getInterval().getStart().isBefore(start)) { start = segment.getInterval().getStart(); } if (end == null || segment.getInterval().getEnd().isAfter(end)) { end = segment.getInterval().getEnd(); } } return new Interval(start, end); }
public boolean withinMinMaxRecordTime(final InputRow row) { final boolean beforeMinimumMessageTime = ioConfig.getMinimumMessageTime().isPresent() && ioConfig.getMinimumMessageTime().get().isAfter(row.getTimestamp()); final boolean afterMaximumMessageTime = ioConfig.getMaximumMessageTime().isPresent() && ioConfig.getMaximumMessageTime().get().isBefore(row.getTimestamp()); if (!Intervals.ETERNITY.contains(row.getTimestamp())) { final String errorMsg = StringUtils.format( "Encountered row with timestamp that cannot be represented as a long: [%s]", row ); throw new ParseException(errorMsg); } if (log.isDebugEnabled()) { if (beforeMinimumMessageTime) { log.debug( "CurrentTimeStamp[%s] is before MinimumMessageTime[%s]", row.getTimestamp(), ioConfig.getMinimumMessageTime().get() ); } else if (afterMaximumMessageTime) { log.debug( "CurrentTimeStamp[%s] is after MaximumMessageTime[%s]", row.getTimestamp(), ioConfig.getMaximumMessageTime().get() ); } } return !beforeMinimumMessageTime && !afterMaximumMessageTime; }
timelineObjects.add(Pair.of(timelineObject, underlyingInterval)); } else { final DateTime start = underlyingInterval.getStart().isBefore(mergedUnderlyingInterval.getStart()) ? underlyingInterval.getStart() : mergedUnderlyingInterval.getStart(); final DateTime end = underlyingInterval.getEnd().isAfter(mergedUnderlyingInterval.getEnd()) ? underlyingInterval.getEnd() : mergedUnderlyingInterval.getEnd();
@Override public int compareTo(final SubscriptionBaseEvent other) { if (other == null) { throw new IllegalArgumentException("IEvent is compared to a null instance"); } if (effectiveDate.isBefore(other.getEffectiveDate())) { return -1; } else if (effectiveDate.isAfter(other.getEffectiveDate())) { return 1; } else if (getType() != other.getType()) { return (getType() == EventType.PHASE) ? -1 : 1; } else if (getType() == EventType.API_USER) { return ((ApiEvent) this).getApiEventType().compareTo(((ApiEvent) other).getApiEventType()); } else { return uuid.compareTo(other.getId()); } }
final boolean catalogOlderThanSubscriptionStartDate = !subscriptionStartDate.isBefore(catalogEffectiveDate); if (oldestCatalog || // Prevent issue with time granularity -- see #760 if (plan.getEffectiveDateForExistingSubscriptions() != null) { // If it is null, any change to this catalog does not apply to existing subscriptions final DateTime existingSubscriptionDate = CatalogDateHelper.toUTCDateTime(plan.getEffectiveDateForExistingSubscriptions()); if (requestedDate.isAfter(existingSubscriptionDate)) { // This plan is now applicable to existing subs return new CatalogPlanEntry(c, plan);
if (currKey.contains(entryInterval)) { return true; } else if (currKey.getStart().isBefore(entryInterval.getStart())) { entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd()); } else { addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry, timeline); if (entryInterval.getEnd().isAfter(currKey.getEnd())) { entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd()); } else { } else if (currKey.getStart().isBefore(entryInterval.getStart())) { addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline); } else if (entryInterval.getEnd().isBefore(currKey.getEnd())) { addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
private boolean shouldProcessDir(Path inputDir, DateTime inputDate, DateTime minDaysAgo, DateTime maxDaysAgo) { if (inputDate.isAfter(minDaysAgo)) { LOG.info(String.format("folder %s with timestamp %s is later than latest allowed timestamp %s. Skipping", inputDir, inputDate, minDaysAgo)); return false; } if (inputDate.isBefore(maxDaysAgo)) { LOG.info(String.format("folder %s with timestamp %s is earlier than earliest allowed timestamp %s. Skipping", inputDir, inputDate, maxDaysAgo)); return false; } return true; }
@Test public void testTimeseriesNoAggregators() { Granularity gran = Granularities.DAY; TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(gran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .descending(descending) .build(); Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), CONTEXT).toList(); final DateTime expectedLast = descending ? QueryRunnerTestHelper.earliest : QueryRunnerTestHelper.last; Result lastResult = null; for (Result<TimeseriesResultValue> result : results) { DateTime current = result.getTimestamp(); Assert.assertFalse( StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast) ); Assert.assertEquals(ImmutableMap.of(), result.getValue().getBaseObject()); lastResult = result; } Assert.assertEquals(lastResult.toString(), expectedLast, lastResult.getTimestamp()); }