private Builder() { this.key = null; this.auditInfo = null; this.payload = null; this.auditTime = DateTimes.nowUtc(); }
public AlertEvent( String service, String host, Severity severity, String description, Map<String, Object> dataMap ) { this(DateTimes.nowUtc(), service, host, severity, description, dataMap); }
@Override public DateTime getCurrMaxTime() { return DateTimes.nowUtc(); }
public AlertEvent( String service, String host, String description, Map<String, Object> dataMap ) { this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, dataMap); }
public AlertEvent( String service, String host, String description ) { this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, ImmutableMap.of()); }
@Override public RemoteTaskRunnerWorkItem put(String s, RemoteTaskRunnerWorkItem taskRunnerWorkItem) { return super.put(s, taskRunnerWorkItem.withQueueInsertionTime(DateTimes.nowUtc())); } }
@Override public void onChannelMessage(ChannelPrivMsg aMsg) { try { queue.put(Pair.of(DateTimes.nowUtc(), aMsg)); } catch (InterruptedException e) { throw new RuntimeException("interrupted adding message to queue", e); } } }
@Override public AlertEvent build(ImmutableMap<String, String> serviceDimensions) { return new AlertEvent(DateTimes.nowUtc(), serviceDimensions, severity, description, dataMap); }
@Override public QueryRunner<T> postProcess(QueryRunner<T> baseQueryRunner) { return postProcess(baseQueryRunner, DateTimes.nowUtc().getMillis()); }
private static ImmutableWorkerInfo createDummyWorker(String scheme, String host, int capacity, String version) { return new ImmutableWorkerInfo( new Worker(scheme, host, "-2", capacity, version), 0, new HashSet<>(), new HashSet<>(), DateTimes.nowUtc() ); } }
public void addProvisionEvent(AutoScalingData data) { synchronized (lock) { recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.PROVISION)); } }
public static String makeId(String id, final String typeName, String dataSource, Interval interval) { return id != null ? id : joinId( typeName, dataSource, interval.getStart(), interval.getEnd(), DateTimes.nowUtc().toString() ); }
private Interval getIntervalOrDefault(Interval interval) { final Interval theInterval; if (interval == null) { DateTime now = DateTimes.nowUtc(); theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now); } else { theInterval = interval; } return theInterval; }
private String makeServedSegmentPath() { // server.getName() is already in the zk path return makeServedSegmentPath( UUIDUtils.generateUuid( server.getHost(), server.getType().toString(), server.getTier(), DateTimes.nowUtc().toString() ) ); }
@Test public void testCommonDateTimePattern() { DateTime dt1 = DateTimes.nowUtc(); DateTime dt2 = new DateTime(System.currentTimeMillis(), DateTimes.inferTzFromString("IST")); DateTime dt3 = new DateTime(System.currentTimeMillis(), DateTimeZone.forOffsetHoursMinutes(1, 30)); for (DateTime dt : new DateTime[] {dt1, dt2, dt3}) { Assert.assertTrue(DateTimes.COMMON_DATE_TIME_PATTERN.matcher(dt.toString()).matches()); } } }
@Test public void testDateTime() throws Exception { final DateTime time = DateTimes.nowUtc(); Assert.assertEquals(StringUtils.format("\"%s\"", time), mapper.writeValueAsString(time)); } }
private static String makeTaskId(FireDepartment fireDepartment) { return makeTaskId( fireDepartment.getDataSchema().getDataSource(), fireDepartment.getTuningConfig().getShardSpec().getPartitionNum(), DateTimes.nowUtc(), makeRandomId() ); }
private static String makeTaskId(RealtimeAppenderatorIngestionSpec spec) { return StringUtils.format( "index_realtime_%s_%d_%s_%s", spec.getDataSchema().getDataSource(), spec.getTuningConfig().getShardSpec().getPartitionNum(), DateTimes.nowUtc(), RealtimeIndexTask.makeRandomId() ); }
@Before public void setUp() { EmittingLogger.registerEmitter(emitter); emitter.start(); taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d")); now = DateTimes.nowUtc(); }