public AlertEvent( String service, String host, String description, Map<String, Object> dataMap ) { this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, dataMap); }
@Override public DateTime getCurrMaxTime() { return DateTimes.nowUtc(); }
public AlertEvent( String service, String host, Severity severity, String description, Map<String, Object> dataMap ) { this(DateTimes.nowUtc(), service, host, severity, description, dataMap); }
public TaskRunnerWorkItem(String taskId, ListenableFuture<TaskStatus> result) { this(taskId, result, DateTimes.nowUtc()); }
protected DateTime getCurrentTime() { return DateTimes.nowUtc(); }
public AlertEvent( String service, String host, String description ) { this(DateTimes.nowUtc(), service, host, Severity.DEFAULT, description, ImmutableMap.<String, Object>of()); }
@JsonCreator public CustomVersioningPolicy( @JsonProperty("version") String version ) { this.version = version == null ? DateTimes.nowUtc().toString() : version; }
@Override public void onChannelMessage(ChannelPrivMsg aMsg) { try { queue.put(Pair.of(DateTimes.nowUtc(), aMsg)); } catch (InterruptedException e) { throw new RuntimeException("interrupted adding message to queue", e); } } }
@Override public QueryRunner<T> postProcess(QueryRunner<T> baseQueryRunner) { return postProcess(baseQueryRunner, DateTimes.nowUtc().getMillis()); }
@Override public RemoteTaskRunnerWorkItem put(String s, RemoteTaskRunnerWorkItem taskRunnerWorkItem) { return super.put(s, taskRunnerWorkItem.withQueueInsertionTime(DateTimes.nowUtc())); } }
@Override public AlertEvent build(ImmutableMap<String, String> serviceDimensions) { return new AlertEvent(DateTimes.nowUtc(), serviceDimensions, severity, description, dataMap); }
private void resetNextFlush() { nextFlush = DateTimes.nowUtc().plus(config.getIntermediatePersistPeriod()).getMillis(); }
public void addProvisionEvent(AutoScalingData data) { synchronized (lock) { recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.PROVISION)); } }
protected static String makeId(String dataSource, Interval interval) { Preconditions.checkNotNull(dataSource, "dataSource"); Preconditions.checkNotNull(interval, "interval"); return joinId(TYPE, dataSource, interval.getStart(), interval.getEnd(), DateTimes.nowUtc()); }
private static ImmutableWorkerInfo createDummyWorker(String scheme, String host, int capacity, String version) { return new ImmutableWorkerInfo( new Worker(scheme, host, "-2", capacity, version), 0, Sets.<String>newHashSet(), Sets.<String>newHashSet(), DateTimes.nowUtc() ); } }
public void addTerminateEvent(AutoScalingData data) { synchronized (lock) { recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.TERMINATE)); } }
public static String makeId(String id, final String typeName, String dataSource, Interval interval) { return id != null ? id : joinId( typeName, dataSource, interval.getStart(), interval.getEnd(), DateTimes.nowUtc().toString() ); }
private Interval getIntervalOrDefault(Interval interval) { final Interval theInterval; if (interval == null) { DateTime now = DateTimes.nowUtc(); theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now); } else { theInterval = interval; } return theInterval; }
private static String makeTaskId(FireDepartment fireDepartment) { return makeTaskId( fireDepartment.getDataSchema().getDataSource(), fireDepartment.getTuningConfig().getShardSpec().getPartitionNum(), DateTimes.nowUtc(), random.nextInt() ); }
private String makeServedSegmentPath() { // server.getName() is already in the zk path return makeServedSegmentPath( UUIDUtils.generateUuid( server.getHost(), server.getType().toString(), server.getTier(), DateTimes.nowUtc().toString() ) ); }