this.consumerProperties = Preconditions.checkNotNull(consumerProperties, "consumerProperties"); Preconditions.checkNotNull(consumerProperties.get(BOOTSTRAP_SERVERS_KEY), StringUtils.format("consumerProperties must contain entry for [%s]", BOOTSTRAP_SERVERS_KEY));
@Override public String toString() { return StringUtils.format("serverTime-%s", windowPeriod); } };
@Override public String toString() { return StringUtils.format("Allowed:%s, Message:%s", allowed, message); } }
@Override public String toString() { return StringUtils.format("messageTime-%s", windowPeriod); } }
public static File makeMetricFile(File dir, String metricName, ByteOrder order) { return new File(dir, StringUtils.format("met_%s_%s.drd", metricName, order)); } }
@Override public String toString() { return StringUtils.format("(%s)", OR_JOINER.join(filters)); } }
@VisibleForTesting File getPersistFile(final String tier) { return new File(persistDirectory, StringUtils.format("%s.%s", tier, PERSIST_FILE_SUFFIX)); } }
public List<String> getCurrentlyProcessingSegmentsAndHosts(String tier) { Map<String, String> segments = currentlyProcessingSegments.get(tier); List<String> retVal = Lists.newArrayList(); for (Map.Entry<String, String> entry : segments.entrySet()) { retVal.add( StringUtils.format("%s ON %s", entry.getKey(), entry.getValue()) ); } return retVal; } }
public static String getSequenceName(Interval interval, String version, ShardSpec shardSpec) { return StringUtils.format("index_%s_%s_%d", interval, version, shardSpec.getPartitionNum()); } }
@Override public boolean canAppendRow() { final boolean canAdd = size() < maxRowCount; if (!canAdd) { outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } return canAdd; }
private void addCachePopulator( Cache.NamedKey segmentCacheKey, String segmentIdentifier, Interval segmentQueryInterval ) { cachePopulatorMap.put( StringUtils.format("%s_%s", segmentIdentifier, segmentQueryInterval), new CachePopulator(cache, objectMapper, segmentCacheKey) ); }
@Override public String asString() { return StringUtils.format("DROP: %s", segment.getIdentifier()); }
@Override public String toString() { return StringUtils.format("%s:%s", scheme, hostAndPort.toString()); }
public Path makeIntervalInfoPath() { return new Path( StringUtils.format( "%s/intervals.json", makeIntermediatePath() ) ); }
@SuppressWarnings(value = "unchecked") @Override public void open() throws IOException { writer = new GenericIndexedWriter(segmentWriteOutMedium, StringUtils.format("%s.complex_column", filenameBase), strategy); writer.open(); }
private FullResponseHolder fetchLookupsForTier(String tier) throws ExecutionException, InterruptedException, IOException { return druidLeaderClient.go( druidLeaderClient.makeRequest( HttpMethod.GET, StringUtils.format("/druid/coordinator/v1/lookups/config/%s?detailed=true", tier) ) ); }
@Override public void open() throws IOException { writer = CompressionFactory.getFloatSerializer( segmentWriteOutMedium, StringUtils.format("%s.float_column", filenameBase), byteOrder, compression ); writer.open(); }
@Override public void open() throws IOException { writer = CompressionFactory.getDoubleSerializer( segmentWriteOutMedium, StringUtils.format("%s.double_column", filenameBase), byteOrder, compression ); writer.open(); }
private long checkQueryTimeout() { long timeLeft = timeoutAt - System.currentTimeMillis(); if (timeLeft <= 0) { String msg = StringUtils.format("Query[%s] url[%s] timed out.", query.getId(), url); setupResponseReadFailure(msg, null); throw new RE(msg); } else { return timeLeft; } }