CustomDataStore( CustomDataStoreFactory dataStore, File dataDirectory, String id ) throws IOException { super( dataStore, id ); this.dataDirectory = dataDirectory; this.dataFile = new File( this.dataDirectory, getId() ); if ( IOUtils.isSymbolicLink( this.dataFile ) ) { throw new IOException( "unable to use a symbolic link: " + this.dataFile ); } this.keyValueMap = Maps.newHashMap(); if ( this.dataFile.exists() ) { this.keyValueMap = (HashMap) IOUtils.deserialize( new FileInputStream( this.dataFile ) ); } }
public QueryableDruidServer pick() { synchronized (this) { TreeMap<Integer, Set<QueryableDruidServer>> prioritizedServers = Maps.newTreeMap(); for (QueryableDruidServer server : servers) { Set<QueryableDruidServer> theServers = prioritizedServers.get(server.getServer().getPriority()); if (theServers == null) { theServers = Sets.newHashSet(); prioritizedServers.put(server.getServer().getPriority(), theServers); } theServers.add(server); } return strategy.pick(prioritizedServers, segment); } } }
error("call", "method not found: " + fullMethodName); HashMap<String, Object> parameters = Maps.newHashMap(); File requestBodyFile = null; String contentType = "application/json";
private Map<TupleTag<?>, Integer> transformTupleTagsToLabels(TupleTag<?> mainTag, Set<TupleTag<?>> secondaryTags) { Map<TupleTag<?>, Integer> tagToLabelMap = Maps.newHashMap(); tagToLabelMap.put(mainTag, MAIN_TAG_INDEX); int count = MAIN_TAG_INDEX + 1; for (TupleTag<?> tag : secondaryTags) { if (!tagToLabelMap.containsKey(tag)) { tagToLabelMap.put(tag, count++); } } return tagToLabelMap; }
/** * Parses raw JSON into a map of ID -> Value. * Allows null and empty values (must be handled later). */ private Map<Integer, String> jsonToMap( List<String> jsonAttributeValues ) throws IOException { Map<Integer, String> parsed = Maps.newHashMap(); ObjectMapper mapper = new ObjectMapper(); for ( String jsonString : jsonAttributeValues ) { JsonNode node = mapper.readValue( jsonString, JsonNode.class ); JsonNode nId = node.get( "id" ); JsonNode nValue = node.get( "value" ); if ( nId == null || nId.isNull() ) { continue; } parsed.put( nId.asInt(), nValue.asText() ); } return parsed; } }
public void execute(String webHookUrl, Map<String, Object> payload) throws IOException { String jsonEncodedMessage = new Gson().toJson(payload); HashMap<Object, Object> payloadToSend = Maps.newHashMap(); payloadToSend.put("payload", jsonEncodedMessage); requestFactory.buildPostRequest(new GenericUrl(webHookUrl), new UrlEncodedContent(payloadToSend)) .execute(); } }
public void execute(String webHookUrl, Map<String, Object> payload) throws IOException { String jsonEncodedMessage = new Gson().toJson(payload); HashMap<Object, Object> payloadToSend = Maps.newHashMap(); payloadToSend.put("payload", jsonEncodedMessage); requestFactory.buildPostRequest(new GenericUrl(webHookUrl), new UrlEncodedContent(payloadToSend)) .execute(); } }
@GET @Produces("application/json") public Response getTiers( @QueryParam("simple") String simple ) { Response.ResponseBuilder builder = Response.status(Response.Status.OK); if (simple != null) { Map<String, Map<String, Long>> metadata = Maps.newHashMap(); for (DruidServer druidServer : serverInventoryView.getInventory()) { Map<String, Long> tierMetadata = metadata.get(druidServer.getTier()); if (tierMetadata == null) { tierMetadata = Maps.newHashMap(); metadata.put(druidServer.getTier(), tierMetadata); } Long currSize = tierMetadata.get("currSize"); tierMetadata.put("currSize", ((currSize == null) ? 0 : currSize) + druidServer.getCurrSize()); Long maxSize = tierMetadata.get("maxSize"); tierMetadata.put("maxSize", ((maxSize == null) ? 0 : maxSize) + druidServer.getMaxSize()); } return builder.entity(metadata).build(); } Set<String> tiers = Sets.newHashSet(); for (DruidServer server : serverInventoryView.getInventory()) { tiers.add(server.getTier()); } return builder.entity(tiers).build(); }
@Override public Collection<V> values() throws IOException { lock.lock(); try { // Unfortunately no getKeys() method on MemcacheService, so the only option is to clear all // and re-populate the memcache from scratch. This is clearly inefficient. if (memcache != null) { memcache.clearAll(); } List<V> result = Lists.newArrayList(); Map<String, V> map = memcache != null ? Maps.<String, V>newHashMap() : null; for (Entity entity : query(false)) { V value = deserialize(entity); result.add(value); if (map != null) { map.put(entity.getKey().getName(), value); } } if (memcache != null) { memcache.putAll(map, memcacheExpiration); } return Collections.unmodifiableList(result); } finally { lock.unlock(); } }
Map<String, Object> properties = retVal.get(dataSegment.getDataSource(), dataSegment.getInterval()); if (properties == null) { properties = Maps.newHashMap(); retVal.put(dataSegment.getDataSource(), dataSegment.getInterval(), properties);
Map<TupleTag<?>, Integer> outputMap = Maps.newHashMap();
final Map<String, ListenableFuture<TaskStatus>> runnerTaskFutures = Maps.newHashMap(); for (final TaskRunnerWorkItem workItem : taskRunner.getKnownTasks()) { runnerTaskFutures.put(workItem.getTaskId(), workItem.getResult());
Map<String, Attribute> targetRepositoryAttributeMap = newHashMap(); targetRepositoryEntityType.getAtomicAttributes() .forEach(attribute -> targetRepositoryAttributeMap.put(attribute.getName(), attribute));
final Map<String, Integer> loadStatus = Maps.newHashMap();
Map<String, List<Object>> attributes = Maps.newHashMap(); for( SAMRecord.SAMTagAndValue tagAndValue: record.getAttributes()) { String s = tagAndValue.value.toString();