public HttpLoadQueuePeon( String baseUrl, ObjectMapper jsonMapper, HttpClient httpClient, DruidCoordinatorConfig config, ScheduledExecutorService processingExecutor, ExecutorService callBackExecutor ) { this.jsonMapper = jsonMapper; this.requestBodyWriter = jsonMapper.writerWithType(REQUEST_ENTITY_TYPE_REF); this.httpClient = httpClient; this.config = config; this.processingExecutor = processingExecutor; this.callBackExecutor = callBackExecutor; this.serverId = baseUrl; try { this.changeRequestURL = new URL( new URL(baseUrl), StringUtils.nonStrictFormat( "druid-internal/v1/segments/changeRequests?timeout=%d", config.getHttpLoadQueuePeonHostTimeout().getMillis() ) ); } catch (MalformedURLException ex) { throw Throwables.propagate(ex); } }
@Override public void onSuccess(List<SegmentLoadDropHandler.DataSegmentChangeRequestAndStatus> result) { try { HttpServletResponse response = (HttpServletResponse) asyncContext.getResponse(); response.setStatus(HttpServletResponse.SC_OK); context.inputMapper.writerWithType(HttpLoadQueuePeon.RESPONSE_ENTITY_TYPE_REF) .writeValue(asyncContext.getResponse().getOutputStream(), result); asyncContext.complete(); } catch (Exception ex) { log.debug(ex, "Request timed out or closed already."); } }
private synchronized void persistSequences() throws IOException { log.info("Persisting Sequences Metadata [%s]", sequences); toolbox.getObjectMapper().writerWithType( new TypeReference<List<SequenceMetadata>>() { } ).writeValue(getSequencesPersistFile(toolbox), sequences); }
@Override public void onSuccess(ChangeRequestsSnapshot<DataSegmentChangeRequest> result) { try { HttpServletResponse response = (HttpServletResponse) asyncContext.getResponse(); response.setStatus(HttpServletResponse.SC_OK); context.inputMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF) .writeValue(asyncContext.getResponse().getOutputStream(), result); asyncContext.complete(); } catch (Exception ex) { log.debug(ex, "Request timed out or closed already."); } }
@Override public void onSuccess(ChangeRequestsSnapshot result) { try { HttpServletResponse response = (HttpServletResponse) asyncContext.getResponse(); response.setStatus(HttpServletResponse.SC_OK); context.inputMapper.writerWithType(WorkerHolder.WORKER_SYNC_RESP_TYPE_REF) .writeValue(asyncContext.getResponse().getOutputStream(), result); asyncContext.complete(); } catch (Exception ex) { log.debug(ex, "Request timed out or closed already."); } }
@Override public void run(Context context) throws IOException, InterruptedException { super.run(context); if (determineIntervals) { final Path outPath = config.makeIntervalInfoPath(); final OutputStream out = Utils.makePathAndOutputStream(context, outPath, config.isOverwriteFiles()); try { HadoopDruidIndexerConfig.JSON_MAPPER.writerWithType( new TypeReference<List<Interval>>() { } ).writeValue( out, intervals ); } finally { Closeables.close(out, false); } } } }
) throws JsonProcessingException final String checkpoints = sortingMapper.writerWithType(new TypeReference<TreeMap<Integer, Map<Integer, Long>>>()
.writerWithType(Long.class) .writeValue(out, aggregate.estimateCardinalityRound());
new ByteArrayInputStream( ServerTestHelper.MAPPER .writerWithType(HttpLoadQueuePeon.RESPONSE_ENTITY_TYPE_REF) .writeValueAsBytes(statuses)
.writerWithType( new TypeReference<List<ShardSpec>>()
Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(typeRef).writeValueAsBytes( new ChangeRequestsSnapshot( false, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(typeRef).writeValueAsBytes( new ChangeRequestsSnapshot( false, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(typeRef).writeValueAsBytes( new ChangeRequestsSnapshot( true, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(typeRef).writeValueAsBytes( new ChangeRequestsSnapshot( false, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(typeRef).writeValueAsBytes( new ChangeRequestsSnapshot( false,
private <T> String toJson(T value, Class<T> type) throws IOException { return mapper.writerWithType(type).writeValueAsString(value); }
Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes( new ChangeRequestsSnapshot( false, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes( new ChangeRequestsSnapshot( false, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes( new ChangeRequestsSnapshot( true, Futures.immediateFuture( new ByteArrayInputStream( jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes( new ChangeRequestsSnapshot( false,
private <T> Expectations toJson(final T output, final Class<T> clazz) throws JsonProcessingException { final ObjectWriter writer = context.mock(ObjectWriter.class); return new Expectations() {{ oneOf(mapper).writerWithType(clazz); will(returnValue(writer)); oneOf(writer).writeValueAsString(output); will(returnValue(RESULT)); }}; }
public ClusterMessageConverter(ObjectMapper mapper) { _reader = mapper.reader(ClusterStatusMessage.class); _writer = mapper.writerWithType(ClusterStatusMessage.class); }
@Override public String marshal(T entity) throws IOException { return objectMapper.writerWithType(clazz).writeValueAsString(entity); }
public String getObjectAsString() { //orderObjs : Considering this is the list of objects of class Order ObjectMapper objMapper = new ObjectMapper(); returnValue = objMapper.writerWithType( objMapper.getTypeFactory().constructCollectionType( List.class, Order.class)).writeValueAsString( orderObjs); return returnValue; }
public static String toJson(Object dto) throws JsonProcessingException { Class<?> clazz = dto.getClass(); return objectMapper.writerWithType(clazz).writeValueAsString(dto); }
final ObjectMapper mapper = new ObjectMapper(); final SimpleModule module = new SimpleModule("myModule", Version.unknownVersion()); module.addKeySerializer(CustomType1.class, new CustomType1Serializer()); mapper.registerModule(module); final MapType type = mapper.getTypeFactory().constructMapType( Map.class, CustomType1.class, CustomType2.class); final Map<CustomType1, CustomType2> map = new HashMap<CustomType1, CustomType2>(4); final ObjectWriter writer = mapper.writerWithType(type); final String json = writer.writeValueAsString(map);
public static void writeRunningQueryList(Writer w, List<RunningQuery> rQueries) throws JsonGenerationException, JsonMappingException, IOException { final ObjectMapper mapper = new ObjectMapper(); final TypeFactory typeFactory = mapper.getTypeFactory(); final ObjectWriter writer = mapper.writerWithType(typeFactory .constructCollectionType(List.class, com.bigdata.rdf.sail.model.RunningQuery.class)); writer.writeValue(w, rQueries); }