/** * Build an ObjectMappersSuite for everyone to use, since they are heavy-weight. * * @return The instance of ObjectMappersSuite */ protected final ObjectMappersSuite getMappers() { if (objectMappers == null) { objectMappers = new ObjectMappersSuite(); } return objectMappers; }
/** * Constructor. * * @param entries The data entries to generate the response for * @param pages The paginated set of results containing the pages being linked to. * @param uriInfo UriInfo to generate the URL for the page links * @param responseName Top level name of json object described the response data. * @param objectMappers Suite of Object Mappers to use when serializing the response */ public JsonResponse( Stream<T> entries, Pagination<?> pages, UriInfo uriInfo, String responseName, ObjectMappersSuite objectMappers ) { super(entries, pages, uriInfo, objectMappers); this.responseName = responseName; this.jsonFactory = new JsonFactory(objectMappers.getMapper()); }
/** * Writes CSV response. * * @param os The output stream to write document bytes to * * @throws IOException If an error occurs while writing this stream */ @Override public void write(OutputStream os) throws IOException { AtomicReference<CsvSchema> schema = new AtomicReference<>(); AtomicBoolean isFirstRow = new AtomicBoolean(true); ObjectMapper csvMapper = objectMappers.getCsvMapper(); try { entries.peek(row -> schema.compareAndSet(null, setOrGuessHeader(row, columnNames))) .forEachOrdered( row -> { try { boolean addHeader = isFirstRow.getAndSet(false); csvMapper.writer().with(schema.get().withUseHeader(addHeader)).writeValue(os, row); } catch (IOException ioe) { String msg = String.format("Unable to write CSV data row: %s", row); LOG.error(msg, ioe); throw new RuntimeException(msg, ioe); } } ); } catch (RuntimeException re) { throw new IOException(re); } }
@Override public void write( ApiRequest request, ResponseData responseData, OutputStream outputStream ) throws IOException { // Just write the header first CsvSchema schema = buildCsvHeaders(responseData); CsvMapper csvMapper = objectMappers.getCsvMapper(); csvMapper.writer().with(schema.withSkipFirstDataRow(true)) .writeValue(outputStream, Collections.emptyMap()); ObjectWriter writer = csvMapper.writer().with(schema.withoutHeader()); try { responseData.getResultSet().stream() .map(responseData::buildResultRow) .forEachOrdered( row -> { try { writer.writeValue(outputStream, row); } catch (IOException ioe) { String msg = String.format("Unable to write CSV data row: %s", row); LOG.error(msg, ioe); throw new RuntimeException(msg, ioe); } } ); } catch (RuntimeException re) { throw new IOException(re); } }
/** * Constructor. * * @param dimensionDictionary Set of dimensions to be aware of * @param dataCache A cache that we can clear if told to * @param objectMappers Shares mappers for dealing with JSON */ @Inject public DimensionCacheLoaderServlet( DimensionDictionary dimensionDictionary, @NotNull DataCache<?> dataCache, ObjectMappersSuite objectMappers ) { this.mapper = objectMappers.getMapper(); this.dimensionDictionary = dimensionDictionary; this.dataCache = dataCache; }
/** * Return the instance of ObjectMapper defined in the current ObjectMappersSuite. * * @return The instance of ObjectMapper */ protected final ObjectMapper getMapper() { return getMappers().getMapper(); }
/** * Given a JobRow, and the URI of the request, serializes the JobRow into the version to be sent to the user. * * @param jobRow The row to be serialized * @param uriInfo The URI of the request * * @return A String that should be sent back to the user describing the asynchronous job */ private String serializeJobRow(JobRow jobRow, UriInfo uriInfo) { try { return objectMappers.getMapper().writeValueAsString( jobPayloadBuilder.buildPayload(jobRow, uriInfo) ); } catch (JsonProcessingException e) { LOG.error("Error serializing JobRow: %s", e); throw new RuntimeException(e); } }
this.preResponseStore = preResponseStore; this.broadcastChannel = broadcastChannel; this.writer = objectMappers.getMapper().writer(); this.httpResponseMaker = httpResponseMaker; this.formatResolver = formatResolver;
/** * Prepare Response object from error details with reason and description. * * @param statusCode Error status code * @param reason Brief reason about the error * @param description Description of the error * @param druidQuery Druid query associated with the an error * * @return Publishable Response object */ public javax.ws.rs.core.Response buildErrorResponse( int statusCode, String reason, String description, DruidQuery<?> druidQuery ) { return RequestHandlerUtils.makeErrorResponse( statusCode, reason, description, druidQuery, objectMappers.getMapper().writer() ); }
@Override public Response handleThrowable( Throwable e, Optional<? extends ApiRequest> request, ContainerRequestContext requestContext ) { if (e instanceof RequestValidationException) { LOG.debug(e.getMessage(), e); RequestValidationException rve = (RequestValidationException) e; return RequestHandlerUtils.makeErrorResponse(rve.getStatus(), rve, mappers.getMapper().writer()); } else { String msg = ErrorMessageFormat.REQUEST_PROCESSING_EXCEPTION.format(e.getMessage()); LOG.info(msg, e); return Response.status(INTERNAL_SERVER_ERROR).entity(e.getMessage()).build(); } } }
@Override public void invoke(Throwable error) { LOG.error(ErrorMessageFormat.FAILED_TO_SEND_QUERY_TO_DRUID.logFormat(druidQuery), error); responseEmitter.onError(new ResponseException( Status.INTERNAL_SERVER_ERROR, druidQuery, error, objectMappers.getMapper().writer() )); } };
/** * Process a request to get job payload. * * @param ticket The ticket that can uniquely identify a Job * @param apiRequest JobsApiRequestImpl object with all the associated info in it * @param uriInfo The Uri Info needed to build response links * * @return an observable response to be consumed. */ protected Observable<Response> handleJobResponse(String ticket, JobsApiRequestImpl apiRequest, UriInfo uriInfo) { return apiRequest.getJobViewObservable(ticket) //map the job to Json String .map( job -> { try { return objectMappers.getMapper().writeValueAsString(job); } catch (JsonProcessingException e) { LOG.error(e.getMessage(), e); throw Exceptions.propagate(e); } } ) //map the jsonResponse String to a Response .map(this::getResponse) .onErrorReturn(this::getErrorResponse); }
@Override public void invoke(int statusCode, String reason, String responseBody) { LOG.error(ErrorMessageFormat.ERROR_FROM_DRUID.logFormat(responseBody, statusCode, reason, druidQuery)); responseEmitter.onError(new ResponseException( statusCode, reason, responseBody, druidQuery, null, getObjectMappers().getMapper().writer() )); } };
OutputStream os ) throws IOException { JsonFactory jsonFactory = new JsonFactory(getObjectMappers().getMapper()); try (JsonGenerator g = jsonFactory.createGenerator(os)) { g.writeStartObject();
FeatureFlagEntry status = new FeatureFlagEntry(flag.getName(), flag.isOn()); String output = objectMappers.getMapper().writeValueAsString(status);
); String output = objectMappers.getMapper().writeValueAsString(result); LOG.debug("Dimension Endpoint Response: {}", output); responseSender = () -> Response.status(Status.OK).entity(output).build();
OutputStream os ) throws IOException { JsonFactory jsonFactory = new JsonFactory(getObjectMappers().getMapper()); try (JsonGenerator generator = jsonFactory.createGenerator(os)) {
containerRequestContext.getUriInfo() ); String output = objectMappers.getMapper().writeValueAsString(result); LOG.debug("Tables Endpoint Response: {}", output); return Response.status(OK).entity(output).build();
String output = objectMappers.getMapper().writeValueAsString(apiRequest.getSlice()); LOG.debug("Slice Endpoint Response: {}", output); return Response.status(Response.Status.OK).entity(output).build();
String output = objectMappers.getMapper().writeValueAsString(result); LOG.debug("Tables Endpoint Response: {}", output); return Response.status(Response.Status.OK).entity(output).build();