public static boolean isComplexFeature(FeatureCollectionResponse results) { boolean hasComplex = false; for (int fcIndex = 0; fcIndex < results.getFeature().size(); fcIndex++) { if (!(results.getFeature().get(fcIndex).getSchema() instanceof SimpleFeatureTypeImpl)) { hasComplex = true; break; } } return hasComplex; }
public List<FeatureCollection> getFeature() { // alias return getFeatures(); }
/** Calls through to {@link #write(FeatureCollectionResponse, OutputStream, Operation)}. */ public void write(Object value, OutputStream output, Operation operation) throws IOException, ServiceException { // for WFS 2.0 we changed the input object type to be the request object adapter, but there // is other code (like WMS GetFeatureInfo) that passes in the old objects, so do a check if (value instanceof FeatureCollectionResponse) { write((FeatureCollectionResponse) value, output, operation); } else { write(FeatureCollectionResponse.adapt(value), output, operation); } }
public void write(Object value, OutputStream output, Operation operation) throws IOException, ServiceException { WFSInfo wfs = getInfo(); FeatureCollectionResponse featureCollection = (FeatureCollectionResponse) value; // create a new feautre collcetion type with just the numbers FeatureCollectionResponse hits = featureCollection.create(); if (GML3OutputFormat.isComplexFeature(featureCollection)) { // we have to count the number of features here manually because complex feature // collection size() now returns 0. In order to count the number of features, // we have to build the features to count them and this has great performance // impact. Unless we introduce joins in our fetching of // data, we will have to count the number of features manually when needed. In // GML3Outputformat I use xslt to populate numberOfFeatures attribute. hits.setNumberOfFeatures(countFeature(featureCollection)); } else { hits.setNumberOfFeatures(featureCollection.getNumberOfFeatures()); } hits.setTotalNumberOfFeatures(featureCollection.getTotalNumberOfFeatures()); hits.setNext(featureCollection.getNext()); hits.setPrevious(featureCollection.getPrevious()); hits.setTimeStamp(featureCollection.getTimeStamp()); encode(hits, output, wfs); }
@Override public Response responseDispatched( Request request, Operation operation, Object result, Response response) { // is this a feature response we are about to encode? if (result instanceof FeatureCollectionResponse) { HttpServletResponse httpResponse = request.getHttpResponse(); FeatureCollectionResponse fcr = (FeatureCollectionResponse) result; String contentType = response.getMimeType(result, operation); if (fcr.getPrevious() != null) { addLink(httpResponse, "prev", contentType, fcr.getPrevious()); } if (fcr.getNext() != null) { addLink(httpResponse, "next", contentType, fcr.getNext()); } } return response; }
@Override public Object operationExecuted(Request request, Operation operation, Object result) { TimeoutVerifier timeoutVerifier = TIMEOUT_VERIFIER.get(); if (timeoutVerifier != null) { // check before encode timeoutVerifier.checkTimeout(); // wrap if needed if (result instanceof FeatureCollectionResponse) { FeatureCollectionResponse featureCollectionResponse = (FeatureCollectionResponse) result; List<FeatureCollection> collections = featureCollectionResponse.getFeatures(); List<FeatureCollection> wrappers = collections .stream() .map(fc -> TimeoutFeatureCollection.wrap(timeoutVerifier, fc)) .collect(Collectors.toList()); featureCollectionResponse.setFeatures(wrappers); } } return result; }
/** * Saves the feature source contents into a zipped shapefile, returns the output as a byte array */ byte[] writeOut(FeatureCollection fc) throws IOException { ShapeZipOutputFormat zip = new ShapeZipOutputFormat(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); FeatureCollectionResponse fct = FeatureCollectionResponse.adapt(WfsFactory.eINSTANCE.createFeatureCollectionType()); fct.getFeature().add(fc); zip.write(fct, bos, op); return bos.toByteArray(); }
result.setNumberOfFeatures(BigInteger.valueOf(count)); result.setTotalNumberOfFeatures(total); result.setTimeStamp(Calendar.getInstance()); result.setLockId(lockId); result.getFeature().addAll(results); result.setGetFeatureById(getFeatureById);
List featureCollections = results.getFeature(); encoder.encode(results.getAdaptee(), org.geotools.wfs.v1_0.WFS.FeatureCollection, output);
@Override protected void encode(FeatureCollectionResponse results, OutputStream output, Encoder encoder) throws IOException { // evil behavior... if the query was a GetFeatureById then we have to write out a single // feature // without the feature collection wrapper if (results.isGetFeatureById()) { List<FeatureCollection> features = results.getFeatures(); Feature next = DataUtilities.first(features.get(0)); if (next == null) { throw new WFSException( (EObject) null, "No feature matching the requested id found", WFSException.NOT_FOUND); } else { encoder.encode(next, GML.AbstractFeature, output); } } else { encoder.encode( results.unadapt(FeatureCollectionType.class), WFS.FeatureCollection, output); } }
BigInteger totalNumberOfFeatures = featureCollection.getTotalNumberOfFeatures(); BigInteger featureCount = (totalNumberOfFeatures != null && totalNumberOfFeatures.longValue() < 0) jsonWriter.key("features"); jsonWriter.array(); List<FeatureCollection> resultsList = featureCollection.getFeature(); FeaturesInfo featuresInfo = writeFeatures(resultsList, operation, isComplex, jsonWriter);
boolean padWithZeros = false; boolean forcedDecimal = false; for (int i = 0; i < results.getFeature().size(); i++) { FeatureCollection features = (FeatureCollection) results.getFeature().get(i); SimpleFeatureType featureType = (SimpleFeatureType) features.getSchema(); transformer.setGmlPrefixing(wfs.isCiteCompliant() || !gml.getOverrideGMLAttributes()); if (results.getLockId() != null) { transformer.setLockId(results.getLockId());
@Override protected void encode(FeatureCollectionResponse hits, OutputStream output, WFSInfo wfs) throws IOException { hits.setNumberOfFeatures(BigInteger.valueOf(0)); Encoder e = new Encoder(new WFSConfiguration()); e.setEncoding(Charset.forName(wfs.getGeoServer().getSettings().getCharset())); e.setSchemaLocation( WFS.NAMESPACE, ResponseUtils.appendPath(wfs.getSchemaBaseURL(), "wfs/2.0/wfs.xsd")); e.encode(hits.getAdaptee(), WFS.FeatureCollection, output); } }
@Override protected Object getValueInternal(ProgressListener listener) throws Exception { WebFeatureService wfs = (WebFeatureService) context.getBean("wfsServiceTarget"); GetFeatureType gft = null; InputReferenceType ref = input.getReference(); if (ref.getMethod() == MethodType.POST_LITERAL) { gft = (GetFeatureType) ref.getBody(); } else { GetFeatureKvpRequestReader reader = (GetFeatureKvpRequestReader) context.getBean("getFeatureKvpReader"); gft = (GetFeatureType) kvpParse(ref.getHref(), reader); } FeatureCollectionResponse featureCollectionType = wfs.getFeature(gft); // this will also deal with axis order issues return ((ComplexPPIO) ppio).decode(featureCollectionType.getAdaptee()); }
kvp.put("startIndex", String.valueOf(prevOffset)); kvp.put("limit", String.valueOf(offset - prevOffset)); result.setPrevious(buildURL(request, itemsPath, kvp)); kvp.put("startIndex", String.valueOf(offset > 0 ? offset + count : count)); kvp.put("limit", String.valueOf(maxFeatures)); result.setNext(buildURL(request, itemsPath, kvp));
/** * Saves the feature source contents into a zipped shapefile, returns the output as a byte array */ byte[] writeOut(FeatureCollection fc, long maxShpSize, long maxDbfSize) throws IOException { ShapeZipOutputFormat zip = new ShapeZipOutputFormat(); zip.setMaxDbfSize(maxDbfSize); zip.setMaxShpSize(maxShpSize); ByteArrayOutputStream bos = new ByteArrayOutputStream(); FeatureCollectionResponse fct = FeatureCollectionResponse.adapt(WfsFactory.eINSTANCE.createFeatureCollectionType()); fct.getFeature().add(fc); zip.write(fct, bos, op); return bos.toByteArray(); }
@Override public Object operationExecuted(Request request, Operation operation, Object result) { if (delaySeconds > 0 && result instanceof FeatureCollectionResponse) { FeatureCollectionResponse featureCollectionResponse = (FeatureCollectionResponse) result; List<FeatureCollection> collections = featureCollectionResponse.getFeatures(); List<FeatureCollection> wrappers = collections .stream() .map(fc -> new DelayFeatureCollection((SimpleFeatureCollection) fc)) .collect(Collectors.toList()); featureCollectionResponse.setFeatures(wrappers); } return super.operationExecuted(request, operation, result); }
@Override protected void encode(FeatureCollectionResponse hits, OutputStream output, WFSInfo wfs) throws IOException { hits.setNumberOfFeatures(BigInteger.ZERO); // instantiate the XML encoder Encoder encoder = new Encoder(new WFSConfiguration()); encoder.setEncoding(Charset.forName(wfs.getGeoServer().getSettings().getCharset())); encoder.setSchemaLocation( WFS.NAMESPACE, ResponseUtils.appendPath(wfs.getSchemaBaseURL(), "wfs/2.0/wfs.xsd")); Document document; try { // encode the HITS result using FeatureCollection as the root XML element document = encoder.encodeAsDOM(hits.getAdaptee(), WFS.FeatureCollection); } catch (Exception exception) { throw new RuntimeException("Error encoding INDEX result.", exception); } // add the resultSetID attribute to the result addResultSetIdElement(document, resultSetId); // write the XML document to response output stream writeDocument(document, output); }
/** * Process the request as an internal one, without going through GML encoding/decoding * * @param ppio * @param ref * @param method * @return * @throws Exception */ Object handleAsInternalWFS(ProcessParameterIO ppio, InputReferenceType ref) throws Exception { WebFeatureService wfs = (WebFeatureService) context.getBean("wfsServiceTarget"); GetFeatureType gft = null; if (ref.getMethod() == MethodType.POST_LITERAL) { gft = (GetFeatureType) ref.getBody(); } else { GetFeatureKvpRequestReader reader = (GetFeatureKvpRequestReader) context .getBean("getFeatureKvpReader"); gft = (GetFeatureType) kvpParse(ref.getHref(), reader); } FeatureCollectionResponse featureCollectionType = wfs.getFeature(gft); // this will also deal with axis order issues return ((ComplexPPIO) ppio).decode(featureCollectionType.getAdaptee()); }
/** * Writes WFS3 compliant paging links * * @param response * @param operation * @param jw */ protected void writePagingLinks( FeatureCollectionResponse response, Operation operation, GeoJSONBuilder jw) { if (response.getPrevious() != null || response.getNext() != null) { jw.key("links"); jw.array(); String mimeType = getMimeType(response, operation); writeLink(jw, "previous page", mimeType, "previous", response.getPrevious()); writeLink(jw, "next page", mimeType, "next", response.getNext()); jw.endArray(); } }