@Override public void add(String chunk, ChunkMeta chunkMeta, String path, IndexMeta indexMeta, Handler<AsyncResult<Void>> handler) { doAddChunk(chunk, path, indexMeta.getCorrelationId(), ar -> { if (ar.failed()) { handler.handle(Future.failedFuture(ar.cause())); .put("meta", chunkMeta.toJsonObject()); if (indexMeta.getCorrelationId() != null) { indexMsg.put("correlationId", indexMeta.getCorrelationId()); if (indexMeta.getFilename() != null) { indexMsg.put("filename", indexMeta.getFilename()); indexMsg.put("timestamp", indexMeta.getTimestamp()); if (indexMeta.getTags() != null) { indexMsg.put("tags", new JsonArray(indexMeta.getTags())); if (indexMeta.getFallbackCRSString() != null) { indexMsg.put("fallbackCRSString", indexMeta.getFallbackCRSString()); if (indexMeta.getProperties() != null) { indexMsg.put("properties", new JsonObject(indexMeta.getProperties()));
@Override public void onIndexChunk(String path, ChunkMeta chunkMeta, IndexMeta indexMeta) { result.put("path", path); result.put("chunkMeta", chunkMeta.toJsonObject()); if (indexMeta.getTags() != null) { result.put("tags", indexMeta.getTags()); } if (indexMeta.getProperties() != null) { result.put("props", indexMeta.getProperties()); } if (indexMeta.getCorrelationId() != null) { result.put("correlationId", indexMeta.getCorrelationId()); } } }
/** * Imports a JSON file from the given input stream into the store * @param f the JSON file to read * @param correlationId a unique identifier for this import process * @param filename the name of the file currently being imported * @param timestamp denotes when the import process has started * @param layer the layer where the file should be stored (may be null) * @param tags the list of tags to attach to the file (may be null) * @param properties the map of properties to attach to the file (may be null) * @return an observable that will emit the number 1 when a chunk has been imported */ protected Observable<Integer> importJSON(ReadStream<Buffer> f, String correlationId, String filename, long timestamp, String layer, List<String> tags, Map<String, Object> properties) { UTF8BomFilter bomFilter = new UTF8BomFilter(); StringWindow window = new StringWindow(); GeoJsonSplitter splitter = new GeoJsonSplitter(window); AtomicInteger processing = new AtomicInteger(0); return f.toObservable() .map(Buffer::getDelegate) .map(bomFilter::filter) .doOnNext(window::append) .compose(new JsonParserTransformer()) .flatMap(splitter::onEventObservable) .flatMapSingle(result -> { IndexMeta indexMeta = new IndexMeta(correlationId, filename, timestamp, tags, properties, null); return addToStoreWithPause(result, layer, indexMeta, f, processing) .toSingleDefault(1); }); }
return chunkToDocument(chunk, indexMeta.getFallbackCRSString(), parserTransformer, factories) .doAfterTerminate(chunk::close)
IndexMeta indexMeta = new IndexMeta(correlationId, filename, timestamp, tags, properties, fallbackCRSString);
crsString = crsIndexer.getCRS(); IndexMeta indexMeta = new IndexMeta(correlationId, filename, timestamp, tags, properties, crsString); return addToStoreWithPause(result, layer, indexMeta, f, processing)
+ "Store::add was called!")); IndexMeta indexMeta = new IndexMeta(IMPORT_ID, ID, TIMESTAMP, TAGS, PROPERTIES, FALLBACK_CRS_STRING); store.add(CHUNK_CONTENT, META, path, indexMeta, context.asyncAssertSuccess(err -> { validateAfterStoreAdd(context, vertx, path, context.asyncAssertSuccess(v -> {