@Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { if (failure.getClass().equals(ActionRequestValidationException.class)) { if (logger.isTraceEnabled()) { logger.trace("Ignore ActionRequestValidationException : {}", failure); } } else { logger.error("afterBulk - Bulk request failed: {} - {} - {}", executionId, request, failure); MongoDBRiverHelper.setRiverStatus(client, definition.getRiverName(), Status.IMPORT_FAILED); request.requests().clear(); bulkProcessor.close(); river.close(); } }
source.put("lastTimestamp", lastTimestamp); source.put("indexCount", MongoDBRiver.getIndexCount(esClient, definition)); if (logger.isTraceEnabled()) { logger.trace("source: {}", hit.getSourceAsString());
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { logger.error("Bulk processor failed. {}", response.buildFailureMessage()); MongoDBRiverHelper.setRiverStatus(client, definition.getRiverName(), Status.IMPORT_FAILED); request.requests().clear(); bulkProcessor.close(); river.close(); } else { documentCount.addAndGet(response.getItems().length); logStatistics(response.getTookInMillis()); deletedDocuments.set(0); updatedDocuments.set(0); insertedDocuments.set(0); if (logger.isTraceEnabled()) { logger.trace("afterBulk - bulk [{}] success [{} items] [{} ms] total [{}]", executionId, response.getItems().length, response.getTookInMillis(), documentCount.get()); } } } };
private void addToStream(final Operation operation, final Timestamp<?> currentTimestamp, final DBObject data, final String collection) throws InterruptedException { if (logger.isTraceEnabled()) { String dataString = data.toString(); if (dataString.length() > 400) { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data (_id:[{}], serialized length:{}), collection [{}]", operation, currentTimestamp, data.get("_id"), dataString.length(), collection); } else { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data [{}], collection [{}]", operation, currentTimestamp, dataString, collection); } } if (operation == Operation.DROP_DATABASE) { logger.info("addToStream - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, collection); if (definition.isImportAllCollections()) { for (String name : slurpedDb.getCollectionNames()) { logger.info("addToStream - isImportAllCollections - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, name); context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, name)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, collection)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, operation, data, collection)); } }
private void addToStream(final Operation operation, final Timestamp<?> currentTimestamp, final DBObject data, final String collection) throws InterruptedException { if (logger.isTraceEnabled()) { String dataString = data.toString(); if (dataString.length() > 400) { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data (_id:[{}], serialized length:{}), collection [{}]", operation, currentTimestamp, data.get("_id"), dataString.length(), collection); } else { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data [{}], collection [{}]", operation, currentTimestamp, dataString, collection); } } if (operation == Operation.DROP_DATABASE) { logger.info("addToStream - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, collection); if (definition.isImportAllCollections()) { for (String name : slurpedDb.getCollectionNames()) { logger.info("addToStream - isImportAllCollections - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, name); context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, name)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, collection)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, operation, data, collection)); } }
private void addQueryToStream(final Operation operation, final Timestamp<?> currentTimestamp, final DBObject update, final String collection) throws InterruptedException { if (logger.isTraceEnabled()) { logger.trace("addQueryToStream - operation [{}], currentTimestamp [{}], update [{}]", operation, currentTimestamp, update); } if (collection == null) { for (String name : slurpedDb.getCollectionNames()) { DBCollection slurpedCollection = slurpedDb.getCollection(name); addQueryToStream(operation, currentTimestamp, update, name, slurpedCollection); } } else { DBCollection slurpedCollection = slurpedDb.getCollection(collection); addQueryToStream(operation, currentTimestamp, update, collection, slurpedCollection); } }
@Inject public MongoDBRiver(RiverName riverName, RiverSettings settings, @RiverIndexName String riverIndexName, Client esClient, ScriptService scriptService, MongoClientService mongoClientService) { super(riverName, settings); if (logger.isTraceEnabled()) { logger.trace("Initializing"); } this.esClient = esClient; this.scriptService = scriptService; this.mongoClientService = mongoClientService; this.definition = MongoDBRiverDefinition.parseSettings(riverName.name(), riverIndexName, settings, scriptService); BlockingQueue<QueueEntry> stream = definition.getThrottleSize() == -1 ? new LinkedTransferQueue<QueueEntry>() : new ArrayBlockingQueue<QueueEntry>(definition.getThrottleSize()); this.context = new SharedContext(stream, Status.STOPPED); }
private void processAdminCommandOplogEntry(final DBObject entry, final Timestamp<?> startTimestamp) throws InterruptedException { if (logger.isTraceEnabled()) { logger.trace("processAdminCommandOplogEntry - [{}]", entry); } DBObject object = (DBObject) entry.get(MongoDBRiver.OPLOG_OBJECT); if (definition.isImportAllCollections()) { if (object.containsField(MongoDBRiver.OPLOG_RENAME_COLLECTION_COMMAND_OPERATION) && object.containsField(MongoDBRiver.OPLOG_TO)) { String to = object.get(MongoDBRiver.OPLOG_TO).toString(); if (to.startsWith(definition.getMongoDb())) { String newCollection = getCollectionFromNamespace(to); DBCollection coll = slurpedDb.getCollection(newCollection); CollectionSlurper importer = new CollectionSlurper(river, mongoClusterClient); importer.importCollection(coll, timestamp); } } } }
private void deleteBulkRequest(String objectId, String index, String type, String routing, String parent) { if (logger.isTraceEnabled()) { logger.trace("bulkDeleteRequest - objectId: {} - index: {} - type: {} - routing: {} - parent: {}", objectId, index, type, routing, parent); } if (definition.getParentTypes() != null && definition.getParentTypes().contains(type)) { QueryBuilder builder = QueryBuilders.hasParentQuery(type, QueryBuilders.termQuery(MongoDBRiver.MONGODB_ID_FIELD, objectId)); SearchResponse response = esClient.prepareSearch(index).setQuery(builder).setRouting(routing) .addField(MongoDBRiver.MONGODB_ID_FIELD).execute().actionGet(); for (SearchHit hit : response.getHits().getHits()) { getBulkProcessor(index, hit.getType()).deleteBulkRequest(hit.getId(), routing, objectId); } } getBulkProcessor(index, type).deleteBulkRequest(objectId, routing, parent); }
objectId = entry.getData().get(MongoDBRiver.MONGODB_ID_FIELD).toString(); if (logger.isTraceEnabled()) { logger.trace("applyAdvancedTransformation for id: [{}], operation: [{}]", objectId, operation); ExecutableScript executableScript = scriptService.executable(definition.getScriptType(), definition.getScript(), ScriptService.ScriptType.INLINE, ImmutableMap.<String, Object>of("logger", logger)); if (logger.isTraceEnabled()) { logger.trace("Script to be executed: {} - {}", definition.getScriptType(), definition.getScript()); logger.trace("Context before script executed: {}", ctx); MongoDBRiverHelper.setRiverStatus(esClient, definition.getRiverName(), Status.SCRIPT_IMPORT_FAILED); if (logger.isTraceEnabled()) { logger.trace("Context after script executed: {}", ctx); if (object instanceof Map<?, ?>) { Map<String, Object> item = (Map<String, Object>) object; if (logger.isTraceEnabled()) { logger.trace("item: {}", item); Map<String, Object> data = (Map<String, Object>) item.get("data"); objectId = extractObjectId(data, objectId); if (logger.isTraceEnabled()) { logger.trace( "#### - Id: {} - operation: {} - ignore: {} - index: {} - type: {} - routing: {} - parent: {}",
private void updateBulkRequest(DBObject data, String objectId, Operation operation, String index, String type, String routing, String parent) throws IOException { if (logger.isTraceEnabled()) { logger.trace("Operation: {} - index: {} - type: {} - routing: {} - parent: {}", operation, index, type, routing, parent); if (logger.isTraceEnabled()) { logger.trace("Insert operation - id: {} - contains attachment: {}", objectId, (data instanceof GridFSDBFile)); if (logger.isTraceEnabled()) { logger.trace("Update operation - id: {} - contains attachment: {}", objectId, (data instanceof GridFSDBFile));
if (logger.isTraceEnabled()) { String deserialized = object.toString(); if (deserialized.length() < 400) { throw new NullPointerException(MongoDBRiver.MONGODB_ID_FIELD); if (logger.isTraceEnabled()) { logger.trace("Add attachment: {}", objectId);
if (logger.isTraceEnabled()) { logger.trace("serverStatus: {}", cr); if (logger.isTraceEnabled()) { logger.trace("process: {}", process);
/** * Adds an index request operation to a bulk request, updating the last * timestamp for a given namespace (ie: host:dbName.collectionName) * * @param bulk */ void setLastTimestamp(final Timestamp<?> time, final BulkProcessor bulkProcessor) { try { if (logger.isTraceEnabled()) { logger.trace("setLastTimestamp [{}] [{}]", definition.getMongoOplogNamespace(), time); } bulkProcessor.add(indexRequest(definition.getRiverIndexName()).type(definition.getRiverName()) .id(definition.getMongoOplogNamespace()).source(source(time))); } catch (IOException e) { logger.error("error updating last timestamp for namespace {}", definition.getMongoOplogNamespace()); } }
if (logger.isTraceEnabled()) { logger.trace("updateBulkRequest for id: [{}], operation: [{}]", objectId, operation); ctx.put("id", objectId); if (logger.isTraceEnabled()) { logger.trace("Script to be executed: {} - {}", definition.getScriptType(), definition.getScript()); logger.trace("Context before script executed: {}", ctx); MongoDBRiverHelper.setRiverStatus(esClient, definition.getRiverName(), Status.SCRIPT_IMPORT_FAILED); if (logger.isTraceEnabled()) { logger.trace("Context after script executed: {}", ctx);
if (logger.isTraceEnabled()) {
@Override public void onFailure(Throwable e) { if (logger.isTraceEnabled()) { logger.trace("failed to execute bulk", e); } else { logger.warn("failed to execute bulk: [{}]", e.getMessage()); } } });
public void refresh(String source) { verifyNotClosed(); if (logger.isTraceEnabled()) { logger.trace("refresh with source: {}", source); } long time = System.nanoTime(); engine().refresh(source); refreshMetric.inc(System.nanoTime() - time); }
@Override public void operationComplete(ChannelFuture future) throws Exception { Channel channel = openChannels.remove(future.getChannel().getId()); if (channel != null) { openChannelsMetric.dec(); } if (logger.isTraceEnabled()) { logger.trace("channel closed: {}", future.getChannel()); } } };
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (logger.isTraceEnabled()) { logger.trace("[{}] executed [{}]/[{}], took [{}]", executionId, request.numberOfActions(), new ByteSizeValue(request.estimatedSizeInBytes()), response.getTook()); } if (response.hasFailures()) { logger.warn("[{}] failed to execute bulk request: {}", executionId, response.buildFailureMessage()); } }