private void addToStream(final Operation operation, final Timestamp<?> currentTimestamp, final DBObject data, final String collection) throws InterruptedException { if (logger.isTraceEnabled()) { String dataString = data.toString(); if (dataString.length() > 400) { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data (_id:[{}], serialized length:{}), collection [{}]", operation, currentTimestamp, data.get("_id"), dataString.length(), collection); } else { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data [{}], collection [{}]", operation, currentTimestamp, dataString, collection); } } if (operation == Operation.DROP_DATABASE) { logger.info("addToStream - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, collection); if (definition.isImportAllCollections()) { for (String name : slurpedDb.getCollectionNames()) { logger.info("addToStream - isImportAllCollections - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, name); context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, name)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, collection)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, operation, data, collection)); } }
private void addToStream(final Operation operation, final Timestamp<?> currentTimestamp, final DBObject data, final String collection) throws InterruptedException { if (logger.isTraceEnabled()) { String dataString = data.toString(); if (dataString.length() > 400) { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data (_id:[{}], serialized length:{}), collection [{}]", operation, currentTimestamp, data.get("_id"), dataString.length(), collection); } else { logger.trace("addToStream - operation [{}], currentTimestamp [{}], data [{}], collection [{}]", operation, currentTimestamp, dataString, collection); } } if (operation == Operation.DROP_DATABASE) { logger.info("addToStream - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, collection); if (definition.isImportAllCollections()) { for (String name : slurpedDb.getCollectionNames()) { logger.info("addToStream - isImportAllCollections - Operation.DROP_DATABASE, currentTimestamp [{}], data [{}], collection [{}]", currentTimestamp, data, name); context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, name)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, Operation.DROP_COLLECTION, data, collection)); } } else { context.getStream().put(new MongoDBRiver.QueueEntry(currentTimestamp, operation, data, collection)); } }
@Override public void run() { while (context.getStatus() == Status.RUNNING) { try { Timestamp<?> lastTimestamp = null; // 1. Attempt to fill as much of the bulk request as possible QueueEntry entry = context.getStream().take(); lastTimestamp = processBlockingQueue(entry); while ((entry = context.getStream().poll(definition.getBulk().getFlushInterval().millis(), MILLISECONDS)) != null) { lastTimestamp = processBlockingQueue(entry); } // 2. Update the timestamp if (lastTimestamp != null) { river.setLastTimestamp(lastTimestamp, getBulkProcessor(definition.getIndexName(), definition.getTypeName()).getBulkProcessor()); } } catch (InterruptedException e) { logger.info("river-mongodb indexer interrupted"); releaseProcessors(); Thread.currentThread().interrupt(); break; } } }