@Inject public MongoDBRiver(RiverName riverName, RiverSettings settings, @RiverIndexName String riverIndexName, Client esClient, ScriptService scriptService, MongoClientService mongoClientService) { super(riverName, settings); if (logger.isTraceEnabled()) { logger.trace("Initializing"); } this.esClient = esClient; this.scriptService = scriptService; this.mongoClientService = mongoClientService; this.definition = MongoDBRiverDefinition.parseSettings(riverName.name(), riverIndexName, settings, scriptService); BlockingQueue<QueueEntry> stream = definition.getThrottleSize() == -1 ? new LinkedTransferQueue<QueueEntry>() : new ArrayBlockingQueue<QueueEntry>(definition.getThrottleSize()); this.context = new SharedContext(stream, Status.STOPPED); }
public static void delete(final Client client, final RiverName riverName) { DeleteMappingResponse deleteMappingResponse; try { deleteMappingResponse = client.admin().indices() .prepareDeleteMapping("_river").setType(riverName.name()) .execute().actionGet(); } catch (final ElasticsearchException e) { throw new EsUtilSystemException("Failed to delete " + riverName.name(), e); } if (!deleteMappingResponse.isAcknowledged()) { throw new EsUtilSystemException("Failed to delete " + riverName.name() + ". " + deleteMappingResponse.toString()); } }
/** Update river last changes id value.*/ private void updateRiver(String lastScanTimeField, Long lastScanTime) throws Exception{ if (logger.isDebugEnabled()){ logger.debug("Updating lastScanTimeField: {}", lastScanTime); } // We store the lastupdate date and some stats XContentBuilder xb = jsonBuilder() .startObject() .startObject("amazon-s3") .field("feedname", feedDefinition.getFeedname()) .field(lastScanTimeField, lastScanTime) .endObject() .endObject(); esIndex("_river", riverName.name(), lastScanTimeField, xb); }
private String getLastUpdatedTimestamp() { GetResponse lastUpdatedTimestampResponse = client.prepareGet().setIndex(riverIndexName).setType(riverName.name()).setId("lastUpdatedTimestamp").execute().actionGet(); if (lastUpdatedTimestampResponse.isExists() && lastUpdatedTimestampResponse.getSource().containsKey("lastUpdatedTimestamp")) { return lastUpdatedTimestampResponse.getSource().get("lastUpdatedTimestamp").toString(); } return null; } }
private boolean isStarted(){ // Refresh index before querying it. client.admin().indices().prepareRefresh("_river").execute().actionGet(); GetResponse isStartedGetResponse = client.prepareGet("_river", riverName().name(), "_s3status").execute().actionGet(); try{ if (!isStartedGetResponse.isExists()){ XContentBuilder xb = jsonBuilder().startObject() .startObject("amazon-s3") .field("feedname", feedDefinition.getFeedname()) .field("status", "STARTED").endObject() .endObject(); client.prepareIndex("_river", riverName.name(), "_s3status").setSource(xb).execute(); return true; } else { String status = (String)XContentMapValues.extractValue("amazon-s3.status", isStartedGetResponse.getSourceAsMap()); if ("STOPPED".equals(status)){ return false; } } } catch (Exception e){ logger.warn("failed to get status for " + riverName().name() + ", throttling....", e); } return true; }
@SuppressWarnings("unchecked") private int getLastStateFromRiver(int settingId) { int skipCount=1; try { client.admin().indices().prepareRefresh("_river").execute().actionGet(); GetResponse lastSeqGetResponse = client.prepareGet("_river", riverName().name(), String.valueOf(settingId)).execute().actionGet(); if (lastSeqGetResponse.isExists()) { Map<String, Object> rssState = (Map<String, Object>) lastSeqGetResponse.getSource().get("email"); if (rssState != null) { Object skip_count = rssState.get("skip_count"); if (skip_count != null) { skipCount= Integer.parseInt(skip_count.toString()); } } } else { if (logger.isDebugEnabled()) logger.debug("{} doesn't exist", settingId); } } catch (Exception e) { logger.error("email-river",e); logger.warn("failed to get last_skip_count, throttling....", e); } return skipCount; } }
GetResponse lastSeqGetResponse = client.prepareGet("_river", riverName().name(), lastScanTimeField).execute().actionGet(); if (lastSeqGetResponse.isExists()) {
indexName = XContentMapValues.nodeStringValue(indexSettings.get(INDEX_NAME), riverName.name()); typeName = XContentMapValues.nodeStringValue(indexSettings.get(MAPPING_TYPE), "status"); bulkSize = XContentMapValues.nodeIntegerValue(indexSettings.get(BULK_SIZE), 100); flushInterval = TimeValue.parseTimeValue(XContentMapValues.nodeStringValue(indexSettings.get(FLUSH_INTERVAL), "12h"), FLUSH_12H); } else { indexName = riverName.name(); typeName = "status"; bulkSize = 100;
/** * Reconfigure jira river. Must be stopped! */ public synchronized void reconfigure() { if (!closed) throw new IllegalStateException("Jira River must be stopped to reconfigure it!"); logger.info("reconfiguring JIRA River"); String riverIndexName = getRiverIndexName(); refreshSearchIndex(riverIndexName); GetResponse resp = client.prepareGet(riverIndexName, riverName().name(), "_meta").execute().actionGet(); if (resp.isExists()) { if (logger.isDebugEnabled()) { logger.debug("Configuration document: {}", resp.getSourceAsString()); } Map<String, Object> newset = resp.getSource(); configure(newset); } else { throw new IllegalStateException("Configuration document not found to reconfigure jira river " + riverName().name()); } }
private void storeLastUpdatedTimestamp(String exportTimestamp) { String json = "{ \"lastUpdatedTimestamp\" : \"" + exportTimestamp + "\" }"; IndexRequest updateTimestampRequest = indexRequest(riverIndexName).type(riverName.name()).id("lastUpdatedTimestamp").source(json); client.index(updateTimestampRequest).actionGet(); }
mysqlUser = XContentMapValues.nodeStringValue(mysqlSettings.get("user"), "root"); mysqlPassword = XContentMapValues.nodeStringValue(mysqlSettings.get("password"), ""); mysqlDb = XContentMapValues.nodeStringValue(mysqlSettings.get("db"), riverName.name());
client.admin().indices().prepareDeleteMapping("_river").setType(riverName.name()).execute();
this.columnSeparator = readConfig("columnSeparator", null); this.idField = normalizeField(readConfig("idField", null)); this.index = normalizeField(readConfig("index", riverName.name())); this.type = normalizeField(readConfig("type", this.table)); this.interval = Long.parseLong(readConfig("interval", "600000"));
@Override public void storeDatetimeValue(String projectKey, String propertyName, Date datetime, BulkRequestBuilder esBulk) throws IOException { String documentName = prepareValueStoreDocumentName(projectKey, propertyName); if (logger.isDebugEnabled()) logger.debug( "Going to write {} property with datetime value {} for project {} using {} update. Document name is {}.", propertyName, datetime, projectKey, (esBulk != null ? "bulk" : "direct"), documentName); if (esBulk != null) { esBulk.add(indexRequest(getRiverIndexName()).type(riverName.name()).id(documentName) .source(storeDatetimeValueBuildDocument(projectKey, propertyName, datetime))); } else { client.prepareIndex(getRiverIndexName(), riverName.name(), documentName) .setSource(storeDatetimeValueBuildDocument(projectKey, propertyName, datetime)).execute().actionGet(); } }
@Inject public MysqlRiver(final RiverName riverName, final RiverSettings settings, final Client esClient) { super(riverName, settings); this.esClient = esClient; this.logger.info("Creating MySQL Stream River"); this.index = readConfig("index", riverName.name()); this.type = readConfig("type", "data"); this.url = "jdbc:mysql://" + readConfig("hostname") + "/" + readConfig("database"); this.username = readConfig("username"); this.password = readConfig("password"); this.query = readConfig("query"); this.uniqueIdField = readConfig("uniqueIdField", null); this.deleteOldEntries = Boolean.parseBoolean(readConfig("deleteOldEntries", "true")); this.interval = Long.parseLong(readConfig("interval", "600000")); }
@Override public Date readDatetimeValue(String projectKey, String propertyName) throws IOException { Date lastDate = null; String documentName = prepareValueStoreDocumentName(projectKey, propertyName); if (logger.isDebugEnabled()) logger.debug("Going to read datetime value from {} property for project {}. Document name is {}.", propertyName, projectKey, documentName); refreshSearchIndex(getRiverIndexName()); GetResponse lastSeqGetResponse = client.prepareGet(getRiverIndexName(), riverName.name(), documentName).execute() .actionGet(); if (lastSeqGetResponse.isExists()) { Object timestamp = lastSeqGetResponse.getSourceAsMap().get(STORE_FIELD_VALUE); if (timestamp != null) { lastDate = DateTimeUtils.parseISODateTime(timestamp.toString()); } } else { if (logger.isDebugEnabled()) logger.debug("{} document doesn't exist in JIRA river persistent store", documentName); } return lastDate; }
Map<String, Object> indexSettings = (Map<String, Object>)settings.settings().get("index"); indexName = XContentMapValues.nodeStringValue(indexSettings.get("index"), riverName.name()); typeName = XContentMapValues.nodeStringValue(indexSettings.get("type"), S3RiverUtil.INDEX_TYPE_DOC); bulkSize = XContentMapValues.nodeIntegerValue(indexSettings.get("bulk_size"), 100); } else { indexName = riverName.name(); typeName = S3RiverUtil.INDEX_TYPE_DOC; bulkSize = 100;
@Override public boolean deleteDatetimeValue(String projectKey, String propertyName) { String documentName = prepareValueStoreDocumentName(projectKey, propertyName); if (logger.isDebugEnabled()) logger.debug("Going to delete datetime value from {} property for project {}. Document name is {}.", propertyName, projectKey, documentName); refreshSearchIndex(getRiverIndexName()); DeleteResponse lastSeqGetResponse = client.prepareDelete(getRiverIndexName(), riverName.name(), documentName) .execute().actionGet(); if (!lastSeqGetResponse.isFound()) { if (logger.isDebugEnabled()) { logger.debug("{} document doesn't exist in JIRA river persistent store", documentName); } return false; } else { return true; } }
updateRiver("_lastScanTime", lastScanTime); } else { logger.info("Amazon S3 River is disabled for {}", riverName().name());