@Override public String save(Alert alert) throws ValidationException { checkArgument(alert instanceof AlertImpl, "Supplied argument must be of type " + AlertImpl.class + ", and not " + alert.getClass()); return this.coll.save((AlertImpl)alert).getSavedId(); }
/** * Stores the given {@link DTO} in the database. * * @param dto the {@link DTO} to save * @return the newly saved {@link DTO} */ public DTO save(DTO dto) { final WriteResult<DTO, ObjectId> save = db.save(dto); return save.getSavedObject(); }
public IndexFieldTypesDTO save(IndexFieldTypesDTO dto) { final WriteResult<IndexFieldTypesDTO, ObjectId> save = db.save(dto); return save.getSavedObject(); }
@Override public RoleImpl save(Role role1) throws ValidationException { // sucky but necessary because of graylog2-shared not knowing about mongodb :( if (!(role1 instanceof RoleImpl)) { throw new IllegalArgumentException("invalid Role implementation class"); } RoleImpl role = (RoleImpl) role1; final Set<ConstraintViolation<Role>> violations = validate(role); if (!violations.isEmpty()) { throw new ValidationException("Validation failed.", violations.toString()); } final WriteResult<RoleImpl, ObjectId> writeResult = dbCollection.save(role); return writeResult.getSavedObject(); }
@Override public Output create(Output request) throws ValidationException { final OutputImpl outputImpl = implOrFail(request); final WriteResult<OutputImpl, String> writeResult = coll.save(outputImpl); return writeResult.getSavedObject(); }
@Override public AlarmCallbackHistory save(AlarmCallbackHistory alarmCallbackHistory) { final AlarmCallbackHistoryImpl historyImpl = implOrFail(alarmCallbackHistory); final WriteResult<AlarmCallbackHistoryImpl, String> writeResult = coll.save(historyImpl); return writeResult.getSavedObject(); }
@Override public String save(AlarmCallbackConfiguration model) throws ValidationException { return coll.save(implOrFail(model)).getSavedId(); }
@Subscribe public void publishClusterEvent(Object event) { if (event instanceof DeadEvent) { LOG.debug("Skipping DeadEvent on cluster event bus"); return; } final String className = AutoValueUtils.getCanonicalName(event.getClass()); final ClusterEvent clusterEvent = ClusterEvent.create(nodeId.toString(), className, event); try { final String id = dbCollection.save(clusterEvent, WriteConcern.JOURNALED).getSavedId(); LOG.debug("Published cluster event with ID <{}> and type <{}>", id, className); } catch (MongoException e) { LOG.error("Couldn't publish cluster event of type <" + className + ">", e); } }
/** * {@inheritDoc} */ @Override public IndexSetConfig save(IndexSetConfig indexSetConfig) { final WriteResult<IndexSetConfig, ObjectId> writeResult = collection.save(indexSetConfig); final IndexSetConfig savedObject = writeResult.getSavedObject(); final IndexSetCreatedEvent createdEvent = IndexSetCreatedEvent.create(savedObject); clusterEventBus.post(createdEvent); return savedObject; }
public LookupTableDto save(LookupTableDto table) { WriteResult<LookupTableDto, ObjectId> save = db.save(table); final LookupTableDto savedLookupTable = save.getSavedObject(); clusterEventBus.post(LookupTablesUpdated.create(savedLookupTable)); return savedLookupTable; }
@Override public Decorator save(Decorator decorator) { checkArgument(decorator instanceof DecoratorImpl, "Argument must be an instance of DecoratorImpl, not %s", decorator.getClass()); if (!Strings.isNullOrEmpty(decorator.id())) { this.coll.updateById(decorator.id(), (DecoratorImpl)decorator); return this.coll.findOneById(decorator.id()); } return this.coll.save((DecoratorImpl) decorator).getSavedObject(); }
@Override public WriteResult<MongoIndexRange, ObjectId> save(IndexRange indexRange) { remove(indexRange.indexName()); final WriteResult<MongoIndexRange, ObjectId> save = collection.save(MongoIndexRange.create(indexRange)); return save; }
@Override public PipelineDao save(PipelineDao pipeline) { final WriteResult<PipelineDao, String> save = dbCollection.save(pipeline); final PipelineDao savedPipeline = save.getSavedObject(); clusterBus.post(PipelinesChangedEvent.updatedPipelineId(savedPipeline.id())); return savedPipeline; }
public DataAdapterDto save(DataAdapterDto table) { WriteResult<DataAdapterDto, ObjectId> save = db.save(table); final DataAdapterDto savedDataAdapter = save.getSavedObject(); clusterEventBus.post(DataAdaptersUpdated.create(savedDataAdapter.id())); return savedDataAdapter; }
public CacheDto save(CacheDto table) { WriteResult<CacheDto, ObjectId> save = db.save(table); final CacheDto savedCache = save.getSavedObject(); clusterEventBus.post(CachesUpdated.create(savedCache.id())); return savedCache; }
@Override public RuleDao save(RuleDao rule) { final WriteResult<RuleDao, String> save = dbCollection.save(rule); final RuleDao savedRule = save.getSavedObject(); clusterBus.post(RulesChangedEvent.updatedRuleId(savedRule.id())); return savedRule; }
@Override public List<GrokPattern> saveAll(Collection<GrokPattern> patterns, boolean replace) throws ValidationException { if (!replace) { for (GrokPattern pattern : loadAll()) { final boolean patternExists = patterns.stream().anyMatch(p -> p.name().equals(pattern.name())); if (patternExists) { throw new ValidationException("Grok pattern " + pattern.name() + " already exists"); } } } try { if (!validateAll(patterns)) { throw new ValidationException("Invalid patterns"); } } catch (GrokException | PatternSyntaxException e) { throw new ValidationException("Invalid patterns.\n" + e.getMessage()); } if (replace) { deleteAll(); } final ImmutableList.Builder<GrokPattern> savedPatterns = ImmutableList.builder(); final ImmutableSet.Builder<String> patternNames = ImmutableSet.builder(); for (final GrokPattern pattern : patterns) { final WriteResult<GrokPattern, ObjectId> result = dbCollection.save(pattern); final GrokPattern savedGrokPattern = result.getSavedObject(); savedPatterns.add(savedGrokPattern); patternNames.add(savedGrokPattern.name()); } return savedPatterns.build(); }
@Override public Alert resolveAlert(Alert alert) { if (alert == null || isResolved(alert)) { return alert; } final AlertImpl updatedAlert = ((AlertImpl) alert).toBuilder().resolvedAt(Tools.nowUTC()).build(); this.coll.save(updatedAlert); return updatedAlert; }
@Override public GrokPattern save(GrokPattern pattern) throws ValidationException { try { if (!validate(pattern)) { throw new ValidationException("Invalid pattern " + pattern); } } catch (GrokException | PatternSyntaxException e) { throw new ValidationException("Invalid pattern " + pattern + "\n" + e.getMessage()); } if (loadByName(pattern.name()).isPresent()) { throw new ValidationException("Grok pattern " + pattern.name() + " already exists"); } final WriteResult<GrokPattern, ObjectId> result = dbCollection.save(pattern); final GrokPattern savedGrokPattern = result.getSavedObject(); clusterBus.post(GrokPatternsUpdatedEvent.create(ImmutableSet.of(savedGrokPattern.name()))); return savedGrokPattern; }
@Override public PipelineConnections save(PipelineConnections connections) { PipelineConnections existingConnections = dbCollection.findOne(DBQuery.is("stream_id", connections.streamId())); if (existingConnections == null) { existingConnections = PipelineConnections.create(null, connections.streamId(), Collections.emptySet()); } final PipelineConnections toSave = existingConnections.toBuilder() .pipelineIds(connections.pipelineIds()).build(); final WriteResult<PipelineConnections, String> save = dbCollection.save(toSave); final PipelineConnections savedConnections = save.getSavedObject(); clusterBus.post(PipelineConnectionsChangedEvent.create(savedConnections.streamId(), savedConnections.pipelineIds())); return savedConnections; }