/** * Rethrow task failures if there are any. */ public void rethrowFailures(String operationName) { rethrowAndSuppress(Stream.concat( getNodeFailures().stream(), getTaskFailures().stream().map(f -> new ElasticsearchException( "{} of [{}] failed", f.getCause(), operationName, new TaskId(f.getNodeId(), f.getTaskId())))) .collect(toList())); }
/** * Validates the given settings by running it through all update listeners without applying it. This * method will not change any settings but will fail if any of the settings can't be applied. */ public synchronized Settings validateUpdate(Settings settings) { final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); List<RuntimeException> exceptions = new ArrayList<>(); for (SettingUpdater<?> settingUpdater : settingUpdaters) { try { // ensure running this through the updater / dynamic validator // don't check if the value has changed we wanna test this anyways settingUpdater.getValue(current, previous); } catch (RuntimeException ex) { exceptions.add(ex); logger.debug(() -> new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex); } } // here we are exhaustive and record all settings that failed. ExceptionsHelper.rethrowAndSuppress(exceptions); return current; }
ExceptionsHelper.rethrowAndSuppress(ex);
ExceptionsHelper.rethrowAndSuppress(exceptions);
void validatePipeline(Map<DiscoveryNode, IngestInfo> ingestInfos, PutPipelineRequest request) throws Exception { if (ingestInfos.isEmpty()) { throw new IllegalStateException("Ingest info is empty"); } Map<String, Object> pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); Pipeline pipeline = Pipeline.create(request.getId(), pipelineConfig, processorFactories, scriptService); List<Exception> exceptions = new ArrayList<>(); for (Processor processor : pipeline.flattenAllProcessors()) { for (Map.Entry<DiscoveryNode, IngestInfo> entry : ingestInfos.entrySet()) { String type = processor.getType(); if (entry.getValue().containsProcessor(type) == false && ConditionalProcessor.TYPE.equals(type) == false) { String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; exceptions.add( ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message) ); } } } ExceptionsHelper.rethrowAndSuppress(exceptions); }
ExceptionsHelper.rethrowAndSuppress(exceptions); return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), null); } catch (final Exception e) {
private void innerUpdatePipelines(ClusterState previousState, ClusterState state) { if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { return; } IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE); IngestMetadata previousIngestMetadata = previousState.getMetaData().custom(IngestMetadata.TYPE); if (Objects.equals(ingestMetadata, previousIngestMetadata)) { return; } Map<String, Pipeline> pipelines = new HashMap<>(); List<ElasticsearchParseException> exceptions = new ArrayList<>(); for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) { try { pipelines.put( pipeline.getId(), Pipeline.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories, scriptService) ); } catch (ElasticsearchParseException e) { pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), e)); exceptions.add(e); } catch (Exception e) { ElasticsearchParseException parseException = new ElasticsearchParseException( "Error updating pipeline with id [" + pipeline.getId() + "]", e); pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), parseException)); exceptions.add(parseException); } } this.pipelines = Collections.unmodifiableMap(pipelines); ExceptionsHelper.rethrowAndSuppress(exceptions); }
/** * * Validates that all given settings are registered and valid */ public final void validate(Settings settings) { List<RuntimeException> exceptions = new ArrayList<>(); for (String key : settings.keySet()) { // settings iterate in deterministic fashion try { validate(key, settings); } catch (RuntimeException ex) { exceptions.add(ex); } } ExceptionsHelper.rethrowAndSuppress(exceptions); }
@Override public void close() { if (isClosed.compareAndSet(false, true)) { Page page; List<RuntimeException> closingExceptions = new ArrayList<>(); while ((page = pages.pollFirst()) != null) { try { page.close(); } catch (RuntimeException e) { closingExceptions.add(e); } } ExceptionsHelper.rethrowAndSuppress(closingExceptions); } }
/** * Rethrow task failures if there are any. */ public void rethrowFailures(String operationName) { rethrowAndSuppress(Stream.concat( getNodeFailures().stream(), getTaskFailures().stream().map(f -> new ElasticsearchException( "{} of [{}] failed", f.getCause(), operationName, new TaskId(f.getNodeId(), f.getTaskId())))) .collect(toList())); }
/** * Rethrow task failures if there are any. */ public void rethrowFailures(String operationName) { rethrowAndSuppress(Stream.concat( getNodeFailures().stream(), getTaskFailures().stream().map(f -> new ElasticsearchException( "{} of [{}] failed", f.getCause(), operationName, new TaskId(f.getNodeId(), f.getTaskId())))) .collect(toList())); }
/** * Rethrow task failures if there are any. */ public void rethrowFailures(String operationName) { rethrowAndSuppress(Stream.concat( getNodeFailures().stream(), getTaskFailures().stream().map(f -> new ElasticsearchException( "{} of [{}] failed", f.getCause(), operationName, new TaskId(f.getNodeId(), f.getTaskId())))) .collect(toList())); }
/** * Validates the given settings by running it through all update listeners without applying it. This * method will not change any settings but will fail if any of the settings can't be applied. */ public synchronized Settings validateUpdate(Settings settings) { final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); List<RuntimeException> exceptions = new ArrayList<>(); for (SettingUpdater<?> settingUpdater : settingUpdaters) { try { // ensure running this through the updater / dynamic validator // don't check if the value has changed we wanna test this anyways settingUpdater.getValue(current, previous); } catch (RuntimeException ex) { exceptions.add(ex); logger.debug(() -> new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex); } } // here we are exhaustive and record all settings that failed. ExceptionsHelper.rethrowAndSuppress(exceptions); return current; }
/** * Validates the given settings by running it through all update listeners without applying it. This * method will not change any settings but will fail if any of the settings can't be applied. */ public synchronized Settings validateUpdate(Settings settings) { final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); List<RuntimeException> exceptions = new ArrayList<>(); for (SettingUpdater<?> settingUpdater : settingUpdaters) { try { // ensure running this through the updater / dynamic validator // don't check if the value has changed we wanna test this anyways settingUpdater.getValue(current, previous); } catch (RuntimeException ex) { exceptions.add(ex); logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex); } } // here we are exhaustive and record all settings that failed. ExceptionsHelper.rethrowAndSuppress(exceptions); return current; }
void validatePipeline(Map<DiscoveryNode, IngestInfo> ingestInfos, PutPipelineRequest request) throws Exception { if (ingestInfos.isEmpty()) { throw new IllegalStateException("Ingest info is empty"); } Map<String, Object> pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); Pipeline pipeline = Pipeline.create(request.getId(), pipelineConfig, processorFactories, scriptService); List<Exception> exceptions = new ArrayList<>(); for (Processor processor : pipeline.flattenAllProcessors()) { for (Map.Entry<DiscoveryNode, IngestInfo> entry : ingestInfos.entrySet()) { String type = processor.getType(); if (entry.getValue().containsProcessor(type) == false && ConditionalProcessor.TYPE.equals(type) == false) { String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; exceptions.add( ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message) ); } } } ExceptionsHelper.rethrowAndSuppress(exceptions); }
void validatePipeline(Map<DiscoveryNode, IngestInfo> ingestInfos, PutPipelineRequest request) throws Exception { if (ingestInfos.isEmpty()) { throw new IllegalStateException("Ingest info is empty"); } Map<String, Object> pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); Pipeline pipeline = factory.create(request.getId(), pipelineConfig, processorFactories); List<Exception> exceptions = new ArrayList<>(); for (Processor processor : pipeline.flattenAllProcessors()) { for (Map.Entry<DiscoveryNode, IngestInfo> entry : ingestInfos.entrySet()) { if (entry.getValue().containsProcessor(processor.getType()) == false) { String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; exceptions.add(ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message)); } } } ExceptionsHelper.rethrowAndSuppress(exceptions); }
void validatePipeline(Map<DiscoveryNode, IngestInfo> ingestInfos, PutPipelineRequest request) throws Exception { if (ingestInfos.isEmpty()) { throw new IllegalStateException("Ingest info is empty"); } Map<String, Object> pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); Pipeline pipeline = factory.create(request.getId(), pipelineConfig, processorFactories); List<Exception> exceptions = new ArrayList<>(); for (Processor processor : pipeline.flattenAllProcessors()) { for (Map.Entry<DiscoveryNode, IngestInfo> entry : ingestInfos.entrySet()) { if (entry.getValue().containsProcessor(processor.getType()) == false) { String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; exceptions.add(ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message)); } } } ExceptionsHelper.rethrowAndSuppress(exceptions); }
ExceptionsHelper.rethrowAndSuppress(exceptions); return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), null); } catch (final Exception e) {
void innerUpdatePipelines(ClusterState previousState, ClusterState state) { if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { return; } IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE); IngestMetadata previousIngestMetadata = previousState.getMetaData().custom(IngestMetadata.TYPE); if (Objects.equals(ingestMetadata, previousIngestMetadata)) { return; } Map<String, Pipeline> pipelines = new HashMap<>(); List<ElasticsearchParseException> exceptions = new ArrayList<>(); for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) { try { pipelines.put(pipeline.getId(), factory.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories)); } catch (ElasticsearchParseException e) { pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), e)); exceptions.add(e); } catch (Exception e) { ElasticsearchParseException parseException = new ElasticsearchParseException( "Error updating pipeline with id [" + pipeline.getId() + "]", e); pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), parseException)); exceptions.add(parseException); } } this.pipelines = Collections.unmodifiableMap(pipelines); ExceptionsHelper.rethrowAndSuppress(exceptions); }
private void innerUpdatePipelines(ClusterState previousState, ClusterState state) { if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { return; } IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE); IngestMetadata previousIngestMetadata = previousState.getMetaData().custom(IngestMetadata.TYPE); if (Objects.equals(ingestMetadata, previousIngestMetadata)) { return; } Map<String, Pipeline> pipelines = new HashMap<>(); List<ElasticsearchParseException> exceptions = new ArrayList<>(); for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) { try { pipelines.put( pipeline.getId(), Pipeline.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories, scriptService) ); } catch (ElasticsearchParseException e) { pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), e)); exceptions.add(e); } catch (Exception e) { ElasticsearchParseException parseException = new ElasticsearchParseException( "Error updating pipeline with id [" + pipeline.getId() + "]", e); pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), parseException)); exceptions.add(parseException); } } this.pipelines = Collections.unmodifiableMap(pipelines); ExceptionsHelper.rethrowAndSuppress(exceptions); }