/** needed for plugins BWC */ public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) { this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0)); }
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".scale"); double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType);
/** needed for plugins BWC */ public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) { this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0)); }
/** needed for plugins BWC */ public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) { this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0)); }
/** needed for plugins BWC */ public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) { this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0)); }
/** needed for plugins BWC */ public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) { this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0)); }
@Override protected void doStart() throws ElasticsearchException { logger.info("Starting {}", getClass().getSimpleName()); TimeValue interval = TimeValue.parseTimeValue(settings.get("initial"), TimeValue.timeValueHours(1)); future = threadPool.schedule(interval, ThreadPool.Names.GENERIC, new Task()); }
@Override protected void doStart() throws ElasticsearchException { TimeValue interval = TimeValue.parseTimeValue(settings.get("initial"), TimeValue.timeValueHours(1)); future = threadPool.schedule(interval, ThreadPool.Names.GENERIC, new Task()); }
@Inject protected InsightIndicesHousekeeperService(Settings settings, ThreadPool threadPool, AdminClient adminClient) { super(settings); this.threadPool = threadPool; this.adminClient = adminClient; this.settings = settings.getByPrefix("insight.indices.management."); indicesPrefix = this.settings.get("prefix", "insight"); daysOpened = this.settings.getAsInt("opened", 7); daysClosed = this.settings.getAsInt("closed", 14); daysStored = this.settings.getAsInt("stored", 0); interval = TimeValue.parseTimeValue(settings.get("interval"), TimeValue.timeValueHours(1)); logger.info("Initialized {}", getClass().getSimpleName()); }
TransportRequestOptions.Type.REG, TransportRequestOptions.Type.STATE); builder.setHandshakeTimeout(TimeValue.timeValueHours(1)); ConnectTransportException ex = expectThrows(ConnectTransportException.class, () -> serviceA.connectToNode(dummy, builder.build()));
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale"); double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldType);
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".scale"); double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType);
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".scale"); double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType);
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".scale"); double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType);
TimeValue interval = TimeValue.parseTimeValue(settings.get("interval"), TimeValue.timeValueHours(8)); future = threadPool.schedule(interval, ThreadPool.Names.GENERIC, this);
Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), Property.NodeScope, Property.Dynamic); public static final Setting<TimeValue> MAX_KEEPALIVE_SETTING = Setting.positiveTimeSetting("search.max_keep_alive", timeValueHours(24), Property.NodeScope, Property.Dynamic); public static final Setting<TimeValue> KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope);
Setting.timeSetting("index.translog.retention.age", TimeValue.timeValueHours(12), TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope);