static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException { return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.doubleValue()); } }
@Override public Double parse(XContentParser parser, boolean coerce) throws IOException { double parsed = parser.doubleValue(coerce); validateParsed(parsed); return parsed; }
static ParsedBucket fromXContent(XContentParser parser) throws IOException { return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> bucket.key = p.doubleValue()); } }
/** * Parse a token of type XContentParser.Token.VALUE_NUMBER or XContentParser.Token.STRING to a double. * In other cases the default value is returned instead. */ protected static double parseDouble(XContentParser parser, double defaultNullValue) throws IOException { Token currentToken = parser.currentToken(); if (currentToken == XContentParser.Token.VALUE_NUMBER || currentToken == XContentParser.Token.VALUE_STRING) { return parser.doubleValue(); } else { return defaultNullValue; } } }
@Override protected boolean token(XContentParser parser, String field, XContentParser.Token token, Map<String, Object> params) throws IOException { if (SIGMA.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.VALUE_NUMBER) { params.put(SIGMA.getPreferredName(), parser.doubleValue()); return true; } return false; } }
parameterName = parser.currentName(); } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { scale = parser.doubleValue(); scaleFound = true; } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { decay = parser.doubleValue(); } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { origin = parser.doubleValue(); refFound = true; } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { offset = parser.doubleValue(); } else { throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
private static Coordinate parseCoordinate(XContentParser parser, boolean ignoreZValue) throws IOException { if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { throw new ElasticsearchParseException("geo coordinates must be numbers"); } double lon = parser.doubleValue(); if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) { throw new ElasticsearchParseException("geo coordinates must be numbers"); } double lat = parser.doubleValue(); XContentParser.Token token = parser.nextToken(); // alt (for storing purposes only - future use includes 3d shapes) double alt = Double.NaN; if (token == XContentParser.Token.VALUE_NUMBER) { alt = GeoPoint.assertZValue(ignoreZValue, parser.doubleValue()); parser.nextToken(); } // do not support > 3 dimensions if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { throw new ElasticsearchParseException("geo coordinates greater than 3 dimensions are not supported"); } return new Coordinate(lon, lat, alt); }
} else if (token.isValue()) { if (TRIGRAM_FIELD.match(fieldName, parser.getDeprecationHandler())) { trigramLambda = parser.doubleValue(); if (trigramLambda < 0) { throw new IllegalArgumentException("trigram_lambda must be positive"); bigramLambda = parser.doubleValue(); if (bigramLambda < 0) { throw new IllegalArgumentException("bigram_lambda must be positive"); unigramLambda = parser.doubleValue(); if (unigramLambda < 0) { throw new IllegalArgumentException("unigram_lambda must be positive");
@Override protected boolean token(XContentParser parser, String field, XContentParser.Token token, Map<String, Object> params) throws IOException { if (PERCENTS_FIELD.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.START_ARRAY) { DoubleArrayList percents = new DoubleArrayList(10); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { percents.add(parser.doubleValue()); } params.put(PERCENTS_FIELD.getPreferredName(), percents.toArray()); return true; } return false; }
static void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException { while (!parser.nextToken().equals(XContentParser.Token.END_ARRAY)) { if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { // we might get here if the geo point is " number, number] " and the parser already moved over the // opening bracket in this case we cannot use GeoUtils.parseGeoPoint(..) because this expects an opening // bracket double lon = parser.doubleValue(); parser.nextToken(); if (!parser.currentToken().equals(XContentParser.Token.VALUE_NUMBER)) { throw new ElasticsearchParseException( "geo point parsing: expected second number but got [{}] instead", parser.currentToken()); } double lat = parser.doubleValue(); GeoPoint point = new GeoPoint(); point.reset(lat, lon); geoPoints.add(point); } else { GeoPoint point = new GeoPoint(); GeoUtils.parseGeoPoint(parser, point); geoPoints.add(point); } } }
public static SmoothingModel fromXContent(XContentParser parser) throws IOException { XContentParser.Token token; String fieldName = null; double alpha = DEFAULT_LAPLACE_ALPHA; while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } if (token.isValue() && ALPHA_FIELD.match(fieldName, parser.getDeprecationHandler())) { alpha = parser.doubleValue(); } } return new Laplace(alpha); }
values.add(parser.doubleValue()); break;
public static SmoothingModel fromXContent(XContentParser parser) throws IOException { XContentParser.Token token; String fieldName = null; double discount = DEFAULT_BACKOFF_DISCOUNT; while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } if (token.isValue() && DISCOUNT_FIELD.match(fieldName, parser.getDeprecationHandler())) { discount = parser.doubleValue(); } } return new StupidBackoff(discount); }
bucket.setDocCount(value); } else if (InternalSignificantTerms.SCORE.equals(currentFieldName)) { bucket.score = parser.doubleValue(); } else if (InternalSignificantTerms.BG_COUNT.equals(currentFieldName)) { bucket.supersetDf = parser.longValue();
origin = GeoUtils.parseGeoPoint(parser); } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { decay = parser.doubleValue(); } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { offsetString = parser.text();
bucket.setDocCount(parser.longValue()); } else if (CommonFields.FROM.getPreferredName().equals(currentFieldName)) { bucket.from = parser.doubleValue(); } else if (CommonFields.FROM_AS_STRING.getPreferredName().equals(currentFieldName)) { bucket.fromAsString = parser.text(); } else if (CommonFields.TO.getPreferredName().equals(currentFieldName)) { bucket.to = parser.doubleValue(); } else if (CommonFields.TO_AS_STRING.getPreferredName().equals(currentFieldName)) { bucket.toAsString = parser.text();
originString = parser.text(); } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { decay = parser.doubleValue(); } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { offsetString = parser.text();
modifier = FieldValueFactorFunction.Modifier.fromString(parser.text()); } else if ("missing".equals(currentFieldName)) { missing = parser.doubleValue(); } else { throw new ParsingException(parser.getTokenLocation(), NAME + " query does not support [" + currentFieldName + "]");
} else if (token == XContentParser.Token.VALUE_NUMBER) { if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { from = parser.doubleValue(); } else if (TO_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { to = parser.doubleValue(); } else { XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation());
} else if (token == XContentParser.Token.VALUE_NUMBER) { if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { from = parser.doubleValue(); } else if (TO_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { to = parser.doubleValue(); } else { XContentParserUtils.throwUnknownField(currentFieldName, parser.getTokenLocation());