/** * Reads the state from a given file and compares the expected version against the actual version of * the state. */ public final T read(Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (final IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; indexInput.readLong(); // version currently unused long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch(CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
private void serializeValue(Map<String, String> settings, StringBuilder sb, List<String> path, XContentParser parser, String fieldName) throws IOException { sb.setLength(0); for (String pathEle : path) { sb.append(pathEle).append('.'); } sb.append(fieldName); String key = sb.toString(); String currentValue = parser.text(); String previousValue = settings.put(key, currentValue); if (previousValue != null) { throw new ElasticsearchParseException( "duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]", key, parser.getTokenLocation().lineNumber, parser.getTokenLocation().columnNumber, previousValue, currentValue ); } }
public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple<GetResult, GetResult> tuple = randomGetResult(xContentType); GetResult getResult = tuple.v1(); GetResult expectedGetResult = tuple.v2(); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toXContent(getResult, xContentType, humanReadable); //test that we can parse what we print out GetResult parsedGetResult; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { parsedGetResult = GetResult.fromXContent(parser); assertNull(parser.nextToken()); } assertEquals(expectedGetResult, parsedGetResult); //print the parsed object out and test that the output is the same as the original output BytesReference finalBytes = toXContent(parsedGetResult, xContentType, humanReadable); assertToXContentEquivalent(originalBytes, finalBytes, xContentType); //check that the source stays unchanged, no shuffling of keys nor anything like that assertEquals(expectedGetResult.sourceAsString(), parsedGetResult.sourceAsString()); }
public void testParseInvalidPoint() throws IOException { // test case 1: create an invalid point object with multipoint data format XContentBuilder invalidPoint1 = XContentFactory.jsonBuilder() .startObject() .field("type", "point") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject(); XContentParser parser = createParser(invalidPoint1); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid point object with an empty number of coordinates XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder() .startObject() .field("type", "point") .startArray("coordinates") .endArray() .endObject(); parser = createParser(invalidPoint2); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); }
private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, MultiValueMode mode) throws IOException { // now, the field must exist, else we cannot read the value for // the doc later MappedFieldType fieldType = parseContext.fieldMapper(fieldName); if (fieldType == null) { throw new QueryParsingException(parseContext, "unknown field [{}]", fieldName); } // dates and time need special handling parser.nextToken(); if (fieldType instanceof DateFieldMapper.DateFieldType) { return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper.DateFieldType) fieldType, mode); } else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) { return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode); } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode); } else { throw new QueryParsingException(parseContext, "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType); } }
public void testParseXContentForAnalyzeRequest() throws Exception { XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") .array("filter", "lowercase") .endObject()); AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); RestAnalyzeAction.buildFromContent(content, analyzeRequest); assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { assertThat(filter.name, equalTo("lowercase")); } }
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, XContentParser parser, boolean allowExplicitIndex) throws IOException { XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if ("docs".equals(currentFieldName)) { parseDocuments(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting, allowExplicitIndex); } else if ("ids".equals(currentFieldName)) { parseIds(parser, this.items, defaultIndex, defaultType, defaultFields, defaultFetchSource, defaultRouting); } } } return this; }
@Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { ValuesSourceParser<ValuesSource.GeoPoint> vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoCentroid.TYPE, context) .targetValueType(ValueType.GEOPOINT) .formattable(true) .build(); XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (vsParser.token(currentFieldName, token, parser)) { continue; } else { throw new SearchParseException(context, "Unknown key for a " + token + " in aggregation [" + aggregationName + "]: [" + currentFieldName + "].", parser.getTokenLocation()); } } return new GeoCentroidAggregator.Factory(aggregationName, vsParser.config()); }
public static WrapperQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new ParsingException(parser.getTokenLocation(), "[wrapper] query malformed"); } String fieldName = parser.currentName(); if (! QUERY_FIELD.match(fieldName)) { throw new ParsingException(parser.getTokenLocation(), "[wrapper] query malformed, expected `query` but was " + fieldName); } parser.nextToken(); byte[] source = parser.binaryValue(); parser.nextToken(); if (source == null) { throw new ParsingException(parser.getTokenLocation(), "wrapper query has no [query] specified"); } return new WrapperQueryBuilder(source); }
private void extractFieldAndBoost(QueryParseContext parseContext, XContentParser parser, Map<String, Float> fieldNameWithBoosts) throws IOException { String fField = null; Float fBoost = null; char[] fieldText = parser.textCharacters(); int end = parser.textOffset() + parser.textLength(); for (int i = parser.textOffset(); i < end; i++) { if (fieldText[i] == '^') { int relativeLocation = i - parser.textOffset(); fField = new String(fieldText, parser.textOffset(), relativeLocation); fBoost = Float.parseFloat(new String(fieldText, i + 1, parser.textLength() - relativeLocation - 1)); break; } } if (fField == null) { fField = parser.text(); } if (Regex.isSimpleMatchPattern(fField)) { for (String field : parseContext.mapperService().simpleMatchToIndexNames(fField)) { fieldNameWithBoosts.put(field, fBoost); } } else { fieldNameWithBoosts.put(fField, fBoost); } }
@Override public void parseSpecial(String aggregationName, XContentParser parser, SearchContext context, XContentParser.Token token, String currentFieldName) throws IOException { if (token == XContentParser.Token.START_OBJECT) { SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserMapper.get(currentFieldName); if (significanceHeuristicParser != null) { significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher(), context); } else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) { filter = context.queryParserService().parseInnerFilter(parser).query(); } else { throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].", parser.getTokenLocation()); } } else { throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].", parser.getTokenLocation()); } }
public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName, SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { if ("analyzer".equals(fieldName)) { String analyzerName = parser.text(); Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); if (analyzer == null) { throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); } suggestion.setAnalyzer(analyzer); } else if ("field".equals(fieldName)) { suggestion.setField(parser.text()); } else if ("size".equals(fieldName)) { suggestion.setSize(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.SHARD_SIZE)) { suggestion.setShardSize(parser.intValue()); } else { return false; } return true; }
private void serializeArray(Map<String, String> settings, StringBuilder sb, List<String> path, XContentParser parser, String fieldName) throws IOException { XContentParser.Token token; int counter = 0; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { serializeObject(settings, sb, path, parser, fieldName + '.' + (counter++)); } else if (token == XContentParser.Token.START_ARRAY) { serializeArray(settings, sb, path, parser, fieldName + '.' + (counter++)); } else if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), true); // ignore } else { serializeValue(settings, sb, path, parser, fieldName + '.' + (counter++), false); } } }
/** * Parses the template query replacing template parameters with provided * values. Handles both submitting the template as part of the request as * well as referencing only the template name. * * @param parseContext * parse context containing the templated query. */ @Override @Nullable public Query parse(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); Template template = parse(parser, parseContext.parseFieldMatcher()); ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH, SearchContext.current(), Collections.<String, String>emptyMap()); BytesReference querySource = (BytesReference) executable.run(); try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParserService()); context.reset(qSourceParser); return context.parseInnerQuery(); } }
public void testXContentRoundTrip() throws Exception { ExtendedBounds orig = randomExtendedBounds(); try (XContentBuilder out = JsonXContent.contentBuilder()) { out.startObject(); orig.toXContent(out, ToXContent.EMPTY_PARAMS); out.endObject(); try (XContentParser in = createParser(JsonXContent.jsonXContent, out.bytes())) { XContentParser.Token token = in.currentToken(); assertNull(token); token = in.nextToken(); assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); token = in.nextToken(); assertThat(token, equalTo(XContentParser.Token.FIELD_NAME)); assertThat(in.currentName(), equalTo(ExtendedBounds.EXTENDED_BOUNDS_FIELD.getPreferredName())); ExtendedBounds read = ExtendedBounds.PARSER.apply(in, null); assertEquals(orig, read); } catch (Exception e) { throw new Exception("Error parsing [" + out.bytes().utf8ToString() + "]", e); } } }
public void testXContent() throws IOException { IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); XContentBuilder builder = JsonXContent.contentBuilder(); indexId.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); String name = null; String id = null; while (parser.nextToken() != XContentParser.Token.END_OBJECT) { final String currentFieldName = parser.currentName(); parser.nextToken(); if (currentFieldName.equals(IndexId.NAME)) { name = parser.text(); } else if (currentFieldName.equals(IndexId.ID)) { id = parser.text(); } } assertNotNull(name); assertNotNull(id); assertEquals(indexId, new IndexId(name, id)); }
private static void innerParseObject(ParseContext context, ObjectMapper mapper, XContentParser parser, String currentFieldName, XContentParser.Token token) throws IOException { while (token != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.START_OBJECT) { parseObject(context, mapper, currentFieldName); } else if (token == XContentParser.Token.START_ARRAY) { parseArray(context, mapper, currentFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); if (MapperService.isMetadataField(context.path().pathAsText(currentFieldName))) { throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside a document. Use the index API request parameters."); } } else if (token == XContentParser.Token.VALUE_NULL) { parseNullValue(context, mapper, currentFieldName); } else if (token == null) { throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?"); } else if (token.isValue()) { parseValue(context, mapper, currentFieldName, token); } token = parser.nextToken(); } }
@Override public ShardStateMetaData fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); if (token == null) { return null; } long version = -1; Boolean primary = null; String currentFieldName = null; String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (VERSION_KEY.equals(currentFieldName)) { version = parser.longValue(); } else if (PRIMARY_KEY.equals(currentFieldName)) { primary = parser.booleanValue(); } else if (INDEX_UUID_KEY.equals(currentFieldName)) { indexUUID = parser.text(); } else { throw new CorruptStateException("unexpected field in shard state [" + currentFieldName + "]"); } } else { throw new CorruptStateException("unexpected token in shard state [" + token.name() + "]"); } } if (primary == null) { throw new CorruptStateException("missing value for [primary] in shard state"); } if (version == -1) { throw new CorruptStateException("missing value for [version] in shard state"); } return new ShardStateMetaData(version, primary, indexUUID); }
private static XContentParser transform(Mapping mapping, XContentParser parser) throws IOException { Map<String, Object> transformed; try (XContentParser ignored = parser) { transformed = transformSourceAsMap(mapping, parser.mapOrdered()); } XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()).value(transformed); return parser.contentType().xContent().createParser(builder.bytes()); }
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException { parseContext.reset(parser); try { parseContext.parseFieldMatcher(parseFieldMatcher); Query query = parseContext.parseInnerQuery(); if (query == null) { query = Queries.newMatchNoDocsQuery(); } return new ParsedQuery(query, parseContext.copyNamedQueries()); } finally { parseContext.reset(null); } }
/** * Test that creates new sort from a random test sort and checks both for equality */ public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { T testItem = createTestItem(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); XContentParser itemParser = createParser(shuffled); itemParser.nextToken(); /* * filter out name of sort, or field name to sort on for element fieldSort */ itemParser.nextToken(); String elementName = itemParser.currentName(); itemParser.nextToken(); QueryParseContext context = new QueryParseContext(itemParser); T parsedItem = fromXContent(context, elementName); assertNotSame(testItem, parsedItem); assertEquals(testItem, parsedItem); assertEquals(testItem.hashCode(), parsedItem.hashCode()); } }
public void testTimeoutWithoutUnits() throws IOException { final int timeout = randomIntBetween(1, 1024); final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) { final ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> SearchSourceBuilder.fromXContent( createParseContext(parser))); assertThat(e, hasToString(containsString("unit is missing or unrecognized"))); } }
private static Map<String, HighlightField> parseHighlightFields(XContentParser parser) throws IOException { Map<String, HighlightField> highlightFields = new HashMap<>(); while((parser.nextToken()) != XContentParser.Token.END_OBJECT) { HighlightField highlightField = HighlightField.fromXContent(parser); highlightFields.put(highlightField.getName(), highlightField); } return highlightFields; }
@Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { UpdateSettingsRequest updateSettingsRequest = updateSettingsRequest(Strings.splitStringByCommaToArray(request.param("index"))); updateSettingsRequest.timeout(request.paramAsTime("timeout", updateSettingsRequest.timeout())); updateSettingsRequest.setPreserveExisting(request.paramAsBoolean("preserve_existing", updateSettingsRequest.isPreserveExisting())); updateSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", updateSettingsRequest.masterNodeTimeout())); updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions())); Map<String, Object> settings = new HashMap<>(); if (request.hasContent()) { try (XContentParser parser = request.contentParser()) { Map<String, Object> bodySettings = parser.map(); Object innerBodySettings = bodySettings.get("settings"); // clean up in case the body is wrapped with "settings" : { ... } if (innerBodySettings instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> innerBodySettingsMap = (Map<String, Object>) innerBodySettings; settings.putAll(innerBodySettingsMap); } else { settings.putAll(bodySettings); } } } updateSettingsRequest.settings(settings); return channel -> client.admin().indices().updateSettings(updateSettingsRequest, new AcknowledgedRestListener<>(channel)); }
public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, SourceToParse source, XContentParser parser) { this.indexSettings = indexSettings; this.docMapper = docMapper; this.docMapperParser = docMapperParser; this.path = new ContentPath(0); this.parser = parser; this.document = new Document(); this.documents = new ArrayList<>(); this.documents.add(document); this.version = null; this.sourceToParse = source; this.allEntries = new AllEntries(); this.dynamicMappers = new ArrayList<>(); }
private void registerScoreFunction(ScoreFunctionSpec<?> scoreFunction) { namedWriteables.add(new NamedWriteableRegistry.Entry( ScoreFunctionBuilder.class, scoreFunction.getName().getPreferredName(), scoreFunction.getReader())); // TODO remove funky contexts namedXContents.add(new NamedXContentRegistry.Entry( ScoreFunctionBuilder.class, scoreFunction.getName(), (XContentParser p, Object c) -> scoreFunction.getParser().fromXContent((QueryParseContext) c))); }
public void testParseInnerQueryBuilder() throws IOException { QueryBuilder query = new MatchQueryBuilder("foo", "bar"); String source = query.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { QueryParseContext context = new QueryParseContext(parser); QueryBuilder actual = context.parseInnerQueryBuilder(); assertEquals(query, actual); } }
@Override public void parse(XContentParser parser, SQLXContentSourceContext context) throws Exception { XContentParser.Token token = parser.currentToken(); if (!token.isValue()) { throw new SQLParseSourceException(context, "Field [" + parser.currentName() + "] has an invalid value"); } String stmt = parser.text(); if (stmt == null || stmt.length() == 0) { throw new SQLParseSourceException(context, "Field [" + parser.currentName() + "] has no value"); } context.stmt(parser.text()); }
private boolean needsScores(IndexService index, String agg) throws IOException { XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); QueryParseContext parseContext = new QueryParseContext(aggParser); aggParser.nextToken(); SearchContext context = createSearchContext(index); final AggregatorFactories factories = AggregatorFactories.parseAggregators(parseContext).build(context, null); final Aggregator[] aggregators = factories.createTopLevelAggregators(); assertEquals(1, aggregators.length); return aggregators[0].needsScores(); }
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException { Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes); for (String parameter : parameters) { parameterMap.put(parameter, ScriptService.ScriptType.INLINE); } return parse(parser, parameterMap, parseFieldMatcher); }
static ObjectMapper parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { if (currentFieldName == null) { throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]"); } Mapper mapper = parentMapper.getMapper(currentFieldName); if (mapper != null) { Mapper subUpdate = parseObjectOrField(context, mapper); if (subUpdate == null) { return null; } return parentMapper.mappingUpdate(subUpdate); } else { return parseDynamicValue(context, parentMapper, currentFieldName, token); } }
private InnerHitsContext.ParentChildInnerHits parseParentChild(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String type) throws Exception { ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser); DocumentMapper documentMapper = searchContext.mapperService().documentMapper(type); if (documentMapper == null) { throw new IllegalArgumentException("type [" + type + "] doesn't exist"); } return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parseContext.mapperService(), documentMapper); }
public void testParseBadFieldNameExceptions() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : \"number\"" + "} }"; XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); parser.nextToken(); parser.nextToken(); parser.nextToken(); QueryParseContext context = new QueryParseContext(parser); Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(context, null)); assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage()); }
public static SmoothingModel fromXContent(XContentParser parser) throws IOException { XContentParser.Token token; String fieldName = null; double alpha = DEFAULT_LAPLACE_ALPHA; while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } if (token.isValue() && ALPHA_FIELD.match(fieldName)) { alpha = parser.doubleValue(); } } return new Laplace(alpha); }
private void assertParsedInstance(XContentType xContentType, BytesReference instanceAsBytes, T expectedInstance) throws IOException { XContentParser parser = createParser(XContentFactory.xContent(xContentType), instanceAsBytes); T newInstance = parseInstance(parser); assertNotSame(newInstance, expectedInstance); assertEquals(expectedInstance, newInstance); assertEquals(expectedInstance.hashCode(), newInstance.hashCode()); }
private XContentParser objectLatLon(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.startObject(); content.field("lat", lat).field("lon", lon); content.endObject(); XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); parser.nextToken(); return parser; }
public static IndexResponse fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); Builder context = new Builder(); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { parseXContentFields(parser, context); } return context.build(); }
@Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { if (!fieldType().stored() && !fieldType().hasDocValues()) { return; } byte[] value = context.parseExternalValue(byte[].class); if (value == null) { if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { return; } else { value = context.parser().binaryValue(); } } if (value == null) { return; } if (fieldType().stored()) { fields.add(new Field(fieldType().names().indexName(), value, fieldType())); } if (fieldType().hasDocValues()) { CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field == null) { field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value); context.doc().addWithKey(fieldType().names().indexName(), field); } else { field.add(value); } } }
static List<Object> readList(XContentParser parser, MapFactory mapFactory) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.FIELD_NAME) { token = parser.nextToken(); } if (token == XContentParser.Token.START_ARRAY) { token = parser.nextToken(); } ArrayList<Object> list = new ArrayList<>(); for (; token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) { list.add(readValue(parser, mapFactory, token)); } return list; }
public static ChildrenAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { String childType = null; XContentParser.Token token; String currentFieldName = null; XContentParser parser = context.parser(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_STRING) { if ("type".equals(currentFieldName)) { childType = parser.text(); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "]."); } } if (childType == null) { throw new ParsingException(parser.getTokenLocation(), "Missing [child_type] field for children aggregation [" + aggregationName + "]"); } return new ChildrenAggregationBuilder(aggregationName, childType); }