Java 类org.apache.lucene.search.Query 实例源码

项目:ait-platform    文件:AitFTSRepo.java   
@Override
public FullTextQuery getFTSQuery(String filterText, final Class<?> entityType, Integer maxResults, String... fields) {

    // entityManager para busquedas de tipo FTS
    final FullTextEntityManager fullTextEntityManager = org.hibernate.search.jpa.Search.getFullTextEntityManager(entityManager);

    // se crea el query usando Hibernate Search query DSL
    final QueryBuilder queryBuilder = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity(entityType).get();

    // se crea el query sobre los campos indicados
    final Query query = queryBuilder.keyword().onFields(fields).matching(filterText.trim()).createQuery();

    // se enmascara el query de Lucene en uno de Hibernate
    final FullTextQuery jpaQuery = fullTextEntityManager.createFullTextQuery(query, entityType);

    // se define la cantidad maxima de resultados si es mayor a cero
    if (maxResults > 0) {
        jpaQuery.setMaxResults(maxResults);
    }
    // se retorna el query listo para ejecución o para inyeccion de criterias
    return jpaQuery;
}
项目:ir-generalized-translation-models    文件:SimilarityParserPluginIntegrationTest.java   
/**
 * Basic test that checks that the "/similarity-query"
 * defined in the resources/solrconfig.xml does set the query parser
 * to the <code>{@link SimilarityParserPlugin}</code>
 */
public void test_QParserGetsCalled() throws Exception {

    // arrange
    SolrQueryRequest queryRequest = req("testQueryString");

    // act
    SolrQueryResponse resp = h.queryAndResponse("/similarity-query", queryRequest);

    // assert - the only way to check that the similarity parser was used is to check
    //          the type of the query returned by the similarity parser (for a single term): AugmentedTermQuery
    BasicResultContext basicResultContext = (BasicResultContext)resp.getResponse();
    Query usedLuceneQuery = basicResultContext.getQuery();
    assertTrue(usedLuceneQuery instanceof AugmentedTermQuery);

    // cleanup
    queryRequest.close();
}
项目:elasticsearch_my    文件:SimpleQueryParserTests.java   
public void testAnalyzeWildcard() {
    SimpleQueryParser.Settings settings = new SimpleQueryParser.Settings();
    settings.analyzeWildcard(true);
    Map<String, Float> weights = new HashMap<>();
    weights.put("field1", 1.0f);
    SimpleQueryParser parser = new MockSimpleQueryParser(new StandardAnalyzer(), weights, -1, settings);
    for (Operator op : Operator.values()) {
        BooleanClause.Occur defaultOp = op.toBooleanClauseOccur();
        parser.setDefaultOperator(defaultOp);
        Query query = parser.parse("first foo-bar-foobar* last");
        Query expectedQuery =
            new BooleanQuery.Builder()
                .add(new BooleanClause(new TermQuery(new Term("field1", "first")), defaultOp))
                .add(new BooleanQuery.Builder()
                    .add(new BooleanClause(new TermQuery(new Term("field1", "foo")), defaultOp))
                    .add(new BooleanClause(new TermQuery(new Term("field1", "bar")), defaultOp))
                    .add(new BooleanClause(new PrefixQuery(new Term("field1", "foobar")), defaultOp))
                    .build(), defaultOp)
                .add(new BooleanClause(new TermQuery(new Term("field1", "last")), defaultOp))
                .build();
        assertThat(query, equalTo(expectedQuery));
    }
}
项目:lams    文件:DisjunctionMaxQueryBuilder.java   
@Override
public Query getQuery(Element e) throws ParserException {
  float tieBreaker = DOMUtils.getAttribute(e, "tieBreaker", 0.0f); 
  DisjunctionMaxQuery dq = new DisjunctionMaxQuery(tieBreaker);
  dq.setBoost(DOMUtils.getAttribute(e, "boost", 1.0f));

  NodeList nl = e.getChildNodes();
  for (int i = 0; i < nl.getLength(); i++) {
    Node node = nl.item(i);
    if (node instanceof Element) { // all elements are disjuncts.
      Element queryElem = (Element) node;
      Query q = factory.getQuery(queryElem);
      dq.add(q);
    }
  }

  return dq;
}
项目:Elasticsearch    文件:PercolatorQueriesRegistry.java   
private int loadQueries(IndexShard shard) {
    shard.refresh("percolator_load_queries");
    // NOTE: we acquire the searcher via the engine directly here since this is executed right
    // before the shard is marked as POST_RECOVERY
    try (Engine.Searcher searcher = shard.engine().acquireSearcher("percolator_load_queries")) {
        Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME));
        QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService);
        IndexSearcher indexSearcher = new IndexSearcher(searcher.reader());
        indexSearcher.setQueryCache(null);
        indexSearcher.search(query, queryCollector);
        Map<BytesRef, Query> queries = queryCollector.queries();
        for (Map.Entry<BytesRef, Query> entry : queries.entrySet()) {
            Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue());
            shardPercolateService.addedQuery(entry.getKey(), previousQuery, entry.getValue());
        }
        return queries.size();
    } catch (Exception e) {
        throw new PercolatorException(shardId.index(), "failed to load queries from percolator index", e);
    }
}
项目:lams    文件:SpanNotQuery.java   
@Override
public Query rewrite(IndexReader reader) throws IOException {
  SpanNotQuery clone = null;

  SpanQuery rewrittenInclude = (SpanQuery) include.rewrite(reader);
  if (rewrittenInclude != include) {
    clone = this.clone();
    clone.include = rewrittenInclude;
  }
  SpanQuery rewrittenExclude = (SpanQuery) exclude.rewrite(reader);
  if (rewrittenExclude != exclude) {
    if (clone == null) clone = this.clone();
    clone.exclude = rewrittenExclude;
  }

  if (clone != null) {
    return clone;                        // some clauses rewrote
  } else {
    return this;                         // no clauses rewrote
  }
}
项目:sjk    文件:QuickTipsService1Impl.java   
@Override
public ScoreDoc[] prefixSearch(String q) throws IOException {
    if (StringUtils.isEmpty(q) || q.length() > appConfig.getKeywordMaxLength()) {
        logger.error("empty keywords or over-length! {}", q);
        return null;
    }

    final TopDocs[] rstTopDocs = new TopDocs[2];
    final Query nameFldQuery = new PrefixQuery(new Term(NAME.getName(), q));
    rstTopDocs[0] = indexSearcher.search(nameFldQuery, appConfig.getQuickTipsNum() * 2, sort);

    final Query downLoadRankQuery = NumericRangeQuery.newIntRange(DOWNOLOAD_RANK.getName(), MIN_DOWNLOAD_RANK,
            Integer.MAX_VALUE, true, false);
    // 从下载量最高的1000条记录中,再过滤符合关键字的记录
    rstTopDocs[1] = indexSearcher.search(downLoadRankQuery, MAX_TOP, sort);
    TopDocs rst = TopDocsUtil.mergeDuplicateDocId(TopDocs.merge(sort, MAX_TOP + appConfig.getQuickTipsNum() * 2,
            rstTopDocs));
    if (rst != null) {
        return rst.scoreDocs;
    }
    return null;
}
项目:elasticsearch_my    文件:Lucene.java   
/**
 * Check whether there is one or more documents matching the provided query.
 */
public static boolean exists(IndexSearcher searcher, Query query) throws IOException {
    final Weight weight = searcher.createNormalizedWeight(query, false);
    // the scorer API should be more efficient at stopping after the first
    // match than the bulk scorer API
    for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
        final Scorer scorer = weight.scorer(context);
        if (scorer == null) {
            continue;
        }
        final Bits liveDocs = context.reader().getLiveDocs();
        final DocIdSetIterator iterator = scorer.iterator();
        for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
            if (liveDocs == null || liveDocs.get(doc)) {
                return true;
            }
        }
    }
    return false;
}
项目:elasticsearch_my    文件:TermsQueryBuilder.java   
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
    if (termsLookup != null) {
        throw new UnsupportedOperationException("query must be rewritten first");
    }
    if (values == null || values.isEmpty()) {
        return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query.");
    }
    MappedFieldType fieldType = context.fieldMapper(fieldName);

    if (fieldType != null) {
        return fieldType.termsQuery(values, context);
    } else {
        BytesRef[] filterValues = new BytesRef[values.size()];
        for (int i = 0; i < filterValues.length; i++) {
            filterValues[i] = BytesRefs.toBytesRef(values.get(i));
        }
        return new TermInSetQuery(fieldName, filterValues);
    }
}
项目:Elasticsearch    文件:FreqTermsEnum.java   
public FreqTermsEnum(IndexReader reader, String field, boolean needDocFreq, boolean needTotalTermFreq, @Nullable Query filter, BigArrays bigArrays) throws IOException {
    super(reader, field, needTotalTermFreq ? PostingsEnum.FREQS : PostingsEnum.NONE, filter);
    this.bigArrays = bigArrays;
    this.needDocFreqs = needDocFreq;
    this.needTotalTermFreqs = needTotalTermFreq;
    if (needDocFreq) {
        termDocFreqs = bigArrays.newIntArray(INITIAL_NUM_TERM_FREQS_CACHED, false);
    } else {
        termDocFreqs = null;
    }
    if (needTotalTermFreq) {
        termsTotalFreqs = bigArrays.newLongArray(INITIAL_NUM_TERM_FREQS_CACHED, false);
    } else {
        termsTotalFreqs = null;
    }
    cachedTermOrds = new BytesRefHash(INITIAL_NUM_TERM_FREQS_CACHED, bigArrays);
}
项目:sjk    文件:SearchServiceImpl.java   
/**
 * 查询索引
 * 
 * @param keywords
 * @return
 * @throws Exception
 */
public List<Document> searchIndex(Integer typeId, String keywords) throws Exception {
    // 1.init searcher
    Analyzer analyzer = new PaodingAnalyzer();
    IndexReader reader = IndexReader.open(typeId == appConfig.getGameTypeId() ? appConfig.getGameIndexDir()
            : appConfig.getSoftIndexDir());
    BooleanClause.Occur[] flags = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD,
            BooleanClause.Occur.SHOULD };
    Query query = MultiFieldQueryParser.parse(keywords, appConfig.getQueryFields(), flags, analyzer);
    query = query.rewrite(reader);

    // 2.search
    List<Document> docs = new ArrayList<Document>();
    Hits hits = (typeId == appConfig.getGameTypeId() ? gameSearcher.search(query, Sort.RELEVANCE) : softSearcher
            .search(query, Sort.RELEVANCE));// searcher.search(query,
                                            // Sort.RELEVANCE);
    for (int i = 0; i < hits.length(); i++) {
        docs.add(hits.doc(i));
    }

    // 3.return
    reader.close();
    return docs;
}
项目:Elasticsearch    文件:MapperQueryParser.java   
private Query applySlop(Query q, int slop) {
    if (q instanceof PhraseQuery) {
        PhraseQuery pq = (PhraseQuery) q;
        PhraseQuery.Builder builder = new PhraseQuery.Builder();
        builder.setSlop(slop);
        final Term[] terms = pq.getTerms();
        final int[] positions = pq.getPositions();
        for (int i = 0; i < terms.length; ++i) {
            builder.add(terms[i], positions[i]);
        }
        pq = builder.build();
        pq.setBoost(q.getBoost());
        return pq;
    } else if (q instanceof MultiPhraseQuery) {
        ((MultiPhraseQuery) q).setSlop(slop);
        return q;
    } else {
        return q;
    }
}
项目:para-search-elasticsearch    文件:ElasticSearchUtils.java   
static Query qsParsed(String query) {
    if (StringUtils.isBlank(query) || "*".equals(query.trim())) {
        return null;
    }
    try {
        StandardQueryParser parser = new StandardQueryParser();
        parser.setAllowLeadingWildcard(false);
        return parser.parse(query, "");
    } catch (Exception ex) {
        logger.warn("Failed to parse query string '{}'.", query);
    }
    return null;
}
项目:elasticsearch_my    文件:StringFieldType.java   
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
    failIfNotIndexed();
    return new TermRangeQuery(name(),
        lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
        upperTerm == null ? null : indexedValueForSearch(upperTerm),
        includeLower, includeUpper);
}
项目:elasticsearch_my    文件:Queries.java   
public static BooleanQuery filtered(@Nullable Query query, @Nullable Query filter) {
    BooleanQuery.Builder builder = new BooleanQuery.Builder();
    if (query != null) {
        builder.add(new BooleanClause(query, Occur.MUST));
    }
    if (filter != null) {
        builder.add(new BooleanClause(filter, Occur.FILTER));
    }
    return builder.build();
}
项目:amanda    文件:ArticleRepositoryImpl.java   
@Override
public Page<SearchResult<Article>> search(String query, Pageable pageable) {
    FullTextEntityManager manager = this.getFullTextEntityManager();

    QueryBuilder builder = manager.getSearchFactory().buildQueryBuilder()
            .forEntity(Article.class).get();

    Query luceneQuery = builder.keyword()
            .onFields("title", "keywords", "abstractContent", "category.name", "category.zhName", "mdBody")
            .matching(query)
            .createQuery();

    FullTextQuery fullTextQuery = manager.createFullTextQuery(luceneQuery, Article.class);
    // fullTextQuery.enableFullTextFilter("published").setParameter( "published", true );

    fullTextQuery.setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);

    long total = fullTextQuery.getResultSize();

    fullTextQuery.setFirstResult(pageable.getOffset())
            .setMaxResults(pageable.getPageSize());

    @SuppressWarnings("unchecked")
    List<Object[]> results = fullTextQuery.getResultList();
    List<SearchResult<Article>> list = new ArrayList<>();
    results.forEach(o -> list.add(
            new SearchResult<>((Article) o[0], (Float)o[1])
    ));

    return new PageImpl<>(list, pageable, total);
}
项目:incubator-netbeans    文件:PersistentClassIndex.java   
@Override
public <T> void getDeclaredElements (
        final String ident,
        final ClassIndex.NameKind kind,
        final Convertor<? super Document, T> convertor,
        final Map<T,Set<String>> result) throws InterruptedException, IOException {
    final Pair<Convertor<? super Document, T>,Index> ctu = indexPath.getPatch(convertor);
    try {
        IndexManager.priorityAccess(() -> {
            final Query query = Queries.createTermCollectingQuery(
                    DocumentUtil.FIELD_FEATURE_IDENTS,
                    DocumentUtil.FIELD_CASE_INSENSITIVE_FEATURE_IDENTS,
                    ident,
                    DocumentUtil.translateQueryKind(kind));
            index.queryDocTerms(
                    result,
                    ctu.first(),
                    Term::text,
                    DocumentUtil.declaredTypesFieldSelector(false, false),
                    cancel.get(),
                    query);
            if (ctu.second() != null) {
                ctu.second().queryDocTerms(
                        result,
                        convertor,
                        Term::text,
                        DocumentUtil.declaredTypesFieldSelector(false, false),
                        cancel.get(),
                        query);
            }
            return null;
        });
    } catch (IOException ioe) {
        this.<Void,IOException>handleException(null, ioe, root);
    }
}
项目:lams    文件:MultiFieldQueryParser.java   
@Override
protected Query getRegexpQuery(String field, String termStr)
    throws ParseException {
  if (field == null) {
    List<BooleanClause> clauses = new ArrayList<>();
    for (int i = 0; i < fields.length; i++) {
      clauses.add(new BooleanClause(getRegexpQuery(fields[i], termStr),
          BooleanClause.Occur.SHOULD));
    }
    return getBooleanQuery(clauses, true);
  }
  return super.getRegexpQuery(field, termStr);
}
项目:Elasticsearch    文件:MapperQueryParser.java   
/**
 * We override this one so we can get the fuzzy part to be treated as string, so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
 */
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
    if (fuzzySlop.image.length() == 1) {
        return getFuzzyQuery(qfield, termImage, Float.toString(fuzzyMinSim));
    }
    return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1));
}
项目:joai-project    文件:LuceneGeospatialQueryConverter.java   
/**
 *  Test driver.
 *
 * @param  args  The command line arguments
 */
public static void main(String[] args) {
    if (args.length != 5) {
        System.err.println(
            "usage: java edu.ucsb.adl.LuceneGeospatialQueryConverter " +
            "predicate north south east west");
        System.exit(1);
    }
    Query q = convertQuery("northCoord", "southCoord", "eastCoord", "westCoord", args[0],
        Double.valueOf(args[1]).doubleValue(),
        Double.valueOf(args[2]).doubleValue(),
        Double.valueOf(args[3]).doubleValue(),
        Double.valueOf(args[4]).doubleValue());
    System.out.println(q.toString());
}
项目:Elasticsearch    文件:DefaultSearchContext.java   
@Override
public Query searchFilter(String[] types) {
    Query filter = mapperService().searchFilter(types);
    if (filter == null && aliasFilter == null) {
        return null;
    }
    BooleanQuery.Builder bq = new BooleanQuery.Builder();
    if (filter != null) {
        bq.add(filter, Occur.MUST);
    }
    if (aliasFilter != null) {
        bq.add(aliasFilter, Occur.MUST);
    }
    return new ConstantScoreQuery(bq.build());
}
项目:para-search-elasticsearch    文件:ElasticSearchUtils.java   
/**
 * @param query query string
 * @return a list of composite queries for matching nested objects
 */
static QueryBuilder convertQueryStringToNestedQuery(String query) {
    String queryStr = StringUtils.trimToEmpty(query).replaceAll("\\[(\\d+)\\]", "-$1");
    Query q = qsParsed(queryStr);
    if (q == null) {
        return matchAllQuery();
    }
    try {
        return rewriteQuery(q, 0);
    } catch (Exception e) {
        logger.warn(e.getMessage());
        return null;
    }
}
项目:jdg-lab    文件:IndexedCacheBeerService.java   
public List<Beer> getAllBeers(boolean desc) {
    SearchManager searchManager = Search.getSearchManager(cache);
    QueryBuilder qb = searchManager.buildQueryBuilderForClass(Beer.class).get();
    Query query = qb.all().createQuery();
    CacheQuery cq = searchManager.getQuery(query); //cq is a CacheQuery wrapper of a Lucene query
    if(desc){
        Sort sort = new Sort(new SortField("id", SortField.Type.LONG));
        cq.sort(sort);
    }
    List<Beer> result = (List<Beer>)(List)cq.list();
    return result;
}
项目:lams    文件:SimpleQueryParser.java   
/**
 * Factory method to generate a prefix query.
 */
protected Query newPrefixQuery(String text) {
  BooleanQuery bq = new BooleanQuery(true);
  for (Map.Entry<String,Float> entry : weights.entrySet()) {
    PrefixQuery prefix = new PrefixQuery(new Term(entry.getKey(), text));
    prefix.setBoost(entry.getValue());
    bq.add(prefix, BooleanClause.Occur.SHOULD);
  }
  return simplify(bq);
}
项目:elasticsearch_my    文件:MultiMatchQueryTests.java   
public void testMultiMatchCrossFieldsWithSynonyms() throws IOException {
    QueryShardContext queryShardContext = indexService.newQueryShardContext(
        randomInt(20), null, () -> { throw new UnsupportedOperationException(); });

    // check that synonym query is used for a single field
    Query parsedQuery =
        multiMatchQuery("quick").field("name.first")
            .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
    Term[] terms = new Term[2];
    terms[0] = new Term("name.first", "quick");
    terms[1] = new Term("name.first", "fast");
    Query expectedQuery = new SynonymQuery(terms);
    assertThat(parsedQuery, equalTo(expectedQuery));

    // check that blended term query is used for multiple fields
    parsedQuery =
        multiMatchQuery("quick").field("name.first").field("name.last")
            .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
    terms = new Term[4];
    terms[0] = new Term("name.first", "quick");
    terms[1] = new Term("name.first", "fast");
    terms[2] = new Term("name.last", "quick");
    terms[3] = new Term("name.last", "fast");
    float[] boosts = new float[4];
    Arrays.fill(boosts, 1.0f);
    expectedQuery = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f);
    assertThat(parsedQuery, equalTo(expectedQuery));

}
项目:elasticsearch_my    文件:MultiMatchQueryBuilderTests.java   
public void testToQueryMultipleTermsBooleanQuery() throws Exception {
    assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
    Query query = multiMatchQuery("test1 test2").field(STRING_FIELD_NAME).useDisMax(false).toQuery(createShardContext());
    assertThat(query, instanceOf(BooleanQuery.class));
    BooleanQuery bQuery = (BooleanQuery) query;
    assertThat(bQuery.clauses().size(), equalTo(2));
    assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test1")));
    assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test2")));
}
项目:elasticsearch_my    文件:IdsQueryBuilderTests.java   
@Override
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
    if (queryBuilder.ids().size() == 0) {
        assertThat(query, instanceOf(MatchNoDocsQuery.class));
    } else {
        assertThat(query, instanceOf(TermInSetQuery.class));
    }
}
项目:dacapobench    文件:Search.java   
public void run() throws java.io.IOException {
  Analyzer analyzer = new StandardAnalyzer();
  QueryParser parser = new QueryParser(field, analyzer);

  while (true) {
    String line = in.readLine();

    if (line == null || line.length() == -1)
      break;

    line = line.trim();
    if (line.length() == 0)
      break;

    Query query = null;
    try {
      query = parser.parse(line);
    } catch (Exception e) {
      e.printStackTrace();
    }
    searcher.search(query, null, 10);

    doPagingSearch(query);
  }

  reader.close();
  out.flush();
  out.close();
  synchronized (parent) {
    parent.completed++;
    if (parent.completed % 4 == 0) {
      System.out.println(parent.completed + " query batches completed");
    }
    parent.notify();
  }
}
项目:elasticsearch_my    文件:MapperQueryParser.java   
/**
 * We override this one so we can get the fuzzy part to be treated as string,
 * so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
 */
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
    if (fuzzySlop.image.length() == 1) {
        return getFuzzyQuery(qfield, termImage, Float.toString(settings.fuzziness().asDistance(termImage)));
    }
    return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1));
}
项目:Elasticsearch    文件:MultiMatchQuery.java   
public List<Query> buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException{
    List<Query> queries = new ArrayList<>();
    for (String fieldName : fieldNames.keySet()) {
        Float boostValue = fieldNames.get(fieldName);
        Query query = parseGroup(type.matchQueryType(), fieldName, boostValue, value, minimumShouldMatch);
        if (query != null) {
            queries.add(query);
        }
    }
    return queries;
}
项目:Elasticsearch    文件:MatchAllQueryParser.java   
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
    XContentParser parser = parseContext.parser();

    float boost = 1.0f;
    String currentFieldName = null;

    XContentParser.Token token;
    while (((token = parser.nextToken()) != XContentParser.Token.END_OBJECT && token != XContentParser.Token.END_ARRAY)) {
        if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
        } else if (token.isValue()) {
            if ("boost".equals(currentFieldName)) {
                boost = parser.floatValue();
            } else {
                throw new QueryParsingException(parseContext, "[match_all] query does not support [" + currentFieldName + "]");
            }
        }
    }

    if (boost == 1.0f) {
        return Queries.newMatchAllQuery();
    }

    MatchAllDocsQuery query = new MatchAllDocsQuery();
    query.setBoost(boost);
    return query;
}
项目:TextHIN    文件:FbEntitySearcher.java   
private ScoreDoc[] getHits(String question) throws IOException {
 try {
  Query luceneQuery = queryParser.parse(question);
  ScoreDoc[] hits = indexSearcher.search(luceneQuery, numOfDocs).scoreDocs;
  return hits;
 } 
 catch (ParseException e) {
  System.out.println("ParseException question = " + question);
  e.printStackTrace();
  return new ScoreDoc[0];
 }

}
项目:Elasticsearch    文件:ChildrenConstantScoreQuery.java   
public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
    this.parentChildIndexFieldData = parentChildIndexFieldData;
    this.parentFilter = parentFilter;
    this.parentType = parentType;
    this.childType = childType;
    this.childQuery = childQuery;
    this.shortCircuitParentDocSet = shortCircuitParentDocSet;
    this.nonNestedDocsFilter = nonNestedDocsFilter;
}
项目:elasticsearch_my    文件:FiltersFunctionScoreQuery.java   
public FiltersFunctionScoreQuery(Query subQuery, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost, Float minScore, CombineFunction combineFunction) {
    this.subQuery = subQuery;
    this.scoreMode = scoreMode;
    this.filterFunctions = filterFunctions;
    this.maxBoost = maxBoost;
    this.combineFunction = combineFunction;
    this.minScore = minScore;
}
项目:Elasticsearch    文件:DocumentMapper.java   
/**
 * Returns the best nested {@link ObjectMapper} instances that is in the scope of the specified nested docId.
 */
public ObjectMapper findNestedObjectMapper(int nestedDocId, SearchContext sc, LeafReaderContext context) throws IOException {
    ObjectMapper nestedObjectMapper = null;
    for (ObjectMapper objectMapper : objectMappers().values()) {
        if (!objectMapper.nested().isNested()) {
            continue;
        }

        Query filter = objectMapper.nestedTypeFilter();
        if (filter == null) {
            continue;
        }
        // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and
        // therefor is guaranteed to be a live doc.
        final Weight nestedWeight = filter.createWeight(sc.searcher(), false);
        Scorer scorer = nestedWeight.scorer(context);
        if (scorer == null) {
            continue;
        }

        if (scorer.iterator().advance(nestedDocId) == nestedDocId) {
            if (nestedObjectMapper == null) {
                nestedObjectMapper = objectMapper;
            } else {
                if (nestedObjectMapper.fullPath().length() < objectMapper.fullPath().length()) {
                    nestedObjectMapper = objectMapper;
                }
            }
        }
    }
    return nestedObjectMapper;
}
项目:marathonv5    文件:IndexSearcher.java   
public Map<DocumentType, List<SearchResult>> search(String searchString) throws ParseException {
    Map<DocumentType, List<SearchResult>> resultMap = new TreeMap<DocumentType, List<SearchResult>>();
    try {
        Query query = parser.parse(searchString);
        final SecondPassGroupingCollector collector = new SecondPassGroupingCollector("documentType", searchGroups,
                Sort.RELEVANCE, null, 5, true, false, true);
        searcher.search(query, collector);
        final TopGroups groups = collector.getTopGroups(0);
        for (GroupDocs groupDocs : groups.groups) {
            DocumentType docType = DocumentType.valueOf(groupDocs.groupValue);
            List<SearchResult> results = new ArrayList<SearchResult>();
            for (ScoreDoc scoreDoc : groupDocs.scoreDocs) {
                Document doc = searcher.doc(scoreDoc.doc);
                SearchResult result = new SearchResult(
                        docType,
                        doc.get("name"),
                        doc.get("url"),
                        doc.get("className"),
                        doc.get("package"),
                        doc.get("ensemblePath"),
                        doc.get("shortDescription")
                );
                results.add(result);
            }
            resultMap.put(docType, results);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    return resultMap;
}
项目:elasticsearch_my    文件:MultiMatchQueryBuilderTests.java   
@Override
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
    // we rely on integration tests for deeper checks here
    assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
            .or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
            .or(instanceOf(FuzzyQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
            .or(instanceOf(MatchAllDocsQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
            .or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(PhraseQuery.class))
            .or(instanceOf(LegacyNumericRangeQuery.class))
            .or(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class)));
}
项目:elasticsearch_my    文件:MatchPhrasePrefixQueryBuilder.java   
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
    // validate context specific fields
    if (analyzer != null && context.getIndexAnalyzers().get(analyzer) == null) {
        throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
    }

    MatchQuery matchQuery = new MatchQuery(context);
    matchQuery.setAnalyzer(analyzer);
    matchQuery.setPhraseSlop(slop);
    matchQuery.setMaxExpansions(maxExpansions);

    return matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, fieldName, value);
}
项目:elasticsearch_my    文件:MultiMatchQueryBuilderTests.java   
public void testToQueryMultipleFieldsBooleanQuery() throws Exception {
    assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
    Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext());
    assertThat(query, instanceOf(BooleanQuery.class));
    BooleanQuery bQuery = (BooleanQuery) query;
    assertThat(bQuery.clauses().size(), equalTo(2));
    assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
    assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
}
项目:elasticsearch_my    文件:QueryStringQueryBuilderTests.java   
public void testToQueryMultipleFieldsDisMaxQuery() throws Exception {
    assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
    Query query = queryStringQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2)
        .useDisMax(true)
        .toQuery(createShardContext());
    assertThat(query, instanceOf(DisjunctionMaxQuery.class));
    DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query;
    List<Query> disjuncts = disMaxQuery.getDisjuncts();
    assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
    assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
}