Java 类org.apache.lucene.analysis.MockAnalyzer 实例源码

项目:search    文件:TestParallelAtomicReader.java   
private IndexSearcher single(Random random) throws IOException {
  dir = newDirectory();
  IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)));
  Document d1 = new Document();
  d1.add(newTextField("f1", "v1", Field.Store.YES));
  d1.add(newTextField("f2", "v1", Field.Store.YES));
  d1.add(newTextField("f3", "v1", Field.Store.YES));
  d1.add(newTextField("f4", "v1", Field.Store.YES));
  w.addDocument(d1);
  Document d2 = new Document();
  d2.add(newTextField("f1", "v2", Field.Store.YES));
  d2.add(newTextField("f2", "v2", Field.Store.YES));
  d2.add(newTextField("f3", "v2", Field.Store.YES));
  d2.add(newTextField("f4", "v2", Field.Store.YES));
  w.addDocument(d2);
  w.close();

  DirectoryReader ir = DirectoryReader.open(dir);
  return newSearcher(ir);
}
项目:search    文件:TestIndexWriterReader.java   
public AddDirectoriesThreads(int numDirs, IndexWriter mainWriter) throws Throwable {
  this.numDirs = numDirs;
  this.mainWriter = mainWriter;
  addDir = newDirectory();
  IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig(new MockAnalyzer(random()))
                                                 .setMaxBufferedDocs(2));
  TestUtil.reduceOpenFiles(writer);
  for (int i = 0; i < NUM_INIT_DOCS; i++) {
    Document doc = DocHelper.createDocument(i, "addindex", 4);
    writer.addDocument(doc);
  }

  writer.close();

  readers = new IndexReader[numDirs];
  for (int i = 0; i < numDirs; i++)
    readers[i] = DirectoryReader.open(addDir);
}
项目:search    文件:TestDocValuesIndexing.java   
public void testMixedTypesAfterReopenAppend2() throws IOException {
  assumeTrue("codec does not support SORTED_SET", defaultCodecSupportsSortedSet());
  Directory dir = newDirectory();
  IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))) ;
  Document doc = new Document();
  doc.add(new SortedSetDocValuesField("foo", new BytesRef("foo")));
  w.addDocument(doc);
  w.close();

  doc = new Document();
  w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
  doc.add(new StringField("foo", "bar", Field.Store.NO));
  doc.add(new BinaryDocValuesField("foo", new BytesRef("foo")));
  try {
    // NOTE: this case follows a different code path inside
    // DefaultIndexingChain/FieldInfos, because the field (foo)
    // is first added without DocValues:
    w.addDocument(doc);
    fail("did not get expected exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }
  w.forceMerge(1);
  w.close();
  dir.close();
}
项目:search    文件:TestDirectoryReader.java   
public void testGetSumTotalTermFreq() throws Exception {
  Directory dir = newDirectory();
  IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
  Document d = new Document();
  d.add(newTextField("f", "a b b", Field.Store.NO));
  writer.addDocument(d);
  d = new Document();
  d.add(newTextField("f", "a a b", Field.Store.NO));
  writer.addDocument(d);
  DirectoryReader r = writer.getReader();
  writer.close();
  try {
    // Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
    Assume.assumeTrue(r.getSumTotalTermFreq("f") != -1);
    assertEquals(6, r.getSumTotalTermFreq("f"));
  } finally {
    r.close();
    dir.close();
  }
}
项目:search    文件:TestTaxonomyFacetCounts.java   
public void testChildCount() throws Exception {
  // LUCENE-4885: FacetResult.numValidDescendants was not set properly by FacetsAccumulator
  Directory indexDir = newDirectory();
  Directory taxoDir = newDirectory();

  DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
  IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
  FacetsConfig config = new FacetsConfig();
  for (int i = 0; i < 10; i++) {
    Document doc = new Document();
    doc.add(new FacetField("a", Integer.toString(i)));
    iw.addDocument(config.build(taxoWriter, doc));
  }

  DirectoryReader r = DirectoryReader.open(iw, true);
  DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);

  FacetsCollector sfc = new FacetsCollector();
  newSearcher(r).search(new MatchAllDocsQuery(), sfc);
  Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);

  assertEquals(10, facets.getTopChildren(2, "a").childCount);

  IOUtils.close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
}
项目:search    文件:TestTransactionRollback.java   
@Override
public void setUp() throws Exception {
  super.setUp();
  dir = newDirectory();

  //Build index, of records 1 to 100, committing after each batch of 10
  IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
  IndexWriter w=new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
                                        .setIndexDeletionPolicy(sdp));

  for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
    Document doc=new Document();
    doc.add(newTextField(FIELD_RECORD_ID, ""+currentRecordId, Field.Store.YES));
    w.addDocument(doc);

    if (currentRecordId%10 == 0) {
      Map<String,String> data = new HashMap<>();
      data.put("index", "records 1-"+currentRecordId);
      w.setCommitData(data);
      w.commit();
    }
  }

  w.close();
}
项目:search    文件:TestIndexWriter.java   
public void testEmptyFieldNameTerms() throws IOException {
  Directory dir = newDirectory();
  IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
  Document doc = new Document();
  doc.add(newTextField("", "a b c", Field.Store.NO));
  writer.addDocument(doc);  
  writer.close();
  DirectoryReader reader = DirectoryReader.open(dir);
  AtomicReader subreader = getOnlySegmentReader(reader);
  TermsEnum te = subreader.fields().terms("").iterator(null);
  assertEquals(new BytesRef("a"), te.next());
  assertEquals(new BytesRef("b"), te.next());
  assertEquals(new BytesRef("c"), te.next());
  assertNull(te.next());
  reader.close();
  dir.close();
}
项目:search    文件:BaseCompressingDocValuesFormatTestCase.java   
public void testSingleBigValueCompression() throws IOException {
  final Directory dir = new RAMDirectory();
  final IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
  final IndexWriter iwriter = new IndexWriter(dir, iwc);

  final Document doc = new Document();
  final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
  doc.add(dvf);
  for (int i = 0; i < 20000; ++i) {
    dvf.setLongValue(i & 1023);
    iwriter.addDocument(doc);
  }
  iwriter.forceMerge(1);
  final long size1 = dirSize(dir);
  dvf.setLongValue(Long.MAX_VALUE);
  iwriter.addDocument(doc);
  iwriter.forceMerge(1);
  final long size2 = dirSize(dir);
  // make sure the new value did not grow the bpv for every other value
  assertTrue(size2 < size1 + (20000 * (63 - 10)) / 8);
}
项目:search    文件:TestIndexWriter.java   
public void testFlushWithNoMerging() throws IOException {
  Directory dir = newDirectory();
  IndexWriter writer = new IndexWriter(
      dir,
      newIndexWriterConfig(new MockAnalyzer(random()))
          .setMaxBufferedDocs(2)
          .setMergePolicy(newLogMergePolicy(10))
  );
  Document doc = new Document();
  FieldType customType = new FieldType(TextField.TYPE_STORED);
  customType.setStoreTermVectors(true);
  customType.setStoreTermVectorPositions(true);
  customType.setStoreTermVectorOffsets(true);
  doc.add(newField("field", "aaa", customType));
  for(int i=0;i<19;i++)
    writer.addDocument(doc);
  writer.flush(false, true);
  writer.close();
  SegmentInfos sis = new SegmentInfos();
  sis.read(dir);
  // Since we flushed w/o allowing merging we should now
  // have 10 segments
  assertEquals(10, sis.size());
  dir.close();
}
项目:search    文件:TestDocValuesIndexing.java   
public void testTypeChangeViaAddIndexes2() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);
  Document doc = new Document();
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);
  writer.close();

  Directory dir2 = newDirectory();
  conf = newIndexWriterConfig(new MockAnalyzer(random()));
  writer = new IndexWriter(dir2, conf);
  writer.addIndexes(dir);
  doc = new Document();
  doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
  try {
    writer.addDocument(doc);
    fail("did not hit exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }
  writer.close();
  dir2.close();
  dir.close();
}
项目:search    文件:TestMultiTermConstantScore.java   
@BeforeClass
public static void beforeClass() throws Exception {
  String[] data = new String[] { "A 1 2 3 4 5 6", "Z       4 5 6", null,
      "B   2   4 5 6", "Y     3   5 6", null, "C     3     6",
      "X       4 5 6" };

  small = newDirectory();
  RandomIndexWriter writer = new RandomIndexWriter(random(), small, 
      newIndexWriterConfig(
          new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMergePolicy(newLogMergePolicy()));

  FieldType customType = new FieldType(TextField.TYPE_STORED);
  customType.setTokenized(false);
  for (int i = 0; i < data.length; i++) {
    Document doc = new Document();
    doc.add(newField("id", String.valueOf(i), customType));// Field.Keyword("id",String.valueOf(i)));
    doc.add(newField("all", "all", customType));// Field.Keyword("all","all"));
    if (null != data[i]) {
      doc.add(newTextField("data", data[i], Field.Store.YES));// Field.Text("data",data[i]));
    }
    writer.addDocument(doc);
  }

  reader = writer.getReader();
  writer.close();
}
项目:search    文件:SolrTestCaseJ4.java   
/** sets system properties based on 
 * {@link #newIndexWriterConfig(org.apache.lucene.analysis.Analyzer)}
 * 
 * configs can use these system properties to vary the indexwriter settings
 */
public static void newRandomConfig() {
  IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));

  System.setProperty("useCompoundFile", String.valueOf(iwc.getUseCompoundFile()));

  System.setProperty("solr.tests.maxBufferedDocs", String.valueOf(iwc.getMaxBufferedDocs()));
  System.setProperty("solr.tests.ramBufferSizeMB", String.valueOf(iwc.getRAMBufferSizeMB()));
  System.setProperty("solr.tests.mergeScheduler", iwc.getMergeScheduler().getClass().getName());

  // don't ask iwc.getMaxThreadStates(), sometimes newIWC uses 
  // RandomDocumentsWriterPerThreadPool and all hell breaks loose
  int maxIndexingThreads = rarely(random())
    ? TestUtil.nextInt(random(), 5, 20) // crazy value
    : TestUtil.nextInt(random(), 1, 4); // reasonable value
  System.setProperty("solr.tests.maxIndexingThreads", String.valueOf(maxIndexingThreads));
}
项目:search    文件:TestSpansAdvanced2.java   
/**
 * Initializes the tests by adding documents to the index.
 */
@Override
public void setUp() throws Exception {
  super.setUp();

  // create test index
  final RandomIndexWriter writer = new RandomIndexWriter(random(), mDirectory,
      newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))
          .setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())
          .setSimilarity(new DefaultSimilarity()));
  addDocument(writer, "A", "Should we, could we, would we?");
  addDocument(writer, "B", "It should.  Should it?");
  addDocument(writer, "C", "It shouldn't.");
  addDocument(writer, "D", "Should we, should we, should we.");
  reader2 = writer.getReader();
  writer.close();

  // re-open the searcher since we added more docs
  searcher2 = newSearcher(reader2);
  searcher2.setSimilarity(new DefaultSimilarity());
}
项目:search    文件:TestLongNormValueSource.java   
@BeforeClass
public static void beforeClass() throws Exception {
  dir = newDirectory();
  IndexWriterConfig iwConfig = newIndexWriterConfig(new MockAnalyzer(random()));
  iwConfig.setMergePolicy(newLogMergePolicy());
  iwConfig.setSimilarity(sim);
  RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConfig);

  Document doc = new Document();
  doc.add(new TextField("text", "this is a test test test", Field.Store.NO));
  iw.addDocument(doc);

  doc = new Document();
  doc.add(new TextField("text", "second test", Field.Store.NO));
  iw.addDocument(doc);

  reader = iw.getReader();
  searcher = newSearcher(reader);
  iw.close();
}
项目:search    文件:TestBackwardsCompatibility.java   
public void testDocValuesUpdates() throws Exception {
  File oldIndexDir = createTempDir("dvupdates");
  TestUtil.unzip(getDataFile(dvUpdatesIndex), oldIndexDir);
  Directory dir = newFSDirectory(oldIndexDir);

  verifyDocValues(dir);

  // update fields and verify index
  IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);
  updateNumeric(writer, "1", "ndv1", "ndv1_c", 300L);
  updateNumeric(writer, "1", "ndv2", "ndv2_c", 300L);
  updateBinary(writer, "1", "bdv1", "bdv1_c", 300L);
  updateBinary(writer, "1", "bdv2", "bdv2_c", 300L);
  writer.commit();
  verifyDocValues(dir);

  // merge all segments
  writer.forceMerge(1);
  writer.commit();
  verifyDocValues(dir);

  writer.close();
  dir.close();
}
项目:search    文件:TestIDVersionPostingsFormat.java   
public void testMoreThanOneDocPerIDWithDeletes() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
  iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
  RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
  Document doc = new Document();
  doc.add(makeIDField("id", 17));
  w.addDocument(doc);
  w.deleteDocuments(new Term("id", "id"));
  doc = new Document();
  doc.add(makeIDField("id", 17));
  w.addDocument(doc);
  w.commit();
  w.close();
  dir.close();
}
项目:search    文件:TestAddIndexes.java   
public void testNoMergeAfterCopy() throws IOException {
  // main directory
  Directory dir = newDirectory();
  // auxiliary directory
  Directory aux = newDirectory();

  setUpDirs(dir, aux);

  IndexWriter writer = newWriter(
      dir,
      newIndexWriterConfig(new MockAnalyzer(random())).
          setOpenMode(OpenMode.APPEND).
          setMaxBufferedDocs(10).
          setMergePolicy(newLogMergePolicy(4))
  );

  writer.addIndexes(aux, new MockDirectoryWrapper(random(), new RAMDirectory(aux, newIOContext(random()))));
  assertEquals(1060, writer.maxDoc());
  assertEquals(1000, writer.getDocCount(0));
  writer.close();

  // make sure the index is correct
  verifyNumDocs(dir, 1060);
  dir.close();
  aux.close();
}
项目:search    文件:TestTaxonomyFacetCounts.java   
public void testReallyNoNormsForDrillDown() throws Exception {
  Directory dir = newDirectory();
  Directory taxoDir = newDirectory();
  IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
  iwc.setSimilarity(new PerFieldSimilarityWrapper() {
      final Similarity sim = new DefaultSimilarity();

      @Override
      public Similarity get(String name) {
        assertEquals("field", name);
        return sim;
      }
    });
  TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
  RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
  FacetsConfig config = new FacetsConfig();

  Document doc = new Document();
  doc.add(newTextField("field", "text", Field.Store.NO));
  doc.add(new FacetField("a", "path"));
  writer.addDocument(config.build(taxoWriter, doc));
  IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
项目:search    文件:AnalyzingInfixSuggesterTest.java   
public void testAfterLoad() throws Exception {
  Input keys[] = new Input[] {
    new Input("lend me your ear", 8, new BytesRef("foobar")),
    new Input("a penny saved is a penny earned", 10, new BytesRef("foobaz")),
  };

  File tempDir = createTempDir("AnalyzingInfixSuggesterTest");

  Analyzer a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
  AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, newFSDirectory(tempDir), a, a, 3);
  suggester.build(new InputArrayIterator(keys));
  assertEquals(2, suggester.getCount());
  suggester.close();

  suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, newFSDirectory(tempDir), a, a, 3);
  List<LookupResult> results = suggester.lookup(TestUtil.stringToCharSequence("ear", random()), 10, true, true);
  assertEquals(2, results.size());
  assertEquals("a penny saved is a penny earned", results.get(0).key);
  assertEquals("a penny saved is a penny <b>ear</b>ned", results.get(0).highlightKey);
  assertEquals(10, results.get(0).value);
  assertEquals(new BytesRef("foobaz"), results.get(0).payload);
  assertEquals(2, suggester.getCount());
  suggester.close();
}
项目:search    文件:TestDuelingAnalyzers.java   
public void testLetterAscii() throws Exception {
  Random random = random();
  Analyzer left = new MockAnalyzer(random, jvmLetter, false);
  Analyzer right = new Analyzer() {
    @Override
    protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
      Tokenizer tokenizer = new LetterTokenizer(newAttributeFactory(), reader);
      return new TokenStreamComponents(tokenizer, tokenizer);
    }
  };
  for (int i = 0; i < 1000; i++) {
    String s = TestUtil.randomSimpleString(random);
    assertEquals(s, left.tokenStream("foo", newStringReader(s)), 
                 right.tokenStream("foo", newStringReader(s)));
  }
}
项目:search    文件:TestMultiThreadTermVectors.java   
@Override
public void setUp() throws Exception {
  super.setUp();
  directory = newDirectory();
  IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
  //writer.setNoCFSRatio(0.0);
  //writer.infoStream = System.out;
  FieldType customType = new FieldType(TextField.TYPE_STORED);
  customType.setTokenized(false);
  customType.setStoreTermVectors(true);
  for (int i = 0; i < numDocs; i++) {
    Document doc = new Document();
    Field fld = newField("field", English.intToEnglish(i), customType);
    doc.add(fld);
    writer.addDocument(doc);
  }
  writer.close();

}
项目:search    文件:TestBinaryDocValuesUpdates.java   
public void testUpdateBinaryDVFieldWithSameNameAsPostingField() throws Exception {
  // this used to fail because FieldInfos.Builder neglected to update
  // globalFieldMaps.docValueTypes map
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);

  Document doc = new Document();
  doc.add(new StringField("f", "mock-value", Store.NO));
  doc.add(new BinaryDocValuesField("f", toBytes(5L)));
  writer.addDocument(doc);
  writer.commit();
  writer.updateBinaryDocValue(new Term("f", "mock-value"), "f", toBytes(17L));
  writer.close();

  DirectoryReader r = DirectoryReader.open(dir);
  BinaryDocValues bdv = r.leaves().get(0).reader().getBinaryDocValues("f");
  assertEquals(17, getValue(bdv, 0));
  r.close();

  dir.close();
}
项目:search    文件:TestFieldsReader.java   
@BeforeClass
public static void beforeClass() throws Exception {
  testDoc = new Document();
  fieldInfos = new FieldInfos.Builder();
  DocHelper.setupDoc(testDoc);
  for (IndexableField field : testDoc) {
    fieldInfos.addOrUpdate(field.name(), field.fieldType());
  }
  dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
                             .setMergePolicy(newLogMergePolicy());
  conf.getMergePolicy().setNoCFSRatio(0.0);
  IndexWriter writer = new IndexWriter(dir, conf);
  writer.addDocument(testDoc);
  writer.close();
  FaultyIndexInput.doFail = false;
}
项目:search    文件:TestIndexWriter.java   
public void testOtherFiles() throws Throwable {
  Directory dir = newDirectory();
  IndexWriter iw = new IndexWriter(dir, 
      newIndexWriterConfig(new MockAnalyzer(random())));
  iw.addDocument(new Document());
  iw.close();
  try {
    // Create my own random file:
    IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random()));
    out.writeByte((byte) 42);
    out.close();

    new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).close();

    assertTrue(slowFileExists(dir, "myrandomfile"));
  } finally {
    dir.close();
  }
}
项目:elasticsearch_my    文件:PlainHighlighterTests.java   
public void testHighlightPhrase() throws Exception {
    Query query = new PhraseQuery.Builder()
            .add(new Term("field", "foo"))
            .add(new Term("field", "bar"))
            .build();
    QueryScorer queryScorer = new CustomQueryScorer(query);
    org.apache.lucene.search.highlight.Highlighter highlighter = new org.apache.lucene.search.highlight.Highlighter(queryScorer);
    String[] frags = highlighter.getBestFragments(new MockAnalyzer(random()), "field", "bar foo bar foo", 10);
    assertArrayEquals(new String[] {"bar <B>foo</B> <B>bar</B> foo"}, frags);
}
项目:elasticsearch_my    文件:CollapseBuilderTests.java   
public void testBuild() throws IOException {
    Directory dir = new RAMDirectory();
    try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
        writer.commit();
    }
    SearchContext searchContext = mockSearchContext();
    try (IndexReader reader = DirectoryReader.open(dir)) {
        when(searchContext.getQueryShardContext().getIndexReader()).thenReturn(reader);
        MappedFieldType numberFieldType =
            new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
        MappedFieldType keywordFieldType =
            new KeywordFieldMapper.KeywordFieldType();
        for (MappedFieldType fieldType : new MappedFieldType[] {numberFieldType, keywordFieldType}) {
            fieldType.setName("field");
            fieldType.setHasDocValues(true);
            when(searchContext.getQueryShardContext().fieldMapper("field")).thenReturn(fieldType);
            CollapseBuilder builder = new CollapseBuilder("field");
            CollapseContext collapseContext = builder.build(searchContext);
            assertEquals(collapseContext.getFieldType(), fieldType);

            fieldType.setIndexOptions(IndexOptions.NONE);
            collapseContext = builder.build(searchContext);
            assertEquals(collapseContext.getFieldType(), fieldType);

            fieldType.setHasDocValues(false);
            SearchContextException exc = expectThrows(SearchContextException.class, () -> builder.build(searchContext));
            assertEquals(exc.getMessage(), "cannot collapse on field `field` without `doc_values`");

            fieldType.setHasDocValues(true);
            builder.setInnerHits(new InnerHitBuilder());
            exc = expectThrows(SearchContextException.class, () -> builder.build(searchContext));
            assertEquals(exc.getMessage(),
                "cannot expand `inner_hits` for collapse field `field`, " +
                    "only indexed field can retrieve `inner_hits`");
        }
    }
}
项目:elasticsearch_my    文件:BooleanFieldMapperTests.java   
public void testDefaults() throws IOException {
    String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
            .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject()
            .endObject().endObject().string();

    DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));

    ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
            .startObject()
            .field("field", true)
            .endObject()
            .bytes());

    try (Directory dir = new RAMDirectory();
         IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) {
        w.addDocuments(doc.docs());
        try (DirectoryReader reader = DirectoryReader.open(w)) {
            final LeafReader leaf = reader.leaves().get(0).reader();
            // boolean fields are indexed and have doc values by default
            assertEquals(new BytesRef("T"), leaf.terms("field").iterator().next());
            SortedNumericDocValues values = leaf.getSortedNumericDocValues("field");
            assertNotNull(values);
            values.setDocument(0);
            assertEquals(1, values.count());
            assertEquals(1, values.valueAt(0));
        }
    }
}
项目:elasticsearch_my    文件:StoreTests.java   
public void testUserDataRead() throws IOException {
    final ShardId shardId = new ShardId("index", "_na_", 1);
    DirectoryService directoryService = new LuceneManagedDirectoryService(random());
    Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
    IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec());
    SnapshotDeletionPolicy deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
    config.setIndexDeletionPolicy(deletionPolicy);
    IndexWriter writer = new IndexWriter(store.directory(), config);
    Document doc = new Document();
    doc.add(new TextField("id", "1", Field.Store.NO));
    writer.addDocument(doc);
    Map<String, String> commitData = new HashMap<>(2);
    String syncId = "a sync id";
    String translogId = "a translog id";
    commitData.put(Engine.SYNC_COMMIT_ID, syncId);
    commitData.put(Translog.TRANSLOG_GENERATION_KEY, translogId);
    writer.setCommitData(commitData);
    writer.commit();
    writer.close();
    Store.MetadataSnapshot metadata;
    metadata = store.getMetadata(randomBoolean() ? null : deletionPolicy.snapshot());
    assertFalse(metadata.asMap().isEmpty());
    // do not check for correct files, we have enough tests for that above
    assertThat(metadata.getCommitUserData().get(Engine.SYNC_COMMIT_ID), equalTo(syncId));
    assertThat(metadata.getCommitUserData().get(Translog.TRANSLOG_GENERATION_KEY), equalTo(translogId));
    TestUtil.checkIndex(store.directory());
    assertDeleteContent(store, directoryService);
    IOUtils.close(store);
}
项目:search    文件:TestBinaryDocValuesUpdates.java   
public void testUpdateDocumentByMultipleTerms() throws Exception {
  // make sure the order of updates is respected, even when multiple terms affect same document
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);

  Document doc = new Document();
  doc.add(new StringField("k1", "v1", Store.NO));
  doc.add(new StringField("k2", "v2", Store.NO));
  doc.add(new BinaryDocValuesField("bdv", toBytes(5L)));
  writer.addDocument(doc); // flushed document
  writer.commit();
  writer.addDocument(doc); // in-memory document

  writer.updateBinaryDocValue(new Term("k1", "v1"), "bdv", toBytes(17L));
  writer.updateBinaryDocValue(new Term("k2", "v2"), "bdv", toBytes(3L));
  writer.close();

  final DirectoryReader reader = DirectoryReader.open(dir);
  final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
  BinaryDocValues bdv = r.getBinaryDocValues("bdv");
  for (int i = 0; i < r.maxDoc(); i++) {
    assertEquals(3, getValue(bdv, i));
  }
  reader.close();
  dir.close();
}
项目:search    文件:AnalyzingSuggesterTest.java   
public void testMaxSurfaceFormsPerAnalyzedForm() throws Exception {
  Analyzer a = new MockAnalyzer(random());
  AnalyzingSuggester suggester = new AnalyzingSuggester(a, a, 0, 2, -1, true);
  suggester.build(new InputArrayIterator(shuffle(new Input("a", 40),
      new Input("a ", 50), new Input(" a", 60))));

  List<LookupResult> results = suggester.lookup("a", false, 5);
  assertEquals(2, results.size());
  assertEquals(" a", results.get(0).key);
  assertEquals(60, results.get(0).value);
  assertEquals("a ", results.get(1).key);
  assertEquals(50, results.get(1).value);
}
项目:search    文件:TestIndexWriterDelete.java   
public void testBothDeletes() throws IOException {
  Directory dir = newDirectory();
  IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
      .setMaxBufferedDocs(100)
      .setMaxBufferedDeleteTerms(100));

  int id = 0;
  int value = 100;

  for (int i = 0; i < 5; i++) {
    addDoc(modifier, ++id, value);
  }

  value = 200;
  for (int i = 0; i < 5; i++) {
    addDoc(modifier, ++id, value);
  }
  modifier.commit();

  for (int i = 0; i < 5; i++) {
    addDoc(modifier, ++id, value);
  }
  modifier.deleteDocuments(new Term("value", String.valueOf(value)));

  modifier.commit();

  IndexReader reader = DirectoryReader.open(dir);
  assertEquals(5, reader.numDocs());
  modifier.close();
  reader.close();
  dir.close();
}
项目:search    文件:TestIndexWriter.java   
public void testOnlyUpdateDocuments() throws Exception {
  Directory dir = newDirectory();
  IndexWriter w = new IndexWriter(dir,
                                  new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));

  final List<Document> docs = new ArrayList<>();
  docs.add(new Document());
  w.updateDocuments(new Term("foo", "bar"),
                    docs);
  w.close();
  dir.close();
}
项目:search    文件:TestTransactions.java   
public void initIndex(Directory dir) throws Throwable {
  IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
  for(int j=0; j<7; j++) {
    Document d = new Document();
    int n = random().nextInt();
    d.add(newTextField("contents", English.intToEnglish(n), Field.Store.NO));
    writer.addDocument(d);
  }
  writer.close();
}
项目:search    文件:TestIndexWriterReader.java   
public void testReopenAfterNoRealChange() throws Exception {
  Directory d = getAssertNoDeletesDirectory(newDirectory());
  IndexWriter w = new IndexWriter(
      d,
      newIndexWriterConfig(new MockAnalyzer(random())));

  DirectoryReader r = w.getReader(); // start pooling readers

  DirectoryReader r2 = DirectoryReader.openIfChanged(r);
  assertNull(r2);

  w.addDocument(new Document());
  DirectoryReader r3 = DirectoryReader.openIfChanged(r);
  assertNotNull(r3);
  assertTrue(r3.getVersion() != r.getVersion());
  assertTrue(r3.isCurrent());

  // Deletes nothing in reality...:
  w.deleteDocuments(new Term("foo", "bar"));

  // ... but IW marks this as not current:
  assertFalse(r3.isCurrent());
  DirectoryReader r4 = DirectoryReader.openIfChanged(r3);
  assertNull(r4);

  // Deletes nothing in reality...:
  w.deleteDocuments(new Term("foo", "bar"));
  DirectoryReader r5 = DirectoryReader.openIfChanged(r3, w, true);
  assertNull(r5);

  r3.close();

  w.close();
  d.close();
}
项目:search    文件:TestIndexWriterExceptions.java   
public void testSegmentsChecksumError() throws IOException {
  Directory dir = newDirectory();

  IndexWriter writer = null;

  writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));

  // add 100 documents
  for (int i = 0; i < 100; i++) {
    addDoc(writer);
  }

  // close
  writer.close();

  long gen = SegmentInfos.getLastCommitGeneration(dir);
  assertTrue("segment generation should be > 0 but got " + gen, gen > 0);

  final String segmentsFileName = SegmentInfos.getLastCommitSegmentsFileName(dir);
  IndexInput in = dir.openInput(segmentsFileName, newIOContext(random()));
  IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen), newIOContext(random()));
  out.copyBytes(in, in.length()-1);
  byte b = in.readByte();
  out.writeByte((byte) (1+b));
  out.close();
  in.close();

  IndexReader reader = null;
  try {
    reader = DirectoryReader.open(dir);
  } catch (IOException e) {
    e.printStackTrace(System.out);
    fail("segmentInfos failed to retry fallback to correct segments_N file");
  }
  reader.close();

  // should remove the corrumpted segments_N
  new IndexWriter(dir, newIndexWriterConfig(null)).close();
  dir.close();
}
项目:search    文件:TestIndexWriter.java   
public void testRollbackThenClose() throws IOException {
  Directory dir = newDirectory();
  IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
  w.addDocument(new Document());
  w.rollback();
  // Close after rollback should have no effect
  w.close();
  dir.close();
}
项目:search    文件:TestIndexWriter.java   
/**
 * Test that no NullPointerException will be raised,
 * when adding one document with a single, empty field
 * and term vectors enabled.
 */
public void testBadSegment() throws IOException {
  Directory dir = newDirectory();
  IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));

  Document document = new Document();
  FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
  customType.setStoreTermVectors(true);
  document.add(newField("tvtest", "", customType));
  iw.addDocument(document);
  iw.close();
  dir.close();
}
项目:search    文件:BaseStoredFieldsFormatTestCase.java   
public void testBinaryFieldOffsetLength() throws IOException {
  Directory dir = newDirectory();
  IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
  byte[] b = new byte[50];
  for(int i=0;i<50;i++)
    b[i] = (byte) (i+77);

  Document doc = new Document();
  Field f = new StoredField("binary", b, 10, 17);
  byte[] bx = f.binaryValue().bytes;
  assertTrue(bx != null);
  assertEquals(50, bx.length);
  assertEquals(10, f.binaryValue().offset);
  assertEquals(17, f.binaryValue().length);
  doc.add(f);
  w.addDocument(doc);
  w.close();

  IndexReader ir = DirectoryReader.open(dir);
  Document doc2 = ir.document(0);
  IndexableField f2 = doc2.getField("binary");
  b = f2.binaryValue().bytes;
  assertTrue(b != null);
  assertEquals(17, b.length, 17);
  assertEquals(87, b[0]);
  ir.close();
  dir.close();
}
项目:search    文件:TestIndexWriterReader.java   
public void testIsCurrent() throws IOException {
  Directory dir = newDirectory();
  IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));

  IndexWriter writer = new IndexWriter(dir, iwc);
  Document doc = new Document();
  doc.add(newTextField("field", "a b c", Field.Store.NO));
  writer.addDocument(doc);
  writer.close();

  iwc = newIndexWriterConfig(new MockAnalyzer(random()));
  writer = new IndexWriter(dir, iwc);
  doc = new Document();
  doc.add(newTextField("field", "a b c", Field.Store.NO));
  DirectoryReader nrtReader = writer.getReader();
  assertTrue(nrtReader.isCurrent());
  writer.addDocument(doc);
  assertFalse(nrtReader.isCurrent()); // should see the changes
  writer.forceMerge(1); // make sure we don't have a merge going on
  assertFalse(nrtReader.isCurrent());
  nrtReader.close();

  DirectoryReader dirReader = DirectoryReader.open(dir);
  nrtReader = writer.getReader();

  assertTrue(dirReader.isCurrent());
  assertTrue(nrtReader.isCurrent()); // nothing was committed yet so we are still current
  assertEquals(2, nrtReader.maxDoc()); // sees the actual document added
  assertEquals(1, dirReader.maxDoc());
  writer.close(); // close is actually a commit both should see the changes
  assertTrue(nrtReader.isCurrent()); 
  assertFalse(dirReader.isCurrent()); // this reader has been opened before the writer was closed / committed

  dirReader.close();
  nrtReader.close();
  dir.close();
}
项目:search    文件:TestDocValuesIndexing.java   
public void testTypeChangeViaAddIndexesIR() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);
  Document doc = new Document();
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);
  writer.close();

  Directory dir2 = newDirectory();
  conf = newIndexWriterConfig(new MockAnalyzer(random()));
  writer = new IndexWriter(dir2, conf);
  doc = new Document();
  doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
  writer.addDocument(doc);
  IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
  try {
    writer.addIndexes(readers);
    fail("did not hit exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }
  readers[0].close();
  writer.close();

  dir.close();
  dir2.close();
}