Java 类org.apache.lucene.search.FieldCache 实例源码

项目:search    文件:ReverseOrdFieldSource.java   
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
  final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader();
  final AtomicReader r = SlowCompositeReaderWrapper.wrap(topReader);
  final int off = readerContext.docBase;

  final SortedDocValues sindex = FieldCache.DEFAULT.getTermsIndex(r, field);
  final int end = sindex.getValueCount();

  return new IntDocValues(this) {
   @Override
    public int intVal(int doc) {
      return (end - sindex.getOrd(doc+off) - 1);
    }
  };
}
项目:search    文件:TestCustomScoreQuery.java   
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) throws IOException {
  final FieldCache.Ints values = FieldCache.DEFAULT.getInts(context.reader(), INT_FIELD, false);
  return new CustomScoreProvider(context) {
    @Override
    public float customScore(int doc, float subScore, float valSrcScore) {
      assertTrue(doc <= context.reader().maxDoc());
      return values.get(doc);
    }
  };
}
项目:search    文件:TermAllGroupsCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  index = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);

  // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
  ordSet.clear();
  for (BytesRef countedGroup : groups) {
    if (countedGroup == null) {
      ordSet.put(-1);
    } else {
      int ord = index.lookupTerm(countedGroup);
      if (ord >= 0) {
        ordSet.put(ord);
      }
    }
  }
}
项目:search    文件:TermDistinctValuesCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  groupFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);
  countFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), countField);
  ordSet.clear();
  for (GroupCount group : groups) {
    int groupOrd = group.groupValue == null ? -1 : groupFieldTermIndex.lookupTerm(group.groupValue);
    if (group.groupValue != null && groupOrd < 0) {
      continue;
    }

    groupCounts[ordSet.put(groupOrd)] = group;
    group.ords = new int[group.uniqueValues.size()];
    Arrays.fill(group.ords, -2);
    int i = 0;
    for (BytesRef value : group.uniqueValues) {
      int countOrd = value == null ? -1 : countFieldTermIndex.lookupTerm(value);
      if (value == null || countOrd >= 0) {
        group.ords[i++] = countOrd;
      }
    }
  }
}
项目:search    文件:TestRuleFieldCacheSanity.java   
@Override
public Statement apply(final Statement s, final Description d) {
  return new Statement() {
    @Override
    public void evaluate() throws Throwable {
      s.evaluate();

      Throwable problem = null;
      try {
        LuceneTestCase.assertSaneFieldCaches(d.getDisplayName());
      } catch (Throwable t) {
        problem = t;
      }

      FieldCache.DEFAULT.purgeAllCaches();

      if (problem != null) {
        Rethrow.rethrow(problem);
      }
    }
  };
}
项目:search    文件:LuceneTestCase.java   
/**
 * Asserts that FieldCacheSanityChecker does not detect any
 * problems with FieldCache.DEFAULT.
 * <p>
 * If any problems are found, they are logged to System.err
 * (allong with the msg) when the Assertion is thrown.
 * </p>
 * <p>
 * This method is called by tearDown after every test method,
 * however IndexReaders scoped inside test methods may be garbage
 * collected prior to this method being called, causing errors to
 * be overlooked. Tests are encouraged to keep their IndexReaders
 * scoped at the class level, or to explicitly call this method
 * directly in the same scope as the IndexReader.
 * </p>
 *
 * @see org.apache.lucene.util.FieldCacheSanityChecker
 */
protected static void assertSaneFieldCaches(final String msg) {
  final CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries();
  Insanity[] insanity = null;
  try {
    try {
      insanity = FieldCacheSanityChecker.checkSanity(entries);
    } catch (RuntimeException e) {
      dumpArray(msg + ": FieldCache", entries, System.err);
      throw e;
    }

    assertEquals(msg + ": Insane FieldCache usage(s) found",
                 0, insanity.length);
    insanity = null;
  } finally {

    // report this in the event of any exception/failure
    // if no failure, then insanity will be null anyway
    if (null != insanity) {
      dumpArray(msg + ": Insane FieldCache usage(s)", insanity, System.err);
    }
  }
}
项目:search    文件:TestDocValuesIndexing.java   
public void testMultiValuedDocValuesField() throws Exception {
  Directory d = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), d);
  Document doc = new Document();
  Field f = new NumericDocValuesField("field", 17);
  doc.add(f);

  // add the doc
  w.addDocument(doc);

  // Index doc values are single-valued so we should not
  // be able to add same field more than once:
  doc.add(f);
  try {
    w.addDocument(doc);
    fail("didn't hit expected exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }

  DirectoryReader r = w.getReader();
  w.close();
  assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0));
  r.close();
  d.close();
}
项目:search    文件:TestDocValuesIndexing.java   
public void testDifferentTypedDocValuesField() throws Exception {
  Directory d = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), d);
  Document doc = new Document();
  doc.add(new NumericDocValuesField("field", 17));
  w.addDocument(doc);

  // Index doc values are single-valued so we should not
  // be able to add same field more than once:
  doc.add(new BinaryDocValuesField("field", new BytesRef("blah")));
  try {
    w.addDocument(doc);
    fail("didn't hit expected exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }

  DirectoryReader r = w.getReader();
  w.close();
  assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0));
  r.close();
  d.close();
}
项目:search    文件:TestDocValuesIndexing.java   
public void testDocsWithField() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);
  Document doc = new Document();
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);

  doc = new Document();
  doc.add(new TextField("dv", "some text", Field.Store.NO));
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);

  DirectoryReader r = writer.getReader();
  writer.close();

  AtomicReader subR = r.leaves().get(0).reader();
  assertEquals(2, subR.numDocs());

  Bits bits = FieldCache.DEFAULT.getDocsWithField(subR, "dv");
  assertTrue(bits.get(0));
  assertTrue(bits.get(1));
  r.close();
  dir.close();
}
项目:search    文件:TestFieldCacheSanityChecker.java   
public void testSanity() throws IOException {
  FieldCache cache = FieldCache.DEFAULT;
  cache.purgeAllCaches();

  cache.getDoubles(readerA, "theDouble", false);
  cache.getDoubles(readerA, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
  cache.getDoubles(readerAclone, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
  cache.getDoubles(readerB, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);

  cache.getInts(readerX, "theInt", false);
  cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);

  // // // 

  Insanity[] insanity = 
    FieldCacheSanityChecker.checkSanity(cache.getCacheEntries());

  if (0 < insanity.length)
    dumpArray(getTestClass().getName() + "#" + getTestName() 
        + " INSANITY", insanity, System.err);

  assertEquals("shouldn't be any cache insanity", 0, insanity.length);
  cache.purgeAllCaches();
}
项目:search    文件:TestFieldCacheSanityChecker.java   
public void testInsanity1() throws IOException {
  FieldCache cache = FieldCache.DEFAULT;
  cache.purgeAllCaches();

  cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);
  cache.getTerms(readerX, "theInt", false);
  cache.getBytes(readerX, "theByte", false);

  // // // 

  Insanity[] insanity = 
    FieldCacheSanityChecker.checkSanity(cache.getCacheEntries());

  assertEquals("wrong number of cache errors", 1, insanity.length);
  assertEquals("wrong type of cache error", 
               InsanityType.VALUEMISMATCH,
               insanity[0].getType());
  assertEquals("wrong number of entries in cache error", 2,
               insanity[0].getCacheEntries().length);

  // we expect bad things, don't let tearDown complain about them
  cache.purgeAllCaches();
}
项目:search    文件:TestFieldCacheSanityChecker.java   
public void testInsanity2() throws IOException {
  FieldCache cache = FieldCache.DEFAULT;
  cache.purgeAllCaches();

  cache.getTerms(readerA, "theInt", false);
  cache.getTerms(readerB, "theInt", false);
  cache.getTerms(readerX, "theInt", false);

  cache.getBytes(readerX, "theByte", false);


  // // // 

  Insanity[] insanity = 
    FieldCacheSanityChecker.checkSanity(cache.getCacheEntries());

  assertEquals("wrong number of cache errors", 1, insanity.length);
  assertEquals("wrong type of cache error", 
               InsanityType.SUBREADER,
               insanity[0].getType());
  assertEquals("wrong number of entries in cache error", 3,
               insanity[0].getCacheEntries().length);

  // we expect bad things, don't let tearDown complain about them
  cache.purgeAllCaches();
}
项目:search    文件:TrieField.java   
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
  field.checkFieldCacheSource(qparser);
  switch (type) {
    case INTEGER:
      return new IntFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER );
    case FLOAT:
      return new FloatFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER );
    case DATE:
      return new TrieDateFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );        
    case LONG:
      return new LongFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
    case DOUBLE:
      return new DoubleFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER );
    default:
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
  }
}
项目:search    文件:TestFunctionQuery.java   
/**
 * some platforms don't allow quote characters in filenames, so 
 * in addition to testExternalFieldValueSourceParser above, test a field 
 * name with quotes in it that does NOT use ExternalFileField
 * @see #testExternalFieldValueSourceParser
 */
@Test
public void testFieldValueSourceParser() {
  clearIndex();

  String field = "CoMpleX \" fieldName _f";
  String fieldAsFunc = "field(\"CoMpleX \\\" fieldName _f\")";

  float[] ids = {100,-4,0,10,25,5,77,1};

  createIndex(field, ids);

  // test identity (straight field value)
  singleTest(fieldAsFunc, "\0", 
             100,100,  -4,-4,  0,0,  10,10,  25,25,  5,5,  77,77,  1,1);
  singleTest(fieldAsFunc, "sqrt(\0)", 
             100,10,  25,5,  0,0,   1,1);
  singleTest(fieldAsFunc, "log(\0)",  1,0);

  FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity    
}
项目:search    文件:TestRandomFaceting.java   
@Test
public void testRandomFaceting() throws Exception {
  try {
    Random rand = random();
    int iter = atLeast(100);
    init();
    addMoreDocs(0);

    for (int i=0; i<iter; i++) {
      doFacetTests();

      if (rand.nextInt(100) < 5) {
        init();
      }

      addMoreDocs(rand.nextInt(indexSize) + 1);

      if (rand.nextInt(100) < 50) {
        deleteSomeDocs();
      }
    }
  } finally {
    FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity
  }
}
项目:search    文件:TestRandomDVFaceting.java   
@Test
public void testRandomFaceting() throws Exception {
  try {
    Random rand = random();
    int iter = atLeast(100);
    init();
    addMoreDocs(0);

    for (int i=0; i<iter; i++) {
      doFacetTests();

      if (rand.nextInt(100) < 5) {
        init();
      }

      addMoreDocs(rand.nextInt(indexSize) + 1);

      if (rand.nextInt(100) < 50) {
        deleteSomeDocs();
      }
    }
  } finally {
    FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity
  }
}
项目:NYBC    文件:TermAllGroupsCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  index = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);

  // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
  ordSet.clear();
  for (BytesRef countedGroup : groups) {
    if (countedGroup == null) {
      ordSet.put(-1);
    } else {
      int ord = index.lookupTerm(countedGroup);
      if (ord >= 0) {
        ordSet.put(ord);
      }
    }
  }
}
项目:read-open-source-code    文件:TermDistinctValuesCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  groupFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);
  countFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), countField);
  ordSet.clear();
  for (GroupCount group : groups) {
    int groupOrd = group.groupValue == null ? -1 : groupFieldTermIndex.lookupTerm(group.groupValue);
    if (group.groupValue != null && groupOrd < 0) {
      continue;
    }

    groupCounts[ordSet.put(groupOrd)] = group;
    group.ords = new int[group.uniqueValues.size()];
    Arrays.fill(group.ords, -2);
    int i = 0;
    for (BytesRef value : group.uniqueValues) {
      int countOrd = value == null ? -1 : countFieldTermIndex.lookupTerm(value);
      if (value == null || countOrd >= 0) {
        group.ords[i++] = countOrd;
      }
    }
  }
}
项目:NYBC    文件:TestDocValuesIndexing.java   
public void testMultiValuedDocValuesField() throws Exception {
  Directory d = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), d);
  Document doc = new Document();
  Field f = new NumericDocValuesField("field", 17);
  // Index doc values are single-valued so we should not
  // be able to add same field more than once:
  doc.add(f);
  doc.add(f);
  try {
    w.addDocument(doc);
    fail("didn't hit expected exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }

  doc = new Document();
  doc.add(f);
  w.addDocument(doc);
  w.forceMerge(1);
  DirectoryReader r = w.getReader();
  w.close();
  assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0));
  r.close();
  d.close();
}
项目:NYBC    文件:TestDocValuesIndexing.java   
public void testDifferentTypedDocValuesField() throws Exception {
  Directory d = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), d);
  Document doc = new Document();
  // Index doc values are single-valued so we should not
  // be able to add same field more than once:
  Field f;
  doc.add(f = new NumericDocValuesField("field", 17));
  doc.add(new BinaryDocValuesField("field", new BytesRef("blah")));
  try {
    w.addDocument(doc);
    fail("didn't hit expected exception");
  } catch (IllegalArgumentException iae) {
    // expected
  }

  doc = new Document();
  doc.add(f);
  w.addDocument(doc);
  w.forceMerge(1);
  DirectoryReader r = w.getReader();
  w.close();
  assertEquals(17, FieldCache.DEFAULT.getInts(getOnlySegmentReader(r), "field", false).get(0));
  r.close();
  d.close();
}
项目:NYBC    文件:TestDocValuesIndexing.java   
public void testDocsWithField() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);
  Document doc = new Document();
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);

  doc = new Document();
  doc.add(new TextField("dv", "some text", Field.Store.NO));
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);

  DirectoryReader r = writer.getReader();
  writer.close();

  AtomicReader subR = r.leaves().get(0).reader();
  assertEquals(2, subR.numDocs());

  Bits bits = FieldCache.DEFAULT.getDocsWithField(subR, "dv");
  assertTrue(bits.get(0));
  assertTrue(bits.get(1));
  r.close();
  dir.close();
}
项目:NYBC    文件:TestFieldCacheSanityChecker.java   
public void testSanity() throws IOException {
  FieldCache cache = FieldCache.DEFAULT;
  cache.purgeAllCaches();

  cache.getDoubles(readerA, "theDouble", false);
  cache.getDoubles(readerA, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
  cache.getDoubles(readerAclone, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
  cache.getDoubles(readerB, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);

  cache.getInts(readerX, "theInt", false);
  cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);

  // // // 

  Insanity[] insanity = 
    FieldCacheSanityChecker.checkSanity(cache.getCacheEntries());

  if (0 < insanity.length)
    dumpArray(getTestClass().getName() + "#" + getTestName() 
        + " INSANITY", insanity, System.err);

  assertEquals("shouldn't be any cache insanity", 0, insanity.length);
  cache.purgeAllCaches();
}
项目:NYBC    文件:TestFieldCacheSanityChecker.java   
public void testInsanity1() throws IOException {
  FieldCache cache = FieldCache.DEFAULT;
  cache.purgeAllCaches();

  cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);
  cache.getTerms(readerX, "theInt");
  cache.getBytes(readerX, "theByte", false);

  // // // 

  Insanity[] insanity = 
    FieldCacheSanityChecker.checkSanity(cache.getCacheEntries());

  assertEquals("wrong number of cache errors", 1, insanity.length);
  assertEquals("wrong type of cache error", 
               InsanityType.VALUEMISMATCH,
               insanity[0].getType());
  assertEquals("wrong number of entries in cache error", 2,
               insanity[0].getCacheEntries().length);

  // we expect bad things, don't let tearDown complain about them
  cache.purgeAllCaches();
}
项目:NYBC    文件:TestFieldCacheSanityChecker.java   
public void testInsanity2() throws IOException {
  FieldCache cache = FieldCache.DEFAULT;
  cache.purgeAllCaches();

  cache.getTerms(readerA, "theInt");
  cache.getTerms(readerB, "theInt");
  cache.getTerms(readerX, "theInt");

  cache.getBytes(readerX, "theByte", false);


  // // // 

  Insanity[] insanity = 
    FieldCacheSanityChecker.checkSanity(cache.getCacheEntries());

  assertEquals("wrong number of cache errors", 1, insanity.length);
  assertEquals("wrong type of cache error", 
               InsanityType.SUBREADER,
               insanity[0].getType());
  assertEquals("wrong number of entries in cache error", 3,
               insanity[0].getCacheEntries().length);

  // we expect bad things, don't let tearDown complain about them
  cache.purgeAllCaches();
}
项目:NYBC    文件:TrieField.java   
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
  field.checkFieldCacheSource(qparser);
  switch (type) {
    case INTEGER:
      return new IntFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER );
    case FLOAT:
      return new FloatFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER );
    case DATE:
      return new TrieDateFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );        
    case LONG:
      return new LongFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
    case DOUBLE:
      return new DoubleFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER );
    default:
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
  }
}
项目:NYBC    文件:TestFunctionQuery.java   
/**
 * some platforms don't allow quote characters in filenames, so 
 * in addition to testExternalFieldValueSourceParser above, test a field 
 * name with quotes in it that does NOT use ExternalFileField
 * @see #testExternalFieldValueSourceParser
 */
@Test
public void testFieldValueSourceParser() {
  clearIndex();

  String field = "CoMpleX \" fieldName _f";
  String fieldAsFunc = "field(\"CoMpleX \\\" fieldName _f\")";

  float[] ids = {100,-4,0,10,25,5,77,1};

  createIndex(field, ids);

  // test identity (straight field value)
  singleTest(fieldAsFunc, "\0", 
             100,100,  -4,-4,  0,0,  10,10,  25,25,  5,5,  77,77,  1,1);
  singleTest(fieldAsFunc, "sqrt(\0)", 
             100,10,  25,5,  0,0,   1,1);
  singleTest(fieldAsFunc, "log(\0)",  1,0);

  FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity    
}
项目:NYBC    文件:TestRandomFaceting.java   
@Test
public void testRandomFaceting() throws Exception {
  try {
    Random rand = random();
    int iter = atLeast(100);
    init();
    addMoreDocs(0);

    for (int i=0; i<iter; i++) {
      doFacetTests();

      if (rand.nextInt(100) < 5) {
        init();
      }

      addMoreDocs(rand.nextInt(indexSize) + 1);

      if (rand.nextInt(100) < 50) {
        deleteSomeDocs();
      }
    }
  } finally {
    FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity
  }
}
项目:read-open-source-code    文件:ReverseOrdFieldSource.java   
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
  final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader();
  final AtomicReader r = SlowCompositeReaderWrapper.wrap(topReader);
  final int off = readerContext.docBase;

  final SortedDocValues sindex = FieldCache.DEFAULT.getTermsIndex(r, field);
  final int end = sindex.getValueCount();

  return new IntDocValues(this) {
   @Override
    public int intVal(int doc) {
      return (end - sindex.getOrd(doc+off) - 1);
    }
  };
}
项目:read-open-source-code    文件:TermDistinctValuesCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  groupFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);
  countFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), countField);
  ordSet.clear();
  for (GroupCount group : groups) {
    int groupOrd = group.groupValue == null ? -1 : groupFieldTermIndex.lookupTerm(group.groupValue);
    if (group.groupValue != null && groupOrd < 0) {
      continue;
    }

    groupCounts[ordSet.put(groupOrd)] = group;
    group.ords = new int[group.uniqueValues.size()];
    Arrays.fill(group.ords, -2);
    int i = 0;
    for (BytesRef value : group.uniqueValues) {
      int countOrd = value == null ? -1 : countFieldTermIndex.lookupTerm(value);
      if (value == null || countOrd >= 0) {
        group.ords[i++] = countOrd;
      }
    }
  }
}
项目:search-core    文件:TestRandomFaceting.java   
@Test
public void testRandomFaceting() throws Exception {
  try {
    Random rand = random();
    int iter = atLeast(100);
    init();
    addMoreDocs(0);

    for (int i=0; i<iter; i++) {
      doFacetTests();

      if (rand.nextInt(100) < 5) {
        init();
      }

      addMoreDocs(rand.nextInt(indexSize) + 1);

      if (rand.nextInt(100) < 50) {
        deleteSomeDocs();
      }
    }
  } finally {
    FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity
  }
}
项目:search-core    文件:TestRandomDVFaceting.java   
@Test
public void testRandomFaceting() throws Exception {
  try {
    Random rand = random();
    int iter = atLeast(100);
    init();
    addMoreDocs(0);

    for (int i=0; i<iter; i++) {
      doFacetTests();

      if (rand.nextInt(100) < 5) {
        init();
      }

      addMoreDocs(rand.nextInt(indexSize) + 1);

      if (rand.nextInt(100) < 50) {
        deleteSomeDocs();
      }
    }
  } finally {
    FieldCache.DEFAULT.purgeAllCaches();   // avoid FC insanity
  }
}
项目:read-open-source-code    文件:ReverseOrdFieldSource.java   
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
  final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader();
  final AtomicReader r = SlowCompositeReaderWrapper.wrap(topReader);
  final int off = readerContext.docBase;

  final SortedDocValues sindex = FieldCache.DEFAULT.getTermsIndex(r, field);
  final int end = sindex.getValueCount();

  return new IntDocValues(this) {
   @Override
    public int intVal(int doc) {
      return (end - sindex.getOrd(doc+off) - 1);
    }
  };
}
项目:read-open-source-code    文件:TermAllGroupsCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  index = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);

  // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
  ordSet.clear();
  for (BytesRef countedGroup : groups) {
    if (countedGroup == null) {
      ordSet.put(-1);
    } else {
      int ord = index.lookupTerm(countedGroup);
      if (ord >= 0) {
        ordSet.put(ord);
      }
    }
  }
}
项目:read-open-source-code    文件:TermDistinctValuesCollector.java   
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
  groupFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), groupField);
  countFieldTermIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), countField);
  ordSet.clear();
  for (GroupCount group : groups) {
    int groupOrd = group.groupValue == null ? -1 : groupFieldTermIndex.lookupTerm(group.groupValue);
    if (group.groupValue != null && groupOrd < 0) {
      continue;
    }

    groupCounts[ordSet.put(groupOrd)] = group;
    group.ords = new int[group.uniqueValues.size()];
    Arrays.fill(group.ords, -2);
    int i = 0;
    for (BytesRef value : group.uniqueValues) {
      int countOrd = value == null ? -1 : countFieldTermIndex.lookupTerm(value);
      if (value == null || countOrd >= 0) {
        group.ords[i++] = countOrd;
      }
    }
  }
}
项目:read-open-source-code    文件:ReverseOrdFieldSource.java   
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
  final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader();
  final AtomicReader r = SlowCompositeReaderWrapper.wrap(topReader);
  final int off = readerContext.docBase;

  final SortedDocValues sindex = FieldCache.DEFAULT.getTermsIndex(r, field);
  final int end = sindex.getValueCount();

  return new IntDocValues(this) {
   @Override
    public int intVal(int doc) {
      return (end - sindex.getOrd(doc+off) - 1);
    }
  };
}
项目:read-open-source-code    文件:TrieField.java   
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
  field.checkFieldCacheSource(qparser);
  switch (type) {
    case INTEGER:
      return new IntFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER );
    case FLOAT:
      return new FloatFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER );
    case DATE:
      return new TrieDateFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );        
    case LONG:
      return new LongFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
    case DOUBLE:
      return new DoubleFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER );
    default:
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
  }
}
项目:linden    文件:LindenUtil.java   
private static boolean actualContain(AtomicReader reader, String field, int locDocId) {
  try {
    // index really contains such field of this doc
    return FieldCache.DEFAULT.getDocsWithField(reader, field).get(locDocId);
  } catch (IOException e) {
    return false;
  }
}
项目:query-autofiltering-component    文件:QueryAutoFilteringComponent.java   
private void buildFieldMap( ResponseBuilder rb ) throws IOException {
  Log.debug( "buildFieldMap" );
  SolrIndexSearcher searcher = rb.req.getSearcher();
  // build a synonym map from the SortedDocValues -
  // for each field value: lower case, stemmed, lookup synonyms from synonyms.txt - map to fieldValue
  SynonymMap.Builder fieldBuilder = new SynonymMap.Builder( true );
  SynonymMap.Builder termBuilder = new SynonymMap.Builder( true );

  ArrayList<String> searchFields = getStringFields( searcher );

  for (String searchField : searchFields ) {
    Log.debug( "adding searchField " + searchField );
    CharsRef fieldChars = new CharsRef( searchField );
    SortedSetDocValues sdv = FieldCache.DEFAULT.getDocTermOrds( searcher.getAtomicReader( ), searchField );
    if (sdv == null) continue;
    Log.debug( "got SortedSetDocValues for " + searchField );
    TermsEnum te = sdv.termsEnum();
    while (te.next() != null) {
      BytesRef term = te.term();
      String fieldValue = term.utf8ToString( );
      addTerm ( fieldChars, fieldValue, fieldBuilder, termBuilder );
    }
  }

  addDistributedTerms( rb, fieldBuilder, termBuilder, searchFields );

  fieldMap = fieldBuilder.build( );
  termMap = termBuilder.build( );
}
项目:search    文件:TestPerfTasksLogic.java   
public void testDocMakerThreadSafety() throws Exception {
  // 1. alg definition (required in every "logic" test)
  String algLines[] = {
      "# ----- properties ",
      "content.source=org.apache.lucene.benchmark.byTask.feeds.SortableSingleDocSource",
      "doc.term.vector=false",
      "log.step.AddDoc=10000",
      "content.source.forever=true",
      "directory=RAMDirectory",
      "doc.reuse.fields=false",
      "doc.stored=false",
      "doc.tokenized=false",
      "doc.index.props=true",
      "# ----- alg ",
      "CreateIndex",
      "[ { AddDoc > : 250 ] : 4",
      "CloseIndex",
  };

  // 2. we test this value later
  CountingSearchTestTask.numSearches = 0;

  // 3. execute the algorithm  (required in every "logic" test)
  Benchmark benchmark = execBenchmark(algLines);

  DirectoryReader r = DirectoryReader.open(benchmark.getRunData().getDirectory());
  SortedDocValues idx = FieldCache.DEFAULT.getTermsIndex(SlowCompositeReaderWrapper.wrap(r), "country");
  final int maxDoc = r.maxDoc();
  assertEquals(1000, maxDoc);
  for(int i=0;i<1000;i++) {
    assertTrue("doc " + i + " has null country", idx.getOrd(i) != -1);
  }
  r.close();
}
项目:search    文件:DocTermsIndexDocValues.java   
public DocTermsIndexDocValues(ValueSource vs, AtomicReaderContext context, String field) throws IOException {
  try {
    termsIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), field);
  } catch (RuntimeException e) {
    throw new DocTermsIndexException(field, e);
  }
  this.vs = vs;
}