Java 类org.apache.lucene.search.SearcherManager 实例源码

项目:elasticsearch_my    文件:MockEngineSupport.java   
public Engine.Searcher wrapSearcher(String source, Engine.Searcher engineSearcher, IndexSearcher searcher, SearcherManager manager) {
    final AssertingIndexSearcher assertingIndexSearcher = newSearcher(source, searcher, manager);
    assertingIndexSearcher.setSimilarity(searcher.getSimilarity(true));
    // pass the original searcher to the super.newSearcher() method to make sure this is the searcher that will
    // be released later on. If we wrap an index reader here must not pass the wrapped version to the manager
    // on release otherwise the reader will be closed too early. - good news, stuff will fail all over the place if we don't get this right here
    AssertingSearcher assertingSearcher = new AssertingSearcher(assertingIndexSearcher, engineSearcher, shardId, logger) {
        @Override
        public void close() {
            try {
                searcherCloseable.remove(this);
            } finally {
                super.close();
            }
        }
    };
    searcherCloseable.add(assertingSearcher, engineSearcher.source());
    return assertingSearcher;
}
项目:elasticsearch_my    文件:Engine.java   
/**
 * Read the last segments info from the commit pointed to by the searcher manager
 */
protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManager sm, final Store store) throws IOException {
    IndexSearcher searcher = sm.acquire();
    try {
        IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit();
        return Lucene.readSegmentInfos(latestCommit);
    } catch (IOException e) {
        // Fall back to reading from the store if reading from the commit fails
        try {
            return store.readLastCommittedSegmentsInfo();
        } catch (IOException e2) {
            e2.addSuppressed(e);
            throw e2;
        }
    } finally {
        sm.release(searcher);
    }
}
项目:Elasticsearch    文件:Engine.java   
/**
 * Read the last segments info from the commit pointed to by the searcher manager
 */
protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManager sm, final Store store) throws IOException {
    IndexSearcher searcher = sm.acquire();
    try {
        IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit();
        return Lucene.readSegmentInfos(latestCommit);
    } catch (IOException e) {
        // Fall back to reading from the store if reading from the commit fails
        try {
            return store. readLastCommittedSegmentsInfo();
        } catch (IOException e2) {
            e2.addSuppressed(e);
            throw e2;
        }
    } finally {
        sm.release(searcher);
    }
}
项目:gitplex-mit    文件:DefaultSearchManager.java   
@Transactional
@Listen
public void on(EntityRemoved event) {
    if (event.getEntity() instanceof Project) {
        dao.doAfterCommit(new Runnable() {

            @Override
            public void run() {
                synchronized (searcherManagers) {
                    Long projectId = event.getEntity().getId();                     
                    SearcherManager searcherManager = searcherManagers.remove(projectId);
                    if (searcherManager != null) {
                        try {
                            searcherManager.close();
                        } catch (IOException e) {
                            Throwables.propagate(e);
                        }
                    }
                }
            }

        });
    }
}
项目:tripod    文件:LuceneService.java   
public LuceneService(final SearcherManager searcherManager,
                     final Analyzer analyzer,
                     final LuceneQueryTransformer queryTransformer,
                     final LuceneDocumentTransformer<QR> documentTransformer,
                     final SortTypeFactory sortTypeFactory,
                     final Integer maxSearchResults) {
    this.searcherManager = searcherManager;
    this.analyzer = analyzer;
    this.queryTransformer = queryTransformer;
    this.documentTransformer = documentTransformer;
    this.sortTypeFactory = sortTypeFactory;
    this.maxSearchResults = maxSearchResults;
    this.fieldDocSerializer = new StandardFieldDocSerializer();

    Validate.notNull(this.searcherManager);
    Validate.notNull(this.queryTransformer);
    Validate.notNull(this.documentTransformer);
    Validate.notNull(this.sortTypeFactory);
    Validate.notNull(this.maxSearchResults);
}
项目:Wiab.pro    文件:LuceneSearchImpl.java   
@Override
public Digest findWave(WaveId waveId, ParticipantId viewer) {
  TermQuery query = new TermQuery(new Term(IndexCondition.Field.WAVE_ID.toString(), waveId.serialise()));
  SearcherManager searcherManager = nrtManager.getSearcherManager(true);
  IndexSearcher indexSearcher = searcherManager.acquire();
  try {
    TopDocs hints = indexSearcher.search(query, 1);
    if (hints.totalHits != 0) {
      ScoreDoc hint = hints.scoreDocs[0];
      return parseDigest(indexSearcher.doc(hint.doc), null);
    }
  } catch (IOException ex) {
    LOG.log(Level.SEVERE, "Search wave " + waveId.serialise() + " failed", ex);
  }
  return null;
}
项目:search    文件:AnalyzingInfixSuggester.java   
/** Create a new instance, loading from a previously built
 *  AnalyzingInfixSuggester directory, if it exists.  This directory must be
 *  private to the infix suggester (i.e., not an external
 *  Lucene index).  Note that {@link #close}
 *  will also close the provided directory.
 *
 *  @param minPrefixChars Minimum number of leading characters
 *     before PrefixQuery is used (default 4).
 *     Prefixes shorter than this are indexed as character
 *     ngrams (increasing index size but making lookups
 *     faster).
 */
public AnalyzingInfixSuggester(Version matchVersion, Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars) throws IOException {

  if (minPrefixChars < 0) {
    throw new IllegalArgumentException("minPrefixChars must be >= 0; got: " + minPrefixChars);
  }

  this.queryAnalyzer = queryAnalyzer;
  this.indexAnalyzer = indexAnalyzer;
  this.matchVersion = matchVersion;
  this.dir = dir;
  this.minPrefixChars = minPrefixChars;

  if (DirectoryReader.indexExists(dir)) {
    // Already built; open it:
    writer = new IndexWriter(dir,
                             getIndexWriterConfig(matchVersion, getGramAnalyzer(), IndexWriterConfig.OpenMode.APPEND));
    searcherMgr = new SearcherManager(writer, true, null);
  }
}
项目:search    文件:SearcherTaxonomyManager.java   
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
  // Must re-open searcher first, otherwise we may get a
  // new reader that references ords not yet known to the
  // taxonomy reader:
  final IndexReader r = ref.searcher.getIndexReader();
  final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
  if (newReader == null) {
    return null;
  } else {
    DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
    if (tr == null) {
      ref.taxonomyReader.incRef();
      tr = ref.taxonomyReader;
    } else if (taxoWriter != null && taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
      IOUtils.close(newReader, tr);
      throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
    }

    return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
  }
}
项目:flow    文件:LuceneFiler.java   
public LuceneFiler(@Nonnull Filer delegate, @Nonnull Config config) throws IOException {
    super(delegate);

    String path = config.getString("index.path");
    maxAge = config.getTime("index.maxAge", "-1");
    double maxMergeMb = config.getDouble("index.maxMergeMb", 4);
    double maxCachedMb = config.getDouble("index.maxCacheMb", 64);
    long targetMaxStale = config.getTime("index.targetMaxStale", "5s");
    long targetMinStale = config.getTime("index.targetMinStale", "1s");

    Directory dir = FSDirectory.open(new File(path).toPath());
    NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, maxMergeMb, maxCachedMb);
    IndexWriterConfig writerConfig = new IndexWriterConfig(null);
    writerConfig.setOpenMode(OpenMode.CREATE_OR_APPEND);

    writer = new TrackingIndexWriter(new IndexWriter(cachingDir, writerConfig));
    manager = new SearcherManager(writer.getIndexWriter(), true, new SearcherFactory());
    thread = new ControlledRealTimeReopenThread<>(writer, manager, targetMaxStale, targetMinStale);
    thread.start();
}
项目:read-open-source-code    文件:SearcherTaxonomyManager.java   
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
  // Must re-open searcher first, otherwise we may get a
  // new reader that references ords not yet known to the
  // taxonomy reader:
  final IndexReader r = ref.searcher.getIndexReader();
  final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
  if (newReader == null) {
    return null;
  } else {
    DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
    if (tr == null) {
      ref.taxonomyReader.incRef();
      tr = ref.taxonomyReader;
    } else if (taxoWriter != null && taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
      IOUtils.close(newReader, tr);
      throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
    }

    return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
  }
}
项目:read-open-source-code    文件:SearcherTaxonomyManager.java   
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
  // Must re-open searcher first, otherwise we may get a
  // new reader that references ords not yet known to the
  // taxonomy reader:
  final IndexReader r = ref.searcher.getIndexReader();
  final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
  if (newReader == null) {
    return null;
  } else {
    DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
    if (tr == null) {
      ref.taxonomyReader.incRef();
      tr = ref.taxonomyReader;
    } else if (taxoWriter != null && taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
      IOUtils.close(newReader, tr);
      throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
    }

    return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
  }
}
项目:read-open-source-code    文件:AnalyzingInfixSuggester.java   
/** Create a new instance, loading from a previously built
 *  AnalyzingInfixSuggester directory, if it exists.  This directory must be
 *  private to the infix suggester (i.e., not an external
 *  Lucene index).  Note that {@link #close}
 *  will also close the provided directory.
 *
 *  @param minPrefixChars Minimum number of leading characters
 *     before PrefixQuery is used (default 4).
 *     Prefixes shorter than this are indexed as character
 *     ngrams (increasing index size but making lookups
 *     faster).
 */
public AnalyzingInfixSuggester(Version matchVersion, Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars) throws IOException {

  if (minPrefixChars < 0) {
    throw new IllegalArgumentException("minPrefixChars must be >= 0; got: " + minPrefixChars);
  }

  this.queryAnalyzer = queryAnalyzer;
  this.indexAnalyzer = indexAnalyzer;
  this.matchVersion = matchVersion;
  this.dir = dir;
  this.minPrefixChars = minPrefixChars;

  if (DirectoryReader.indexExists(dir)) {
    // Already built; open it:
    writer = new IndexWriter(dir,
                             getIndexWriterConfig(matchVersion, getGramAnalyzer(), IndexWriterConfig.OpenMode.APPEND));
    searcherMgr = new SearcherManager(writer, true, null);
  }
}
项目:read-open-source-code    文件:SearcherTaxonomyManager.java   
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
  // Must re-open searcher first, otherwise we may get a
  // new reader that references ords not yet known to the
  // taxonomy reader:
  final IndexReader r = ref.searcher.getIndexReader();
  final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
  if (newReader == null) {
    return null;
  } else {
    DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
    if (tr == null) {
      ref.taxonomyReader.incRef();
      tr = ref.taxonomyReader;
    } else if (taxoWriter != null && taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
      IOUtils.close(newReader, tr);
      throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
    }

    return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
  }
}
项目:DagYo    文件:NodeAliasLuceneModule.java   
@Override
public void setDAG(DirectedAcyclicGraph directedAcyclicGraph) {
    super.setDAG(directedAcyclicGraph);

    // Connect to the Lucene DB
    try {
        Analyzer analyser = new KeywordAnalyzer();
        IndexWriterConfig config = new IndexWriterConfig(analyser);
        config.setOpenMode(OpenMode.CREATE_OR_APPEND);
        Path path = DAGModule.moduleFile(directedAcyclicGraph.rootDir_,
                INDEX_FOLDER).toPath();
        Directory directory = FSDirectory.open(path);
        // Directory directory = new RAMDirectory();
        writer_ = new IndexWriter(directory, config);

        // Searching
        parser_ = new QueryParser(LOWERCASE_FIELD, analyser);
        manager_ = new SearcherManager(writer_, true, new SearcherFactory());
    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:Maskana-Gestor-de-Conocimiento    文件:SearcherTaxonomyManager.java   
@Override
protected SearcherAndTaxonomy refreshIfNeeded(SearcherAndTaxonomy ref) throws IOException {
  // Must re-open searcher first, otherwise we may get a
  // new reader that references ords not yet known to the
  // taxonomy reader:
  final IndexReader r = ref.searcher.getIndexReader();
  final IndexReader newReader = DirectoryReader.openIfChanged((DirectoryReader) r);
  if (newReader == null) {
    return null;
  } else {
    DirectoryTaxonomyReader tr = TaxonomyReader.openIfChanged(ref.taxonomyReader);
    if (tr == null) {
      ref.taxonomyReader.incRef();
      tr = ref.taxonomyReader;
    } else if (taxoWriter.getTaxonomyEpoch() != taxoEpoch) {
      IOUtils.close(newReader, tr);
      throw new IllegalStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
    }

    return new SearcherAndTaxonomy(SearcherManager.getSearcher(searcherFactory, newReader), tr);
  }
}
项目:elasticsearch_my    文件:MockEngineSupport.java   
public AssertingIndexSearcher newSearcher(String source, IndexSearcher searcher, SearcherManager manager) throws EngineException {
    IndexReader reader = searcher.getIndexReader();
    IndexReader wrappedReader = reader;
    assert reader != null;
    if (reader instanceof DirectoryReader && mockContext.wrapReader) {
        wrappedReader = wrapReader((DirectoryReader) reader);
    }
    // this executes basic query checks and asserts that weights are normalized only once etc.
    final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader);
    assertingIndexSearcher.setSimilarity(searcher.getSimilarity(true));
    assertingIndexSearcher.setQueryCache(filterCache);
    assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy);
    return assertingIndexSearcher;
}
项目:elasticsearch_my    文件:ShadowEngine.java   
public ShadowEngine(EngineConfig engineConfig)  {
    super(engineConfig);
    if (engineConfig.getRefreshListeners() != null) {
        throw new IllegalArgumentException("ShadowEngine doesn't support RefreshListeners");
    }
    SearcherFactory searcherFactory = new EngineSearcherFactory(engineConfig);
    final long nonexistentRetryTime = engineConfig.getIndexSettings().getSettings()
            .getAsTime(NONEXISTENT_INDEX_RETRY_WAIT, DEFAULT_NONEXISTENT_INDEX_RETRY_WAIT)
            .getMillis();
    try {
        DirectoryReader reader = null;
        store.incRef();
        boolean success = false;
        try {
            if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) {
                reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId);
                this.searcherManager = new SearcherManager(reader, searcherFactory);
                this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store);
                success = true;
            } else {
                throw new IllegalStateException("failed to open a shadow engine after" +
                        nonexistentRetryTime + "ms, " +
                        "directory is not an index");
            }
        } catch (Exception e) {
            logger.warn("failed to create new reader", e);
            throw e;
        } finally {
            if (success == false) {
                IOUtils.closeWhileHandlingException(reader);
                store.decRef();
            }
        }
    } catch (IOException ex) {
        throw new EngineCreationFailureException(shardId, "failed to open index reader", ex);
    }
    logger.trace("created new ShadowEngine");
}
项目:Elasticsearch    文件:ShadowEngine.java   
public ShadowEngine(EngineConfig engineConfig)  {
    super(engineConfig);
    SearcherFactory searcherFactory = new EngineSearcherFactory(engineConfig);
    final long nonexistentRetryTime = engineConfig.getIndexSettings()
            .getAsTime(NONEXISTENT_INDEX_RETRY_WAIT, DEFAULT_NONEXISTENT_INDEX_RETRY_WAIT)
            .getMillis();
    try {
        DirectoryReader reader = null;
        store.incRef();
        boolean success = false;
        try {
            if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) {
                reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId);
                this.searcherManager = new SearcherManager(reader, searcherFactory);
                this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store);
                success = true;
            } else {
                throw new IllegalStateException("failed to open a shadow engine after" +
                        nonexistentRetryTime + "ms, " +
                        "directory is not an index");
            }
        } catch (Throwable e) {
            logger.warn("failed to create new reader", e);
            throw e;
        } finally {
            if (success == false) {
                IOUtils.closeWhileHandlingException(reader);
                store.decRef();
            }
        }
    } catch (IOException ex) {
        throw new EngineCreationFailureException(shardId, "failed to open index reader", ex);
    }
    logger.trace("created new ShadowEngine");
}
项目:gitplex-mit    文件:DefaultSearchManager.java   
@Listen
public void on(SystemStopping event) {
    synchronized (searcherManagers) {
        for (SearcherManager searcherManager: searcherManagers.values()) {
            try {
                searcherManager.close();
            } catch (IOException e) {
                Throwables.propagate(e);
            }
        }
        searcherManagers.clear();
    }
}
项目:tripod    文件:LuceneService.java   
public LuceneService(final SearcherManager searcherManager,
                     final Analyzer analyzer,
                     final LuceneQueryTransformer queryTransformer,
                     final LuceneDocumentTransformer<QR> documentTransformer,
                     final SortTypeFactory sortTypeFactory) {
    this(searcherManager, analyzer, queryTransformer, documentTransformer,
            sortTypeFactory, DEFAULT_MAX_SEARCH_RESULTS);
}
项目:tripod    文件:LuceneRetrievalService.java   
public LuceneRetrievalService(final SearcherManager searcherManager,
                              final Analyzer analyzer,
                              final LuceneQueryTransformer queryTransformer,
                              final LuceneDocumentTransformer<E> documentTransformer,
                              final SortTypeFactory sortTypeFactory) {
    this.searcherManager = searcherManager;
    this.analyzer = analyzer;
    this.queryTransformer = queryTransformer;
    this.documentTransformer = documentTransformer;
    this.sortTypeFactory = sortTypeFactory;
    Validate.notNull(this.searcherManager);
    Validate.notNull(this.queryTransformer);
    Validate.notNull(this.documentTransformer);
    Validate.notNull(this.sortTypeFactory);
}
项目:tripod    文件:LuceneQueryService.java   
public LuceneQueryService(final SearcherManager searcherManager,
                          final Analyzer analyzer,
                          final LuceneQueryTransformer queryTransformer,
                          final LuceneDocumentTransformer<QR> documentTransformer,
                          final SortTypeFactory sortTypeFactory) {
    super(searcherManager, analyzer, queryTransformer, documentTransformer, sortTypeFactory);
}
项目:tripod    文件:LuceneQueryService.java   
public LuceneQueryService(final SearcherManager searcherManager,
                          final Analyzer analyzer,
                          final LuceneQueryTransformer queryTransformer,
                          final LuceneDocumentTransformer<QR> documentTransformer,
                          final SortTypeFactory sortTypeFactory,
                          final Integer maxSearchResults) {
    super(searcherManager, analyzer, queryTransformer, documentTransformer, sortTypeFactory, maxSearchResults);
}
项目:tripod    文件:ExampleRetrievalService.java   
public ExampleRetrievalService(final SearcherManager searcherManager,
                               final String defaultField,
                               final Analyzer analyzer) {
    super(searcherManager, analyzer,
            new StandardLuceneQueryTransformer(defaultField, analyzer),
            new ExampleDocumentTransformer(),
            new ExampleFieldSortTypeFactory());
}
项目:tripod    文件:ExampleSummaryQueryService.java   
public ExampleSummaryQueryService(final SearcherManager searcherManager,
                                  final String defaultField,
                                  final Analyzer analyzer,
                                  final FacetsConfig facetsConfig) {
    super(searcherManager, analyzer,
            new StandardLuceneQueryTransformer(defaultField, analyzer, facetsConfig),
            new ExampleSummaryTransformer(),
            new ExampleFieldSortTypeFactory());
}
项目:DoSeR-Disambiguation    文件:AbstractKnowledgeBase.java   
AbstractKnowledgeBase(String uri, boolean dynamic, Similarity sim) {
    super();
    this.indexUri = uri;
    this.dynamic = dynamic;

    File indexDir = new File(indexUri);
    Directory dir;
    try {
        dir = FSDirectory.open(indexDir);
        this.manager = new SearcherManager(dir, new SearcherFactory());
    } catch (IOException e) {
        logger.error("IOException in "+AbstractKnowledgeBase.class.getName(), e);
    }
}
项目:sql-layer    文件:FullTextCursor.java   
public FullTextCursor(QueryContext context, HKeyRowType rowType, 
                      SearcherManager searcherManager, Query query, int limit) {
    this.context = context;
    this.rowType = rowType;
    this.searcherManager = searcherManager;
    this.query = query;
    this.limit = limit;
    //adapter = context.getStore();
    searcher = searcherManager.acquire();
}
项目:che    文件:LuceneSearcher.java   
private void doInitialize() throws ServerException {
  try {
    luceneIndexWriter = new IndexWriter(makeDirectory(), new IndexWriterConfig(makeAnalyzer()));
    searcherManager = new SearcherManager(luceneIndexWriter, true, true, new SearcherFactory());
    closed = false;
  } catch (IOException e) {
    throw new ServerException(e);
  }
}
项目:LuceneDB    文件:LuceneKVSBase.java   
/**
 * constructor
 * @param directory Lucene Directory
 * @param file Index File Path
 * @param isVolatile isVolatile, if true, delete file automatically
 * @throws IOException IOException
 */
public LuceneKVSBase(Directory directory, File file, boolean isVolatile) throws IOException {
    this.directory = directory;
    this.file = file;
    Analyzer analyzer = new KeywordAnalyzer();
    IndexWriterConfig config = new IndexWriterConfig(analyzer);
    this.writer = new IndexWriter(directory, config);
    // LuceneObjectKVS avoid "no segments* file found in RAMDirectory" Exception
    this.writer.commit();

    this.manager = new SearcherManager(directory, new KVSSearcherFactory());
    this.isAutoCommit = true;
    this.isAsyncReflesh = true;

    this.numDocs = new AtomicInteger(writer.numDocs());
    this.isVolatile = isVolatile;

    @SuppressWarnings("rawtypes")
    final LuceneKVSBase own = this;
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            try {
                close(own);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    });
}
项目:LuceneDB    文件:LuceneKVSIterator.java   
/**
 * constructor
 * @param manager lucene searcher manager
 * @param parent parent map
 * @throws IOException IOException
 */
public LuceneKVSIterator(SearcherManager manager, LuceneKVSBase<K, V> parent) throws IOException {
    this.manager = manager;
    this.parent = parent;

    manager.maybeRefreshBlocking();

    searcher = manager.acquire();
    reader = searcher.getIndexReader();
    maxDoc = reader.maxDoc();
    size = reader.numDocs();
    docCount = 0;
    entryCount = 0;
}
项目:LuceneDB    文件:LuceneValuesDB.java   
/**
 * constructor
 * @param directory LuceneDirectory
 * @param indexFile Index File Path
 * @param isVolatile if true, delete file automatically
 * @param analyzer LuceneIndexAnalyzer
 * @throws IOException IOException
 */
public LuceneValuesDB(Directory directory, File indexFile, boolean isVolatile, Analyzer analyzer) throws IOException {
    this.directory = directory;
    this.indexFile = indexFile;

    IndexWriterConfig config = new IndexWriterConfig(analyzer);
    this.writer = new IndexWriter(directory, config);
    this.writer.commit();

    this.manager = new SearcherManager(directory, new SearcherFactory());

    this.isVolatile = isVolatile;

    if (isVolatile) {
        final LuceneValuesDB own = this;
        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                try {
                    close(own);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
    }
}
项目:r01fb    文件:LuceneIndex.java   
/**
 * Constructor based on an instance of the type responsible of the lucene index persistence
 */
@Inject
public LuceneIndex(final Directory luceneDirectory,
                   final Analyzer analyzer) {
    try {
        // [1]: Create the indexWriter
        _indexWriter = new IndexWriter(luceneDirectory,
                                       new IndexWriterConfig(LuceneConstants.VERSION,
                                                             analyzer));

        // [2a]: Create the TrackingIndexWriter to track changes to the delegated previously created IndexWriter 
        _trackingIndexWriter = new TrackingIndexWriter(_indexWriter);

        // [2b]: Create an IndexSearcher ReferenceManager to safely share IndexSearcher instances across
        //       multiple threads
        _indexSearcherReferenceManager = new SearcherManager(_indexWriter,
                                                             true,
                                                             null);

        // [3]: Create the ControlledRealTimeReopenThread that reopens the index periodically having into 
        //      account the changes made to the index and tracked by the TrackingIndexWriter instance
        //      The index is refreshed every 60sc when nobody is waiting 
        //      and every 100 millis whenever is someone waiting (see search method)
        //      (see http://lucene.apache.org/core/4_3_0/core/org/apache/lucene/search/NRTManagerReopenThread.html)
        _indexSearcherReopenThread = new ControlledRealTimeReopenThread<IndexSearcher>(_trackingIndexWriter,
                                                                                       _indexSearcherReferenceManager,
                                                                                       60.00,   // when there is nobody waiting
                                                                                       0.1);    // when there is someone waiting
        _indexSearcherReopenThread.start(); // start the refresher thread
    } catch (IOException ioEx) {
        throw new IllegalStateException("Lucene index could not be created: " + ioEx.getMessage());
    }
}
项目:neo4j-lucene5-index    文件:LuceneBatchInserterIndex.java   
private static SearcherManager instantiateSearcherManager( LuceneIndexWriter writer )
{
    try
    {
        return writer.createSearcherManager();
    }
    catch ( IOException e )
    {
        throw new RuntimeException( e );
    }
}
项目:neo4j-lucene5-index    文件:AllNodesCollector.java   
static List<Long> getAllNodes( DirectoryFactory dirFactory, File indexDir, Object propertyValue ) throws IOException
{
    try ( Directory directory = dirFactory.open( indexDir );
          SearcherManager manager = new SearcherManager( directory, new SearcherFactory() ))
    {
        IndexSearcher searcher = manager.acquire();
        List<Long> nodes = new ArrayList<>();
        LuceneDocumentStructure documentStructure = new LuceneDocumentStructure();
        Query query = documentStructure.newSeekQuery( propertyValue );
        searcher.search( query, new AllNodesCollector( documentStructure, nodes ) );
        return nodes;
    }
}
项目:DoSeR    文件:AbstractKnowledgeBase.java   
AbstractKnowledgeBase(String uri, boolean dynamic, Similarity sim) {
    super();
    this.indexUri = uri;
    this.dynamic = dynamic;

    File indexDir = new File(indexUri);
    Directory dir;
    try {
        dir = FSDirectory.open(indexDir);
        this.manager = new SearcherManager(dir, new SearcherFactory());
    } catch (IOException e) {
        logger.error("IOException in "+AbstractKnowledgeBase.class.getName(), e);
    }
}
项目:perecoder    文件:LuceneIndexManagerTemplate.java   
@Override
public SearcherManager createSearcherManager() {
    checkConfiguration();
    try {
        return new SearcherManager(directory, null);
    } catch (IOException ex) {
        throw new RuntimeException(ex.getMessage(), ex);
    }
}
项目:lucene-plugin    文件:LuceneIndexBean.java   
@PostConstruct
public void init() throws IOException
{
  log.finer(String.format("init("
                          + System.identityHashCode(this)
                          + ") "
                          + _manager));

  if (!_isInitialized.compareAndSet(false, true))
    return;

  String baratineData
    = RootDirectorySystem.getCurrentDataDirectory()
                         .toAbsolutePath()
                         .toString();

  int pod = 0;//getManager().getNode().getNodeIndex();

  _indexDirectory = baratineData
                    + File.separatorChar
                    + "lucene"
                    + File.separatorChar
                    + "node-" + pod;

  initIndexWriter();

  _searcherManager = new SearcherManager(getIndexWriter(), true, this);

  log.finer(String.format("%1$s init()", this));
}
项目:elasticsearch-extension    文件:ExtendedEngine.java   
@Override
protected SearcherManager getSearcherManager() {
    try {
        return (SearcherManager) getSearcherManagerMethod.invoke(engine);
    } catch (IllegalAccessException | IllegalArgumentException
            | InvocationTargetException e) {
        throw new ExtendedEngineException(
                "Cannot invoke getSearcherManager() of " + engine, e);
    }
}
项目:Pydev    文件:IndexApi.java   
public void init(boolean applyAllDeletes) throws IOException {
    this.analyzer = new CodeAnalyzer();
    IndexWriterConfig config = new IndexWriterConfig(analyzer);
    config.setCommitOnClose(true);
    config.setOpenMode(OpenMode.CREATE_OR_APPEND);
    try {
        writer = new IndexWriter(this.indexDir, config);
    } catch (IOException e) {
        config.setOpenMode(OpenMode.CREATE);
        writer = new IndexWriter(this.indexDir, config);
    }

    searcherFactory = new SearcherFactory();
    searchManager = new SearcherManager(writer, applyAllDeletes, false, searcherFactory);
}
项目:t4f-data    文件:AosManagerTest.java   
@Test
public void testSearcherManager() throws IOException {
    SearcherManager searcherManager = new SearcherManager(indexWriter.getDirectory(), new SearcherFactory());
    IndexSearcher indexSearcher = null;
    try {
        indexSearcher = searcherManager.acquire();
    }
    finally {
        if (indexSearcher == null) {
            searcherManager.release(indexSearcher);
        }
    }
}