Java 类com.intellij.psi.impl.cache.impl.id.LexerBasedIdIndexer 实例源码

项目:intellij-ce-playground    文件:BaseFilterLexerUtil.java   
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
  ScanContent data = content.getUserData(scanContentKey);
  if (data != null) {
    content.putUserData(scanContentKey, null);
    return data;
  }

  final boolean needTodo = content.getFile().isInLocalFileSystem(); // same as TodoIndex.getFilter().isAcceptable
  final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;

  final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
  final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
  final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
  filterLexer.start(content.getContentAsText());

  while (filterLexer.getTokenType() != null) filterLexer.advance();

  Map<TodoIndexEntry,Integer> todoMap = null;
  if (needTodo) {
    for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
        final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
        if (count > 0) {
          if (todoMap == null) todoMap = new THashMap<TodoIndexEntry, Integer>();
          todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
        }
      }
  }

  data = new ScanContent(
    consumer != null? consumer.getResult():Collections.<IdIndexEntry, Integer>emptyMap(),
    todoMap != null ? todoMap: Collections.<TodoIndexEntry,Integer>emptyMap()
  );
  if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
  return data;
}
项目:tools-idea    文件:BaseFilterLexerUtil.java   
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
  ScanContent data = content.getUserData(scanContentKey);
  if (data != null) {
    content.putUserData(scanContentKey, null);
    return data;
  }

  final boolean needTodo = content.getFile().getFileSystem().getProtocol().equals(StandardFileSystems.FILE_PROTOCOL);
  final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;

  final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
  final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
  final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
  filterLexer.start(content.getContentAsText());

  while (filterLexer.getTokenType() != null) filterLexer.advance();

  Map<TodoIndexEntry,Integer> todoMap = null;
  if (needTodo) {
    for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
        final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
        if (count > 0) {
          if (todoMap == null) todoMap = new THashMap<TodoIndexEntry, Integer>();
          todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
        }
      }
  }

  data = new ScanContent(
    consumer != null? consumer.getResult():Collections.<IdIndexEntry, Integer>emptyMap(),
    todoMap != null ? todoMap: Collections.<TodoIndexEntry,Integer>emptyMap()
  );
  if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
  return data;
}
项目:consulo    文件:BaseFilterLexerUtil.java   
public static ScanContent scanContent(FileContent content, IdAndToDoScannerBasedOnFilterLexer indexer) {
  ScanContent data = content.getUserData(scanContentKey);
  if (data != null) {
    content.putUserData(scanContentKey, null);
    return data;
  }

  final boolean needTodo = content.getFile().getFileSystem() instanceof LocalFileSystem;
  final boolean needIdIndex = IdTableBuilding.getFileTypeIndexer(content.getFileType()) instanceof LexerBasedIdIndexer;

  final IdDataConsumer consumer = needIdIndex? new IdDataConsumer():null;
  final OccurrenceConsumer todoOccurrenceConsumer = new OccurrenceConsumer(consumer, needTodo);
  final Lexer filterLexer = indexer.createLexer(todoOccurrenceConsumer);
  filterLexer.start(content.getContentAsText());

  while (filterLexer.getTokenType() != null) filterLexer.advance();

  Map<TodoIndexEntry,Integer> todoMap = null;
  if (needTodo) {
    for (IndexPattern indexPattern : IndexPatternUtil.getIndexPatterns()) {
        final int count = todoOccurrenceConsumer.getOccurrenceCount(indexPattern);
        if (count > 0) {
          if (todoMap == null) todoMap = new THashMap<TodoIndexEntry, Integer>();
          todoMap.put(new TodoIndexEntry(indexPattern.getPatternString(), indexPattern.isCaseSensitive()), count);
        }
      }
  }

  data = new ScanContent(
    consumer != null? consumer.getResult():Collections.<IdIndexEntry, Integer>emptyMap(),
    todoMap != null ? todoMap: Collections.<TodoIndexEntry,Integer>emptyMap()
  );
  if (needIdIndex && needTodo) content.putUserData(scanContentKey, data);
  return data;
}