Java 类org.apache.lucene.analysis.ValidatingTokenFilter 实例源码

项目:search    文件:TestRandomChains.java   
private TokenFilterSpec newFilterChain(Random random, Tokenizer tokenizer, boolean offsetsAreCorrect) {
  TokenFilterSpec spec = new TokenFilterSpec();
  spec.offsetsAreCorrect = offsetsAreCorrect;
  spec.stream = tokenizer;
  StringBuilder descr = new StringBuilder();
  int numFilters = random.nextInt(5);
  for (int i = 0; i < numFilters; i++) {

    // Insert ValidatingTF after each stage so we can
    // catch problems right after the TF that "caused"
    // them:
    spec.stream = new ValidatingTokenFilter(spec.stream, "stage " + i, spec.offsetsAreCorrect);

    while (true) {
      final Constructor<? extends TokenFilter> ctor = tokenfilters.get(random.nextInt(tokenfilters.size()));

      // hack: MockGraph/MockLookahead has assertions that will trip if they follow
      // an offsets violator. so we cant use them after e.g. wikipediatokenizer
      if (!spec.offsetsAreCorrect &&
          (ctor.getDeclaringClass().equals(MockGraphTokenFilter.class)
           || ctor.getDeclaringClass().equals(MockRandomLookaheadTokenFilter.class))) {
        continue;
      }

      final Object args[] = newFilterArgs(random, spec.stream, ctor.getParameterTypes());
      if (broken(ctor, args)) {
        continue;
      }
      final TokenFilter flt = createComponent(ctor, args, descr);
      if (flt != null) {
        spec.offsetsAreCorrect &= !brokenOffsets(ctor, args);
        spec.stream = flt;
        break;
      }
    }
  }

  // Insert ValidatingTF after each stage so we can
  // catch problems right after the TF that "caused"
  // them:
  spec.stream = new ValidatingTokenFilter(spec.stream, "last stage", spec.offsetsAreCorrect);

  spec.toString = descr.toString();
  return spec;
}
项目:NYBC    文件:TestRandomChains.java   
private TokenFilterSpec newFilterChain(Random random, Tokenizer tokenizer, boolean offsetsAreCorrect) {
  TokenFilterSpec spec = new TokenFilterSpec();
  spec.offsetsAreCorrect = offsetsAreCorrect;
  spec.stream = tokenizer;
  StringBuilder descr = new StringBuilder();
  int numFilters = random.nextInt(5);
  for (int i = 0; i < numFilters; i++) {

    // Insert ValidatingTF after each stage so we can
    // catch problems right after the TF that "caused"
    // them:
    spec.stream = new ValidatingTokenFilter(spec.stream, "stage " + i, spec.offsetsAreCorrect);

    while (true) {
      final Constructor<? extends TokenFilter> ctor = tokenfilters.get(random.nextInt(tokenfilters.size()));

      // hack: MockGraph/MockLookahead has assertions that will trip if they follow
      // an offsets violator. so we cant use them after e.g. wikipediatokenizer
      if (!spec.offsetsAreCorrect &&
          (ctor.getDeclaringClass().equals(MockGraphTokenFilter.class)
           || ctor.getDeclaringClass().equals(MockRandomLookaheadTokenFilter.class))) {
        continue;
      }

      final Object args[] = newFilterArgs(random, spec.stream, ctor.getParameterTypes());
      if (broken(ctor, args)) {
        continue;
      }
      final TokenFilter flt = createComponent(ctor, args, descr);
      if (flt != null) {
        spec.offsetsAreCorrect &= !brokenOffsets(ctor, args);
        spec.stream = flt;
        break;
      }
    }
  }

  // Insert ValidatingTF after each stage so we can
  // catch problems right after the TF that "caused"
  // them:
  spec.stream = new ValidatingTokenFilter(spec.stream, "last stage", spec.offsetsAreCorrect);

  spec.toString = descr.toString();
  return spec;
}
项目:Maskana-Gestor-de-Conocimiento    文件:TestRandomChains.java   
private TokenFilterSpec newFilterChain(Random random, Tokenizer tokenizer, boolean offsetsAreCorrect) {
  TokenFilterSpec spec = new TokenFilterSpec();
  spec.offsetsAreCorrect = offsetsAreCorrect;
  spec.stream = tokenizer;
  StringBuilder descr = new StringBuilder();
  int numFilters = random.nextInt(5);
  for (int i = 0; i < numFilters; i++) {

    // Insert ValidatingTF after each stage so we can
    // catch problems right after the TF that "caused"
    // them:
    spec.stream = new ValidatingTokenFilter(spec.stream, "stage " + i, spec.offsetsAreCorrect);

    while (true) {
      final Constructor<? extends TokenFilter> ctor = tokenfilters.get(random.nextInt(tokenfilters.size()));

      // hack: MockGraph/MockLookahead has assertions that will trip if they follow
      // an offsets violator. so we cant use them after e.g. wikipediatokenizer
      if (!spec.offsetsAreCorrect &&
          (ctor.getDeclaringClass().equals(MockGraphTokenFilter.class)
           || ctor.getDeclaringClass().equals(MockRandomLookaheadTokenFilter.class))) {
        continue;
      }

      final Object args[] = newFilterArgs(random, spec.stream, ctor.getParameterTypes());
      if (broken(ctor, args)) {
        continue;
      }
      final TokenFilter flt = createComponent(ctor, args, descr);
      if (flt != null) {
        spec.offsetsAreCorrect &= !brokenOffsets(ctor, args);
        spec.stream = flt;
        break;
      }
    }
  }

  // Insert ValidatingTF after each stage so we can
  // catch problems right after the TF that "caused"
  // them:
  spec.stream = new ValidatingTokenFilter(spec.stream, "last stage", spec.offsetsAreCorrect);

  spec.toString = descr.toString();
  return spec;
}