Java 类org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester 实例源码

项目:newblog    文件:BlogServiceImpl.java   
/**
     * lookup
     *
     * @param suggester
     * @param keyword
     * @throws IOException
     */
    private static List<String> lookup(AnalyzingInfixSuggester suggester, String keyword
    ) throws IOException {
        //先以contexts为过滤条件进行过滤,再以title为关键字进行筛选,根据weight值排序返回前2条
        //第3个布尔值即是否每个Term都要匹配,第4个参数表示是否需要关键字高亮
        List<LookupResult> results = suggester.lookup(keyword, 20, true, true);
        List<String> list = new ArrayList<>();
        for (LookupResult result : results) {
            list.add(result.key.toString());
            //从payload中反序列化出Blog对象
//            BytesRef bytesRef = result.payload;
//            InputStream is = Tools.bytes2InputStream(bytesRef.bytes);
//            Blog blog = (Blog) Tools.deSerialize(is);
//            System.out.println("blog-Name:" + blog.getTitle());
//            System.out.println("blog-Content:" + blog.getContent());
//            System.out.println("blog-image:" + blog.getImageurl());
//            System.out.println("blog-numberSold:" + blog.getHits());
        }
        return list;
    }
项目:newblog    文件:BlogServiceImpl.java   
/**
 * ajax简历索引
 */
@Override
public void ajaxbuild() {
    try {
        FileUtils.deleteDirectory(new File(AUTOCOMPLETEPATH));
        logger.info("delete autocomplete file success");
        Directory dir = FSDirectory.open(Paths.get(AUTOCOMPLETEPATH));
        SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
        AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(dir, analyzer);
        //创建Blog测试数据
        List<Blog> blogs = blogMapper.getAllBlog();
        suggester.build(new BlogIterator(blogs.iterator()));
    } catch (IOException e) {
        System.err.println("Error!");
    }
}
项目:newblog    文件:BlogServiceImpl.java   
/**
 * 根据关键词查找
 *
 * @param keyword
 * @return
 */
@Override
public Set<String> ajaxsearch(String keyword) {
    try {
        Directory dir = FSDirectory.open(Paths.get(AUTOCOMPLETEPATH));
        SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
        AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(dir, analyzer);
        List<String> list = lookup(suggester, keyword);
        list.sort((o1, o2) -> {
            if (o1.length() > o2.length()) {
                return 1;
            } else {
                return -1;
            }
        });
        Set<String> set = new LinkedHashSet<>(list);
        ssubSet(set, 7);
        return set;
    } catch (IOException e) {
        System.err.println("Error!");
        return null;
    }
}
项目:newblog    文件:SuggesterTest.java   
private static void lookup(AnalyzingInfixSuggester suggester, String keyword,
                           String region) throws IOException {
    HashSet<BytesRef> contexts = new HashSet<BytesRef>();
    contexts.add(new BytesRef(region.getBytes("UTF8")));
    //先以contexts为过滤条件进行过滤,再以keyword为关键字进行筛选,根据weight值排序返回前2条
    //第3个布尔值即是否每个Term都要匹配,第4个参数表示是否需要关键字高亮
    List<Lookup.LookupResult> results = suggester.lookup(keyword, 2, true, false);
    System.out.println("-- \"" + keyword + "\" (" + region + "):");
    for (Lookup.LookupResult result : results) {
        System.out.println(result.key);
        //从payload中反序列化出Product对象
        BytesRef bytesRef = result.payload;
        InputStream is = Tools.bytes2InputStream(bytesRef.bytes);
        Product product = (Product) Tools.deSerialize(is);
        System.out.println("product-Name:" + product.getName());
        System.out.println("product-regions:" + product.getRegions());
        System.out.println("product-image:" + product.getImage());
        System.out.println("product-numberSold:" + product.getNumberSold());
    }
    System.out.println();
}
项目:lucenestudy    文件:Suggester.java   
/**
 * Rebuild a suggestion index from the document index.
 *
 * This method iterates through the entire document index and makes sure that only unique titles
 * are indexed.
 *
 * @param indexRoot The parent directory inside which both the document index and the suggestion
 *                  index lives.
 * @throws IOException
 */
public static void rebuild(String indexRoot) throws IOException {
  Path indexRootPath = Paths.get(indexRoot);
  Path suggestionPath = getSuggestionIndexPath(indexRootPath);

  // Delete the suggestion index if it exists.
  if (Files.exists(suggestionPath)) {
    Util.deletePath(suggestionPath);
  }

  // Create the suggestion index.
  Analyzer analyzer = Indexer.getAnalyzer();
  Directory suggestionDir = FSDirectory.open(getSuggestionIndexPath(indexRootPath));
  AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(suggestionDir, analyzer);

  // Open the document index.
  Directory indexDir = FSDirectory.open(Indexer.getMainIndexPath(indexRootPath));
  IndexReader reader = DirectoryReader.open(indexDir);

  // Get a document iterator.
  DocumentDictionary docDict = new DocumentDictionary(reader, Indexer.TITLE_FIELD_NAME, null);
  InputIterator iterator = docDict.getEntryIterator();
  Set<BytesRef> titleSet = new HashSet<>();
  BytesRef next;
  while ((next = iterator.next()) != null) {
    if (titleSet.contains(next)) {
      continue;
    }

    titleSet.add(next);
    suggester.add(next, null, 0, null);
  }

  reader.close();

  suggester.commit();
  suggester.close();
}
项目:newblog    文件:BlogServiceImpl.java   
public static void main(String[] args) {
        try {
            Directory dir = FSDirectory.open(Paths.get(AUTOCOMPLETEPATH));
            RAMDirectory indexDir = new RAMDirectory();
            SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer();
            AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(dir, analyzer);
            IBlogService blogService = new BlogServiceImpl();
            lookup(suggester, "jav");
//            new BlogServiceImpl().ajaxsearch("北京");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
项目:newblog    文件:SuggesterTest.java   
public static void main(String[] args) {
        try {
            RAMDirectory indexDir = new RAMDirectory();
            StandardAnalyzer analyzer = new StandardAnalyzer();
            AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(indexDir, analyzer);

            //创建Product测试数据
            ArrayList<Product> products = new ArrayList<Product>();
            products.add(new Product("Electric Guitar",
                    "http://images.example/electric-guitar.jpg", new String[]{
                    "US", "CA"}, 100));
            products.add(new Product("Electric Train",
                    "http://images.example/train.jpg", new String[]{"US",
                    "CA"}, 100));
            products.add(new Product("Acoustic Guitar",
                    "http://images.example/acoustic-guitar.jpg", new String[]{
                    "US", "ZA"}, 80));
            products.add(new Product("Guarana Soda",
                    "http://images.example/soda.jpg",
                    new String[]{"ZA", "IE"}, 130));

            // 创建测试索引
            suggester.build(new ProductIterator(products.iterator()));

            // 开始搜索
            lookup(suggester, "Gu", "US");
//            lookup(suggester, "Gu", "ZA");
//            lookup(suggester, "Gui", "CA");
//            lookup(suggester, "Electric guit", "US");
        } catch (IOException e) {
            System.err.println("Error!");
        }
    }
项目:lucenestudy    文件:Suggester.java   
/**
 * Open a suggestion index.
 * @param indexRoot The parent directory inside which the suggestion index lives.
 * @throws IOException
 */
public Suggester(String indexRoot) throws IOException {
  indexRootPath = Paths.get(indexRoot);
  Analyzer analyzer = Indexer.getAnalyzer();
  Directory suggestionDir = FSDirectory.open(getSuggestionIndexPath(indexRootPath));
  suggester = new AnalyzingInfixSuggester(suggestionDir, analyzer);
  suggestionCount = DEFAULT_SUGGESTION_COUNT;
}
项目:ifpress-solr-plugin    文件:SafeInfixLookupFactory.java   
@Override
public Lookup create(@SuppressWarnings("rawtypes") NamedList params, SolrCore core) {
    // mandatory parameter
    Object fieldTypeName = params.get(QUERY_ANALYZER);
    if (fieldTypeName == null) {
        throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
    }
    FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
    if (ft == null) {
        throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
    }
    Analyzer indexAnalyzer = ft.getIndexAnalyzer();
    Analyzer queryAnalyzer = ft.getQueryAnalyzer();

    // optional parameters

    String indexPath = params.get(INDEX_PATH) != null ?
            params.get(INDEX_PATH).toString() :
                    DEFAULT_INDEX_PATH;

    int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
            ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
                    : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

    Boolean highlight = params.getBooleanArg(HIGHLIGHT);
    if (highlight == null) {
        highlight = DEFAULT_HIGHLIGHT;
    }

     try {
         return new SafariInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
                                       FSDirectory.open(new File(indexPath)), indexAnalyzer,
                                       queryAnalyzer, minPrefixChars, highlight);
     } catch (IOException e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
}
项目:ifpress-solr-plugin    文件:MultiSuggester.java   
@Override
public void reload(SolrCore coreParam, SolrIndexSearcher searcher) throws IOException {
  if (lookup instanceof AnalyzingInfixSuggester) {
    // AnalyzingInfixSuggester maintains its own index and sees updates, so we
    // don't need to
    // build it every time the core starts or is reloaded
    AnalyzingInfixSuggester ais = (AnalyzingInfixSuggester) lookup;
    if (ais.getCount() > 0) {
      LOG.info("load existing suggestion index");
      return;
    }
  }
  build(core, searcher);
}
项目:lucenelab    文件:ContextSuggestDemo.java   
public ContextSuggestDemo() throws IOException {
    indexDir = new RAMDirectory();
    suggestDir = new RAMDirectory();
    analyzer = new SimpleAnalyzer();
    suggester = new AnalyzingInfixSuggester(suggestDir, analyzer, analyzer, 1, true);
    buildSearchIndex();
    buildSuggesterIndex();
}
项目:read-open-source-code    文件:BlendedInfixLookupFactory.java   
@Override
public Lookup create(NamedList params, SolrCore core) {
  // mandatory parameter
  Object fieldTypeName = params.get(QUERY_ANALYZER);
  if (fieldTypeName == null) {
    throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
  }
  FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
  if (ft == null) {
    throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
  }
  Analyzer indexAnalyzer = ft.getAnalyzer();
  Analyzer queryAnalyzer = ft.getQueryAnalyzer();

  // optional parameters

  String indexPath = params.get(INDEX_PATH) != null
  ? params.get(INDEX_PATH).toString()
  : DEFAULT_INDEX_PATH;

  int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
  ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
  : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

  BlenderType blenderType = getBlenderType(params.get(BLENDER_TYPE));

  int numFactor = params.get(NUM_FACTOR) != null
  ? Integer.parseInt(params.get(NUM_FACTOR).toString())
  : BlendedInfixSuggester.DEFAULT_NUM_FACTOR;

  try {
    return new BlendedInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
        new File(indexPath), indexAnalyzer, queryAnalyzer, minPrefixChars, blenderType, numFactor);
  } catch (IOException e) {
    throw new RuntimeException();
  }
}
项目:read-open-source-code    文件:AnalyzingInfixLookupFactory.java   
@Override
public Lookup create(NamedList params, SolrCore core) {
  // mandatory parameter
  Object fieldTypeName = params.get(QUERY_ANALYZER);
  if (fieldTypeName == null) {
    throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
  }
  FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
  if (ft == null) {
    throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
  }
  Analyzer indexAnalyzer = ft.getAnalyzer();
  Analyzer queryAnalyzer = ft.getQueryAnalyzer();

  // optional parameters

  String indexPath = params.get(INDEX_PATH) != null
  ? params.get(INDEX_PATH).toString()
  : DEFAULT_INDEX_PATH;

  int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
  ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
  : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

  try {
    return new AnalyzingInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
        new File(indexPath), indexAnalyzer, queryAnalyzer, minPrefixChars);
  } catch (IOException e) {
    throw new RuntimeException();
  }
}
项目:search    文件:BlendedInfixLookupFactory.java   
@Override
public Lookup create(NamedList params, SolrCore core) {
  // mandatory parameter
  Object fieldTypeName = params.get(QUERY_ANALYZER);
  if (fieldTypeName == null) {
    throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
  }
  FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
  if (ft == null) {
    throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
  }
  Analyzer indexAnalyzer = ft.getIndexAnalyzer();
  Analyzer queryAnalyzer = ft.getQueryAnalyzer();

  // optional parameters

  String indexPath = params.get(INDEX_PATH) != null
  ? params.get(INDEX_PATH).toString()
  : DEFAULT_INDEX_PATH;
  if (new File(indexPath).isAbsolute() == false) {
    indexPath = core.getDataDir() + File.separator + indexPath;
  }

  int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
  ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
  : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

  BlenderType blenderType = getBlenderType(params.get(BLENDER_TYPE));

  int numFactor = params.get(NUM_FACTOR) != null
  ? Integer.parseInt(params.get(NUM_FACTOR).toString())
  : BlendedInfixSuggester.DEFAULT_NUM_FACTOR;

  try {
    return new BlendedInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
                                     FSDirectory.open(new File(indexPath)),
                                     indexAnalyzer, queryAnalyzer, minPrefixChars,
                                     blenderType, numFactor) {
      @Override
      public List<LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, int num, boolean allTermsRequired, boolean doHighlight) throws IOException {
        List<LookupResult> res = super.lookup(key, contexts, num, allTermsRequired, doHighlight);
        if (doHighlight) {
          List<LookupResult> res2 = new ArrayList<>();
          for(LookupResult hit : res) {
            res2.add(new LookupResult(hit.highlightKey.toString(),
                                      hit.highlightKey,
                                      hit.value,
                                      hit.payload,
                                      hit.contexts));
          }
          res = res2;
        }

        return res;
      }
    };
  } catch (IOException e) {
    throw new RuntimeException();
  }
}
项目:search    文件:AnalyzingInfixLookupFactory.java   
@Override
public Lookup create(NamedList params, SolrCore core) {
  // mandatory parameter
  Object fieldTypeName = params.get(QUERY_ANALYZER);
  if (fieldTypeName == null) {
    throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
  }
  FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
  if (ft == null) {
    throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
  }
  Analyzer indexAnalyzer = ft.getIndexAnalyzer();
  Analyzer queryAnalyzer = ft.getQueryAnalyzer();

  // optional parameters

  String indexPath = params.get(INDEX_PATH) != null
  ? params.get(INDEX_PATH).toString()
  : DEFAULT_INDEX_PATH;
  if (new File(indexPath).isAbsolute() == false) {
    indexPath = core.getDataDir() + File.separator + indexPath;
  }

  int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
  ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
  : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

  try {
    return new AnalyzingInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
                                       FSDirectory.open(new File(indexPath)), indexAnalyzer,
                                       queryAnalyzer, minPrefixChars) {
      @Override
      public List<LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, int num, boolean allTermsRequired, boolean doHighlight) throws IOException {
        List<LookupResult> res = super.lookup(key, contexts, num, allTermsRequired, doHighlight);
        if (doHighlight) {
          List<LookupResult> res2 = new ArrayList<>();
          for(LookupResult hit : res) {
            res2.add(new LookupResult(hit.highlightKey.toString(),
                                      hit.highlightKey,
                                      hit.value,
                                      hit.payload,
                                      hit.contexts));
          }
          res = res2;
        }

        return res;
      }
    };
  } catch (IOException e) {
    throw new RuntimeException();
  }
}
项目:mytourbook    文件:FTSearchManager.java   
private static Lookup setupSuggester_AnalyzingInfix() {

        setupIndexReader();

        final Lookup suggester[] = new AnalyzingInfixSuggester[1];

        Display.getDefault().syncExec(new Runnable() {
            @Override
            public void run() {

                BusyIndicator.showWhile(Display.getDefault(), new Runnable() {

                    @Override
                    public void run() {

                        try {

                            final DocumentInputIterator inputIterator = new DocumentInputIterator(_indexReader);
                            final Analyzer indexAnalyzer = new StandardAnalyzer(new CharArraySet(0, true));
                            final Analyzer queryAnalyzer = new WhitespaceAnalyzer();

                            _infixStore = openStore("AnalyzingInfixSuggesterSTORE"); //$NON-NLS-1$

                            suggester[0] = new AnalyzingInfixSuggester(
                                    LUCENE_VERSION,
                                    _infixStore,
                                    indexAnalyzer,
                                    queryAnalyzer,
                                    2);

                            suggester[0].build(inputIterator);

                        } catch (final Exception e) {
                            StatusUtil.showStatus(e);
                        }
                    }
                });
            }
        });

        return suggester[0];
    }
项目:read-open-source-code    文件:BlendedInfixLookupFactory.java   
@Override
public Lookup create(NamedList params, SolrCore core) {
  // mandatory parameter
  Object fieldTypeName = params.get(QUERY_ANALYZER);
  if (fieldTypeName == null) {
    throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
  }
  FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
  if (ft == null) {
    throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
  }
  Analyzer indexAnalyzer = ft.getIndexAnalyzer();
  Analyzer queryAnalyzer = ft.getQueryAnalyzer();

  // optional parameters

  String indexPath = params.get(INDEX_PATH) != null
  ? params.get(INDEX_PATH).toString()
  : DEFAULT_INDEX_PATH;
  if (new File(indexPath).isAbsolute() == false) {
    indexPath = core.getDataDir() + File.separator + indexPath;
  }

  int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
  ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
  : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

  BlenderType blenderType = getBlenderType(params.get(BLENDER_TYPE));

  int numFactor = params.get(NUM_FACTOR) != null
  ? Integer.parseInt(params.get(NUM_FACTOR).toString())
  : BlendedInfixSuggester.DEFAULT_NUM_FACTOR;

  try {
    return new BlendedInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
                                     FSDirectory.open(new File(indexPath)),
                                     indexAnalyzer, queryAnalyzer, minPrefixChars,
                                     blenderType, numFactor) {
      @Override
      public List<LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, int num, boolean allTermsRequired, boolean doHighlight) throws IOException {
        List<LookupResult> res = super.lookup(key, contexts, num, allTermsRequired, doHighlight);
        if (doHighlight) {
          List<LookupResult> res2 = new ArrayList<>();
          for(LookupResult hit : res) {
            res2.add(new LookupResult(hit.highlightKey.toString(),
                                      hit.highlightKey,
                                      hit.value,
                                      hit.payload,
                                      hit.contexts));
          }
          res = res2;
        }

        return res;
      }
    };
  } catch (IOException e) {
    throw new RuntimeException();
  }
}
项目:read-open-source-code    文件:AnalyzingInfixLookupFactory.java   
@Override
public Lookup create(NamedList params, SolrCore core) {
  // mandatory parameter
  Object fieldTypeName = params.get(QUERY_ANALYZER);
  if (fieldTypeName == null) {
    throw new IllegalArgumentException("Error in configuration: " + QUERY_ANALYZER + " parameter is mandatory");
  }
  FieldType ft = core.getLatestSchema().getFieldTypeByName(fieldTypeName.toString());
  if (ft == null) {
    throw new IllegalArgumentException("Error in configuration: " + fieldTypeName.toString() + " is not defined in the schema");
  }
  Analyzer indexAnalyzer = ft.getIndexAnalyzer();
  Analyzer queryAnalyzer = ft.getQueryAnalyzer();

  // optional parameters

  String indexPath = params.get(INDEX_PATH) != null
  ? params.get(INDEX_PATH).toString()
  : DEFAULT_INDEX_PATH;
  if (new File(indexPath).isAbsolute() == false) {
    indexPath = core.getDataDir() + File.separator + indexPath;
  }

  int minPrefixChars = params.get(MIN_PREFIX_CHARS) != null
  ? Integer.parseInt(params.get(MIN_PREFIX_CHARS).toString())
  : AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS;

  try {
    return new AnalyzingInfixSuggester(core.getSolrConfig().luceneMatchVersion, 
                                       FSDirectory.open(new File(indexPath)), indexAnalyzer,
                                       queryAnalyzer, minPrefixChars) {
      @Override
      public List<LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, int num, boolean allTermsRequired, boolean doHighlight) throws IOException {
        List<LookupResult> res = super.lookup(key, contexts, num, allTermsRequired, doHighlight);
        if (doHighlight) {
          List<LookupResult> res2 = new ArrayList<>();
          for(LookupResult hit : res) {
            res2.add(new LookupResult(hit.highlightKey.toString(),
                                      hit.highlightKey,
                                      hit.value,
                                      hit.payload,
                                      hit.contexts));
          }
          res = res2;
        }

        return res;
      }
    };
  } catch (IOException e) {
    throw new RuntimeException();
  }
}