Exemplo n.º 1
0
  /**
   * 单连续字窜(不带空格符)单Field查询分析
   *
   * @param field
   * @param query
   * @return
   */
  public static Query parse(String field, String query) {
    if (field == null) {
      throw new IllegalArgumentException("parameter \"field\" is null");
    }

    if (query == null || "".equals(query.trim())) {
      return new TermQuery(new Term(field));
    }

    // 从缓存中取出已经解析的query生产的TokenBranch
    TokenBranch root = getCachedTokenBranch(query);
    if (root != null) {
      return optimizeQueries(root.toQueries(field));
    } else {
      // System.out.println(System.currentTimeMillis());
      root = new TokenBranch(null);
      // 对查询条件q进行分词
      StringReader input = new StringReader(query.trim());
      IKSegmentation ikSeg = new IKSegmentation(input, isMaxWordLength);
      try {
        for (Lexeme lexeme = ikSeg.next(); lexeme != null; lexeme = ikSeg.next()) {
          // 处理词元分支
          root.accept(lexeme);
        }
      } catch (IOException e) {
        e.printStackTrace();
      }
      // 缓存解析结果的博弈树
      cachedTokenBranch(query, root);
      return optimizeQueries(root.toQueries(field));
    }
  }
Exemplo n.º 2
0
  public static List<String> parser(String content) {

    System.out.println("--------------------- parser ----------- : " + content);
    List<String> words = Lists.newArrayList();
    IKSegmentation ikSeg = new IKSegmentation(new StringReader(content), true);

    Lexeme l = null;
    try {
      while ((l = ikSeg.next()) != null) {
        String word = l.getLexemeText();
        int wordType = l.getLexemeType();
        System.out.println(wordType + "->" + word);
        if (wordType == 0 || wordType == 1) {
          words.add(word);
        }
      }
    } catch (IOException e) {
      e.printStackTrace();
    }

    System.out.println("--------------------- parser ----------- : " + words);
    return words;
  }