Esempio n. 1
0
  public void testAdded() throws Exception {
    prepareTestData();
    queryParser = createQueryParser("blurb");

    luceneQuery = queryParser.parse("eats");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);
    List<Object> found = cacheQuery.list();

    AssertJUnit.assertEquals(2, found.size());
    assert found.contains(person2);
    assert found.contains(person3);
    assert !found.contains(person4) : "This should not contain object person4";

    person4 = new Person();
    person4.setName("Mighty Goat");
    person4.setBlurb("Also eats grass");

    cache1.put("mighty", person4);

    luceneQuery = queryParser.parse("eats");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);
    found = cacheQuery.list();

    AssertJUnit.assertEquals(3, found.size());
    assert found.contains(person2);
    assert found.contains(person3);
    assert found.contains(person4) : "This should now contain object person4";
  }
Esempio n. 2
0
 /**
  * Given the search text, searcher object, and query analyzer generate an appropriate Lucene
  * search query.
  */
 protected Query createSearchQuery(
     IndexSearcher searcher, StandardAnalyzer analyzer, String text, List<Integer> namespaces)
     throws IOException, ParseException {
   BooleanQuery fullQuery = new BooleanQuery();
   QueryParser qp;
   // build the namespace portion the query
   if (namespaces != null && !namespaces.isEmpty()) {
     qp = new QueryParser(USE_LUCENE_VERSION, FIELD_TOPIC_NAMESPACE, analyzer);
     StringBuilder namespaceText = new StringBuilder();
     for (Integer namespaceId : namespaces) {
       if (namespaceText.length() != 0) {
         namespaceText.append(" ").append(QueryParser.Operator.OR).append(" ");
       }
       namespaceText.append(namespaceId);
     }
     fullQuery.add(qp.parse(namespaceText.toString()), Occur.MUST);
   }
   // create a sub-query for topic name & topic text
   BooleanQuery nameAndContentQuery = new BooleanQuery();
   // topic name
   qp = new QueryParser(USE_LUCENE_VERSION, FIELD_TOPIC_NAME_ANALYZED, analyzer);
   nameAndContentQuery.add(qp.parse(text), Occur.SHOULD);
   // topic content
   qp = new QueryParser(USE_LUCENE_VERSION, FIELD_TOPIC_CONTENT, analyzer);
   nameAndContentQuery.add(qp.parse(text), Occur.SHOULD);
   // rewrite the sub-query to expand it - required for wildcards to work with highlighter
   Query subQuery = searcher.rewrite(nameAndContentQuery);
   // add the sub-query to the main query
   fullQuery.add(subQuery, Occur.MUST);
   return fullQuery;
 }
Esempio n. 3
0
  public void testModified() throws Exception {
    prepareTestData();
    assertQueryInterceptorPresent(cache2);

    queryParser = createQueryParser("blurb");
    luceneQuery = queryParser.parse("playing");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);

    List<Object> found = cacheQuery.list();

    assert found.size() == 1 : "Expected list of size 1, was of size " + found.size();
    assert found.get(0).equals(person1);

    person1.setBlurb("Likes pizza");
    cache1.put("Navin", person1);

    queryParser = createQueryParser("blurb");
    luceneQuery = queryParser.parse("pizza");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);

    found = cacheQuery.list();

    assert found.size() == 1;
    assert found.get(0).equals(person1);
  }
Esempio n. 4
0
  public TopDocCollectorSearchResult searchByScore(
      String queryStr, int startFrom, String operator) {

    try {
      queryStr = queryStr.trim();
      QueryParser parser = new QueryParser("contents", analyzer);
      if (QueryParser.AND_OPERATOR.toString().equalsIgnoreCase(operator)) {
        parser.setDefaultOperator(QueryParser.AND_OPERATOR);
      } else {
        parser.setDefaultOperator(QueryParser.OR_OPERATOR);
      }
      Query query;
      query = parser.parse(queryStr);
      TopDocCollector collector = doPagingSearch(query, startFrom);
      TopDocCollectorSearchResult result = new TopDocCollectorSearchResult(collector, searcher);
      return result;
    } catch (ParseException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    return null;

    // System.out.println("Searching for: " + query.toString("contents"));

    // doPagingSearch(in, searcher, query, hitsPerPage, raw, queries ==
    // null);

    // }
    // reader.close();
  }
  private Query buildLuceneQuery(Long categoryId, Integer page, String q) throws ParseException {
    StringBuilder query = new StringBuilder();

    QueryParser queryParser =
        new org.apache.lucene.queryParser.QueryParser(
            Version.LUCENE_31, "title", new StandardAnalyzer((Version.LUCENE_31)));
    if (StringUtils.isNotBlank(q)) {
      query.append(buildQuery(q));
    }

    if (categoryId != null) {
      List<Long> childCategories =
          DAOFactory.getInstance().getCategoryDAO().childCategories(categoryId);
      if (StringUtils.isNotBlank(q)) {
        query.append("AND ( " + buildQuery(childCategories) + ")");
      } else {
        query.append(buildQuery(childCategories));
      }

      buildQuery(childCategories);
    }
    if (!StringUtils.isNotBlank(query)) {
      query.append("id:*");
    }
    queryParser.setAllowLeadingWildcard(true);
    return queryParser.parse(query.toString());
  }
Esempio n. 6
0
  public WOActionResults searchAction() {
    String searchString = (String) this.request().formValueForKey("searchString");

    String searchTerm = null;
    if (null != searchString
        && 1 <= searchString.length()
        && searchString.lastIndexOf("~") != searchString.length() - 1) {
      searchTerm = searchString + "~";
    }

    ScoreDoc[] foundScoreDocs = null;
    ERIndex indicatorIndex = ERIndex.indexNamed(Indicator.ENTITY_NAME);
    if (null != searchString && 0 < searchString.length()) {
      try {
        QueryParser contentQueryParser =
            new QueryParser(
                Version.LUCENE_29, "community", new StandardAnalyzer(Version.LUCENE_29));
        Query q = contentQueryParser.parse(searchTerm);
        foundScoreDocs = indicatorIndex.findScoreDocs(q, 10);
      } catch (Exception e) {
        ERXApplication.log.error(
            "Failed to query lucene with " + searchString + " " + e.getMessage());
        e.printStackTrace();
      }
    }

    Search searchPage = (Search) pageWithName(Search.class.getName());
    searchPage.setSearchString(searchString);
    searchPage.setFoundScoreDocs(foundScoreDocs);
    searchPage.setIndicatorIndex(indicatorIndex);
    return searchPage;
  }
Esempio n. 7
0
  public void testDemo() throws IOException, ParseException {

    Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);

    // Store the index in memory:
    Directory directory = new RAMDirectory();
    // To store an index on disk, use this instead:
    // Directory directory = FSDirectory.open("/tmp/testindex");
    IndexWriter iwriter =
        new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
    Document doc = new Document();
    String text = "This is the text to be indexed.";
    doc.add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
    iwriter.addDocument(doc);
    iwriter.close();

    // Now search the index:
    IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
    // Parse a simple query that searches for "text":
    QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fieldname", analyzer);
    Query query = parser.parse("text");
    ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs;
    assertEquals(1, hits.length);
    // Iterate through the results:
    for (int i = 0; i < hits.length; i++) {
      Document hitDoc = isearcher.doc(hits[i].doc);
      assertEquals("This is the text to be indexed.", hitDoc.get("fieldname"));
    }
    isearcher.close();
    directory.close();
  }
Esempio n. 8
0
 private final Table values(final String s) throws IOException, ParseException {
   final QueryParser parser = new QueryParser(s, new StandardAnalyzer());
   parser.setAllowLeadingWildcard(true);
   final Query query = parser.parse("*");
   final Set<String> values = new TreeSet<String>();
   try {
     state.search(
         query,
         new HitCollector() {
           @Override
           public final void collect(int doc, float score) {
             try {
               final Document d = state.doc(doc);
               final Field[] fs = d.getFields(s);
               for (int i = 0; i < fs.length; ++i) values.add(fs[i].stringValue());
             } catch (final IOException e) {
               throw new WrapException(e);
             }
           }
         });
   } catch (final WrapException e) {
     throw (IOException) e.getCause();
   }
   return new ArrayTable(
       TableType.FIELD_VALUE, "value", values.toArray(new String[values.size()]));
 }
  public void startSearch(String searchString) throws IOException {

    /*analyze(searchString);*/

    try {
      Directory directory = FSDirectory.open(new File(".//Index")); // где находится индекс
      IndexSearcher is = new IndexSearcher(directory); // объект поиска
      QueryParser parser =
          new QueryParser(
              Version.LUCENE_31,
              "name",
              new RussianAnalyzer(Version.LUCENE_31)); // поле поиска + анализатор
      /* String str1 = "фотоаппарат";
      String str2 = "телевизор";
      String str3 = "SONY";
      String total = "(" + str1 + " OR " + str2 + ")" + " AND " + str3;
      System.out.println(total);*/
      Query query = parser.parse(searchString); // что ищем
      TopDocs results =
          is.search(
              query, null,
              10); // включаем поиск ограничиваемся 10 документами, results содержит ...
      System.out.println(
          "getMaxScore()="
              + results.getMaxScore()
              + " totalHits="
              + results
                  .totalHits); // MaxScore - наилучший результат(приоритет), totalHits - количество
      // найденных документов

      /*proposalController.getProposalList().clear();*/

      for (ScoreDoc hits : results.scoreDocs) { // получаем подсказки
        Document doc = is.doc(hits.doc); // получаем документ по спец сылке doc

        for (Proposal proposal :
            proposalFacade.findPropolsalsByProduct(Long.valueOf(doc.get("recid")))) {

          proposalController.getProposalList().add(proposal);
          _log.info(
              "Предложение найдено:"
                  + proposal.getRecid().toString()
                  + ",Товар: "
                  + doc.get("recid")
                  + ", "
                  + doc.get("name"));
        }

        /*System.out.println("doc="+hits.doc+" score="+hits.score);//выводим спец сылку doc + приоритет
        addMessage(doc.get("id") + " | " + doc.get("recid") + " | " + doc.get("name"));//выводим поля найденного документа*/
      }

      directory.close();
    } catch (ParseException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
    addMessage("Поиск выполнен");
  }
Esempio n. 10
0
  public TopFieldDocsSearchResult searchBySession(String queryStr, int startFrom, String operator) {

    try {
      queryStr = queryStr.trim();
      QueryParser parser = new QueryParser("contents", analyzer);
      Operator op = QueryParser.AND_OPERATOR;
      if (QueryParser.AND_OPERATOR.toString().equalsIgnoreCase(operator)) {
        parser.setDefaultOperator(QueryParser.AND_OPERATOR);
      } else {
        parser.setDefaultOperator(QueryParser.OR_OPERATOR);
      }

      Query query;
      query = parser.parse(queryStr);
      Sort sort = new Sort("summary", true);

      TopFieldDocs tfd = searcher.search(query, null, startFrom + 10, sort);
      TopFieldDocsSearchResult result = new TopFieldDocsSearchResult(tfd, searcher);
      return result;
    } catch (ParseException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    return null;
  }
 public static CacheQuery createCacheQuery(Cache m_cache, String fieldName, String searchString)
     throws ParseException {
   QueryParser qp = createQueryParser(fieldName);
   Query parsedQuery = qp.parse(searchString);
   SearchManager queryFactory = Search.getSearchManager(m_cache);
   CacheQuery cacheQuery = queryFactory.getQuery(parsedQuery);
   return cacheQuery;
 }
Esempio n. 12
0
 public List<Item> search(String query, String indexDir) throws Exception {
   open(indexDir);
   String[] fields = {"title", "content"};
   QueryParser qp = new MultiFieldQueryParser(Version.LUCENE_36, fields, analyzer);
   Query q = qp.parse(query);
   List<Item> result = search(q, 100);
   close();
   return result;
 }
Esempio n. 13
0
  public void testRemoved() throws Exception {
    prepareTestData();
    queryParser = createQueryParser("blurb");
    luceneQuery = queryParser.parse("eats");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);
    List<Object> found = cacheQuery.list();

    assert found.size() == 2;
    assert found.contains(person2);
    assert found.contains(person3) : "This should still contain object person3";

    cache1.remove(key3);

    queryParser = createQueryParser("blurb");
    luceneQuery = queryParser.parse("eats");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);
    found = cacheQuery.list();
  }
Esempio n. 14
0
  public void testGetResultSize() throws Exception {
    prepareTestData();
    queryParser = createQueryParser("blurb");
    luceneQuery = queryParser.parse("playing");
    cacheQuery = Search.getSearchManager(cache2).getQuery(luceneQuery);
    List<Object> found = cacheQuery.list();

    AssertJUnit.assertEquals(1, found.size());
  }
Esempio n. 15
0
 /* (non-Javadoc)
  * @see org.apache.lucene.benchmark.quality.QualityQueryParser#parse(org.apache.lucene.benchmark.quality.QualityQuery)
  */
 public Query parse(QualityQuery qq) throws ParseException {
   QueryParser qp = queryParser.get();
   if (qp == null) {
     qp =
         new QueryParser(
             Version.LUCENE_CURRENT, indexField, new StandardAnalyzer(Version.LUCENE_CURRENT));
     queryParser.set(qp);
   }
   return qp.parse(qq.getValue(qqName));
 }
Esempio n. 16
0
  protected Hits query(String db, String defaultField, String queryString)
      throws IOException, CorruptIndexException, ParseException {
    Directory directory = FSDirectory.getDirectory(indexPath(db));
    IndexReader reader = IndexReader.open(directory);

    Searcher searcher = new IndexSearcher(reader);
    Analyzer analyzer = new StandardAnalyzer();
    QueryParser qp = new QueryParser(defaultField, analyzer);
    Query query = qp.parse(queryString);
    return searcher.search(query);
  }
Esempio n. 17
0
  public void testBatchSize() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    final int loop = 14;
    s.doWork(
        new Work() {
          @Override
          public void execute(Connection connection) throws SQLException {
            for (int i = 0; i < loop; i++) {
              Statement statmt = connection.createStatement();
              statmt.executeUpdate(
                  "insert into Domain(id, name) values( + " + (i + 1) + ", 'sponge" + i + "')");
              statmt.executeUpdate(
                  "insert into Email(id, title, body, header, domain_id) values( + "
                      + (i + 1)
                      + ", 'Bob Sponge', 'Meet the guys who create the software', 'nope', "
                      + (i + 1)
                      + ")");
              statmt.close();
            }
          }
        });
    tx.commit();
    s.close();

    // check non created object does get found!!1
    s = Search.getFullTextSession(openSession());
    tx = s.beginTransaction();
    ScrollableResults results = s.createCriteria(Email.class).scroll(ScrollMode.FORWARD_ONLY);
    int index = 0;
    while (results.next()) {
      index++;
      s.index(results.get(0));
      if (index % 5 == 0) {
        s.clear();
      }
    }
    tx
        .commit(); // if you get a LazyInitializationException, that's because we clear() the
                   // session in the loop.. it only works with a batch size of 5 (the point of the
                   // test)
    s.clear();
    tx = s.beginTransaction();
    QueryParser parser =
        new QueryParser(TestConstants.getTargetLuceneVersion(), "id", TestConstants.stopAnalyzer);
    List result = s.createFullTextQuery(parser.parse("body:create")).list();
    assertEquals(14, result.size());
    for (Object object : result) {
      s.delete(object);
    }
    tx.commit();
    s.close();
  }
Esempio n. 18
0
  public void testBoostedFieldDesc() throws Exception {
    FullTextSession fullTextSession = Search.getFullTextSession(openSession());
    buildBoostedFieldIndex(fullTextSession);

    fullTextSession.clear();
    Transaction tx = fullTextSession.beginTransaction();

    QueryParser authorParser =
        new QueryParser(getTargetLuceneVersion(), "author", SearchTestCase.standardAnalyzer);
    QueryParser descParser =
        new QueryParser(getTargetLuceneVersion(), "description", SearchTestCase.standardAnalyzer);
    Query author = authorParser.parse("Wells");
    Query desc = descParser.parse("martians");

    BooleanQuery query = new BooleanQuery();
    query.add(author, BooleanClause.Occur.SHOULD);
    query.add(desc, BooleanClause.Occur.SHOULD);
    log.debug(query.toString());

    org.hibernate.search.FullTextQuery hibQuery =
        fullTextSession.createFullTextQuery(query, BoostedFieldDescriptionLibrary.class);
    List results = hibQuery.list();

    assertTrue(
        "incorrect document boost",
        ((BoostedFieldDescriptionLibrary) results.get(0)).getDescription().startsWith("Martians"));

    log.debug(hibQuery.explain(0).toString());
    log.debug(hibQuery.explain(1).toString());

    // cleanup
    for (Object element :
        fullTextSession
            .createQuery("from " + BoostedFieldDescriptionLibrary.class.getName())
            .list()) {
      fullTextSession.delete(element);
    }
    tx.commit();
    fullTextSession.close();
  }
Esempio n. 19
0
  private int doSearch(String searchPhrase, int maxNumberOfHits, Version luenceVersion)
      throws NullPointerException, ParseException, IOException {
    LOG.trace("*** Search Phrase: {} ***", searchPhrase);

    QueryParser parser = new QueryParser(luenceVersion, "contents", analyzer);
    Query query = parser.parse(searchPhrase);
    TopScoreDocCollector collector = TopScoreDocCollector.create(maxNumberOfHits, true);
    indexSearcher.search(query, collector);
    hits = collector.topDocs().scoreDocs;

    LOG.trace("*** Search generated {} hits ***", hits.length);
    return hits.length;
  }
Esempio n. 20
0
  public static void wildTest() throws Exception {
    Directory dir = FSDirectory.open(new File(tempDir));
    IndexSearcher searcher = new IndexSearcher(dir); // 来对索引进行检索

    // WildcardQuery query = new WildcardQuery(new Term("path", ("www." + "si\u003fna" + "*" )));
    QueryParser p = new QueryParser(Version.LUCENE_36, "aa", new KeywordAnalyzer());
    Query query = p.parse("path:www." + "s*i\\?na.com");
    System.out.println(query.toString() + "=" + query.getClass());
    TopDocs top = searcher.search(query, 1000);
    ScoreDoc[] sdoc = top.scoreDocs;
    System.out.println(sdoc.length);
    printResult(sdoc, "path", searcher);
  }
Esempio n. 21
0
  @SuppressWarnings("unchecked")
  public SearchResult<Subject> searchSubjectsBasic(int resultsPerPage, int page, String text) {

    int firstResult = page * resultsPerPage;

    StringBuilder queryBuilder = new StringBuilder();

    if (!StringUtils.isBlank(text)) {
      queryBuilder.append("+(");
      addTokenizedSearchCriteria(queryBuilder, "name", text, false);
      addTokenizedSearchCriteria(queryBuilder, "educationType.name", text, false);
      queryBuilder.append(")");
    }

    EntityManager entityManager = getEntityManager();
    FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(entityManager);

    try {
      String queryString = queryBuilder.toString();
      Query luceneQuery;
      QueryParser parser =
          new QueryParser(Version.LUCENE_29, "", new StandardAnalyzer(Version.LUCENE_29));
      if (StringUtils.isBlank(queryString)) {
        luceneQuery = new MatchAllDocsQuery();
      } else {
        luceneQuery = parser.parse(queryString);
      }

      FullTextQuery query =
          (FullTextQuery)
              fullTextEntityManager
                  .createFullTextQuery(luceneQuery, Subject.class)
                  .setFirstResult(firstResult)
                  .setMaxResults(resultsPerPage);
      query.enableFullTextFilter("ArchivedSubject").setParameter("archived", Boolean.FALSE);

      int hits = query.getResultSize();
      int pages = hits / resultsPerPage;
      if (hits % resultsPerPage > 0) {
        pages++;
      }

      int lastResult = Math.min(firstResult + resultsPerPage, hits) - 1;

      return new SearchResult<Subject>(
          page, pages, hits, firstResult, lastResult, query.getResultList());

    } catch (ParseException e) {
      throw new PersistenceException(e);
    }
  }
  @Override
  public QueryResponse query(SearchQuery searchQuery) {
    try {
      if (logger.isDebugEnabled()) {
        logger.debug("searching query...");
      }
      long start = System.currentTimeMillis();
      QueryParser queryParser =
          new QueryParser(
              LuceneConfig.LUCENE_VERSION, schema.getDefaultSearchField(), schema.getAnalyzer());
      Query query = queryParser.parse(searchQuery.getQuery());
      int pageNo = searchQuery.getPageNo();
      int pageSize = searchQuery.getPageSize();
      int fullPageCount = pageNo * pageSize;
      int pageStartIndex = pageNo < 1 ? 0 : ((pageNo - 1) * pageSize);
      Sort sort = getSort(searchQuery);
      Filter filter = getFilter(searchQuery);
      TopFieldDocs topFieldDocs = indexSearcher.search(query, filter, fullPageCount, sort);
      ScoreDoc[] scoreDocs = topFieldDocs.scoreDocs;
      int scoreDocsLength = scoreDocs.length;
      List<OutputDocument> outputDocuments;

      if (scoreDocsLength <= pageStartIndex) {
        // 当前页没有数据了
        outputDocuments = CollectionUtil.newArrayList(0);
      } else {
        // 只获取最后一页的数据
        outputDocuments = CollectionUtil.newArrayList(scoreDocs.length - pageStartIndex);
        for (int i = pageStartIndex; i < scoreDocs.length; ++i) {
          Document doc = indexSearcher.doc(scoreDocs[i].doc);
          OutputDocument outputDocument = DocumentTransformUtil.toOutputDocument(doc, schema);
          outputDocuments.add(outputDocument);
        }
      }
      QueryResponse queryResponse = new QueryResponse();

      queryResponse.setOutputDocuments(outputDocuments);
      queryResponse.setTotalHits(topFieldDocs.totalHits);

      if (logger.isDebugEnabled()) {
        logger.debug("search query finish.");
      }
      long end = System.currentTimeMillis();
      long timeEscape = end - start;
      queryResponse.setTimeEscape(timeEscape);
      return queryResponse;
    } catch (Exception e) {
      logger.error("search query error", e);
      return new QueryResponse(e.getMessage(), ResultCodes.COMMON_ERROR);
    }
  }
  @Override
  public void addTerm(
      BooleanQuery booleanQuery,
      String field,
      String value,
      boolean like,
      BooleanClauseOccur booleanClauseOccur) {

    if (Validator.isNull(value)) {
      return;
    }

    Analyzer analyzer = getAnalyzer();

    if (analyzer instanceof PerFieldAnalyzer) {
      PerFieldAnalyzer perFieldAnalyzer = (PerFieldAnalyzer) analyzer;

      Analyzer fieldAnalyzer = perFieldAnalyzer.getAnalyzer(field);

      if (fieldAnalyzer instanceof LikeKeywordAnalyzer) {
        like = true;
      }
    }

    if (like) {
      value = StringUtil.replace(value, StringPool.PERCENT, StringPool.BLANK);
    }

    try {
      QueryParser queryParser = new QueryParser(getVersion(), field, analyzer);

      Query query = queryParser.parse(value);

      BooleanClause.Occur occur = null;

      if (booleanClauseOccur.equals(BooleanClauseOccur.MUST)) {
        occur = BooleanClause.Occur.MUST;
      } else if (booleanClauseOccur.equals(BooleanClauseOccur.MUST_NOT)) {
        occur = BooleanClause.Occur.MUST_NOT;
      } else {
        occur = BooleanClause.Occur.SHOULD;
      }

      _includeIfUnique(booleanQuery, like, queryParser, query, occur);
    } catch (Exception e) {
      if (_log.isWarnEnabled()) {
        _log.warn(e, e);
      }
    }
  }
  /* (non-Javadoc)
   * @see org.apache.lucene.benchmark.quality.QualityQueryParser#parse(org.apache.lucene.benchmark.quality.QualityQuery)
   */
  public Query parse(QualityQuery qq) throws ParseException {
    QueryParser qp = queryParser.get();
    if (qp == null) {
      qp =
          new QueryParser(
              Version.LUCENE_CURRENT, indexField, new StandardAnalyzer(Version.LUCENE_CURRENT));
      queryParser.set(qp);
    }
    BooleanQuery bq = new BooleanQuery();
    for (int i = 0; i < qqNames.length; i++)
      bq.add(qp.parse(QueryParser.escape(qq.getValue(qqNames[i]))), BooleanClause.Occur.SHOULD);

    return bq;
  }
Esempio n. 25
0
  public void testClear() throws Exception {
    prepareTestData();
    queryParser = createQueryParser("blurb");
    luceneQuery = queryParser.parse("eats");
    cacheQuery = Search.getSearchManager(cache1).getQuery(luceneQuery);

    Query[] queries = new Query[2];
    queries[0] = luceneQuery;

    luceneQuery = queryParser.parse("playing");
    queries[1] = luceneQuery;

    Query luceneQuery = queries[0].combine(queries);
    CacheQuery cacheQuery = Search.getSearchManager(cache1).getQuery(luceneQuery);
    AssertJUnit.assertEquals(3, cacheQuery.getResultSize());

    cache2.clear();

    AssertJUnit.assertEquals(3, cacheQuery.getResultSize());
    cacheQuery = Search.getSearchManager(cache1).getQuery(luceneQuery);

    AssertJUnit.assertEquals(0, cacheQuery.getResultSize());
  }
Esempio n. 26
0
 /**
  * 获取全文查询对象
  *
  * @param q 查询关键字
  * @param fields 查询字段
  * @return 全文查询对象
  */
 public BooleanQuery getFullTextQuery(String q, String... fields) {
   Analyzer analyzer = new IKAnalyzer();
   BooleanQuery query = new BooleanQuery();
   try {
     if (StringUtils.isNotBlank(q)) {
       for (String field : fields) {
         QueryParser parser = new QueryParser(Version.LUCENE_36, field, analyzer);
         query.add(parser.parse(q), Occur.SHOULD);
       }
     }
   } catch (ParseException e) {
     e.printStackTrace();
   }
   return query;
 }
  private int executeQuery(String field, String queryString, Directory dir)
      throws CorruptIndexException, IOException, ParseException {

    IndexReader reader = IndexReader.open(dir, true);
    IndexSearcher searcher = new IndexSearcher(reader);
    QueryParser parser =
        new QueryParser(Version.LUCENE_35, field, new StandardWithACIIFoldingFilter());
    Query q1 = parser.parse(queryString);
    TopDocs hits = searcher.search(q1, 1000);
    int hitCount = hits.totalHits;

    searcher.close();

    return hitCount;
  }
Esempio n. 28
0
  /**
   * Creates a new instance; Passes the query directly on to the Lucene parser.
   *
   * @param values
   * @param query
   * @param analyzer
   */
  public FullText(SearchValues values, String query, Class<? extends Analyzer> analyzer) {
    super(values);
    Assert.notNull(analyzer, "Analyzer required");
    this.analyzer = analyzer;

    if (values.onlyTypes == null || values.onlyTypes.size() != 1) {
      throw new ApiUsageException(
          "Searches by full text are currently limited to a single type.\n"
              + "Plese use Search.onlyType()");
    }

    if (query == null || query.length() < 1) {
      throw new IllegalArgumentException("Query string must be non-empty");
    }

    if ((query.startsWith("*") || query.startsWith("?")) && !values.leadingWildcard) {
      throw new ApiUsageException(
          "Searches starting with a leading "
              + "wildcard (*,?) can be slow.\nPlease use "
              + "setAllowLeadingWildcard() to permit this usage.");
    }

    if (query.equals("*")) {
      throw new ApiUsageException(
          "Wildcard searches (*) must contain more than a single wildcard. ");
    }

    this.queryStr = query;
    try {
      final Analyzer a = analyzer.newInstance();
      final QueryParser parser = new /*Analyzing*/ QueryParser("combined_fields", a);
      parser.setAllowLeadingWildcard(values.leadingWildcard);
      q = parser.parse(queryStr);
    } catch (ParseException pe) {
      final String msg = queryStr + " caused a parse exception: " + pe.getMessage();
      // No longer logging these, since it's a simple user error
      ApiUsageException aue = new ApiUsageException(msg);
      throw aue;
    } catch (InstantiationException e) {
      ApiUsageException aue =
          new ApiUsageException(analyzer.getName() + " cannot be instantiated.");
      throw aue;
    } catch (IllegalAccessException e) {
      ApiUsageException aue =
          new ApiUsageException(analyzer.getName() + " cannot be instantiated.");
      throw aue;
    }
  }
 private void search(Analyzer analyzer, Directory directory, String queryStr)
     throws CorruptIndexException, IOException, ParseException {
   IndexSearcher isearcher;
   // 查询索引
   isearcher = new IndexSearcher(directory);
   QueryParser tq = new QueryParser(Version.LUCENE_32, "text", ansjHeightAnalyzer);
   Query query = tq.parse(queryStr);
   System.out.println(query);
   TopDocs hits = isearcher.search(query, 5);
   System.out.println(queryStr + ":共找到" + hits.totalHits + "条记录!");
   for (int i = 0; i < hits.scoreDocs.length; i++) {
     int docId = hits.scoreDocs[i].doc;
     Document document = isearcher.doc(docId);
     System.out.println(toHighlighter(ansjHeightAnalyzer, query, document));
   }
 }
Esempio n. 30
0
  @Transactional(propagation = Propagation.REQUIRED)
  public int searchCount(String[] f, String luquery, Class<?>... entities) throws Exception {

    // create FullTextEntityManager ----------------------------
    FullTextEntityManager fullTextEntityManager =
        org.hibernate.search.jpa.Search.getFullTextEntityManager(entityManager);
    // ---------------------------------------------------------

    QueryParser parser =
        new MultiFieldQueryParser(Version.LUCENE_31, f, new StandardAnalyzer(Version.LUCENE_31));

    org.apache.lucene.search.Query query = parser.parse(luquery.trim());

    // wrap Lucene query in a javax.persistence.Query
    return fullTextEntityManager.createFullTextQuery(query, entities).getResultSize();
  }