@Test
  public void testMapping() throws Exception {
    Address address = new Address();
    address.setStreet1("3340 Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("" + address.getAddressId());
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("documentId does not work properly", 1, query.getResultSize());

    luceneQuery = parser.parse("street1:peachtree");
    query = s.createFullTextQuery(luceneQuery).setProjection("idx_street2", FullTextQuery.THIS);
    assertEquals("Not properly indexed", 1, query.getResultSize());
    Object[] firstResult = (Object[]) query.list().get(0);
    assertEquals("@Field.store not respected", "JBoss", firstResult[0]);

    // Verify that AddressClassBridge was applied as well:
    luceneQuery = parser.parse("AddressClassBridge:Applied\\!");
    assertEquals(1, s.createFullTextQuery(luceneQuery).getResultSize());

    s.delete(firstResult[1]);
    tx.commit();
    s.close();
  }
  @Test
  public void testClassBridgeInstanceMapping() throws Exception {
    OrderLine orderLine = new OrderLine();
    orderLine.setName("Sequoia");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(orderLine);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("orderLineName:Sequoia");
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("Bridge not used", 1, query.getResultSize());

    luceneQuery = parser.parse("orderLineName_ngram:quo");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Analyzer configuration not applied", 1, query.getResultSize());

    luceneQuery = parser.parse("orderLineNameViaParam:Sequoia");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Parameter configuration not applied", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }
  @Test
  public void testBridgeMapping() throws Exception {
    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1:peac");
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("PrefixQuery should not be on", 0, query.getResultSize());

    luceneQuery = parser.parse("street1_abridged:peac");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Bridge not used", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }
  // TODO: Move to QueryParserTestBase once standard flexible parser gets this capability
  public void testMultiWordSynonyms() throws Exception {
    QueryParser dumb = new QueryParser("field", new Analyzer1());
    dumb.setSplitOnWhitespace(false);

    // A multi-word synonym source will form a synonym query for the same-starting-position tokens
    BooleanQuery.Builder multiWordExpandedBqBuilder = new BooleanQuery.Builder();
    Query multiWordSynonymQuery =
        new SynonymQuery(new Term("field", "guinea"), new Term("field", "cavy"));
    multiWordExpandedBqBuilder.add(multiWordSynonymQuery, BooleanClause.Occur.SHOULD);
    multiWordExpandedBqBuilder.add(
        new TermQuery(new Term("field", "pig")), BooleanClause.Occur.SHOULD);
    Query multiWordExpandedBq = multiWordExpandedBqBuilder.build();
    assertEquals(multiWordExpandedBq, dumb.parse("guinea pig"));

    // With the phrase operator, a multi-word synonym source will form a multiphrase query.
    // When the number of expanded term(s) is different from that of the original term(s), this is
    // not good.
    MultiPhraseQuery.Builder multiWordExpandedMpqBuilder = new MultiPhraseQuery.Builder();
    multiWordExpandedMpqBuilder.add(
        new Term[] {new Term("field", "guinea"), new Term("field", "cavy")});
    multiWordExpandedMpqBuilder.add(new Term("field", "pig"));
    Query multiWordExpandedMPQ = multiWordExpandedMpqBuilder.build();
    assertEquals(multiWordExpandedMPQ, dumb.parse("\"guinea pig\""));

    // custom behavior, the synonyms are expanded, unless you use quote operator
    QueryParser smart = new SmartQueryParser();
    smart.setSplitOnWhitespace(false);
    assertEquals(multiWordExpandedBq, smart.parse("guinea pig"));

    PhraseQuery.Builder multiWordUnexpandedPqBuilder = new PhraseQuery.Builder();
    multiWordUnexpandedPqBuilder.add(new Term("field", "guinea"));
    multiWordUnexpandedPqBuilder.add(new Term("field", "pig"));
    Query multiWordUnexpandedPq = multiWordUnexpandedPqBuilder.build();
    assertEquals(multiWordUnexpandedPq, smart.parse("\"guinea pig\""));
  }
 public void testPrefixQuery() throws ParseException {
   Analyzer a = new ASCIIAnalyzer();
   QueryParser parser = new QueryParser(FIELD, a);
   assertEquals("ubersetzung ubersetz*", parser.parse("übersetzung übersetz*").toString(FIELD));
   assertEquals("motley crue motl* cru*", parser.parse("Mötley Crüe Mötl* crü*").toString(FIELD));
   assertEquals("rene? zellw*", parser.parse("René? Zellw*").toString(FIELD));
 }
  public void testFuzzySlopeExtendability() throws ParseException {
    QueryParser qp =
        new QueryParser("a", new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {

          @Override
          Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage)
              throws ParseException {

            if (fuzzySlop.image.endsWith("€")) {
              float fms = fuzzyMinSim;
              try {
                fms =
                    Float.valueOf(fuzzySlop.image.substring(1, fuzzySlop.image.length() - 1))
                        .floatValue();
              } catch (Exception ignored) {
              }
              float value = Float.parseFloat(termImage);
              return getRangeQuery(
                  qfield,
                  Float.toString(value - fms / 2.f),
                  Float.toString(value + fms / 2.f),
                  true,
                  true);
            }
            return super.handleBareFuzzy(qfield, fuzzySlop, termImage);
          }
        };
    assertEquals(qp.parse("a:[11.95 TO 12.95]"), qp.parse("12.45~1€"));
  }
  /**
   * @param query
   * @param virtualWikiNames comma separated list of virtual wiki names
   * @param languages comma separated list of language codes to search in, may be null to search all
   *     languages
   */
  private Query buildQuery(String query, String virtualWikiNames, String languages)
      throws ParseException {
    // build a query like this: <user query string> AND <wikiNamesQuery> AND
    // <languageQuery>
    BooleanQuery bQuery = new BooleanQuery();
    Query parsedQuery = null;

    // for object search
    if (query.startsWith("PROP ")) {
      String property = query.substring(0, query.indexOf(":"));
      query = query.substring(query.indexOf(":") + 1, query.length());
      QueryParser qp = new QueryParser(Version.LUCENE_36, property, this.analyzer);
      parsedQuery = qp.parse(query);
      bQuery.add(parsedQuery, BooleanClause.Occur.MUST);
    } else if (query.startsWith("MULTI ")) {
      // for fulltext search
      List<String> fieldList = IndexUpdater.fields;
      String[] fields = fieldList.toArray(new String[fieldList.size()]);
      BooleanClause.Occur[] flags = new BooleanClause.Occur[fields.length];
      for (int i = 0; i < flags.length; i++) {
        flags[i] = BooleanClause.Occur.SHOULD;
      }
      parsedQuery =
          MultiFieldQueryParser.parse(Version.LUCENE_36, query, fields, flags, this.analyzer);
      bQuery.add(parsedQuery, BooleanClause.Occur.MUST);
    } else {
      String[] fields =
          new String[] {
            IndexFields.FULLTEXT,
            IndexFields.DOCUMENT_TITLE,
            IndexFields.DOCUMENT_NAME,
            IndexFields.FILENAME
          };
      BooleanClause.Occur[] flags = new BooleanClause.Occur[fields.length];
      for (int i = 0; i < flags.length; i++) {
        flags[i] = BooleanClause.Occur.SHOULD;
      }
      QueryParser parser = new MultiFieldQueryParser(Version.LUCENE_36, fields, this.analyzer);
      parsedQuery = parser.parse(query);
      // Since the sub-queries are OR-ed, each sub-query score is normally divided by the number of
      // sub-queries,
      // which would cause extra-small scores whenever there's a hit on only one sub-query;
      // compensate this by boosting the whole outer query
      parsedQuery.setBoost(fields.length);
      bQuery.add(parsedQuery, BooleanClause.Occur.MUST);
    }

    if (virtualWikiNames != null && virtualWikiNames.length() > 0) {
      bQuery.add(
          buildOredTermQuery(virtualWikiNames, IndexFields.DOCUMENT_WIKI),
          BooleanClause.Occur.MUST);
    }
    if (languages != null && languages.length() > 0) {
      bQuery.add(
          buildOredTermQuery(languages, IndexFields.DOCUMENT_LANGUAGE), BooleanClause.Occur.MUST);
    }

    return bQuery;
  }
 /** simple CJK synonym test */
 public void testCJKSynonym() throws Exception {
   Query expected = new SynonymQuery(new Term("field", "国"), new Term("field", "國"));
   QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
   assertEquals(expected, qp.parse("国"));
   qp.setDefaultOperator(Operator.AND);
   assertEquals(expected, qp.parse("国"));
   expected = new BoostQuery(expected, 2f);
   assertEquals(expected, qp.parse("国^2"));
 }
  public void testWildcardDoesNotNormalizeEscapedChars() throws Exception {
    Analyzer asciiAnalyzer = new ASCIIAnalyzer();
    Analyzer keywordAnalyzer = new MockAnalyzer(random());
    QueryParser parser = new QueryParser(FIELD, asciiAnalyzer);

    assertTrue(isAHit(parser.parse("e*e"), "étude", asciiAnalyzer));
    assertTrue(isAHit(parser.parse("é*e"), "etude", asciiAnalyzer));
    assertFalse(isAHit(parser.parse("\\é*e"), "etude", asciiAnalyzer));
    assertTrue(isAHit(parser.parse("\\é*e"), "étude", keywordAnalyzer));
  }
 /** synonyms with default OR operator */
 public void testCJKSynonymsOR() throws Exception {
   BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
   expectedB.add(new TermQuery(new Term("field", "中")), BooleanClause.Occur.SHOULD);
   Query inner = new SynonymQuery(new Term("field", "国"), new Term("field", "國"));
   expectedB.add(inner, BooleanClause.Occur.SHOULD);
   Query expected = expectedB.build();
   QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
   assertEquals(expected, qp.parse("中国"));
   expected = new BoostQuery(expected, 2f);
   assertEquals(expected, qp.parse("中国^2"));
 }
  // LUCENE-4176
  public void testByteTerms() throws Exception {
    String s = "เข";
    Analyzer analyzer = new MockBytesAnalyzer();
    QueryParser qp = new QueryParser(FIELD, analyzer);

    assertTrue(isAHit(qp.parse("[เข TO เข]"), s, analyzer));
    assertTrue(isAHit(qp.parse("เข~1"), s, analyzer));
    assertTrue(isAHit(qp.parse("เข*"), s, analyzer));
    assertTrue(isAHit(qp.parse("เ*"), s, analyzer));
    assertTrue(isAHit(qp.parse("เ??"), s, analyzer));
  }
 /** synonyms with default AND operator */
 public void testCJKSynonymsAND() throws Exception {
   BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
   expectedB.add(new TermQuery(new Term(FIELD, "中")), BooleanClause.Occur.MUST);
   Query inner = new SynonymQuery(new Term(FIELD, "国"), new Term(FIELD, "國"));
   expectedB.add(inner, BooleanClause.Occur.MUST);
   Query expected = expectedB.build();
   QueryParser qp = new QueryParser(FIELD, new MockCJKSynonymAnalyzer());
   qp.setDefaultOperator(Operator.AND);
   assertEquals(expected, qp.parse("中国"));
   expected = new BoostQuery(expected, 2f);
   assertEquals(expected, qp.parse("中国^2"));
 }
 public void testFuzzyQuery() throws ParseException {
   Analyzer a = new ASCIIAnalyzer();
   QueryParser parser = new QueryParser(FIELD, a);
   assertEquals(
       "ubersetzung ubersetzung~1", parser.parse("Übersetzung Übersetzung~0.9").toString(FIELD));
   assertEquals(
       "motley crue motley~1 crue~2",
       parser.parse("Mötley Crüe Mötley~0.75 Crüe~0.5").toString(FIELD));
   assertEquals(
       "renee zellweger renee~0 zellweger~2",
       parser.parse("Renée Zellweger Renée~0.9 Zellweger~").toString(FIELD));
 }
 public void testWildCardQuery() throws ParseException {
   Analyzer a = new ASCIIAnalyzer();
   QueryParser parser = new QueryParser(FIELD, a);
   parser.setAllowLeadingWildcard(true);
   assertEquals("*bersetzung uber*ung", parser.parse("*bersetzung über*ung").toString(FIELD));
   parser.setAllowLeadingWildcard(false);
   assertEquals(
       "motley crue motl?* cru?", parser.parse("Mötley Cr\u00fce Mötl?* Crü?").toString(FIELD));
   assertEquals(
       "renee zellweger ren?? zellw?ger",
       parser.parse("Renée Zellweger Ren?? Zellw?ger").toString(FIELD));
 }
 /** forms multiphrase query */
 public void testCJKSynonymsPhrase() throws Exception {
   MultiPhraseQuery.Builder expectedQBuilder = new MultiPhraseQuery.Builder();
   expectedQBuilder.add(new Term("field", "中"));
   expectedQBuilder.add(new Term[] {new Term("field", "国"), new Term("field", "國")});
   QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
   qp.setDefaultOperator(Operator.AND);
   assertEquals(expectedQBuilder.build(), qp.parse("\"中国\""));
   Query expected = new BoostQuery(expectedQBuilder.build(), 2f);
   assertEquals(expected, qp.parse("\"中国\"^2"));
   expectedQBuilder.setSlop(3);
   expected = new BoostQuery(expectedQBuilder.build(), 2f);
   assertEquals(expected, qp.parse("\"中国\"~3^2"));
 }
  public void testWildcardAlone() throws ParseException {
    // seems like crazy edge case, but can be useful in concordance
    QueryParser parser = new QueryParser(FIELD, new ASCIIAnalyzer());
    parser.setAllowLeadingWildcard(false);
    expectThrows(
        ParseException.class,
        () -> {
          parser.parse("*");
        });

    QueryParser parser2 = new QueryParser("*", new ASCIIAnalyzer());
    parser2.setAllowLeadingWildcard(false);
    assertEquals(new MatchAllDocsQuery(), parser2.parse("*"));
  }
 /** forms multiphrase query */
 public void testSynonymsPhrase() throws Exception {
   MultiPhraseQuery.Builder expectedQBuilder = new MultiPhraseQuery.Builder();
   expectedQBuilder.add(new Term(FIELD, "old"));
   expectedQBuilder.add(new Term[] {new Term(FIELD, "dogs"), new Term(FIELD, "dog")});
   QueryParser qp = new QueryParser(FIELD, new MockSynonymAnalyzer());
   assertEquals(expectedQBuilder.build(), qp.parse("\"old dogs\""));
   qp.setDefaultOperator(Operator.AND);
   assertEquals(expectedQBuilder.build(), qp.parse("\"old dogs\""));
   BoostQuery expected = new BoostQuery(expectedQBuilder.build(), 2f);
   assertEquals(expected, qp.parse("\"old dogs\"^2"));
   expectedQBuilder.setSlop(3);
   expected = new BoostQuery(expectedQBuilder.build(), 2f);
   assertEquals(expected, qp.parse("\"old dogs\"~3^2"));
 }
  public void testWildCardEscapes() throws ParseException, IOException {
    Analyzer a = new ASCIIAnalyzer();
    QueryParser parser = new QueryParser(FIELD, a);
    assertTrue(isAHit(parser.parse("mö*tley"), "moatley", a));
    // need to have at least one genuine wildcard to trigger the wildcard analysis
    // hence the * before the y
    assertTrue(isAHit(parser.parse("mö\\*tl*y"), "mo*tley", a));
    // escaped backslash then true wildcard
    assertTrue(isAHit(parser.parse("mö\\\\*tley"), "mo\\atley", a));
    // escaped wildcard then true wildcard
    assertTrue(isAHit(parser.parse("mö\\??ley"), "mo?tley", a));

    // the first is an escaped * which should yield a miss
    assertFalse(isAHit(parser.parse("mö\\*tl*y"), "moatley", a));
  }
  @Test
  // there is a mismatch of the index name as handled by IndexManagerHolder and the ES-IM: Animal.0
  // vs. Animal00
  @Category(ElasticsearchSupportInProgress.class)
  public void testBehavior() throws Exception {
    Session s = openSession();
    Transaction tx = s.beginTransaction();
    Animal a = new Animal();
    a.setId(1);
    a.setName("Elephant");
    s.persist(a);
    a = new Animal();
    a.setId(2);
    a.setName("Bear");
    s.persist(a);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();
    a = s.get(Animal.class, 1);
    a.setName("Mouse");
    Furniture fur = new Furniture();
    fur.setColor("dark blue");
    s.persist(fur);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();
    FullTextSession fts = Search.getFullTextSession(s);
    QueryParser parser = new QueryParser("id", TestConstants.stopAnalyzer);

    List results = fts.createFullTextQuery(parser.parse("name:mouse OR name:bear")).list();
    assertEquals(
        "Either double insert, single update, or query fails with shards", 2, results.size());

    results = fts.createFullTextQuery(parser.parse("name:mouse OR name:bear OR color:blue")).list();
    assertEquals("Mixing shared and non sharded properties fails", 3, results.size());
    results = fts.createFullTextQuery(parser.parse("name:mouse OR name:bear OR color:blue")).list();
    assertEquals(
        "Mixing shared and non sharded properties fails with indexreader reuse", 3, results.size());
    for (Object o : results) {
      s.delete(o);
    }
    tx.commit();
    s.close();
  }
 @Override
 public Query getQuery(String query, CommonQueryParserConfiguration cqpC) throws Exception {
   assert cqpC != null : "Parameter must not be null";
   assert (cqpC instanceof QueryParser) : "Parameter must be instance of QueryParser";
   QueryParser qp = (QueryParser) cqpC;
   return qp.parse(query);
 }
  public SearchResult search(String field, String query) {
    SearchResult searchResult = new SearchResult();
    try {
      Analyzer analyzer = new StandardAnalyzer();
      QueryParser queryParser = new QueryParser(field, analyzer);
      Query q = queryParser.parse(query);
      long start = System.currentTimeMillis();
      IndexSearcher searcher = getSearcher();
      TopDocs hits = searcher.search(q, 50);
      searchResult.setTotalHits(hits.totalHits);
      long end = System.currentTimeMillis();
      searchResult.setTime(end - start);
      System.err.println(
          "Found "
              + hits.totalHits
              + " document(s) (in "
              + (end - start)
              + " milliseconds) that matched query '"
              + q
              + "':");
      for (ScoreDoc scoreDoc : hits.scoreDocs) {
        Document doc = searcher.doc(scoreDoc.doc);
        ResultDocument document = new ResultDocument();
        document.setFullpath("\"" + doc.get("fullpath").replace("\"", "") + "\"");
        document.setFilename("\"" + doc.get("filename").replace("\"", "") + "\"");
        document.setTeaser("\"" + doc.get("teaser") + "\"");
        searchResult.addDocumnent(document);
      }
      close();
    } catch (Exception e) {
      e.printStackTrace();
    }

    return searchResult;
  }
  @Test
  public void testAnalyzerDef() throws Exception {
    Address address = new Address();
    address.setStreet1("3340 Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1_ngram:pea");

    final FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("Analyzer inoperant", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }
  public static void main(String[] args) throws IOException, ParseException {
    String indexDir = "C:/lucenedir";
    Directory directory = FSDirectory.open(Paths.get(indexDir));
    IndexReader reader = DirectoryReader.open(directory);
    IndexSearcher searcher = new IndexSearcher(reader);

    int day = (int) (new Date().getTime() / Constans.DAY_MILLIS);
    QueryParser parser = new QueryParser("contents", new StandardAnalyzer());
    Query query = parser.parse("java in action");
    Query customScoreQuery =
        new RecencyBoostCustomScoreQuery(query, 2.0, day, 6 * 365, "pubmonthAsDay");
    Sort sort =
        new Sort(
            new SortField[] {
              SortField.FIELD_SCORE, new SortField("title2", SortField.Type.STRING)
            });
    TopDocs hits = searcher.search(customScoreQuery, null, Integer.MAX_VALUE, sort, true, false);

    for (int i = 0; i < hits.scoreDocs.length; i++) {
      // 两种方式取Document都行,其实searcher.doc内部本质还是调用reader.document
      // Document doc = reader.document(hits.scoreDocs[i].doc);
      Document doc = searcher.doc(hits.scoreDocs[i].doc);
      System.out.println(
          (1 + i)
              + ": "
              + doc.get("title")
              + ": pubmonth="
              + doc.get("pubmonth")
              + " score="
              + hits.scoreDocs[i].score);
    }
    reader.close();
    directory.close();
  }
  @Override
  public void testNewFieldQuery() throws Exception {
    /** ordinary behavior, synonyms form uncoordinated boolean query */
    QueryParser dumb = new QueryParser("field", new Analyzer1());
    Query expanded = new SynonymQuery(new Term("field", "dogs"), new Term("field", "dog"));
    assertEquals(expanded, dumb.parse("\"dogs\""));
    /** even with the phrase operator the behavior is the same */
    assertEquals(expanded, dumb.parse("dogs"));

    /** custom behavior, the synonyms are expanded, unless you use quote operator */
    QueryParser smart = new SmartQueryParser();
    assertEquals(expanded, smart.parse("dogs"));

    Query unexpanded = new TermQuery(new Term("field", "dogs"));
    assertEquals(unexpanded, smart.parse("\"dogs\""));
  }
 private int nbrOfMatchingResults(String field, String token, FullTextSession s)
     throws ParseException {
   QueryParser parser = new QueryParser(field, TestConstants.standardAnalyzer);
   org.apache.lucene.search.Query luceneQuery = parser.parse(token);
   FullTextQuery query = s.createFullTextQuery(luceneQuery);
   return query.getResultSize();
 }
Exemple #26
0
  /** This function is only for test search. */
  public static List<String> searchQuery(
      String indexDir, String queryString, int numResults, CharArraySet stopwords) {
    String field = "contents";
    List<String> hitPaths = new ArrayList<String>();

    try {
      IndexReader reader = DirectoryReader.open(FSDirectory.open(new File(indexDir)));
      IndexSearcher searcher = new IndexSearcher(reader);

      Analyzer analyzer = new MyAnalyzer(Version.LUCENE_44, stopwords);

      QueryParser parser = new QueryParser(Version.LUCENE_44, field, analyzer);
      Query query;
      query = parser.parse(QueryParser.escape(queryString));

      TopDocs results = searcher.search(query, null, numResults);
      for (ScoreDoc hit : results.scoreDocs) {
        String path = searcher.doc(hit.doc).get("path");
        hitPaths.add(path.substring(0, path.length() - 4)); // chop off the file extension (".txt")
      }
    } catch (IOException e) {
      System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    } catch (ParseException e) {
      System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    }

    return hitPaths;
  }
  /**
   * Parse the strings containing Lucene queries.
   *
   * @param qs array of strings containing query expressions
   * @param a analyzer to use when parsing queries
   * @return array of Lucene queries
   */
  private static Query[] createQueries(List<Object> qs, Analyzer a) {
    QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, DocMaker.BODY_FIELD, a);
    List<Object> queries = new ArrayList<>();
    for (int i = 0; i < qs.size(); i++) {
      try {

        Object query = qs.get(i);
        Query q = null;
        if (query instanceof String) {
          q = qp.parse((String) query);

        } else if (query instanceof Query) {
          q = (Query) query;

        } else {
          System.err.println("Unsupported Query Type: " + query);
        }

        if (q != null) {
          queries.add(q);
        }

      } catch (Exception e) {
        e.printStackTrace();
      }
    }

    return queries.toArray(new Query[0]);
  }
  private QueryExpression create(Request request, ResourceDefinition resourceDefinition)
      throws InvalidQueryException {
    String queryString;
    if (request.getCardinality() == Request.Cardinality.INSTANCE) {
      String idPropertyName = resourceDefinition.getIdPropertyName();
      queryString =
          String.format("%s:%s", idPropertyName, request.<String>getProperty(idPropertyName));
    } else {
      queryString = request.getQueryString();
    }

    QueryExpression queryExpression;
    if (queryString != null && !queryString.isEmpty()) {
      QueryParser queryParser = new QueryParser(Version.LUCENE_48, "name", new KeywordAnalyzer());
      queryParser.setLowercaseExpandedTerms(false);
      queryParser.setAllowLeadingWildcard(true);
      Query query;
      try {
        query = queryParser.parse((String) escape(queryString));
      } catch (ParseException e) {
        throw new InvalidQueryException(e.getMessage());
      }
      LOG.info("LuceneQuery: " + query);
      queryExpression = create(query, resourceDefinition);
    } else {
      queryExpression = new AlwaysQueryExpression();
    }
    // add query properties to request so that they are returned
    request.addAdditionalSelectProperties(queryExpression.getProperties());
    return queryExpression;
  }
Exemple #29
0
  /**
   * give the id list of sentences, from Lucene index
   *
   * @param input input word
   * @param catalogName catalog (domain) name which we'd like to search in
   * @param limit how many hits are needed (0 means all)
   */
  public List<String> query(String input, String catalogName, int limit) {

    List<String> res = new ArrayList<String>();
    try {

      catalog c = catalogs.get(catalogName);
      IndexReader reader = DirectoryReader.open(FSDirectory.open(Paths.get(c.indexPath)));
      IndexSearcher searcher = new IndexSearcher(reader);

      QueryParser parser = new QueryParser("contents", analyzer);
      Query query = parser.parse(QueryParser.escape(input));

      int n = limit > 0 ? limit : searcher.count(query);
      if (n == 0) n = 1;
      TopDocs results = searcher.search(query, n);

      int endPos = limit;
      if (limit != 0) endPos = Math.min(results.totalHits, limit); // 1st n hits
      else endPos = results.totalHits; // all hits

      for (int i = 0; i < endPos; i++) {
        int id = results.scoreDocs[i].doc;
        Document doc = searcher.doc(id);
        res.add(doc.get("filename"));
      }
      reader.close();
      return res;

    } catch (ParseException e) {
      log(e.getMessage());
    } catch (IOException e) {
      log(e.getMessage());
    }
    return res;
  }
  /**
   * @return
   * @throws Exception
   */
  public Query getQuery() throws Exception {
    PaodingAnalyzer analyzer = new PaodingAnalyzer();
    String field = "contents";
    Query query = null;

    BooleanQuery booleanQuery = new BooleanQuery();

    if (this.qtype.equals("term")) {
      QueryParser parser = new QueryParser(Version.LUCENE_44, field, analyzer);
      query = parser.parse(queryStr);

    } else if ("fuzz".equals(this.qtype)) {
      Term term = new Term(field, queryStr);
      query = new FuzzyQuery(term);
    } else {
      Term term = new Term(field, queryStr);
      query = new PrefixQuery(term);
    }
    if (!"all".equals(this.docType)) {
      Term typeTerm = new Term("type", this.docType);
      TermQuery typeQuery = new TermQuery(typeTerm);
      booleanQuery.add(typeQuery, BooleanClause.Occur.MUST);
    }

    // System.out.println("--this.docType---"+this.docType);
    booleanQuery.add(query, BooleanClause.Occur.MUST);

    return booleanQuery;
  }