public void testQueryOnAllEntities() throws Exception {

    FullTextSession s = Search.getFullTextSession(openSession());

    Transaction tx = s.beginTransaction();
    Person person = new Person();
    person.setName("Jon Doe");
    s.save(person);
    tx.commit();

    tx = s.beginTransaction();
    QueryParser parser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "name", TestConstants.standardAnalyzer);
    Query query = parser.parse("name:foo");
    FullTextQuery hibQuery = s.createFullTextQuery(query);
    try {
      hibQuery.list();
      fail();
    } catch (SearchException e) {
      assertTrue("Wrong message", e.getMessage().startsWith("There are no mapped entities"));
    }

    tx.rollback();
    s.close();
  }
Example #2
0
  public SearchResult search(String field, String query) {
    SearchResult searchResult = new SearchResult();
    try {
      Analyzer analyzer = new StandardAnalyzer();
      QueryParser queryParser = new QueryParser(field, analyzer);
      Query q = queryParser.parse(query);
      long start = System.currentTimeMillis();
      IndexSearcher searcher = getSearcher();
      TopDocs hits = searcher.search(q, 50);
      searchResult.setTotalHits(hits.totalHits);
      long end = System.currentTimeMillis();
      searchResult.setTime(end - start);
      System.err.println(
          "Found "
              + hits.totalHits
              + " document(s) (in "
              + (end - start)
              + " milliseconds) that matched query '"
              + q
              + "':");
      for (ScoreDoc scoreDoc : hits.scoreDocs) {
        Document doc = searcher.doc(scoreDoc.doc);
        ResultDocument document = new ResultDocument();
        document.setFullpath("\"" + doc.get("fullpath").replace("\"", "") + "\"");
        document.setFilename("\"" + doc.get("filename").replace("\"", "") + "\"");
        document.setTeaser("\"" + doc.get("teaser") + "\"");
        searchResult.addDocumnent(document);
      }
      close();
    } catch (Exception e) {
      e.printStackTrace();
    }

    return searchResult;
  }
 public TIntDoubleHashMap getConceptVector(String phrase, TIntSet validIds) throws IOException {
   synchronized (phraseCache) {
     if (phraseCache.containsKey(phrase)) {
       return phraseCache.get(phrase);
     }
   }
   QueryParser parser = new QueryParser(Version.LUCENE_42, "text", analyzer);
   TopDocs docs = null;
   try {
     docs = searcher.search(parser.parse(phrase), esaHelper.getWpIdFilter(validIds), 5000);
   } catch (org.apache.lucene.queryparser.classic.ParseException e) {
     LOG.log(Level.WARNING, "parsing of phrase " + phrase + " failed", e);
     return null;
   }
   pruneSimilar(docs);
   TIntDoubleHashMap result = expandScores(docs.scoreDocs);
   synchronized (phraseCache) {
     phraseCache.put(phrase, result);
   }
   return result;
   //        System.out.println("top docs for " + phrase + " are:");
   //        for (int i = 0; i < 50 && i < docs.scoreDocs.length; i++) {
   //            ScoreDoc sd = docs.scoreDocs[i];
   //            Document d = reader.document(sd.doc);
   //
   //            System.out.println("\t" + sd.score + ": " +
   //                    d.get("title") + ", " + d.get("text").split("\\s+").length +
   //                    ", " + d.get("inlinks"));
   //        }
 }
 private int nbrOfMatchingResults(String field, String token, FullTextSession s)
     throws ParseException {
   QueryParser parser = new QueryParser(field, TestConstants.standardAnalyzer);
   org.apache.lucene.search.Query luceneQuery = parser.parse(token);
   FullTextQuery query = s.createFullTextQuery(luceneQuery);
   return query.getResultSize();
 }
  @Test
  public void testMapping() throws Exception {
    Address address = new Address();
    address.setStreet1("3340 Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("" + address.getAddressId());
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("documentId does not work properly", 1, query.getResultSize());

    luceneQuery = parser.parse("street1:peachtree");
    query = s.createFullTextQuery(luceneQuery).setProjection("idx_street2", FullTextQuery.THIS);
    assertEquals("Not properly indexed", 1, query.getResultSize());
    Object[] firstResult = (Object[]) query.list().get(0);
    assertEquals("@Field.store not respected", "JBoss", firstResult[0]);

    // Verify that AddressClassBridge was applied as well:
    luceneQuery = parser.parse("AddressClassBridge:Applied\\!");
    assertEquals(1, s.createFullTextQuery(luceneQuery).getResultSize());

    s.delete(firstResult[1]);
    tx.commit();
    s.close();
  }
  @Test
  public void testClassBridgeInstanceMapping() throws Exception {
    OrderLine orderLine = new OrderLine();
    orderLine.setName("Sequoia");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(orderLine);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("orderLineName:Sequoia");
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("Bridge not used", 1, query.getResultSize());

    luceneQuery = parser.parse("orderLineName_ngram:quo");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Analyzer configuration not applied", 1, query.getResultSize());

    luceneQuery = parser.parse("orderLineNameViaParam:Sequoia");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Parameter configuration not applied", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }
  @Test
  public void testAnalyzerDef() throws Exception {
    Address address = new Address();
    address.setStreet1("3340 Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1_ngram:pea");

    final FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("Analyzer inoperant", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }
  @Test
  public void testBridgeMapping() throws Exception {
    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1:peac");
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("PrefixQuery should not be on", 0, query.getResultSize());

    luceneQuery = parser.parse("street1_abridged:peac");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Bridge not used", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }
  public static void main(String[] args) throws IOException, ParseException {
    String indexDir = "C:/lucenedir";
    Directory directory = FSDirectory.open(Paths.get(indexDir));
    IndexReader reader = DirectoryReader.open(directory);
    IndexSearcher searcher = new IndexSearcher(reader);

    int day = (int) (new Date().getTime() / Constans.DAY_MILLIS);
    QueryParser parser = new QueryParser("contents", new StandardAnalyzer());
    Query query = parser.parse("java in action");
    Query customScoreQuery =
        new RecencyBoostCustomScoreQuery(query, 2.0, day, 6 * 365, "pubmonthAsDay");
    Sort sort =
        new Sort(
            new SortField[] {
              SortField.FIELD_SCORE, new SortField("title2", SortField.Type.STRING)
            });
    TopDocs hits = searcher.search(customScoreQuery, null, Integer.MAX_VALUE, sort, true, false);

    for (int i = 0; i < hits.scoreDocs.length; i++) {
      // 两种方式取Document都行,其实searcher.doc内部本质还是调用reader.document
      // Document doc = reader.document(hits.scoreDocs[i].doc);
      Document doc = searcher.doc(hits.scoreDocs[i].doc);
      System.out.println(
          (1 + i)
              + ": "
              + doc.get("title")
              + ": pubmonth="
              + doc.get("pubmonth")
              + " score="
              + hits.scoreDocs[i].score);
    }
    reader.close();
    directory.close();
  }
  /**
   * @return
   * @throws Exception
   */
  public Query getQuery() throws Exception {
    PaodingAnalyzer analyzer = new PaodingAnalyzer();
    String field = "contents";
    Query query = null;

    BooleanQuery booleanQuery = new BooleanQuery();

    if (this.qtype.equals("term")) {
      QueryParser parser = new QueryParser(Version.LUCENE_44, field, analyzer);
      query = parser.parse(queryStr);

    } else if ("fuzz".equals(this.qtype)) {
      Term term = new Term(field, queryStr);
      query = new FuzzyQuery(term);
    } else {
      Term term = new Term(field, queryStr);
      query = new PrefixQuery(term);
    }
    if (!"all".equals(this.docType)) {
      Term typeTerm = new Term("type", this.docType);
      TermQuery typeQuery = new TermQuery(typeTerm);
      booleanQuery.add(typeQuery, BooleanClause.Occur.MUST);
    }

    // System.out.println("--this.docType---"+this.docType);
    booleanQuery.add(query, BooleanClause.Occur.MUST);

    return booleanQuery;
  }
  /**
   * Helper method for testing the entity manager before and after serialization.
   *
   * @param em Entity manager used for indexing and searching
   * @throws Exception
   */
  private static void indexSearchAssert(FullTextEntityManager em) throws Exception {
    // index a Bretzel
    em.getTransaction().begin();
    Bretzel bretzel = new Bretzel(23, 34);
    em.persist(bretzel);
    em.getTransaction().commit();
    em.clear();
    em.getTransaction().begin();

    // execute a non matching query
    QueryParser parser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "title", TestConstants.stopAnalyzer);
    Query query = parser.parse("saltQty:noword");
    assertEquals(0, em.createFullTextQuery(query).getResultList().size());

    // execute a matching query
    query = new TermQuery(new Term("saltQty", "23"));
    assertEquals("getResultList", 1, em.createFullTextQuery(query).getResultList().size());
    assertEquals(
        "getSingleResult and object retrieval",
        23,
        ((Bretzel) em.createFullTextQuery(query).getSingleResult()).getSaltQty());
    assertEquals(1, em.createFullTextQuery(query).getResultSize());
    em.getTransaction().commit();

    em.clear();

    em.getTransaction().begin();
    em.remove(em.find(Bretzel.class, bretzel.getId()));
    em.getTransaction().commit();
  }
  public void testFuzzySlopeExtendability() throws ParseException {
    QueryParser qp =
        new QueryParser("a", new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {

          @Override
          Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage)
              throws ParseException {

            if (fuzzySlop.image.endsWith("€")) {
              float fms = fuzzyMinSim;
              try {
                fms =
                    Float.valueOf(fuzzySlop.image.substring(1, fuzzySlop.image.length() - 1))
                        .floatValue();
              } catch (Exception ignored) {
              }
              float value = Float.parseFloat(termImage);
              return getRangeQuery(
                  qfield,
                  Float.toString(value - fms / 2.f),
                  Float.toString(value + fms / 2.f),
                  true,
                  true);
            }
            return super.handleBareFuzzy(qfield, fuzzySlop, termImage);
          }
        };
    assertEquals(qp.parse("a:[11.95 TO 12.95]"), qp.parse("12.45~1€"));
  }
Example #13
0
  /** This function is only for test search. */
  public static List<String> searchQuery(
      String indexDir, String queryString, int numResults, CharArraySet stopwords) {
    String field = "contents";
    List<String> hitPaths = new ArrayList<String>();

    try {
      IndexReader reader = DirectoryReader.open(FSDirectory.open(new File(indexDir)));
      IndexSearcher searcher = new IndexSearcher(reader);

      Analyzer analyzer = new MyAnalyzer(Version.LUCENE_44, stopwords);

      QueryParser parser = new QueryParser(Version.LUCENE_44, field, analyzer);
      Query query;
      query = parser.parse(QueryParser.escape(queryString));

      TopDocs results = searcher.search(query, null, numResults);
      for (ScoreDoc hit : results.scoreDocs) {
        String path = searcher.doc(hit.doc).get("path");
        hitPaths.add(path.substring(0, path.length() - 4)); // chop off the file extension (".txt")
      }
    } catch (IOException e) {
      System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    } catch (ParseException e) {
      System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    }

    return hitPaths;
  }
 @Override
 public void setDateResolution(
     CommonQueryParserConfiguration cqpC, CharSequence field, Resolution value) {
   assert (cqpC instanceof QueryParser);
   QueryParser qp = (QueryParser) cqpC;
   qp.setDateResolution(field.toString(), value);
 }
  /**
   * Parse the strings containing Lucene queries.
   *
   * @param qs array of strings containing query expressions
   * @param a analyzer to use when parsing queries
   * @return array of Lucene queries
   */
  private static Query[] createQueries(List<Object> qs, Analyzer a) {
    QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, DocMaker.BODY_FIELD, a);
    List<Object> queries = new ArrayList<>();
    for (int i = 0; i < qs.size(); i++) {
      try {

        Object query = qs.get(i);
        Query q = null;
        if (query instanceof String) {
          q = qp.parse((String) query);

        } else if (query instanceof Query) {
          q = (Query) query;

        } else {
          System.err.println("Unsupported Query Type: " + query);
        }

        if (q != null) {
          queries.add(q);
        }

      } catch (Exception e) {
        e.printStackTrace();
      }
    }

    return queries.toArray(new Query[0]);
  }
  private QueryExpression create(Request request, ResourceDefinition resourceDefinition)
      throws InvalidQueryException {
    String queryString;
    if (request.getCardinality() == Request.Cardinality.INSTANCE) {
      String idPropertyName = resourceDefinition.getIdPropertyName();
      queryString =
          String.format("%s:%s", idPropertyName, request.<String>getProperty(idPropertyName));
    } else {
      queryString = request.getQueryString();
    }

    QueryExpression queryExpression;
    if (queryString != null && !queryString.isEmpty()) {
      QueryParser queryParser = new QueryParser(Version.LUCENE_48, "name", new KeywordAnalyzer());
      queryParser.setLowercaseExpandedTerms(false);
      queryParser.setAllowLeadingWildcard(true);
      Query query;
      try {
        query = queryParser.parse((String) escape(queryString));
      } catch (ParseException e) {
        throw new InvalidQueryException(e.getMessage());
      }
      LOG.info("LuceneQuery: " + query);
      queryExpression = create(query, resourceDefinition);
    } else {
      queryExpression = new AlwaysQueryExpression();
    }
    // add query properties to request so that they are returned
    request.addAdditionalSelectProperties(queryExpression.getProperties());
    return queryExpression;
  }
 public void testPrefixQuery() throws ParseException {
   Analyzer a = new ASCIIAnalyzer();
   QueryParser parser = new QueryParser(FIELD, a);
   assertEquals("ubersetzung ubersetz*", parser.parse("übersetzung übersetz*").toString(FIELD));
   assertEquals("motley crue motl* cru*", parser.parse("Mötley Crüe Mötl* crü*").toString(FIELD));
   assertEquals("rene? zellw*", parser.parse("René? Zellw*").toString(FIELD));
 }
 public QueryParser getParser(Analyzer a) throws Exception {
   if (a == null) a = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
   QueryParser qp = new QueryParser(getDefaultField(), a);
   qp.setDefaultOperator(QueryParserBase.OR_OPERATOR);
   qp.setSplitOnWhitespace(splitOnWhitespace);
   return qp;
 }
 @Override
 public Query getQuery(String query, CommonQueryParserConfiguration cqpC) throws Exception {
   assert cqpC != null : "Parameter must not be null";
   assert (cqpC instanceof QueryParser) : "Parameter must be instance of QueryParser";
   QueryParser qp = (QueryParser) cqpC;
   return qp.parse(query);
 }
Example #20
0
  /**
   * give the id list of sentences, from Lucene index
   *
   * @param input input word
   * @param catalogName catalog (domain) name which we'd like to search in
   * @param limit how many hits are needed (0 means all)
   */
  public List<String> query(String input, String catalogName, int limit) {

    List<String> res = new ArrayList<String>();
    try {

      catalog c = catalogs.get(catalogName);
      IndexReader reader = DirectoryReader.open(FSDirectory.open(Paths.get(c.indexPath)));
      IndexSearcher searcher = new IndexSearcher(reader);

      QueryParser parser = new QueryParser("contents", analyzer);
      Query query = parser.parse(QueryParser.escape(input));

      int n = limit > 0 ? limit : searcher.count(query);
      if (n == 0) n = 1;
      TopDocs results = searcher.search(query, n);

      int endPos = limit;
      if (limit != 0) endPos = Math.min(results.totalHits, limit); // 1st n hits
      else endPos = results.totalHits; // all hits

      for (int i = 0; i < endPos; i++) {
        int id = results.scoreDocs[i].doc;
        Document doc = searcher.doc(id);
        res.add(doc.get("filename"));
      }
      reader.close();
      return res;

    } catch (ParseException e) {
      log(e.getMessage());
    } catch (IOException e) {
      log(e.getMessage());
    }
    return res;
  }
Example #21
0
  /**
   * 批量删除文档
   *
   * @param entity
   * @return
   * @throws Exception
   */
  public static String deleteDocument(List<String> ids, Class clazz) throws Exception {
    List<String> luceneFields = ClassUtils.getLuceneFields(clazz);
    if (CollectionUtils.isEmpty(luceneFields)) {
      return "error";
    }
    if (CollectionUtils.isEmpty(ids)) {
      return "error";
    }
    String pkName = ClassUtils.getEntityInfoByClass(clazz).getPkName();
    // 索引写入配置
    IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
    // 获取索引目录文件
    Directory directory = getDirectory(clazz);
    if (directory == null) {
      return null;
    }
    IndexWriter indexWriter = new IndexWriter(directory, indexWriterConfig);
    for (String t : ids) {
      /*
       * Term term = new Term(pkName, t);
       * indexWriter.deleteDocuments(term);
       */
      // 获取读取的索引

      // QueryParser parser = new QueryParser(field, analyzer);
      QueryParser parser = new MultiFieldQueryParser(new String[] {pkName}, analyzer);
      // 需要查询的关键字
      Query query = parser.parse(t.toString());
      indexWriter.deleteDocuments(query);
    }
    indexWriter.commit();
    indexWriter.close(); // 记得关闭,否则删除不会被同步到索引文件中
    directory.close(); // 关闭目录
    return null;
  }
Example #22
0
  public static void search(String indexDir, String q) throws IOException, ParseException {
    // 3) Open index
    IndexReader reader = DirectoryReader.open(FSDirectory.open(new File(indexDir)));
    IndexSearcher is = new IndexSearcher(reader);

    // 4) Parser query
    QueryParser parser = new QueryParser(VER, "contents", new StandardAnalyzer(VER));
    Query query;
    query = parser.parse(q);

    // 5) Search index
    long start = System.currentTimeMillis();
    TopDocs hits = is.search(query, 10);
    long end = System.currentTimeMillis();

    // 6) Write search stat
    System.err.println(
        "Found "
            + hits.totalHits
            + " document(s) (in "
            + (end - start)
            + " milliseconds) that matched query '"
            + q
            + "':");

    // 7) Retrieve matching docs
    for (ScoreDoc scoreDoc : hits.scoreDocs) {
      Document doc = is.doc(scoreDoc.doc);
      System.out.println(doc.get("fullpath"));
    }

    // 8) Close IndexSearcher
    reader.close();
  }
Example #23
0
  /**
   * @param query
   * @param virtualWikiNames comma separated list of virtual wiki names
   * @param languages comma separated list of language codes to search in, may be null to search all
   *     languages
   */
  private Query buildQuery(String query, String virtualWikiNames, String languages)
      throws ParseException {
    // build a query like this: <user query string> AND <wikiNamesQuery> AND
    // <languageQuery>
    BooleanQuery bQuery = new BooleanQuery();
    Query parsedQuery = null;

    // for object search
    if (query.startsWith("PROP ")) {
      String property = query.substring(0, query.indexOf(":"));
      query = query.substring(query.indexOf(":") + 1, query.length());
      QueryParser qp = new QueryParser(Version.LUCENE_36, property, this.analyzer);
      parsedQuery = qp.parse(query);
      bQuery.add(parsedQuery, BooleanClause.Occur.MUST);
    } else if (query.startsWith("MULTI ")) {
      // for fulltext search
      List<String> fieldList = IndexUpdater.fields;
      String[] fields = fieldList.toArray(new String[fieldList.size()]);
      BooleanClause.Occur[] flags = new BooleanClause.Occur[fields.length];
      for (int i = 0; i < flags.length; i++) {
        flags[i] = BooleanClause.Occur.SHOULD;
      }
      parsedQuery =
          MultiFieldQueryParser.parse(Version.LUCENE_36, query, fields, flags, this.analyzer);
      bQuery.add(parsedQuery, BooleanClause.Occur.MUST);
    } else {
      String[] fields =
          new String[] {
            IndexFields.FULLTEXT,
            IndexFields.DOCUMENT_TITLE,
            IndexFields.DOCUMENT_NAME,
            IndexFields.FILENAME
          };
      BooleanClause.Occur[] flags = new BooleanClause.Occur[fields.length];
      for (int i = 0; i < flags.length; i++) {
        flags[i] = BooleanClause.Occur.SHOULD;
      }
      QueryParser parser = new MultiFieldQueryParser(Version.LUCENE_36, fields, this.analyzer);
      parsedQuery = parser.parse(query);
      // Since the sub-queries are OR-ed, each sub-query score is normally divided by the number of
      // sub-queries,
      // which would cause extra-small scores whenever there's a hit on only one sub-query;
      // compensate this by boosting the whole outer query
      parsedQuery.setBoost(fields.length);
      bQuery.add(parsedQuery, BooleanClause.Occur.MUST);
    }

    if (virtualWikiNames != null && virtualWikiNames.length() > 0) {
      bQuery.add(
          buildOredTermQuery(virtualWikiNames, IndexFields.DOCUMENT_WIKI),
          BooleanClause.Occur.MUST);
    }
    if (languages != null && languages.length() > 0) {
      bQuery.add(
          buildOredTermQuery(languages, IndexFields.DOCUMENT_LANGUAGE), BooleanClause.Occur.MUST);
    }

    return bQuery;
  }
 @Override
 public void modifyIndex(final IndexWriter writer, final IndexSearcher searcher)
     throws ModifyKnowledgeBaseException {
   for (final Map.Entry<String, HashMap<String, String>> entry : this.attributes.entrySet()) {
     final String key = entry.getKey();
     final HashMap<String, String> hash = entry.getValue();
     final QueryParser qp = new QueryParser(this.docPrimaryKey, new DoserIDAnalyzer());
     try {
       final TopDocs top = searcher.search(qp.parse(QueryParserBase.escape(key)), 1);
       final ScoreDoc[] scores = top.scoreDocs;
       if (scores.length > 0) {
         final Document doc = new Document();
         final Document currentDoc = searcher.getIndexReader().document(scores[0].doc);
         // BugFix create new Document und copy Fields.
         final List<IndexableField> fields = currentDoc.getFields();
         for (final IndexableField field : fields) {
           if (field.stringValue() != null) {
             if (field.name().equalsIgnoreCase(docPrimaryKey)) {
               doc.add(new StringField(field.name(), field.stringValue(), Field.Store.YES));
             } else {
               doc.add(new TextField(field.name(), field.stringValue(), Field.Store.YES));
             }
           }
         }
         final List<Document> docListToAdd = new LinkedList<Document>();
         docListToAdd.add(doc);
         for (final Map.Entry<String, String> subentry : hash.entrySet()) {
           final IndexableField field = doc.getField(subentry.getKey());
           if (field == null) {
             throw new ModifyKnowledgeBaseException("UpdateField no found", null);
           }
           if (this.action.equals(KBModifications.OVERRIDEFIELD)) {
             doc.removeFields(subentry.getKey());
             String[] newentries = generateSeperatedFieldStrings(subentry.getValue());
             for (int i = 0; i < newentries.length; i++) {
               doc.add(new TextField(subentry.getKey(), newentries[i], Field.Store.YES));
             }
           } else if (this.action.equals(KBModifications.UPDATERELATEDLABELS)) {
             doc.removeFields(subentry.getKey());
             doc.add(updateOccurrences(subentry.getValue(), field, "surroundinglabels"));
           } else if (this.action.equals(KBModifications.UPDATEOCCURRENCES)) {
             doc.removeFields(subentry.getKey());
             IndexableField f = updateOccurrences(subentry.getValue(), field, "occurrences");
             doc.add(f);
           }
         }
         writer.updateDocuments(new Term(this.docPrimaryKey, key), docListToAdd);
       } else {
         throw new ModifyKnowledgeBaseException("Document not found", null);
       }
     } catch (final IOException e) {
       throw new ModifyKnowledgeBaseException("IOException in IndexSearcher", e);
     } catch (ParseException e) {
       throw new ModifyKnowledgeBaseException("Queryparser Exception", e);
     }
   }
 }
 /** LUCENE-6677: make sure wildcard query respects maxDeterminizedStates. */
 public void testWildcardMaxDeterminizedStates() throws Exception {
   QueryParser qp = new QueryParser("field", new MockAnalyzer(random()));
   qp.setMaxDeterminizedStates(10);
   expectThrows(
       TooComplexToDeterminizeException.class,
       () -> {
         qp.parse("a*aaaaaaa");
       });
 }
 /** simple CJK synonym test */
 public void testCJKSynonym() throws Exception {
   Query expected = new SynonymQuery(new Term("field", "国"), new Term("field", "國"));
   QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
   assertEquals(expected, qp.parse("国"));
   qp.setDefaultOperator(Operator.AND);
   assertEquals(expected, qp.parse("国"));
   expected = new BoostQuery(expected, 2f);
   assertEquals(expected, qp.parse("国^2"));
 }
  public void testWildcardDoesNotNormalizeEscapedChars() throws Exception {
    Analyzer asciiAnalyzer = new ASCIIAnalyzer();
    Analyzer keywordAnalyzer = new MockAnalyzer(random());
    QueryParser parser = new QueryParser(FIELD, asciiAnalyzer);

    assertTrue(isAHit(parser.parse("e*e"), "étude", asciiAnalyzer));
    assertTrue(isAHit(parser.parse("é*e"), "etude", asciiAnalyzer));
    assertFalse(isAHit(parser.parse("\\é*e"), "etude", asciiAnalyzer));
    assertTrue(isAHit(parser.parse("\\é*e"), "étude", keywordAnalyzer));
  }
 @Override
 public void testDefaultOperator() throws Exception {
   QueryParser qp = getParser(new MockAnalyzer(random()));
   // make sure OR is the default:
   assertEquals(QueryParserBase.OR_OPERATOR, qp.getDefaultOperator());
   setDefaultOperatorAND(qp);
   assertEquals(QueryParserBase.AND_OPERATOR, qp.getDefaultOperator());
   setDefaultOperatorOR(qp);
   assertEquals(QueryParserBase.OR_OPERATOR, qp.getDefaultOperator());
 }
Example #29
0
  //	private Analyzer analyzer = new StandardAnalyzer(matchVersion);
  @Test
  public void test01() {
    Directory directory = null;
    IndexWriter indexWriter = null;
    IndexReader indexReader = null;
    IndexWriterConfig config = new IndexWriterConfig(matchVersion, analyzer);
    try {
      directory = new RAMDirectory();
      indexWriter = new IndexWriter(directory, config);

      Document document = new Document();
      document.add(new TextField("content", "or good", Store.YES));

      indexWriter.addDocument(document);

      indexWriter.commit();

      indexReader = DirectoryReader.open(directory);
      IndexSearcher indexSearcher = new IndexSearcher(indexReader);
      QueryParser parser = new QueryParser(matchVersion, "content", analyzer);
      Query query = parser.parse("excellent");

      TopDocs topDocs = indexSearcher.search(query, 100);
      for (ScoreDoc match : topDocs.scoreDocs) {
        Document matchDoc = indexSearcher.doc(match.doc);
        System.out.println("result: " + matchDoc.get("content"));
      }
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (ParseException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } finally {
      try {
        directory.close();
      } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
      }
      try {
        indexWriter.close();
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
      try {
        indexReader.close();
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }
  }
 /** synonyms with default OR operator */
 public void testCJKSynonymsOR() throws Exception {
   BooleanQuery.Builder expectedB = new BooleanQuery.Builder();
   expectedB.add(new TermQuery(new Term("field", "中")), BooleanClause.Occur.SHOULD);
   Query inner = new SynonymQuery(new Term("field", "国"), new Term("field", "國"));
   expectedB.add(inner, BooleanClause.Occur.SHOULD);
   Query expected = expectedB.build();
   QueryParser qp = new QueryParser("field", new MockCJKSynonymAnalyzer());
   assertEquals(expected, qp.parse("中国"));
   expected = new BoostQuery(expected, 2f);
   assertEquals(expected, qp.parse("中国^2"));
 }