/**
   * Generate HTextFlowTarget query with matching HTextFlow contents, HTextFlowTarget locale,
   * HTextFlowTarget state = Approved
   *
   * @param query
   * @param sourceLocale
   * @param targetLocale
   * @param queryText
   * @param multiQueryText
   * @param contentFields
   * @return
   * @throws ParseException
   */
  private org.apache.lucene.search.Query generateQuery(
      TransMemoryQuery query,
      LocaleId sourceLocale,
      LocaleId targetLocale,
      String queryText,
      String[] multiQueryText,
      String contentFields[],
      boolean useTargetIndex)
      throws ParseException {
    org.apache.lucene.search.Query contentQuery;
    // Analyzer determined by the language
    String analyzerDefName =
        TextContainerAnalyzerDiscriminator.getAnalyzerDefinitionName(sourceLocale.getId());
    Analyzer analyzer = entityManager.getSearchFactory().getAnalyzer(analyzerDefName);

    if (query.getSearchType() == SearchType.FUZZY_PLURAL) {
      int queriesSize = multiQueryText.length;
      if (queriesSize > contentFields.length) {
        log.warn(
            "query contains {} fields, but we only index {}", queriesSize, contentFields.length);
      }
      String[] searchFields = new String[queriesSize];
      System.arraycopy(contentFields, 0, searchFields, 0, queriesSize);

      contentQuery =
          MultiFieldQueryParser.parse(LUCENE_VERSION, multiQueryText, searchFields, analyzer);
    } else {
      MultiFieldQueryParser parser =
          new MultiFieldQueryParser(LUCENE_VERSION, contentFields, analyzer);
      contentQuery = parser.parse(queryText);
    }

    if (useTargetIndex) {
      TermQuery localeQuery =
          new TermQuery(new Term(IndexFieldLabels.LOCALE_ID_FIELD, targetLocale.getId()));

      TermQuery newStateQuery =
          new TermQuery(
              new Term(IndexFieldLabels.CONTENT_STATE_FIELD, ContentState.New.toString()));
      TermQuery needReviewStateQuery =
          new TermQuery(
              new Term(IndexFieldLabels.CONTENT_STATE_FIELD, ContentState.NeedReview.toString()));
      TermQuery rejectedReviewStateQuery =
          new TermQuery(
              new Term(IndexFieldLabels.CONTENT_STATE_FIELD, ContentState.Rejected.toString()));

      BooleanQuery targetQuery = new BooleanQuery();
      targetQuery.add(contentQuery, Occur.MUST);
      targetQuery.add(localeQuery, Occur.MUST);

      targetQuery.add(newStateQuery, Occur.MUST_NOT);
      targetQuery.add(needReviewStateQuery, Occur.MUST_NOT);
      targetQuery.add(rejectedReviewStateQuery, Occur.MUST_NOT);

      return targetQuery;
    } else {
      return contentQuery;
    }
  }
コード例 #2
0
  /** {@inheritDoc} */
  @SuppressWarnings("unchecked")
  @Override
  public List<AbstractPermissionsOwner> search(
      String queryString, boolean withUsers, boolean withGroups) {
    List<AbstractPermissionsOwner> results = new ArrayList<AbstractPermissionsOwner>();
    // No query should be realized while re-indexing resources.
    if (!inhibitSearch) {
      // Gets the Hibernate search object to performs queries.
      FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(entityManager);

      // Parse the the queryString.
      MultiFieldQueryParser parser =
          new MultiFieldQueryParser(
              Version.LUCENE_30,
              new String[] {"name", "firstName", "lastName", "email", "login"},
              new StandardAnalyzer(Version.LUCENE_31));
      parser.setDefaultOperator(Operator.OR);

      try {
        Query luceneQuery = parser.parse(queryString);

        FullTextQuery query = null;
        // Because of the poor design of the Hibernate Search API and the usage of varagrs, we must
        // have this
        // if-else algorihm. TODO refactor with reflection.
        if (withUsers && withGroups) {
          query = fullTextEntityManager.createFullTextQuery(luceneQuery, User.class, Group.class);
        } else if (withUsers) {
          query = fullTextEntityManager.createFullTextQuery(luceneQuery, User.class);
        } else if (withGroups) {
          query = fullTextEntityManager.createFullTextQuery(luceneQuery, Group.class);
        }
        // Finally execute the query.
        if (query != null) {
          List<AbstractPermissionsOwner> found = query.getResultList();
          // Keeps only distinct results.
          for (AbstractPermissionsOwner foundObject : found) {
            if (!results.contains(foundObject)) {
              // TODO Remove this Hibernate specific block.
              // Sometimes hibernate Search returns Javassist proxies, which can't be properly
              // deserialize by Jackson.
              if (foundObject instanceof HibernateProxy) {
                HibernateProxy h = (HibernateProxy) foundObject;
                foundObject =
                    (AbstractPermissionsOwner) h.getHibernateLazyInitializer().getImplementation();
              }
              results.add(foundObject);
            }
          }
        }
      } catch (ParseException exc) {
        // Handle parsing failure
        String error = "Misformatted queryString '" + queryString + "': " + exc.getMessage();
        logger.debug("[search] " + error);
        throw new IllegalArgumentException(error, exc);
      }
    }
    return results;
  } // search().
コード例 #3
0
ファイル: SearchService.java プロジェクト: politrend/vta
  /**
   * Serch method
   *
   * @param f
   * @param luquery
   * @param entities
   * @return
   * @throws Exception
   */
  @Transactional(propagation = Propagation.REQUIRED)
  public List<Object> search(
      String[] f,
      String luquery,
      SortField[] sortFields,
      int firstResult,
      int maxResults,
      Class<?>... entities)
      throws Exception {

    // create FullTextEntityManager ----------------------------
    FullTextEntityManager fullTextEntityManager =
        org.hibernate.search.jpa.Search.getFullTextEntityManager(entityManager);

    // ---------------------------------------------------------

    MultiFieldQueryParser parser =
        new MultiFieldQueryParser(Version.LUCENE_31, f, new StandardAnalyzer(Version.LUCENE_31));

    org.apache.lucene.search.Query query = parser.parse(luquery.trim());

    System.out.println(luquery + " --> QUERY: " + query + " entitys size:" + entities.length);

    // wrap Lucene query in a javax.persistence.Query
    // javax.persistence.Query persistenceQuery = fullTextEntityManager
    // .createFullTextQuery(query, entities);

    org.hibernate.search.jpa.FullTextQuery persistenceQuery =
        fullTextEntityManager.createFullTextQuery(query, entities);

    // org.apache.lucene.search.Sort sort = new Sort(
    // new SortField("title", SortField.STRING));

    if (sortFields != null && sortFields.length > 0) {
      persistenceQuery.setSort(new Sort(sortFields));
      System.out.println("Sort setted");
    }

    if (firstResult >= 0) {
      persistenceQuery.setFirstResult(firstResult);
      persistenceQuery.setMaxResults(maxResults);
    }

    // execute search
    @SuppressWarnings("unchecked")
    List<Object> result = persistenceQuery.getResultList();

    return result;
  }
コード例 #4
0
  public void searchFiles(String queryStr) throws Exception {
    String indexDir = "Folder_Index";
    String[] field = {
      "contents",
      "title",
      "Exacttitle",
      "Bold",
      "BoldAndItalic",
      "Summary",
      "Italic",
      "Category_Links"
    };

    IndexReader reader = IndexReader.open(FSDirectory.open(new File(indexDir)));
    IndexSearcher searcher = new IndexSearcher(reader);

    // Analyzer analyzer = new SnowballAnalyzer(Version.LUCENE_35,"English");
    Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_35);
    // Analyzer analyzer = new PatternAnalyzer(Version.LUCENE_35);
    // QueryParser parser = new QueryParser(Version.LUCENE_35, field, analyzer);

    MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_35, field, analyzer);
    // System.out.println("Query being searched : "+queryStr);
    StringBuffer query_text = new StringBuffer(queryStr);

    int posn = query_text.indexOf(" ");

    if (posn > 0) {
      query_text.insert(posn, "^6 ");
      int posn2 = query_text.indexOf(" ", posn + 4);
      if (posn2 > 0) {
        query_text.insert(posn2, "^4 ");
        int posn3 = query_text.indexOf(" ", posn2 + 4);
        if (posn3 > 0) query_text.insert(posn3, "^2 ");
      }
    }
    Query query = parser.parse(query_text.toString());
    TopDocs hits = searcher.search(query, 30);

    // System.out.println("\nFound "+hits.totalHits +" documents :\n");

    ScoreDoc results[] = hits.scoreDocs;

    for (int i = 0; i < results.length; i++) {
      Document doc = searcher.doc(results[i].doc);
      System.out.println(doc.get("Exacttitle"));
    }
  }
コード例 #5
0
ファイル: SearchDaoHibImpl.java プロジェクト: shred/cilla
  @Override
  public Query parseQuery(String query, Locale locale) throws ParseException {
    FullTextSession fullTextSession = getFullTextSession();

    MultiFieldQueryParser parser =
        new MultiFieldQueryParser(
            Version.LUCENE_36,
            new String[] {"text", "title"},
            fullTextSession.getSearchFactory().getAnalyzer("content"));

    if (locale != null) {
      parser.setLocale(locale);
    }

    return parser.parse(query);
  }
コード例 #6
0
  /*
   * 复合条件查询,即 and or 等 BooleanClause.Occur.MUST表示and
   * BooleanClause.Occur.MUST_NOT表示not BooleanClause.Occur.SHOULD表示or.
   */
  public static void searchQuery(String[] queries, String[] fields) throws Exception {

    IndexSearcher searcher = LuceneUtils.createIndexSearcher();

    // String[] queries = { "南城","网吧"};
    // String[] fields = { "name","city"};
    BooleanClause.Occur[] clauses = {BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD};
    Query query =
        MultiFieldQueryParser.parse(
            Version.LUCENE_36, queries, fields, clauses, new StandardAnalyzer(Version.LUCENE_36));

    TopDocs results = searcher.search(query, null, 100);
    System.out.println("总符合: " + results.totalHits + "条数!");

    // 显示记录
    for (ScoreDoc sr : results.scoreDocs) {
      // 文档编号
      int docID = sr.doc;
      // 真正的内容
      Document doc = searcher.doc(docID);

      System.out.println("name = " + doc.get("name"));
      System.out.println("address = " + doc.get("address"));
      System.out.println("city = " + doc.get("city"));
      System.out.println("lnglat = " + doc.get("lnglat"));
    }
  }
コード例 #7
0
  public SearchResult search(
      SearchCriteria criteria, List<MusicFolder> musicFolders, IndexType indexType) {
    SearchResult result = new SearchResult();
    int offset = criteria.getOffset();
    int count = criteria.getCount();
    result.setOffset(offset);

    IndexReader reader = null;
    try {
      reader = createIndexReader(indexType);
      Searcher searcher = new IndexSearcher(reader);
      Analyzer analyzer = new SubsonicAnalyzer();

      MultiFieldQueryParser queryParser =
          new MultiFieldQueryParser(
              LUCENE_VERSION, indexType.getFields(), analyzer, indexType.getBoosts());

      BooleanQuery query = new BooleanQuery();
      query.add(queryParser.parse(analyzeQuery(criteria.getQuery())), BooleanClause.Occur.MUST);

      List<SpanTermQuery> musicFolderQueries = new ArrayList<SpanTermQuery>();
      for (MusicFolder musicFolder : musicFolders) {
        if (indexType == ALBUM_ID3 || indexType == ARTIST_ID3) {
          musicFolderQueries.add(
              new SpanTermQuery(
                  new Term(FIELD_FOLDER_ID, NumericUtils.intToPrefixCoded(musicFolder.getId()))));
        } else {
          musicFolderQueries.add(
              new SpanTermQuery(new Term(FIELD_FOLDER, musicFolder.getPath().getPath())));
        }
      }
      query.add(
          new SpanOrQuery(musicFolderQueries.toArray(new SpanQuery[musicFolderQueries.size()])),
          BooleanClause.Occur.MUST);

      TopDocs topDocs = searcher.search(query, null, offset + count);
      result.setTotalHits(topDocs.totalHits);

      int start = Math.min(offset, topDocs.totalHits);
      int end = Math.min(start + count, topDocs.totalHits);
      for (int i = start; i < end; i++) {
        Document doc = searcher.doc(topDocs.scoreDocs[i].doc);
        switch (indexType) {
          case SONG:
          case ARTIST:
          case ALBUM:
            MediaFile mediaFile = mediaFileService.getMediaFile(Integer.valueOf(doc.get(FIELD_ID)));
            addIfNotNull(mediaFile, result.getMediaFiles());
            break;
          case ARTIST_ID3:
            Artist artist = artistDao.getArtist(Integer.valueOf(doc.get(FIELD_ID)));
            addIfNotNull(artist, result.getArtists());
            break;
          case ALBUM_ID3:
            Album album = albumDao.getAlbum(Integer.valueOf(doc.get(FIELD_ID)));
            addIfNotNull(album, result.getAlbums());
            break;
          default:
            break;
        }
      }

    } catch (Throwable x) {
      LOG.error("Failed to execute Lucene search.", x);
    } finally {
      FileUtil.closeQuietly(reader);
    }
    return result;
  }
コード例 #8
0
ファイル: HibernateSearchDao.java プロジェクト: TheCorp/mob
  public List<Opportunity> findOpportunities(
      String position,
      String location,
      String skills,
      String additionalKeyword,
      String additionalSkillNames) {

    List OpportunityList = new ArrayList();
    SessionFactory sessionFactory = getHibernateTemplate().getSessionFactory();
    Session session = sessionFactory.openSession();
    FullTextSession fullTextSession = Search.getFullTextSession(session);
    Transaction tx1 = fullTextSession.beginTransaction();
    List<Opportunity> opportunities = session.createQuery("from Opportunity").list();
    for (Opportunity opportunity1 : opportunities) {
      fullTextSession.index(opportunity1);
    }

    MultiFieldQueryParser parser =
        new MultiFieldQueryParser(
            new String[] {"title", "skillsNeeded", "str_location"}, new StandardAnalyzer());

    org.apache.lucene.search.Query q = null;

    if (!position.equals("")
        && !skills.equals("")
        && !location.equals("")) { // when all the values are present
      try {
        q =
            parser.parse(
                new String[] {position, location, skills},
                new String[] {"title", "str_location", "skillsNeeded"},
                new StandardAnalyzer());
      } catch (ParseException e) {
        e.printStackTrace();
      }
      org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
      List result = hibQuery.list();
      OpportunityList = result;
    } else if (!position.equals("")
        && !location.equals("")
        && skills.equals("")) { // when only skill value is not present

      try {
        q =
            parser.parse(
                new String[] {position, location},
                new String[] {"title", "str_location"},
                new StandardAnalyzer());

      } catch (ParseException e) {
        e.printStackTrace();
      }

      org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
      List result = hibQuery.list();
      OpportunityList = result;

    } else if (!position.equals("") && location.equals("") && !skills.equals("")) {
      // search wrt position and skills

      try {
        q =
            parser.parse(
                new String[] {position, skills},
                new String[] {"title", "skillsNeeded"},
                new StandardAnalyzer());

      } catch (ParseException e) {
        e.printStackTrace();
      }
      org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
      List result = hibQuery.list();
      OpportunityList = result;
    } else if (position.equals("")
        && !location.equals("")
        && !skills.equals("")) { // search wrt location and skills

      try {
        q =
            parser.parse(
                new String[] {location, skills},
                new String[] {"str_location", "skillsNeeded"},
                new StandardAnalyzer());

      } catch (ParseException e) {
        e.printStackTrace();
      }
      org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
      List result = hibQuery.list();
      OpportunityList = result;

    } else if (!position.equals("")
        && location.equals("")
        && skills.equals("")) { // search for position

      try {
        // q = parser.parse(new String[] { position },new String[] { "title" }, new
        // StandardAnalyzer());
        String titles = position + "*";
        q = parser.parse("title:" + titles);

      } catch (ParseException e) {
        e.printStackTrace();
      }
      if (additionalKeyword.equals("")) {
        org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
        List result = hibQuery.list();
        OpportunityList = result;
      } else {

        try {
          FullTextQuery hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
          List list = hibQuery.list();

          hibQuery
              .enableFullTextFilter("keyword")
              .setParameter("advancedKeyword", additionalKeyword);
          List filterList = hibQuery.list();

          if (list != null) {
            list.removeAll(filterList);
            filterList.addAll(list);
            list = filterList;
          } else {
            list = filterList;
          }

          OpportunityList = list;

        } catch (Exception e) {
          e.printStackTrace();
        }
      }
    } else if (position.equals("")
        && !location.equals("")
        && skills.equals("")) { // search for location

      try {
        q =
            parser.parse(
                new String[] {location}, new String[] {"str_location"}, new StandardAnalyzer());

      } catch (ParseException e) {
        e.printStackTrace();
      }
      org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
      List result = hibQuery.list();
      OpportunityList = result;

    } else if (position.equals("") && location.equals("") && !skills.equals("")) {

      List result = new ArrayList();
      String strSkill = null;

      if (skills.contains(",")) {

        strSkill = skills.replaceAll(",", " ");

      } else {
        strSkill = skills;
      }

      try {
        q = parser.parse("skillsNeeded:" + strSkill);

      } catch (ParseException e) {

        e.printStackTrace();
      }

      try {

        if (additionalSkillNames.equals("")) {

          FullTextQuery hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
          List list = hibQuery.list();
          result.addAll(list);
        } else {
          FullTextQuery hibQuery = fullTextSession.createFullTextQuery(q, Opportunity.class);
          List list = hibQuery.list();
          hibQuery
              .enableFullTextFilter("skills")
              .setParameter("advancedSkills", additionalSkillNames);
          List filterList = hibQuery.list();
          List<Opportunity> result1 = list;
          if (list != null) {
            list.removeAll(filterList);
            filterList.addAll(list);
            list = filterList;
          } else {
            list = filterList;
          }

          result.addAll(list);
        }

        OpportunityList = result;
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
    tx1.commit();
    session.close();

    return OpportunityList;
  }