@Test public void testSortableField() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); Item item1 = new Item(); item1.setId(3); item1.setPrice((short) 3454); s.persist(item1); Item item2 = new Item(); item2.setId(2); item2.setPrice((short) 3354); s.persist(item2); Item item3 = new Item(); item3.setId(1); item3.setPrice((short) 3554); s.persist(item3); tx.commit(); s.clear(); tx = s.beginTransaction(); Query q = s.getSearchFactory().buildQueryBuilder().forEntity(Item.class).get().all().createQuery(); FullTextQuery query = s.createFullTextQuery(q, Item.class); query.setSort(new Sort(new SortField("price", SortField.Type.INT))); List<?> results = query.list(); assertThat(results) .onProperty("price") .describedAs("Sortable field via programmatic config") .containsExactly((short) 3354, (short) 3454, (short) 3554); query.setSort(new Sort(new SortField("id", SortField.Type.STRING))); results = query.list(); assertThat(results) .onProperty("id") .describedAs("Sortable field via programmatic config") .containsExactly(1, 2, 3); s.delete(results.get(0)); s.delete(results.get(1)); s.delete(results.get(2)); tx.commit(); s.close(); }
@Test public void testAnalyzerDef() throws Exception { Address address = new Address(); address.setStreet1("3340 Peachtree Rd NE"); address.setStreet2("JBoss"); FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); s.persist(address); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("street1_ngram:pea"); final FullTextQuery query = s.createFullTextQuery(luceneQuery); assertEquals("Analyzer inoperant", 1, query.getResultSize()); s.delete(query.list().get(0)); tx.commit(); s.close(); }
@Test public void testMapping() throws Exception { Address address = new Address(); address.setStreet1("3340 Peachtree Rd NE"); address.setStreet2("JBoss"); FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); s.persist(address); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("" + address.getAddressId()); FullTextQuery query = s.createFullTextQuery(luceneQuery); assertEquals("documentId does not work properly", 1, query.getResultSize()); luceneQuery = parser.parse("street1:peachtree"); query = s.createFullTextQuery(luceneQuery).setProjection("idx_street2", FullTextQuery.THIS); assertEquals("Not properly indexed", 1, query.getResultSize()); Object[] firstResult = (Object[]) query.list().get(0); assertEquals("@Field.store not respected", "JBoss", firstResult[0]); // Verify that AddressClassBridge was applied as well: luceneQuery = parser.parse("AddressClassBridge:Applied\\!"); assertEquals(1, s.createFullTextQuery(luceneQuery).getResultSize()); s.delete(firstResult[1]); tx.commit(); s.close(); }
@Test(groups = "ch07") public void testPrefixQuery() throws Exception { FullTextSession session = Search.getFullTextSession(openSession()); Transaction tx = session.beginTransaction(); buildIndex(session, tx); String userInput = "sea"; tx = session.beginTransaction(); PrefixQuery query = new PrefixQuery(new Term("title", userInput)); System.out.println(query.toString()); org.hibernate.search.FullTextQuery hibQuery = session.createFullTextQuery(query, Dvd.class); List<Dvd> results = hibQuery.list(); assert results.size() == 4 : "incorrect hit count"; for (Dvd dvd : results) { assert dvd.getTitle().indexOf("Sea") >= 0; System.out.println(dvd.getTitle()); } for (Object element : session.createQuery("from " + Dvd.class.getName()).list()) session.delete(element); tx.commit(); session.close(); }
@Test public void testClassBridgeInstanceMapping() throws Exception { OrderLine orderLine = new OrderLine(); orderLine.setName("Sequoia"); FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); s.persist(orderLine); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("orderLineName:Sequoia"); FullTextQuery query = s.createFullTextQuery(luceneQuery); assertEquals("Bridge not used", 1, query.getResultSize()); luceneQuery = parser.parse("orderLineName_ngram:quo"); query = s.createFullTextQuery(luceneQuery); assertEquals("Analyzer configuration not applied", 1, query.getResultSize()); luceneQuery = parser.parse("orderLineNameViaParam:Sequoia"); query = s.createFullTextQuery(luceneQuery); assertEquals("Parameter configuration not applied", 1, query.getResultSize()); s.delete(query.list().get(0)); tx.commit(); s.close(); }
@Test(groups = "ch04") public void testEmbedded() throws Exception { Session session = factory.openSession(); Item item = new Item(); item.setDescription("Great DVD"); item.setEan("123456789012"); item.setTitle("Great DVD"); item.setRating(new Rating()); item.getRating().setOverall(5); item.getRating().setPicture(4); item.getRating().setScenario(5); item.getRating().setSoundtrack(3); Transaction tx = session.beginTransaction(); session.save(item); tx.commit(); session.clear(); tx = session.beginTransaction(); FullTextSession fts = Search.getFullTextSession(session); List results = fts.createFullTextQuery(new TermQuery(new Term("rating.overall", "5")), Item.class).list(); assert results.size() == 1; fts.delete(results.get(0)); tx.commit(); fts.close(); }
@Test public void testBridgeMapping() throws Exception { Address address = new Address(); address.setStreet1("Peachtree Rd NE"); address.setStreet2("JBoss"); FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); s.persist(address); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("street1:peac"); FullTextQuery query = s.createFullTextQuery(luceneQuery); assertEquals("PrefixQuery should not be on", 0, query.getResultSize()); luceneQuery = parser.parse("street1_abridged:peac"); query = s.createFullTextQuery(luceneQuery); assertEquals("Bridge not used", 1, query.getResultSize()); s.delete(query.list().get(0)); tx.commit(); s.close(); }
@Test(groups = "ch12") public void vectorTest() throws Exception { FullTextSession session = Search.getFullTextSession(openSession()); Transaction tx = session.beginTransaction(); buildIndex(session, tx); try { tx = session.beginTransaction(); Query query = new TermQuery(new Term("content", "properties")); System.out.println(query.toString()); FullTextQuery hibQuery = session.createFullTextQuery(query, ElectricalProperties.class); hibQuery.setProjection( FullTextQuery.DOCUMENT, FullTextQuery.DOCUMENT_ID, FullTextQuery.SCORE); reader = getReader(session); List<Object[]> results = hibQuery.list(); assert results.size() > 0 : "no results returned"; for (int x = 0; x < results.size(); x++) { Integer docId = (Integer) results.get(x)[1]; TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector(docId, "content"); String[] terms = vector.getTerms(); int[] f = vector.getTermFrequencies(); System.out.println(results.get(x)[2]); for (int y = 0; y < vector.size(); y++) { System.out.print("docID# =>" + docId); System.out.print(" term => " + terms[y]); System.out.print(" freq => " + f[y]); int[] positions = vector.getTermPositions(y); TermVectorOffsetInfo[] offsets = vector.getOffsets(y); for (int z = 0; z < positions.length; z++) { System.out.print(" position => " + positions[z]); System.out.print(" starting offset => " + offsets[z].getStartOffset()); System.out.println(" ending offset => " + offsets[z].getEndOffset()); } System.out.println("---------------"); } } for (Object element : session.createQuery("from " + ElectricalProperties.class.getName()).list()) session.delete(element); tx.commit(); } finally { session.close(); if (provider != null) { provider.closeReader(reader); } } }
public void testBatchSize() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); final int loop = 14; s.doWork( new Work() { @Override public void execute(Connection connection) throws SQLException { for (int i = 0; i < loop; i++) { Statement statmt = connection.createStatement(); statmt.executeUpdate( "insert into Domain(id, name) values( + " + (i + 1) + ", 'sponge" + i + "')"); statmt.executeUpdate( "insert into Email(id, title, body, header, domain_id) values( + " + (i + 1) + ", 'Bob Sponge', 'Meet the guys who create the software', 'nope', " + (i + 1) + ")"); statmt.close(); } } }); tx.commit(); s.close(); // check non created object does get found!!1 s = Search.getFullTextSession(openSession()); tx = s.beginTransaction(); ScrollableResults results = s.createCriteria(Email.class).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++; s.index(results.get(0)); if (index % 5 == 0) { s.clear(); } } tx .commit(); // if you get a LazyInitializationException, that's because we clear() the // session in the loop.. it only works with a batch size of 5 (the point of the // test) s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser(TestConstants.getTargetLuceneVersion(), "id", TestConstants.stopAnalyzer); List result = s.createFullTextQuery(parser.parse("body:create")).list(); assertEquals(14, result.size()); for (Object object : result) { s.delete(object); } tx.commit(); s.close(); }
@After public void deleteTestData() { Session s = openSession(); FullTextSession session = Search.getFullTextSession(s); Transaction tx = s.beginTransaction(); QueryDescriptor query = ElasticsearchQueries.fromJson("{ 'query': { 'match_all' : {} } }"); List<?> result = session.createFullTextQuery(query).list(); for (Object entity : result) { session.delete(entity); } tx.commit(); s.close(); }
@Test public void testFullTextFilterDefAtMappingLevel() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); Address address = new Address(); address.setStreet1("Peachtree Rd NE"); address.setStreet2("Peachtnot Rd NE"); address.setOwner("test"); Calendar c = GregorianCalendar.getInstance( TimeZone.getTimeZone("GMT"), Locale.ROOT); // for the sake of tests c.set(2009, Calendar.NOVEMBER, 15); address.setLastUpdated(c); s.persist(address); address = new Address(); address.setStreet1("Peachtnot Rd NE"); address.setStreet2("Peachtree Rd NE"); address.setLastUpdated(c); address.setOwner("testowner"); s.persist(address); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("street1:Peachtnot"); FullTextQuery query = s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE); query.enableFullTextFilter("security").setParameter("ownerName", "testowner"); assertEquals("expecting 1 results", 1, query.getResultSize()); @SuppressWarnings("unchecked") List<Object[]> results = query.list(); for (Object[] result : results) { s.delete(result[0]); } tx.commit(); s.close(); }
@Test public void testCalendarBridgeMapping() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); Address address = new Address(); address.setStreet1("Peachtree Rd NE"); address.setStreet2("Peachtnot Rd NE"); Calendar calendar = GregorianCalendar.getInstance( TimeZone.getTimeZone("GMT"), Locale.ROOT); // for the sake of tests calendar.set(2009, Calendar.NOVEMBER, 15); address.setLastUpdated(calendar); s.persist(address); address = new Address(); address.setStreet1("Peachtnot Rd NE"); address.setStreet2("Peachtree Rd NE"); address.setLastUpdated(calendar); s.persist(address); tx.commit(); s.clear(); tx = s.beginTransaction(); long searchTimeStamp = DateTools.round(calendar.getTime().getTime(), DateTools.Resolution.DAY); org.apache.lucene.search.Query luceneQuery = NumericRangeQuery.newLongRange( "last-updated", searchTimeStamp, searchTimeStamp, true, true); FullTextQuery query = s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE); assertEquals("expecting 2 results", 2, query.getResultSize()); @SuppressWarnings("unchecked") List<Object[]> results = query.list(); for (Object[] result : results) { s.delete(result[0]); } tx.commit(); s.close(); }
@After public void deleteTestData() { Session s = openSession(); FullTextSession session = Search.getFullTextSession(s); Transaction tx = s.beginTransaction(); // TODO verify this is no longer needed after we implement the delete operations QueryDescriptor query = ElasticSearchQueries.fromJson("{ 'query': { 'match_all' : {} } }"); List<?> result = session.createFullTextQuery(query).list(); for (Object entity : result) { session.delete(entity); } tx.commit(); s.close(); }
@Test(groups = "ch04") public void testEntityAssociations() throws Exception { Session session = factory.openSession(); Transaction tx = session.beginTransaction(); Item item = new Item(); item.setDescription("Great DVD"); item.setEan("123456789012"); item.setTitle("Great DVD"); Director director = new Director(); director.setName("Emmanuel"); director.getItems().add(item); item.setDirector(director); Actor actor = new Actor(); actor.setName("John"); session.save(actor); item.getActors().add(actor); actor.getItems().add(item); session.save(item); session.save(director); tx.commit(); session.clear(); tx = session.beginTransaction(); actor = (Actor) session.get(Actor.class, actor.getId()); actor.setName("John Griffin"); director = (Director) session.get(Director.class, director.getId()); director.setName("emmanuel Bernard"); tx.commit(); session.clear(); tx = session.beginTransaction(); FullTextSession fts = Search.getFullTextSession(session); BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("actors.name", "griffin")), BooleanClause.Occur.MUST); query.add(new TermQuery(new Term("director.name", "bernard")), BooleanClause.Occur.MUST); List results = fts.createFullTextQuery(query, Item.class).list(); assert results.size() == 1; fts.delete(results.get(0)); tx.commit(); fts.close(); }
public void testBoostedFieldDesc() throws Exception { FullTextSession fullTextSession = Search.getFullTextSession(openSession()); buildBoostedFieldIndex(fullTextSession); fullTextSession.clear(); Transaction tx = fullTextSession.beginTransaction(); QueryParser authorParser = new QueryParser( TestConstants.getTargetLuceneVersion(), "author", TestConstants.standardAnalyzer); QueryParser descParser = new QueryParser( TestConstants.getTargetLuceneVersion(), "description", TestConstants.standardAnalyzer); Query author = authorParser.parse("Wells"); Query desc = descParser.parse("martians"); BooleanQuery query = new BooleanQuery(); query.add(author, BooleanClause.Occur.SHOULD); query.add(desc, BooleanClause.Occur.SHOULD); log.debug(query.toString()); org.hibernate.search.FullTextQuery hibQuery = fullTextSession.createFullTextQuery(query, BoostedFieldDescriptionLibrary.class); List results = hibQuery.list(); assertTrue( "incorrect document boost", ((BoostedFieldDescriptionLibrary) results.get(0)).getDescription().startsWith("Martians")); log.debug(hibQuery.explain(0).toString()); log.debug(hibQuery.explain(1).toString()); // cleanup for (Object element : fullTextSession .createQuery("from " + BoostedFieldDescriptionLibrary.class.getName()) .list()) { fullTextSession.delete(element); } tx.commit(); fullTextSession.close(); }
@Test public void testBoost() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); Address address = new Address(); address.setStreet1("Peachtree Rd NE"); address.setStreet2("Peachtnot Rd NE"); s.persist(address); address = new Address(); address.setStreet1("Peachtnot Rd NE"); address.setStreet2("Peachtree Rd NE"); s.persist(address); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("street1:peachtree OR idx_street2:peachtree"); FullTextQuery query = s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE); assertEquals("expecting two results", 2, query.getResultSize()); @SuppressWarnings("unchecked") List<Object[]> results = query.list(); assertTrue( "first result should be strictly higher", (Float) results.get(0)[1] > (Float) results.get(1)[1] * 1.9f); assertEquals( "Wrong result ordered", address.getStreet1(), ((Address) results.get(0)[0]).getStreet1()); for (Object[] result : results) { s.delete(result[0]); } tx.commit(); s.close(); }
/** * Tests that the token filters applied to <code>Team</code> are successfully created and used. * Refer to <code>Team</code> to see the exact definitions. * * @throws Exception in case the test fails */ @Test public void testAnalyzerDef() throws Exception { // create the test instance Team team = new Team(); team.setDescription( "This is a D\u00E0scription"); // \u00E0 == � - ISOLatin1AccentFilterFactory should strip of // diacritic team.setLocation("Atlanta"); team.setName("ATL team"); // persist and index the test object FullTextSession fts = Search.getFullTextSession(openSession()); Transaction tx = fts.beginTransaction(); fts.persist(team); tx.commit(); fts.clear(); // execute several search to show that the right tokenizers were applies tx = fts.beginTransaction(); TermQuery query = new TermQuery(new Term("description", "D\u00E0scription")); assertEquals( "iso latin filter should work. � should be a now", 0, fts.createFullTextQuery(query).list().size()); query = new TermQuery(new Term("description", "is")); assertEquals( "stop word filter should work. is should be removed", 0, fts.createFullTextQuery(query).list().size()); query = new TermQuery(new Term("description", "dascript")); assertEquals( "snowball stemmer should work. 'dascription' should be stemmed to 'dascript'", 1, fts.createFullTextQuery(query).list().size()); // cleanup fts.delete(fts.createFullTextQuery(query).list().get(0)); tx.commit(); fts.close(); }
@Test @Category( ElasticsearchSupportInProgress .class) // HSEARCH-2428 Provide an alternative to // org.hibernate.search.analyzer.Discriminator for Elasticsearch? public void testAnalyzerDiscriminator() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); BlogEntry deEntry = new BlogEntry(); deEntry.setTitle("aufeinanderschl\u00FCgen"); deEntry.setDescription("aufeinanderschl\u00FCgen"); deEntry.setLanguage("de"); s.persist(deEntry); BlogEntry enEntry = new BlogEntry(); enEntry.setTitle("acknowledgment"); enEntry.setDescription("acknowledgment"); enEntry.setLanguage("en"); s.persist(enEntry); tx.commit(); s.clear(); tx = s.beginTransaction(); // at query time we use a standard analyzer. We explicitly search for tokens which can only be // found if the // right language specific stemmer was used at index time assertEquals(1, nbrOfMatchingResults("description", "aufeinanderschlug", s)); assertEquals(1, nbrOfMatchingResults("description", "acknowledg", s)); assertEquals(0, nbrOfMatchingResults("title", "aufeinanderschlug", s)); assertEquals(1, nbrOfMatchingResults("title", "acknowledgment", s)); for (Object result : s.createQuery("from " + BlogEntry.class.getName()).list()) { s.delete(result); } tx.commit(); s.close(); }
@Test public void testMessageSending() throws Exception { TShirt shirt = createObjectWithSQL(); List<LuceneWork> queue = createDocumentAndWorkQueue(shirt); registerMessageListener(); sendMessage(queue); // need to sleep to give JMS processing and indexing time Thread.sleep(1000); FullTextSession ftSess = Search.getFullTextSession(openSession()); ftSess.getTransaction().begin(); QueryParser parser = new QueryParser(TestConstants.getTargetLuceneVersion(), "id", TestConstants.stopAnalyzer); Query luceneQuery = parser.parse("logo:jboss"); org.hibernate.Query query = ftSess.createFullTextQuery(luceneQuery); List result = query.list(); assertEquals(1, result.size()); ftSess.delete(result.get(0)); ftSess.getTransaction().commit(); ftSess.close(); }
@Test public void testIndexEmbedded() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); ProductCatalog productCatalog = new ProductCatalog(); productCatalog.setName("Cars"); Item item = new Item(); item.setId(1); item.setDescription("Ferrari"); item.setProductCatalog(productCatalog); productCatalog.addItem(item); s.persist(item); s.persist(productCatalog); tx.commit(); s.clear(); tx = s.beginTransaction(); QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer); org.apache.lucene.search.Query luceneQuery = parser.parse("items.description:Ferrari"); FullTextQuery query = s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE); assertEquals("expecting 1 results", 1, query.getResultSize()); @SuppressWarnings("unchecked") List<Object[]> results = query.list(); for (Object[] result : results) { s.delete(result[0]); } tx.commit(); s.close(); }
@Test public void testDateBridgeMapping() throws Exception { FullTextSession s = Search.getFullTextSession(openSession()); Transaction tx = s.beginTransaction(); Address address = new Address(); address.setStreet1("Peachtree Rd NE"); address.setStreet2("Peachtnot Rd NE"); Calendar c = GregorianCalendar.getInstance( TimeZone.getTimeZone("GMT"), Locale.ROOT); // for the sake of tests c.set(2009, Calendar.NOVEMBER, 15); Date date = new Date(c.getTimeInMillis()); address.setDateCreated(date); s.persist(address); address = new Address(); address.setStreet1("Peachtnot Rd NE"); address.setStreet2("Peachtree Rd NE"); address.setDateCreated(date); s.persist(address); BlogEntry enEntry = new BlogEntry(); enEntry.setTitle("acknowledgment"); enEntry.setDescription("acknowledgment"); enEntry.setLanguage("en"); enEntry.setDateCreated(date); s.persist(enEntry); tx.commit(); s.clear(); tx = s.beginTransaction(); long searchTimeStamp = DateTools.round(date.getTime(), DateTools.Resolution.DAY); BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); booleanQueryBuilder.add( NumericRangeQuery.newLongRange( "date-created", searchTimeStamp, searchTimeStamp, true, true), BooleanClause.Occur.SHOULD); booleanQueryBuilder.add( NumericRangeQuery.newLongRange( "blog-entry-created", searchTimeStamp, searchTimeStamp, true, true), BooleanClause.Occur.SHOULD); FullTextQuery query = s.createFullTextQuery(booleanQueryBuilder.build()) .setProjection(FullTextQuery.THIS, FullTextQuery.SCORE); assertEquals("expecting 3 results", 3, query.getResultSize()); @SuppressWarnings("unchecked") List<Object[]> results = query.list(); for (Object[] result : results) { s.delete(result[0]); } tx.commit(); s.close(); }
@Test public void testSpatial() { org.hibernate.Session s = openSession(); Transaction tx = s.beginTransaction(); MemberLevelTestPoI memberLevelTestPoI = new MemberLevelTestPoI("test", 24.0, 32.0d); s.persist(memberLevelTestPoI); s.flush(); tx.commit(); tx = s.beginTransaction(); FullTextSession session = Search.getFullTextSession(s); QueryBuilder builder = session.getSearchFactory().buildQueryBuilder().forEntity(MemberLevelTestPoI.class).get(); double centerLatitude = 24; double centerLongitude = 31.5; org.apache.lucene.search.Query luceneQuery = builder .spatial() .onField("location") .within(50, Unit.KM) .ofLatitude(centerLatitude) .andLongitude(centerLongitude) .createQuery(); org.hibernate.Query hibQuery = session.createFullTextQuery(luceneQuery, MemberLevelTestPoI.class); List<?> results = hibQuery.list(); assertEquals(0, results.size()); org.apache.lucene.search.Query luceneQuery2 = builder .spatial() .onField("location") .within(51, Unit.KM) .ofLatitude(centerLatitude) .andLongitude(centerLongitude) .createQuery(); org.hibernate.Query hibQuery2 = session.createFullTextQuery(luceneQuery2, MemberLevelTestPoI.class); List<?> results2 = hibQuery2.list(); assertEquals(1, results2.size()); List<?> testPoIs = session.createQuery("from " + MemberLevelTestPoI.class.getName()).list(); for (Object entity : testPoIs) { session.delete(entity); } tx.commit(); session.close(); s = openSession(); tx = s.beginTransaction(); ClassLevelTestPoI classLevelTestPoI = new ClassLevelTestPoI("test", 24.0, 32.0d); s.persist(classLevelTestPoI); s.flush(); tx.commit(); tx = s.beginTransaction(); session = Search.getFullTextSession(s); builder = session.getSearchFactory().buildQueryBuilder().forEntity(ClassLevelTestPoI.class).get(); centerLatitude = 24; centerLongitude = 31.5; luceneQuery = SpatialQueryBuilder.buildSpatialQueryByHash( centerLatitude, centerLongitude, 50, "location"); hibQuery = session.createFullTextQuery(luceneQuery, ClassLevelTestPoI.class); results = hibQuery.list(); assertEquals(0, results.size()); luceneQuery2 = SpatialQueryBuilder.buildSpatialQueryByHash( centerLatitude, centerLongitude, 51, "location"); hibQuery2 = session.createFullTextQuery(luceneQuery2, ClassLevelTestPoI.class); results2 = hibQuery2.list(); assertEquals(1, results2.size()); testPoIs = session.createQuery("from " + ClassLevelTestPoI.class.getName()).list(); for (Object entity : testPoIs) { session.delete(entity); } tx.commit(); session.close(); s = openSession(); tx = s.beginTransaction(); LatLongAnnTestPoi latLongAnnTestPoi = new LatLongAnnTestPoi("test", 24.0, 32.0d); s.persist(latLongAnnTestPoi); s.flush(); tx.commit(); tx = s.beginTransaction(); session = Search.getFullTextSession(s); builder = session.getSearchFactory().buildQueryBuilder().forEntity(LatLongAnnTestPoi.class).get(); centerLatitude = 24; centerLongitude = 31.5; luceneQuery = builder .spatial() .onField("location") .within(50, Unit.KM) .ofLatitude(centerLatitude) .andLongitude(centerLongitude) .createQuery(); hibQuery = session.createFullTextQuery(luceneQuery, LatLongAnnTestPoi.class); results = hibQuery.list(); assertEquals(0, results.size()); luceneQuery2 = builder .spatial() .onField("location") .within(51, Unit.KM) .ofLatitude(centerLatitude) .andLongitude(centerLongitude) .createQuery(); hibQuery2 = session.createFullTextQuery(luceneQuery2, LatLongAnnTestPoi.class); results2 = hibQuery2.list(); assertEquals(1, results2.size()); testPoIs = session.createQuery("from " + LatLongAnnTestPoi.class.getName()).list(); for (Object entity : testPoIs) { session.delete(entity); } tx.commit(); session.close(); }