@Override
 public String toString(String field) {
   StringBuilder sb = new StringBuilder();
   sb.append("boost(").append(q.toString(field)).append(',').append(boostVal).append(')');
   sb.append(ToStringUtils.boost(getBoost()));
   return sb.toString();
 }
  // Skip encoding for updating the index
  void createIndex2(int nDocs, String... fields) throws IOException {
    Set<String> fieldSet = new HashSet<String>(Arrays.asList(fields));

    SolrQueryRequest req = lrf.makeRequest();
    SolrQueryResponse rsp = new SolrQueryResponse();
    UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessingChain(null);
    UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);

    boolean foomany_s = fieldSet.contains("foomany_s");
    boolean foo1_s = fieldSet.contains("foo1_s");
    boolean foo2_s = fieldSet.contains("foo2_s");
    boolean foo4_s = fieldSet.contains("foo4_s");
    boolean foo8_s = fieldSet.contains("foo8_s");
    boolean t10_100_ws = fieldSet.contains("t10_100_ws");

    for (int i = 0; i < nDocs; i++) {
      SolrInputDocument doc = new SolrInputDocument();
      doc.addField("id", Float.toString(i));
      if (foomany_s) {
        doc.addField("foomany_s", t(r.nextInt(nDocs * 10)));
      }
      if (foo1_s) {
        doc.addField("foo1_s", t(0));
      }
      if (foo2_s) {
        doc.addField("foo2_s", r.nextInt(2));
      }
      if (foo4_s) {
        doc.addField("foo4_s", r.nextInt(4));
      }
      if (foo8_s) {
        doc.addField("foo8_s", r.nextInt(8));
      }
      if (t10_100_ws) {
        StringBuilder sb = new StringBuilder(9 * 100);
        for (int j = 0; j < 100; j++) {
          sb.append(' ');
          sb.append(t(r.nextInt(10)));
        }
        doc.addField("t10_100_ws", sb.toString());
      }

      AddUpdateCommand cmd = new AddUpdateCommand();
      cmd.solrDoc = doc;
      processor.processAdd(cmd);
    }
    processor.finish();
    req.close();

    assertU(commit());

    req = lrf.makeRequest();
    assertEquals(nDocs, req.getSearcher().maxDoc());
    req.close();
  }
 @Override
 public String toString(String field) {
   StringBuilder sb = new StringBuilder();
   sb.append("ParentQuery[")
       .append(parentType)
       .append("](")
       .append(originalParentQuery.toString(field))
       .append(')')
       .append(ToStringUtils.boost(getBoost()));
   return sb.toString();
 }
 public String toString(String field) {
   StringBuilder sb = new StringBuilder();
   sb.append("score_child[")
       .append(childType)
       .append("/")
       .append(parentType)
       .append("](")
       .append(originalChildQuery.toString(field))
       .append(')');
   sb.append(ToStringUtils.boost(getBoost()));
   return sb.toString();
 }
  public double testIndexing() throws IOException, IllegalAccessException, InstantiationException {
    LocalitySensitiveHashing.generateHashFunctions();
    LocalitySensitiveHashing.readHashFunctions();
    DocumentBuilder builder = new ChainedDocumentBuilder();
    ((ChainedDocumentBuilder) builder).addBuilder(DocumentBuilderFactory.getCEDDDocumentBuilder());

    //        System.out.println("-< Getting files to index >--------------");
    ArrayList<String> images = FileUtils.getAllImages(new File(testExtensive), true);
    //        System.out.println("-< Indexing " + images.size() + " files >--------------");

    IndexWriter iw =
        LuceneUtils.createIndexWriter(indexPath, true, LuceneUtils.AnalyzerType.WhitespaceAnalyzer);
    int count = 0;
    long time = System.currentTimeMillis();
    for (String identifier : images) {
      CEDD cedd = new CEDD();
      cedd.extract(ImageIO.read(new FileInputStream(identifier)));
      Document doc = new Document();
      doc.add(new Field(DocumentBuilder.FIELD_NAME_CEDD, cedd.getByteArrayRepresentation()));
      doc.add(
          new Field(
              DocumentBuilder.FIELD_NAME_IDENTIFIER,
              identifier,
              Field.Store.YES,
              Field.Index.NOT_ANALYZED));
      int[] hashes = LocalitySensitiveHashing.generateHashes(cedd.getDoubleHistogram());
      StringBuilder hash = new StringBuilder(512);
      for (int i = 0; i < hashes.length; i++) {
        hash.append(hashes[i]);
        hash.append(' ');
      }
      //            System.out.println("hash = " + hash);
      doc.add(new Field("hash", hash.toString(), Field.Store.YES, Field.Index.ANALYZED));
      iw.addDocument(doc);
      count++;
      //            if (count % 100 == 0) System.out.println(count + " files indexed.");
    }
    long timeTaken = (System.currentTimeMillis() - time);
    float sec = ((float) timeTaken) / 1000f;

    //        System.out.println(sec + " seconds taken, " + (timeTaken / count) + " ms per image.");
    iw.close();

    return testSearch();
  }
Example #6
0
 /** {@inheritDoc} */
 @Override
 public String toString() {
   StringBuilder builder = new StringBuilder();
   builder.append("Search [query=");
   builder.append(queryCondition);
   builder.append(", filter=");
   builder.append(filterCondition);
   builder.append("]");
   return builder.toString();
 }
 /**
  * Create the trigger.
  *
  * @param conn the database connection
  * @param schema the schema name
  * @param table the table name
  */
 protected static void createTrigger(Connection conn, String schema, String table)
     throws SQLException {
   Statement stat = conn.createStatement();
   String trigger =
       StringUtils.quoteIdentifier(schema)
           + "."
           + StringUtils.quoteIdentifier(TRIGGER_PREFIX + table);
   stat.execute("DROP TRIGGER IF EXISTS " + trigger);
   StringBuilder buff = new StringBuilder("CREATE TRIGGER IF NOT EXISTS ");
   // the trigger is also called on rollback because transaction rollback
   // will not undo the changes in the Lucene index
   buff.append(trigger)
       .append(" AFTER INSERT, UPDATE, DELETE, ROLLBACK ON ")
       .append(StringUtils.quoteIdentifier(schema))
       .append('.')
       .append(StringUtils.quoteIdentifier(table))
       .append(" FOR EACH ROW CALL \"")
       .append(FullTextLucene2.FullTextTrigger.class.getName())
       .append('\"');
   stat.execute(buff.toString());
 }
 static String reason(FixedBitSet actual, FixedBitSet expected, IndexSearcher indexSearcher)
     throws IOException {
   StringBuilder builder = new StringBuilder();
   builder.append("expected cardinality:").append(expected.cardinality()).append('\n');
   DocIdSetIterator iterator = expected.iterator();
   for (int doc = iterator.nextDoc();
       doc != DocIdSetIterator.NO_MORE_DOCS;
       doc = iterator.nextDoc()) {
     builder
         .append("Expected doc[")
         .append(doc)
         .append("] with id value ")
         .append(indexSearcher.doc(doc).get(UidFieldMapper.NAME))
         .append('\n');
   }
   builder.append("actual cardinality: ").append(actual.cardinality()).append('\n');
   iterator = actual.iterator();
   for (int doc = iterator.nextDoc();
       doc != DocIdSetIterator.NO_MORE_DOCS;
       doc = iterator.nextDoc()) {
     builder
         .append("Actual doc[")
         .append(doc)
         .append("] with id value ")
         .append(indexSearcher.doc(doc).get(UidFieldMapper.NAME))
         .append('\n');
   }
   return builder.toString();
 }
 /** Describe the parameters that control how the "more like this" query is formed. */
 public String describeParams() {
   StringBuilder sb = new StringBuilder();
   sb.append("\t").append("maxQueryTerms  : ").append(maxQueryTerms).append("\n");
   sb.append("\t").append("minWordLen     : ").append(minWordLen).append("\n");
   sb.append("\t").append("maxWordLen     : ").append(maxWordLen).append("\n");
   sb.append("\t").append("fieldNames     : ");
   String delim = "";
   for (String fieldName : fieldNames) {
     sb.append(delim).append(fieldName);
     delim = ", ";
   }
   sb.append("\n");
   sb.append("\t").append("boost          : ").append(boost).append("\n");
   sb.append("\t").append("minTermFreq    : ").append(minTermFreq).append("\n");
   sb.append("\t").append("minDocFreq     : ").append(minDocFreq).append("\n");
   return sb.toString();
 }
 private String msg(String left, String right) {
   int size = Math.min(left.length(), right.length());
   StringBuilder builder = new StringBuilder("size: " + left.length() + " vs. " + right.length());
   builder.append(" content: <<");
   for (int i = 0; i < size; i++) {
     if (left.charAt(i) == right.charAt(i)) {
       builder.append(left.charAt(i));
     } else {
       builder
           .append(">> ")
           .append("until offset: ")
           .append(i)
           .append(" [")
           .append(left.charAt(i))
           .append(" vs.")
           .append(right.charAt(i))
           .append("] [")
           .append((int) left.charAt(i))
           .append(" vs.")
           .append((int) right.charAt(i))
           .append(']');
       return builder.toString();
     }
   }
   if (left.length() != right.length()) {
     int leftEnd = Math.max(size, left.length()) - 1;
     int rightEnd = Math.max(size, right.length()) - 1;
     builder
         .append(">> ")
         .append("until offset: ")
         .append(size)
         .append(" [")
         .append(left.charAt(leftEnd))
         .append(" vs.")
         .append(right.charAt(rightEnd))
         .append("] [")
         .append((int) left.charAt(leftEnd))
         .append(" vs.")
         .append((int) right.charAt(rightEnd))
         .append(']');
     return builder.toString();
   }
   return "";
 }
Example #11
0
  // Test scores with one field with Term Freqs and one without, otherwise with equal content
  public void testBasic() throws Exception {
    Directory dir = newDirectory();
    Analyzer analyzer = new MockAnalyzer(random());
    IndexWriter writer =
        new IndexWriter(
            dir,
            newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
                .setMaxBufferedDocs(2)
                .setSimilarity(new SimpleSimilarity())
                .setMergePolicy(newLogMergePolicy(2)));

    StringBuilder sb = new StringBuilder(265);
    String term = "term";
    for (int i = 0; i < 30; i++) {
      Document d = new Document();
      sb.append(term).append(" ");
      String content = sb.toString();
      Field noTf = newField("noTf", content + (i % 2 == 0 ? "" : " notf"), omitType);
      d.add(noTf);

      Field tf = newField("tf", content + (i % 2 == 0 ? " tf" : ""), normalType);
      d.add(tf);

      writer.addDocument(d);
      // System.out.println(d);
    }

    writer.forceMerge(1);
    // flush
    writer.close();

    /*
     * Verify the index
     */
    IndexReader reader = DirectoryReader.open(dir);
    IndexSearcher searcher = newSearcher(reader);
    searcher.setSimilarity(new SimpleSimilarity());

    Term a = new Term("noTf", term);
    Term b = new Term("tf", term);
    Term c = new Term("noTf", "notf");
    Term d = new Term("tf", "tf");
    TermQuery q1 = new TermQuery(a);
    TermQuery q2 = new TermQuery(b);
    TermQuery q3 = new TermQuery(c);
    TermQuery q4 = new TermQuery(d);

    PhraseQuery pq = new PhraseQuery();
    pq.add(a);
    pq.add(c);
    try {
      searcher.search(pq, 10);
      fail("did not hit expected exception");
    } catch (Exception e) {
      Throwable cause = e;
      // If the searcher uses an executor service, the IAE is wrapped into other exceptions
      while (cause.getCause() != null) {
        cause = cause.getCause();
      }
      assertTrue("Expected an IAE, got " + cause, cause instanceof IllegalStateException);
    }

    searcher.search(
        q1,
        new CountingHitCollector() {
          private Scorer scorer;

          @Override
          public final void setScorer(Scorer scorer) {
            this.scorer = scorer;
          }

          @Override
          public final void collect(int doc) throws IOException {
            // System.out.println("Q1: Doc=" + doc + " score=" + score);
            float score = scorer.score();
            assertTrue("got score=" + score, score == 1.0f);
            super.collect(doc);
          }
        });
    // System.out.println(CountingHitCollector.getCount());

    searcher.search(
        q2,
        new CountingHitCollector() {
          private Scorer scorer;

          @Override
          public final void setScorer(Scorer scorer) {
            this.scorer = scorer;
          }

          @Override
          public final void collect(int doc) throws IOException {
            // System.out.println("Q2: Doc=" + doc + " score=" + score);
            float score = scorer.score();
            assertEquals(1.0f + doc, score, 0.00001f);
            super.collect(doc);
          }
        });
    // System.out.println(CountingHitCollector.getCount());

    searcher.search(
        q3,
        new CountingHitCollector() {
          private Scorer scorer;

          @Override
          public final void setScorer(Scorer scorer) {
            this.scorer = scorer;
          }

          @Override
          public final void collect(int doc) throws IOException {
            // System.out.println("Q1: Doc=" + doc + " score=" + score);
            float score = scorer.score();
            assertTrue(score == 1.0f);
            assertFalse(doc % 2 == 0);
            super.collect(doc);
          }
        });
    // System.out.println(CountingHitCollector.getCount());

    searcher.search(
        q4,
        new CountingHitCollector() {
          private Scorer scorer;

          @Override
          public final void setScorer(Scorer scorer) {
            this.scorer = scorer;
          }

          @Override
          public final void collect(int doc) throws IOException {
            float score = scorer.score();
            // System.out.println("Q1: Doc=" + doc + " score=" + score);
            assertTrue(score == 1.0f);
            assertTrue(doc % 2 == 0);
            super.collect(doc);
          }
        });
    // System.out.println(CountingHitCollector.getCount());

    BooleanQuery bq = new BooleanQuery();
    bq.add(q1, Occur.MUST);
    bq.add(q4, Occur.MUST);

    searcher.search(
        bq,
        new CountingHitCollector() {
          @Override
          public final void collect(int doc) throws IOException {
            // System.out.println("BQ: Doc=" + doc + " score=" + score);
            super.collect(doc);
          }
        });
    assertEquals(15, CountingHitCollector.getCount());

    reader.close();
    dir.close();
  }
 @Override
 public StorageResults visit(Select select) {
   // TMDM-4654: Checks if entity has a composite PK.
   Set<ComplexTypeMetadata> compositeKeyTypes = new HashSet<ComplexTypeMetadata>();
   // TMDM-7496: Search should include references to reused types
   Collection<ComplexTypeMetadata> types =
       new HashSet<ComplexTypeMetadata>(select.accept(new SearchTransitiveClosure()));
   for (ComplexTypeMetadata type : types) {
     if (type.getKeyFields().size() > 1) {
       compositeKeyTypes.add(type);
     }
   }
   if (!compositeKeyTypes.isEmpty()) {
     StringBuilder message = new StringBuilder();
     Iterator it = compositeKeyTypes.iterator();
     while (it.hasNext()) {
       ComplexTypeMetadata compositeKeyType = (ComplexTypeMetadata) it.next();
       message.append(compositeKeyType.getName());
       if (it.hasNext()) {
         message.append(',');
       }
     }
     throw new FullTextQueryCompositeKeyException(message.toString());
   }
   // Removes Joins and joined fields.
   List<Join> joins = select.getJoins();
   if (!joins.isEmpty()) {
     Set<ComplexTypeMetadata> joinedTypes = new HashSet<ComplexTypeMetadata>();
     for (Join join : joins) {
       joinedTypes.add(join.getRightField().getFieldMetadata().getContainingType());
     }
     for (ComplexTypeMetadata joinedType : joinedTypes) {
       types.remove(joinedType);
     }
     List<TypedExpression> filteredFields = new LinkedList<TypedExpression>();
     for (TypedExpression expression : select.getSelectedFields()) {
       if (expression instanceof Field) {
         FieldMetadata fieldMetadata = ((Field) expression).getFieldMetadata();
         if (joinedTypes.contains(fieldMetadata.getContainingType())) {
           TypeMapping mapping =
               mappings.getMappingFromDatabase(fieldMetadata.getContainingType());
           filteredFields.add(
               new Alias(
                   new StringConstant(StringUtils.EMPTY),
                   mapping.getUser(fieldMetadata).getName()));
         } else {
           filteredFields.add(expression);
         }
       } else {
         filteredFields.add(expression);
       }
     }
     selectedFields.clear();
     selectedFields.addAll(filteredFields);
   }
   // Handle condition
   Condition condition = select.getCondition();
   if (condition == null) {
     throw new IllegalArgumentException("Expected a condition in select clause but got 0.");
   }
   // Create Lucene query (concatenates all sub queries together).
   FullTextSession fullTextSession = Search.getFullTextSession(session);
   Query parsedQuery = select.getCondition().accept(new LuceneQueryGenerator(types));
   // Create Hibernate Search query
   Set<Class> classes = new HashSet<Class>();
   for (ComplexTypeMetadata type : types) {
     String className = ClassCreator.getClassName(type.getName());
     try {
       ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
       classes.add(contextClassLoader.loadClass(className));
     } catch (ClassNotFoundException e) {
       throw new RuntimeException("Could not find class '" + className + "'.", e);
     }
   }
   FullTextQuery fullTextQuery =
       fullTextSession.createFullTextQuery(
           parsedQuery, classes.toArray(new Class<?>[classes.size()]));
   // Very important to leave this null (would disable ability to search across different types)
   fullTextQuery.setCriteriaQuery(null);
   fullTextQuery.setSort(Sort.RELEVANCE); // Default sort (if no order by specified).
   query =
       EntityFinder.wrap(
           fullTextQuery,
           (HibernateStorage) storage,
           session); // ensures only MDM entity objects are returned.
   // Order by
   for (OrderBy current : select.getOrderBy()) {
     current.accept(this);
   }
   // Paging
   Paging paging = select.getPaging();
   paging.accept(this);
   pageSize = paging.getLimit();
   boolean hasPaging = pageSize < Integer.MAX_VALUE;
   if (!hasPaging) {
     return createResults(query.scroll(ScrollMode.FORWARD_ONLY));
   } else {
     return createResults(query.list());
   }
 }
 @Override
 public String toString(String field) {
   StringBuilder buffer = new StringBuilder();
   buffer.append(hashFieldName);
   buffer.append(",");
   buffer.append(Arrays.toString(hashes));
   buffer.append(",");
   buffer.append(maxResult);
   buffer.append(",");
   buffer.append(luceneFieldName);
   buffer.append(",");
   buffer.append(lireFeature.getClass().getSimpleName());
   buffer.append(ToStringUtils.boost(getBoost()));
   return buffer.toString();
 }