Beispiel #1
0
  @Override
  public void setUp() throws Exception {
    super.setUp();
    numIterations = atLeast(50);
    dir = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            dir,
            newIndexWriterConfig(
                    TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
                .setMaxBufferedDocs(_TestUtil.nextInt(random(), 50, 1000)));
    Document doc = new Document();
    Field field = newStringField("field", "", Field.Store.YES);
    doc.add(field);
    terms = new TreeSet<BytesRef>();

    int num = atLeast(200);
    for (int i = 0; i < num; i++) {
      String s = _TestUtil.randomUnicodeString(random());
      field.setStringValue(s);
      terms.add(new BytesRef(s));
      writer.addDocument(doc);
    }

    termsAutomaton = BasicAutomata.makeStringUnion(terms);

    reader = writer.getReader();
    searcher = newSearcher(reader);
    writer.close();
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   // set the default codec, so adding test cases to this isn't fragile
   savedCodec = Codec.getDefault();
   Codec.setDefault(getCodec());
 }
  @Override
  public void setUp() throws Exception {
    super.setUp();
    directory = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            directory,
            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
                .setMergePolicy(newLogMergePolicy()));

    // Add series of docs with filterable fields : url, text and dates  flags
    addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
    addDoc(writer, "http://lucene.apache.org", "New release pending", "20040102");
    addDoc(writer, "http://lucene.apache.org", "Lucene 1.9 out now", "20050101");
    addDoc(writer, "http://www.bar.com", "Local man bites dog", "20040101");
    addDoc(writer, "http://www.bar.com", "Dog bites local man", "20040102");
    addDoc(writer, "http://www.bar.com", "Dog uses Lucene", "20050101");
    addDoc(writer, "http://lucene.apache.org", "Lucene 2.0 out", "20050101");
    addDoc(writer, "http://lucene.apache.org", "Oops. Lucene 2.1 out", "20050102");

    // Until we fix LUCENE-2348, the index must
    // have only 1 segment:
    writer.forceMerge(1);

    reader = writer.getReader();
    writer.close();
    searcher = newSearcher(reader);
  }
  // setup the index
  @Override
  public void setUp() throws Exception {
    super.setUp();
    indexDir = _TestUtil.getTempDir("RAMDirIndex");

    Directory dir = newFSDirectory(indexDir);
    IndexWriter writer =
        new IndexWriter(
            dir,
            new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
                .setOpenMode(OpenMode.CREATE));
    // add some documents
    Document doc = null;
    for (int i = 0; i < docsToAdd; i++) {
      doc = new Document();
      doc.add(
          newField(
              "content",
              English.intToEnglish(i).trim(),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED));
      writer.addDocument(doc);
    }
    assertEquals(docsToAdd, writer.maxDoc());
    writer.close();
    dir.close();
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   // set the theoretical maximum term count for 8bit (see docs for the number)
   // super.tearDown will restore the default
   BooleanQuery.setMaxClauseCount(7 * 255 * 2 + 255);
 }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   PayloadHelper helper = new PayloadHelper();
   searcher = helper.setUp(random(), similarity, 1000);
   indexReader = searcher.getIndexReader();
 }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   File file = _TestUtil.getTempDir("testIndex");
   // use a simple FSDir here, to be sure to have SimpleFSInputs
   dir = new SimpleFSDirectory(file, null);
 }
  @Override
  public void setUp() throws Exception {
    super.setUp();
    dir = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            dir,
            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
                .setMaxBufferedDocs(_TestUtil.nextInt(random(), 50, 1000)));

    Document doc = new Document();
    FieldType customType = new FieldType(TextField.TYPE_STORED);
    customType.setOmitNorms(true);
    Field field = newField("field", "", customType);
    doc.add(field);

    NumberFormat df = new DecimalFormat("000", new DecimalFormatSymbols(Locale.ROOT));
    for (int i = 0; i < 1000; i++) {
      field.setStringValue(df.format(i));
      writer.addDocument(doc);
    }

    reader = writer.getReader();
    writer.close();
    searcher = newSearcher(reader);
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   analyzerW = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
   analyzerB = new BigramAnalyzer();
   analyzerK = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
   dir = newDirectory();
 }
Beispiel #10
0
  @Override
  @Before
  public void setUp() throws Exception {
    super.setUp();

    directory = newDirectory();
    indexWriter = new RandomIndexWriter(random(), directory);
  }
 @Override
 protected void setUp() throws Exception {
   super.setUp();
   analyzerW = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
   analyzerB = new BigramAnalyzer();
   analyzerK = new MockAnalyzer(MockTokenizer.KEYWORD, false);
   paW = new QueryParser(TEST_VERSION_CURRENT, F, analyzerW);
   paB = new QueryParser(TEST_VERSION_CURRENT, F, analyzerB);
   dir = newDirectory(newRandom());
 }
  public void setUp() throws Exception {
    super.setUp();

    FSTCompletionBuilder builder = new FSTCompletionBuilder();
    for (TermFreq tf : evalKeys()) {
      builder.add(tf.term, (int) tf.v);
    }
    completion = builder.build();
    completionAlphabetical = new FSTCompletion(completion.getFST(), false, true);
  }
Beispiel #13
0
 @Override
 @Before
 public void setUp() throws Exception {
   super.setUp();
   similarity = Mockito.mock(Similarity.class);
   simWeight = Mockito.mock(Similarity.SimWeight.class);
   Mockito.when(
           similarity.computeWeight(
               any(CollectionStatistics.class), Matchers.<TermStatistics>anyVararg()))
       .thenReturn(simWeight);
 }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   directory = newDirectory();
   analyzer = new MockAnalyzer(random());
   indexWriter =
       new RandomIndexWriter(
           random(), directory, LuceneTestCase.newIndexWriterConfig(random(), analyzer));
   indexReader = indexWriter.getReader();
   indexSearcher = newSearcher(indexReader);
 }
  @Override
  public void setUp() throws Exception {
    super.setUp();

    String[] data =
        new String[] {
          "A 1 2 3 4 5 6",
          "Z       4 5 6",
          null,
          "B   2   4 5 6",
          "Y     3   5 6",
          null,
          "C     3     6",
          "X       4 5 6"
        };

    index = new RAMDirectory();
    IndexWriter writer =
        new IndexWriter(index, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

    for (int i = 0; i < data.length; i++) {
      Document doc = new Document();
      doc.add(
          new Field(
              "id",
              String.valueOf(i),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED)); // Field.Keyword("id",String.valueOf(i)));
      doc.add(
          new Field(
              "all",
              "all",
              Field.Store.YES,
              Field.Index.NOT_ANALYZED)); // Field.Keyword("all","all"));
      if (null != data[i]) {
        doc.add(
            new Field(
                "data",
                data[i],
                Field.Store.YES,
                Field.Index.ANALYZED)); // Field.Text("data",data[i]));
      }
      writer.addDocument(doc);
    }

    writer.optimize();
    writer.close();

    r = IndexReader.open(index, true);
    s = new IndexSearcher(r);

    // System.out.println("Set up " + getName());
  }
  @Override
  public void setUp() throws Exception {
    super.setUp();
    baseListModel = new BaseListModel(DataStore.getRestaurants());
    listSearcher = new ListSearcher(baseListModel);

    infoToAdd1 = new RestaurantInfo();
    infoToAdd1.setName("Pino's");

    infoToAdd2 = new RestaurantInfo();
    infoToAdd2.setName("Pino's");
    infoToAdd2.setType("Italian");
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   mergedDir = newDirectory();
   merge1Dir = newDirectory();
   merge2Dir = newDirectory();
   DocHelper.setupDoc(doc1);
   SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1);
   DocHelper.setupDoc(doc2);
   SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2);
   reader1 = new SegmentReader(info1, newIOContext(random()));
   reader2 = new SegmentReader(info2, newIOContext(random()));
 }
  @Override
  protected void setUp() throws Exception {
    super.setUp();

    RAMDirectory dirA = new RAMDirectory();
    RAMDirectory dirB = new RAMDirectory();

    IndexWriter wA =
        new IndexWriter(dirA, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
    IndexWriter wB =
        new IndexWriter(dirB, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

    long theLong = Long.MAX_VALUE;
    double theDouble = Double.MAX_VALUE;
    byte theByte = Byte.MAX_VALUE;
    short theShort = Short.MAX_VALUE;
    int theInt = Integer.MAX_VALUE;
    float theFloat = Float.MAX_VALUE;
    for (int i = 0; i < NUM_DOCS; i++) {
      Document doc = new Document();
      doc.add(
          new Field(
              "theLong", String.valueOf(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
      doc.add(
          new Field(
              "theDouble", String.valueOf(theDouble--), Field.Store.NO, Field.Index.NOT_ANALYZED));
      doc.add(
          new Field(
              "theByte", String.valueOf(theByte--), Field.Store.NO, Field.Index.NOT_ANALYZED));
      doc.add(
          new Field(
              "theShort", String.valueOf(theShort--), Field.Store.NO, Field.Index.NOT_ANALYZED));
      doc.add(
          new Field("theInt", String.valueOf(theInt--), Field.Store.NO, Field.Index.NOT_ANALYZED));
      doc.add(
          new Field(
              "theFloat", String.valueOf(theFloat--), Field.Store.NO, Field.Index.NOT_ANALYZED));
      if (0 == i % 3) {
        wA.addDocument(doc);
      } else {
        wB.addDocument(doc);
      }
    }
    wA.close();
    wB.close();
    readerA = IndexReader.open(dirA, true);
    readerB = IndexReader.open(dirB, true);
    readerX = new MultiReader(new IndexReader[] {readerA, readerB});
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   RAMDirectory directory = new RAMDirectory();
   IndexWriter writer =
       new IndexWriter(
           directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
   for (int i = 0; i < docFields.length; i++) {
     Document doc = new Document();
     doc.add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
     writer.addDocument(doc);
   }
   writer.close();
   searcher = new IndexSearcher(directory, true);
 }
  @Override
  public void setUp() throws Exception {
    super.setUp();
    directory = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory);

    // Add series of docs with specific information for MoreLikeThis
    addDoc(writer, "lucene");
    addDoc(writer, "lucene release");
    addDoc(writer, "apache");
    addDoc(writer, "apache lucene");

    reader = writer.getReader();
    writer.close();
    searcher = newSearcher(reader);
  }
Beispiel #21
0
  @Override
  public void setUp() throws Exception {
    super.setUp();
    directory = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(), directory, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));

    // Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
    addDoc(writer, "admin guest", "010", "20040101", "Y");
    addDoc(writer, "guest", "020", "20040101", "Y");
    addDoc(writer, "guest", "020", "20050101", "Y");
    addDoc(writer, "admin", "020", "20050101", "Maybe");
    addDoc(writer, "admin guest", "030", "20050101", "N");
    reader = SlowCompositeReaderWrapper.wrap(writer.getReader());
    writer.close();
  }
  /*
   * @see TestCase#setUp()
   */
  @Override
  public void setUp() throws Exception {
    super.setUp();

    analyzer = new MockAnalyzer(random());
    // Create an index
    dir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer));
    for (String docFieldValue : docFieldValues) {
      w.addDocument(getDocumentFromString(docFieldValue));
    }
    w.forceMerge(1);
    w.close();
    reader = DirectoryReader.open(dir);
    searcher = newSearcher(reader);

    // initialize the parser
    builder = new CorePlusExtensionsParser("artist", analyzer);
  }
  @Override
  public void setUp() throws Exception {
    super.setUp();
    dirA = newDirectory();
    dirB = newDirectory();

    IndexWriter wA =
        new IndexWriter(
            dirA, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
    IndexWriter wB =
        new IndexWriter(
            dirB, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));

    long theLong = Long.MAX_VALUE;
    double theDouble = Double.MAX_VALUE;
    byte theByte = Byte.MAX_VALUE;
    short theShort = Short.MAX_VALUE;
    int theInt = Integer.MAX_VALUE;
    float theFloat = Float.MAX_VALUE;
    for (int i = 0; i < NUM_DOCS; i++) {
      Document doc = new Document();
      doc.add(newStringField("theLong", String.valueOf(theLong--), Field.Store.NO));
      doc.add(newStringField("theDouble", String.valueOf(theDouble--), Field.Store.NO));
      doc.add(newStringField("theByte", String.valueOf(theByte--), Field.Store.NO));
      doc.add(newStringField("theShort", String.valueOf(theShort--), Field.Store.NO));
      doc.add(newStringField("theInt", String.valueOf(theInt--), Field.Store.NO));
      doc.add(newStringField("theFloat", String.valueOf(theFloat--), Field.Store.NO));
      if (0 == i % 3) {
        wA.addDocument(doc);
      } else {
        wB.addDocument(doc);
      }
    }
    wA.close();
    wB.close();
    DirectoryReader rA = DirectoryReader.open(dirA);
    readerA = SlowCompositeReaderWrapper.wrap(rA);
    readerAclone = SlowCompositeReaderWrapper.wrap(rA);
    readerA = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dirA));
    readerB = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dirB));
    readerX = SlowCompositeReaderWrapper.wrap(new MultiReader(readerA, readerB));
  }
Beispiel #24
0
  /**
   * Set the test case. This test case needs a few text files created in the current working
   * directory.
   */
  @Override
  public void setUp() throws Exception {
    super.setUp();
    if (VERBOSE) {
      System.out.println("TEST: setUp");
    }
    workDir = createTempDir("TestDoc");
    workDir.mkdirs();

    indexDir = createTempDir("testIndex");
    indexDir.mkdirs();

    Directory directory = newFSDirectory(indexDir);
    directory.close();

    files = new LinkedList<>();
    files.add(createOutput("test.txt", "This is the first test file"));

    files.add(createOutput("test2.txt", "This is the second test file"));
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   dir = newDirectory();
   iw = new RandomIndexWriter(random(), dir);
   Document doc = new Document();
   Field idField = new StringField("id", "", Field.Store.NO);
   doc.add(idField);
   // add 500 docs with id 0..499
   for (int i = 0; i < 500; i++) {
     idField.setStringValue(Integer.toString(i));
     iw.addDocument(doc);
   }
   // delete 20 of them
   for (int i = 0; i < 20; i++) {
     iw.deleteDocuments(new Term("id", Integer.toString(random().nextInt(iw.maxDoc()))));
   }
   ir = iw.getReader();
   is = newSearcher(ir);
 }
  /**
   * Initializes things your test might need
   *
   * <ul>
   *   <li>Creates a dataDir in the "java.io.tmpdir"
   *   <li>initializes the TestHarness h using this data directory, and getSchemaPath()
   *   <li>initializes the LocalRequestFactory lrf using sensible defaults.
   * </ul>
   */
  @Override
  public void setUp() throws Exception {
    super.setUp();
    log.info("####SETUP_START " + getTestName());
    ignoreException("ignore_exception");
    factoryProp = System.getProperty("solr.directoryFactory");
    if (factoryProp == null) {
      System.setProperty("solr.directoryFactory", "solr.RAMDirectoryFactory");
    }
    dataDir = new File(TEMP_DIR, getClass().getName() + "-" + System.currentTimeMillis());
    dataDir.mkdirs();
    String configFile = getSolrConfigFile();
    System.setProperty("solr.solr.home", getSolrHome());
    if (configFile != null) {

      solrConfig = TestHarness.createConfig(getSolrConfigFile());
      h = new TestHarness(dataDir.getAbsolutePath(), solrConfig, getSchemaFile());
      lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
    }
    log.info("####SETUP_END " + getTestName());
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   dir = newDirectory();
   RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
   int numDocs = TestUtil.nextInt(random(), 2049, 4000);
   for (int i = 0; i < numDocs; i++) {
     Document document = new Document();
     document.add(newTextField("english", English.intToEnglish(i), Field.Store.NO));
     document.add(newTextField("oddeven", (i % 2 == 0) ? "even" : "odd", Field.Store.NO));
     document.add(new NumericDocValuesField("int", random().nextInt()));
     document.add(new NumericDocValuesField("long", random().nextLong()));
     document.add(new FloatDocValuesField("float", random().nextFloat()));
     document.add(new DoubleDocValuesField("double", random().nextDouble()));
     if (i == 545) document.add(new DoubleDocValuesField("onefield", 45.72));
     iw.addDocument(document);
   }
   reader = iw.getReader();
   iw.close();
   searcher = newSearcher(reader);
 }
Beispiel #28
0
  @Override
  public void setUp() throws Exception {
    super.setUp();
    dir = newDirectory();
    fieldName =
        random().nextBoolean() ? "field" : ""; // sometimes use an empty string as field name
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            dir,
            newIndexWriterConfig(
                    TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
                .setMaxBufferedDocs(_TestUtil.nextInt(random(), 50, 1000)));
    Document doc = new Document();
    Field field = newStringField(fieldName, "", Field.Store.NO);
    doc.add(field);
    List<String> terms = new ArrayList<String>();
    int num = atLeast(200);
    for (int i = 0; i < num; i++) {
      String s = _TestUtil.randomUnicodeString(random());
      field.setStringValue(s);
      terms.add(s);
      writer.addDocument(doc);
    }

    if (VERBOSE) {
      // utf16 order
      Collections.sort(terms);
      System.out.println("UTF16 order:");
      for (String s : terms) {
        System.out.println("  " + UnicodeUtil.toHexString(s));
      }
    }

    reader = writer.getReader();
    searcher1 = newSearcher(reader);
    searcher2 = newSearcher(reader);
    writer.close();
  }
  @Override
  public void setUp() throws Exception {
    super.setUp();
    dir = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random,
            dir,
            newIndexWriterConfig(
                    TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.KEYWORD, false))
                .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000)));

    Document doc = new Document();
    Field field = newField("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
    doc.add(field);
    List<String> terms = new ArrayList<String>();
    int num = 2000 * RANDOM_MULTIPLIER;
    for (int i = 0; i < num; i++) {
      String s = _TestUtil.randomUnicodeString(random);
      field.setValue(s);
      terms.add(s);
      writer.addDocument(doc);
    }

    if (VERBOSE) {
      // utf16 order
      Collections.sort(terms);
      System.out.println("UTF16 order:");
      for (String s : terms) {
        System.out.println("  " + UnicodeUtil.toHexString(s));
      }
    }

    reader = writer.getReader();
    searcher = new IndexSearcher(reader);
    writer.close();
  }
 protected void setUp() throws Exception {
   super.setUp();
   tokens = new String[] {"here", "is", "some", "text", "to", "test", "extra"};
   thePositions = new int[tokens.length][];
   offsets = new TermVectorOffsetInfo[tokens.length][];
   numPositions = 0;
   // save off the last one so we can add it with the same positions as some of the others, but in
   // a predictable way
   for (int i = 0; i < tokens.length - 1; i++) {
     thePositions[i] = new int[2 * i + 1]; // give 'em all some positions
     for (int j = 0; j < thePositions[i].length; j++) {
       thePositions[i][j] = numPositions++;
     }
     offsets[i] = new TermVectorOffsetInfo[thePositions[i].length];
     for (int j = 0; j < offsets[i].length; j++) {
       offsets[i][j] =
           new TermVectorOffsetInfo(j, j + 1); // the actual value here doesn't much matter
     }
   }
   thePositions[tokens.length - 1] = new int[1];
   thePositions[tokens.length - 1][0] = 0; // put this at the same position as "here"
   offsets[tokens.length - 1] = new TermVectorOffsetInfo[1];
   offsets[tokens.length - 1][0] = new TermVectorOffsetInfo(0, 1);
 }