Пример #1
0
 @Override
 public void setUp() throws Exception {
   super.setUp();
   directory = newDirectory();
   analyzer = new MockAnalyzer(random());
   indexWriter =
       new RandomIndexWriter(
           random(), directory, LuceneTestCase.newIndexWriterConfig(random(), analyzer));
   indexReader = indexWriter.getReader();
   indexSearcher = newSearcher(indexReader);
 }
  public void testOneDictionary() throws Exception {
    Path tmp = LuceneTestCase.createTempDir();

    String toTest = "zu_ZA.zip";
    for (int i = 0; i < tests.length; i++) {
      if (tests[i].equals(toTest)) {
        Path f = DICTIONARY_HOME.resolve(tests[i]);
        assert Files.exists(f);

        IOUtils.rm(tmp);
        Files.createDirectory(tmp);

        try (InputStream in = Files.newInputStream(f)) {
          TestUtil.unzip(in, tmp);
          Path dicEntry = tmp.resolve(tests[i + 1]);
          Path affEntry = tmp.resolve(tests[i + 2]);

          try (InputStream dictionary = Files.newInputStream(dicEntry);
              InputStream affix = Files.newInputStream(affEntry);
              Directory tempDir = getDirectory()) {
            new Dictionary(tempDir, "dictionary", affix, dictionary);
          }
        }
      }
    }
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   // set the default codec, so adding test cases to this isn't fragile
   savedCodec = Codec.getDefault();
   Codec.setDefault(getCodec());
 }
Пример #4
0
 @Override
 public void tearDown() throws Exception {
   reader.close();
   searcher.close();
   dir.close();
   super.tearDown();
 }
Пример #5
0
  @Override
  public void setUp() throws Exception {
    super.setUp();
    numIterations = atLeast(50);
    dir = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            dir,
            newIndexWriterConfig(
                    TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
                .setMaxBufferedDocs(_TestUtil.nextInt(random(), 50, 1000)));
    Document doc = new Document();
    Field field = newStringField("field", "", Field.Store.YES);
    doc.add(field);
    terms = new TreeSet<BytesRef>();

    int num = atLeast(200);
    for (int i = 0; i < num; i++) {
      String s = _TestUtil.randomUnicodeString(random());
      field.setStringValue(s);
      terms.add(new BytesRef(s));
      writer.addDocument(doc);
    }

    termsAutomaton = BasicAutomata.makeStringUnion(terms);

    reader = writer.getReader();
    searcher = newSearcher(reader);
    writer.close();
  }
 @Override
 protected void tearDown() throws Exception {
   s.close();
   r.close();
   index.close();
   super.tearDown();
 }
Пример #7
0
  /**
   * Shuts down the test harness, and makes the best attempt possible to delete dataDir, unless the
   * system property "solr.test.leavedatadir" is set.
   */
  @Override
  public void tearDown() throws Exception {
    log.info("####TEARDOWN_START " + getTestName());
    if (factoryProp == null) {
      System.clearProperty("solr.directoryFactory");
    }

    if (h != null) {
      h.close();
    }
    String skip = System.getProperty("solr.test.leavedatadir");
    if (null != skip && 0 != skip.trim().length()) {
      System.err.println(
          "NOTE: per solr.test.leavedatadir, dataDir will not be removed: "
              + dataDir.getAbsolutePath());
    } else {
      if (!recurseDelete(dataDir)) {
        System.err.println(
            "!!!! WARNING: best effort to remove " + dataDir.getAbsolutePath() + " FAILED !!!!!");
      }
    }

    resetExceptionIgnores();
    super.tearDown();
  }
 @Override
 public void tearDown() throws Exception {
   super.tearDown();
   readerA.close();
   readerB.close();
   readerX.close();
 }
Пример #9
0
  // TODO: randomize
  public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
    Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
    PayloadAnalyzer analyzer = new PayloadAnalyzer();
    IndexWriter writer =
        new IndexWriter(
            directory,
            new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setSimilarity(similarity));
    // writer.infoStream = System.out;
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      doc.add(new Field(FIELD, English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
      doc.add(
          new Field(
              MULTI_FIELD,
              English.intToEnglish(i) + "  " + English.intToEnglish(i),
              Field.Store.YES,
              Field.Index.ANALYZED));
      doc.add(
          new Field(
              NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
      writer.addDocument(doc);
    }
    reader = IndexReader.open(writer, true);
    writer.close();

    IndexSearcher searcher = LuceneTestCase.newSearcher(reader);
    searcher.setSimilarity(similarity);
    return searcher;
  }
Пример #10
0
  @Override
  public void setUp() throws Exception {
    super.setUp();
    dir = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            dir,
            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
                .setMaxBufferedDocs(_TestUtil.nextInt(random(), 50, 1000)));

    Document doc = new Document();
    FieldType customType = new FieldType(TextField.TYPE_STORED);
    customType.setOmitNorms(true);
    Field field = newField("field", "", customType);
    doc.add(field);

    NumberFormat df = new DecimalFormat("000", new DecimalFormatSymbols(Locale.ROOT));
    for (int i = 0; i < 1000; i++) {
      field.setStringValue(df.format(i));
      writer.addDocument(doc);
    }

    reader = writer.getReader();
    writer.close();
    searcher = newSearcher(reader);
  }
Пример #11
0
 @Override
 public void setUp() throws Exception {
   super.setUp();
   PayloadHelper helper = new PayloadHelper();
   searcher = helper.setUp(random(), similarity, 1000);
   indexReader = searcher.getIndexReader();
 }
Пример #12
0
  public static void indexSerial(Random random, Map<String, Document> docs, Directory dir)
      throws IOException {
    IndexWriter w =
        new IndexWriter(
            dir,
            LuceneTestCase.newIndexWriterConfig(
                    random, TEST_VERSION_CURRENT, new MockAnalyzer(random))
                .setMergePolicy(newLogMergePolicy()));

    // index all docs in a single thread
    Iterator<Document> iter = docs.values().iterator();
    while (iter.hasNext()) {
      Document d = iter.next();
      ArrayList<Field> fields = new ArrayList<>();
      fields.addAll(d.getFields());
      // put fields in same order each time
      Collections.sort(fields, fieldNameComparator);

      Document d1 = new Document();
      for (int i = 0; i < fields.size(); i++) {
        d1.add(fields.get(i));
      }
      w.addDocument(d1);
      // System.out.println("indexing "+d1);
    }

    w.close();
  }
Пример #13
0
 /** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer */
 public RandomIndexWriter(Random r, Directory dir) throws IOException {
   this(
       r,
       dir,
       LuceneTestCase.newIndexWriterConfig(
           r, LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(r)));
 }
Пример #14
0
  @Override
  public void setUp() throws Exception {
    super.setUp();
    directory = newDirectory();
    RandomIndexWriter writer =
        new RandomIndexWriter(
            random(),
            directory,
            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
                .setMergePolicy(newLogMergePolicy()));

    // Add series of docs with filterable fields : url, text and dates  flags
    addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
    addDoc(writer, "http://lucene.apache.org", "New release pending", "20040102");
    addDoc(writer, "http://lucene.apache.org", "Lucene 1.9 out now", "20050101");
    addDoc(writer, "http://www.bar.com", "Local man bites dog", "20040101");
    addDoc(writer, "http://www.bar.com", "Dog bites local man", "20040102");
    addDoc(writer, "http://www.bar.com", "Dog uses Lucene", "20050101");
    addDoc(writer, "http://lucene.apache.org", "Lucene 2.0 out", "20050101");
    addDoc(writer, "http://lucene.apache.org", "Oops. Lucene 2.1 out", "20050102");

    // Until we fix LUCENE-2348, the index must
    // have only 1 segment:
    writer.forceMerge(1);

    reader = writer.getReader();
    writer.close();
    searcher = newSearcher(reader);
  }
 @Override
 public void setUp() throws Exception {
   super.setUp();
   // set the theoretical maximum term count for 8bit (see docs for the number)
   // super.tearDown will restore the default
   BooleanQuery.setMaxClauseCount(7 * 255 * 2 + 255);
 }
Пример #16
0
  // setup the index
  @Override
  public void setUp() throws Exception {
    super.setUp();
    indexDir = _TestUtil.getTempDir("RAMDirIndex");

    Directory dir = newFSDirectory(indexDir);
    IndexWriter writer =
        new IndexWriter(
            dir,
            new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
                .setOpenMode(OpenMode.CREATE));
    // add some documents
    Document doc = null;
    for (int i = 0; i < docsToAdd; i++) {
      doc = new Document();
      doc.add(
          newField(
              "content",
              English.intToEnglish(i).trim(),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED));
      writer.addDocument(doc);
    }
    assertEquals(docsToAdd, writer.maxDoc());
    writer.close();
    dir.close();
  }
Пример #17
0
  @Override
  public void run() {
    Random random = LuceneTestCase.random();
    int numSearches = 0;

    while (true && !stop) {
      numSearches++;
      try {
        // to come to the aid of their country.
        cloudClient.query(new SolrQuery(QUERIES[random.nextInt(QUERIES.length)]));
      } catch (Exception e) {
        System.err.println("QUERY REQUEST FAILED:");
        e.printStackTrace();
        if (e instanceof SolrServerException) {
          System.err.println("ROOT CAUSE:");
          ((SolrServerException) e).getRootCause().printStackTrace();
        }
        queryFails.incrementAndGet();
      }
      try {
        Thread.sleep(random.nextInt(4000) + 300);
      } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
      }
    }

    log.info("num searches done:" + numSearches + " with " + queryFails + " fails");
  }
Пример #18
0
 @Override
 public void setUp() throws Exception {
   super.setUp();
   File file = _TestUtil.getTempDir("testIndex");
   // use a simple FSDir here, to be sure to have SimpleFSInputs
   dir = new SimpleFSDirectory(file, null);
 }
Пример #19
0
 @Override
 public void tearDown() throws Exception {
   // cleanup
   if (indexDir != null && indexDir.exists()) {
     rmDir(indexDir);
   }
   super.tearDown();
 }
Пример #20
0
 @Override
 public void setUp() throws Exception {
   super.setUp();
   analyzerW = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
   analyzerB = new BigramAnalyzer();
   analyzerK = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
   dir = newDirectory();
 }
Пример #21
0
  @Override
  @Before
  public void setUp() throws Exception {
    super.setUp();

    directory = newDirectory();
    indexWriter = new RandomIndexWriter(random(), directory);
  }
Пример #22
0
 @Override
 public void tearDown() throws Exception {
   if (reader != null) {
     reader.close();
     reader = null;
   }
   dir.close();
   super.tearDown();
 }
Пример #23
0
 @Override
 public void tearDown() throws Exception {
   reader1.close();
   reader2.close();
   mergedDir.close();
   merge1Dir.close();
   merge2Dir.close();
   super.tearDown();
 }
    @SuppressWarnings("deprecation")
    public void testDummy() throws Exception {
      file = createTempDir("leftover").resolve("child.locked");
      openFile =
          Files.newByteChannel(
              file, StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE);

      parent = LuceneTestCase.getBaseTempDirForTestClass();
    }
Пример #25
0
 @Override
 protected void setUp() throws Exception {
   super.setUp();
   analyzerW = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
   analyzerB = new BigramAnalyzer();
   analyzerK = new MockAnalyzer(MockTokenizer.KEYWORD, false);
   paW = new QueryParser(TEST_VERSION_CURRENT, F, analyzerW);
   paB = new QueryParser(TEST_VERSION_CURRENT, F, analyzerB);
   dir = newDirectory(newRandom());
 }
Пример #26
0
  public void setUp() throws Exception {
    super.setUp();

    FSTCompletionBuilder builder = new FSTCompletionBuilder();
    for (TermFreq tf : evalKeys()) {
      builder.add(tf.term, (int) tf.v);
    }
    completion = builder.build();
    completionAlphabetical = new FSTCompletion(completion.getFST(), false, true);
  }
Пример #27
0
 @Override
 @Before
 public void setUp() throws Exception {
   super.setUp();
   similarity = Mockito.mock(Similarity.class);
   simWeight = Mockito.mock(Similarity.SimWeight.class);
   Mockito.when(
           similarity.computeWeight(
               any(CollectionStatistics.class), Matchers.<TermStatistics>anyVararg()))
       .thenReturn(simWeight);
 }
Пример #28
0
  @Test
  public void testDeadlock() throws Exception {
    LuceneTestCase.assumeFalse(
        "This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/LUCENE-6036)",
        new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));

    // pick random codec names for stress test in separate process:
    final Random rnd = RandomizedContext.current().getRandom();
    Set<String> avail;
    final String codecName =
        new ArrayList<>(avail = Codec.availableCodecs()).get(rnd.nextInt(avail.size()));
    final String pfName =
        new ArrayList<>(avail = PostingsFormat.availablePostingsFormats())
            .get(rnd.nextInt(avail.size()));
    final String dvfName =
        new ArrayList<>(avail = DocValuesFormat.availableDocValuesFormats())
            .get(rnd.nextInt(avail.size()));

    // spawn separate JVM:
    final Process p =
        new ProcessBuilder(
                Paths.get(System.getProperty("java.home"), "bin", "java").toString(),
                "-cp",
                System.getProperty("java.class.path"),
                getClass().getName(),
                codecName,
                pfName,
                dvfName)
            .inheritIO()
            .start();
    final ScheduledExecutorService scheduler =
        Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("processKiller"));
    final ScheduledFuture<?> f =
        scheduler.schedule(
            new Runnable() {
              @Override
              public void run() {
                p.destroy();
              }
            },
            30,
            TimeUnit.SECONDS);
    try {
      final int exitCode = p.waitFor();
      if (f.cancel(false)) {
        assertEquals("Process died abnormally", 0, exitCode);
      } else {
        fail("Process did not exit after 30 secs -> classloader deadlock?");
      }
    } finally {
      scheduler.shutdown();
      while (!scheduler.awaitTermination(1, TimeUnit.MINUTES)) ;
    }
  }
  @Override
  public void setUp() throws Exception {
    super.setUp();

    String[] data =
        new String[] {
          "A 1 2 3 4 5 6",
          "Z       4 5 6",
          null,
          "B   2   4 5 6",
          "Y     3   5 6",
          null,
          "C     3     6",
          "X       4 5 6"
        };

    index = new RAMDirectory();
    IndexWriter writer =
        new IndexWriter(index, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

    for (int i = 0; i < data.length; i++) {
      Document doc = new Document();
      doc.add(
          new Field(
              "id",
              String.valueOf(i),
              Field.Store.YES,
              Field.Index.NOT_ANALYZED)); // Field.Keyword("id",String.valueOf(i)));
      doc.add(
          new Field(
              "all",
              "all",
              Field.Store.YES,
              Field.Index.NOT_ANALYZED)); // Field.Keyword("all","all"));
      if (null != data[i]) {
        doc.add(
            new Field(
                "data",
                data[i],
                Field.Store.YES,
                Field.Index.ANALYZED)); // Field.Text("data",data[i]));
      }
      writer.addDocument(doc);
    }

    writer.optimize();
    writer.close();

    r = IndexReader.open(index, true);
    s = new IndexSearcher(r);

    // System.out.println("Set up " + getName());
  }
Пример #30
0
 @Override
 public void setUp() throws Exception {
   super.setUp();
   mergedDir = newDirectory();
   merge1Dir = newDirectory();
   merge2Dir = newDirectory();
   DocHelper.setupDoc(doc1);
   SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1);
   DocHelper.setupDoc(doc2);
   SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2);
   reader1 = new SegmentReader(info1, newIOContext(random()));
   reader2 = new SegmentReader(info2, newIOContext(random()));
 }