示例#1
0
文件: Search.java 项目: srnsw/xena
  // returns true if multigroup
  private boolean openDocumentIndex(int docID) throws Exception {
    // The data is only the data for this document id. Thus the base is set
    // to zero.
    _data = _env.getPositions(docID);
    _base = 0;
    _startingIndex = 0;
    int kk = _data[_base] & 0xFF, k2;
    switch (kk >> 6) // get type
    {
      case 0: // single group, no extents
        k2 = _data[_base + 1];
        _firstGenerator.init(_data, _base += 2, k2);
        // decode concept table
        _nConcepts = _firstGenerator.decodeConcepts(kk & 0x3F, 0, _concepts);
        return false;

      case 2: // multi group, no extents
        _kTable.clear();
        _offsets.clear();
        _maxConcepts.clear();
        ByteArrayDecompressor compr = new ByteArrayDecompressor(_data, _base + 1);
        compr.decode(kk & 0x3F, _kTable);
        compr.ascDecode(_kTable.popLast(), _offsets);
        compr.ascDecode(_kTable.popLast(), _maxConcepts);
        _base += 1 + compr.bytesRead();
        _limit = _maxConcepts.cardinality();
        return true;

      case 1: // single group, extents
      case 3: // multi group, extents
        throw new Exception("extents not yet implemented\n");
    }
    return false;
  }
示例#2
0
文件: Search.java 项目: srnsw/xena
 public void addTerm(int col, int concept, double score, int query) {
   if (_env.occursInText(concept)) {
     if (_free2 == _size2) {
       ConceptData[] newArray = new ConceptData[_size2 *= 2];
       System.arraycopy(_conceptData, 0, newArray, 0, _free2);
       _conceptData = newArray;
     }
     _conceptData[_free2++] =
         new ConceptData(concept, col, score, query, _query[query].getNColumns());
   }
 }
示例#3
0
文件: Search.java 项目: srnsw/xena
 public void startSearch(SearchQuery searchQuery) {
   //  fprintf(stderr, "startSearch: setup\n");
   int i, j;
   // set up ConceptData lists
   // order search terms
   quicksort(0, _free2 - 1);
   // remove duplicates
   for (i = 0; i < _free2 - 1; i = j)
     for (j = i + 1; j < _free2; j++)
       if (_conceptData[i].crqEquals(_conceptData[j])) _conceptData[j] = null;
       else i = j;
   // create lists
   for (i = 0; i < _free2 - 1; i = j)
     for (j = i + 1; j < _free2; j++)
       if (_conceptData[j] != null)
         if (_conceptData[i].cEquals(_conceptData[j])) {
           _conceptData[i].addLast(_conceptData[j]);
           _conceptData[j] = null;
         } else i = j;
   // densify
   for (i = 0; i < _free2 - 1; i++)
     if (_conceptData[i] == null)
       for (j = i + 1; j < _free2; j++)
         if (_conceptData[j] != null) {
           _conceptData[i] = _conceptData[j];
           _conceptData[j] = null;
           break;
         }
   // set up new document generators
   _nextDocGenHeap.reset();
   for (i = 0; i < _free2 && _conceptData[i] != null; i++) {
     NextDocGenerator gen = new NextDocGenerator(_conceptData[i], _env);
     try {
       gen.first();
       if (gen.getDocument() != NonnegativeIntegerGenerator.END) {
         _conceptData[i].setConceptLength(_env.getConceptLength(_conceptData[i].getConcept()));
         _nextDocGenHeap.addGenerator(gen);
       }
     } catch (Exception e) {
       e.printStackTrace();
     }
   }
   _nextDocGenHeap.start();
   searchDocument();
   if (searchQuery == null) {
     printResults(_maxHitsToShow);
   } else {
     _query[0].makeEvent(_maxHitsToShow, searchQuery);
   }
 }