コード例 #1
0
  /**
   * Used by the indexing thread to flush incoming data events in batch. A flush is carried out when
   * the batch size is significant, a client requesting a flush, or significant amount of time has
   * passed. The data is flushed to the underlying dataloader, which is a DataConsumer.
   */
  protected void processBatch() {
    List<DataEvent<ZoieIndexable>> tmpList = null;
    long now = System.currentTimeMillis();
    long duration = now - _lastFlushTime;

    synchronized (this) {
      while (_batchList.size() < _batchSize && !_stop && !_flush && duration < _delay) {
        try {
          this.wait(_delay - duration);
        } catch (InterruptedException e) {
          log.warn(e.getMessage());
        }
        now = System.currentTimeMillis();
        duration = now - _lastFlushTime;
      }
      _flush = false;
      _lastFlushTime = now;

      if (_batchList.size() > 0) {
        // change the status and get the batch list
        // this has to be done in the block synchronized on BatchIndexDataLoader
        _idxMgr.setDiskIndexerStatus(SearchIndexManager.Status.Working);
        tmpList = getBatchList();
      }
    }

    if (tmpList != null) {
      long t1 = System.currentTimeMillis();
      int eventCount = tmpList.size();
      try {
        _dataLoader.consume(tmpList);
      } catch (ZoieException e) {
        log.error(e.getMessage(), e);
      } finally {
        long t2 = System.currentTimeMillis();
        synchronized (this) {
          _eventCount -= eventCount;
          log.info(
              this
                  + " flushed batch of "
                  + eventCount
                  + " events to disk indexer, took: "
                  + (t2 - t1)
                  + " current event count: "
                  + _eventCount);
          IndexUpdatedEvent evt = new IndexUpdatedEvent(eventCount, t1, t2, _eventCount);
          fireIndexingEvent(evt);
          this.notifyAll();
        }
      }
    } else {
      log.debug("batch size is 0");
    }
  }
コード例 #2
0
    private void flush() {
      // FLUSH
      Collection<DataEvent<V>> tmp;
      tmp = _batch;
      _batch = new LinkedList<DataEvent<V>>();

      try {
        if (_dataProvider._consumer != null) {
          _eventCount.getAndAdd(tmp.size());
          _dataProvider._consumer.consume(tmp);
        }
      } catch (ZoieException e) {
        log.error(e.getMessage(), e);
      }
    }