Example #1
0
  @Override
  public void write(int b) throws IOException {
    _written += 1;
    boolean complete = _channel.getResponse().isAllContentWritten(_written);

    // Async or Blocking ?
    while (true) {
      switch (_state.get()) {
        case OPEN:
          if (_aggregate == null)
            _aggregate = _channel.getByteBufferPool().acquire(getBufferSize(), false);
          BufferUtil.append(_aggregate, (byte) b);

          // Check if all written or full
          if (complete || BufferUtil.isFull(_aggregate)) {
            BlockingCallback callback = _channel.getWriteBlockingCallback();
            _channel.write(_aggregate, complete, callback);
            callback.block();
            if (complete) closed();
          }
          break;

        case ASYNC:
          throw new IllegalStateException("isReady() not called");

        case READY:
          if (!_state.compareAndSet(State.READY, State.PENDING)) continue;

          if (_aggregate == null)
            _aggregate = _channel.getByteBufferPool().acquire(getBufferSize(), false);
          BufferUtil.append(_aggregate, (byte) b);

          // Check if all written or full
          if (!complete && !BufferUtil.isFull(_aggregate)) {
            if (!_state.compareAndSet(State.PENDING, State.ASYNC))
              throw new IllegalStateException();
            return;
          }

          // Do the asynchronous writing from the callback
          new AsyncFlush().process();
          return;

        case PENDING:
        case UNREADY:
          throw new WritePendingException();

        case CLOSED:
          throw new EofException("Closed");
      }
      break;
    }
  }
Example #2
0
  @Override
  public void write(byte[] b, int off, int len) throws IOException {
    _written += len;
    boolean complete = _channel.getResponse().isAllContentWritten(_written);

    // Async or Blocking ?
    while (true) {
      switch (_state.get()) {
        case OPEN:
          // process blocking below
          break;

        case ASYNC:
          throw new IllegalStateException("isReady() not called");

        case READY:
          if (!_state.compareAndSet(OutputState.READY, OutputState.PENDING)) continue;

          // Should we aggregate?
          if (!complete && len <= _commitSize) {
            if (_aggregate == null)
              _aggregate =
                  _channel
                      .getByteBufferPool()
                      .acquire(getBufferSize(), _interceptor.isOptimizedForDirectBuffers());

            // YES - fill the aggregate with content from the buffer
            int filled = BufferUtil.fill(_aggregate, b, off, len);

            // return if we are not complete, not full and filled all the content
            if (filled == len && !BufferUtil.isFull(_aggregate)) {
              if (!_state.compareAndSet(OutputState.PENDING, OutputState.ASYNC))
                throw new IllegalStateException();
              return;
            }

            // adjust offset/length
            off += filled;
            len -= filled;
          }

          // Do the asynchronous writing from the callback
          new AsyncWrite(b, off, len, complete).iterate();
          return;

        case PENDING:
        case UNREADY:
          throw new WritePendingException();

        case ERROR:
          throw new EofException(_onError);

        case CLOSED:
          throw new EofException("Closed");

        default:
          throw new IllegalStateException();
      }
      break;
    }

    // handle blocking write

    // Should we aggregate?
    int capacity = getBufferSize();
    if (!complete && len <= _commitSize) {
      if (_aggregate == null)
        _aggregate =
            _channel
                .getByteBufferPool()
                .acquire(capacity, _interceptor.isOptimizedForDirectBuffers());

      // YES - fill the aggregate with content from the buffer
      int filled = BufferUtil.fill(_aggregate, b, off, len);

      // return if we are not complete, not full and filled all the content
      if (filled == len && !BufferUtil.isFull(_aggregate)) return;

      // adjust offset/length
      off += filled;
      len -= filled;
    }

    // flush any content from the aggregate
    if (BufferUtil.hasContent(_aggregate)) {
      write(_aggregate, complete && len == 0);

      // should we fill aggregate again from the buffer?
      if (len > 0 && !complete && len <= _commitSize && len <= BufferUtil.space(_aggregate)) {
        BufferUtil.append(_aggregate, b, off, len);
        return;
      }
    }

    // write any remaining content in the buffer directly
    if (len > 0) {
      ByteBuffer wrap = ByteBuffer.wrap(b, off, len);
      ByteBuffer view = wrap.duplicate();

      // write a buffer capacity at a time to avoid JVM pooling large direct buffers
      // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6210541
      while (len > getBufferSize()) {
        int p = view.position();
        int l = p + getBufferSize();
        view.limit(p + getBufferSize());
        write(view, false);
        len -= getBufferSize();
        view.limit(l + Math.min(len, getBufferSize()));
        view.position(l);
      }
      write(view, complete);
    } else if (complete) {
      write(BufferUtil.EMPTY_BUFFER, true);
    }

    if (complete) closed();
  }
  protected void commit(ByteBuffer content, boolean complete, Callback callback) {
    // Are we excluding because of status?
    Response response = _channel.getResponse();
    int sc = response.getStatus();
    if (sc > 0 && (sc < 200 || sc == 204 || sc == 205 || sc >= 300)) {
      LOG.debug("{} exclude by status {}", this, sc);
      noCompression();

      if (sc == 304) {
        String request_etags =
            (String) _channel.getRequest().getAttribute("o.e.j.s.h.gzip.GzipHandler.etag");
        String response_etag = response.getHttpFields().get(HttpHeader.ETAG);
        if (request_etags != null && response_etag != null) {
          String response_etag_gzip = etagGzip(response_etag);
          if (request_etags.contains(response_etag_gzip))
            response.getHttpFields().put(HttpHeader.ETAG, response_etag_gzip);
        }
      }

      _interceptor.write(content, complete, callback);
      return;
    }

    // Are we excluding because of mime-type?
    String ct = response.getContentType();
    if (ct != null) {
      ct = MimeTypes.getContentTypeWithoutCharset(ct);
      if (!_factory.isMimeTypeGzipable(StringUtil.asciiToLowerCase(ct))) {
        LOG.debug("{} exclude by mimeType {}", this, ct);
        noCompression();
        _interceptor.write(content, complete, callback);
        return;
      }
    }

    // Has the Content-Encoding header already been set?
    HttpFields fields = response.getHttpFields();
    String ce = fields.get(HttpHeader.CONTENT_ENCODING);
    if (ce != null) {
      LOG.debug("{} exclude by content-encoding {}", this, ce);
      noCompression();
      _interceptor.write(content, complete, callback);
      return;
    }

    // Are we the thread that commits?
    if (_state.compareAndSet(GZState.MIGHT_COMPRESS, GZState.COMMITTING)) {
      // We are varying the response due to accept encoding header.
      if (_vary != null) {
        if (fields.contains(HttpHeader.VARY)) fields.addCSV(HttpHeader.VARY, _vary.getValues());
        else fields.add(_vary);
      }

      long content_length = response.getContentLength();
      if (content_length < 0 && complete) content_length = content.remaining();

      _deflater = _factory.getDeflater(_channel.getRequest(), content_length);

      if (_deflater == null) {
        LOG.debug("{} exclude no deflater", this);
        _state.set(GZState.NOT_COMPRESSING);
        _interceptor.write(content, complete, callback);
        return;
      }

      fields.put(GZIP._contentEncoding);
      _crc.reset();
      _buffer = _channel.getByteBufferPool().acquire(_bufferSize, false);
      BufferUtil.fill(_buffer, GZIP_HEADER, 0, GZIP_HEADER.length);

      // Adjust headers
      response.setContentLength(-1);
      String etag = fields.get(HttpHeader.ETAG);
      if (etag != null) fields.put(HttpHeader.ETAG, etagGzip(etag));

      LOG.debug("{} compressing {}", this, _deflater);
      _state.set(GZState.COMPRESSING);

      gzip(content, complete, callback);
    } else callback.failed(new WritePendingException());
  }
Example #4
0
  @Override
  public void write(byte[] b, int off, int len) throws IOException {
    _written += len;
    boolean complete = _channel.getResponse().isAllContentWritten(_written);

    // Async or Blocking ?
    while (true) {
      switch (_state.get()) {
        case OPEN:
          // process blocking below
          break;

        case ASYNC:
          throw new IllegalStateException("isReady() not called");

        case READY:
          if (!_state.compareAndSet(State.READY, State.PENDING)) continue;

          // Should we aggregate?
          int capacity = getBufferSize();
          if (!complete && len <= _commitSize) {
            if (_aggregate == null)
              _aggregate = _channel.getByteBufferPool().acquire(capacity, false);

            // YES - fill the aggregate with content from the buffer
            int filled = BufferUtil.fill(_aggregate, b, off, len);

            // return if we are not complete, not full and filled all the content
            if (filled == len && !BufferUtil.isFull(_aggregate)) {
              if (!_state.compareAndSet(State.PENDING, State.ASYNC))
                throw new IllegalStateException();
              return;
            }

            // adjust offset/length
            off += filled;
            len -= filled;
          }

          // Do the asynchronous writing from the callback
          new AsyncWrite(b, off, len, complete).process();
          return;

        case PENDING:
        case UNREADY:
          throw new WritePendingException();

        case CLOSED:
          throw new EofException("Closed");
      }
      break;
    }

    // handle blocking write

    // Should we aggregate?
    int capacity = getBufferSize();
    if (!complete && len <= _commitSize) {
      if (_aggregate == null) _aggregate = _channel.getByteBufferPool().acquire(capacity, false);

      // YES - fill the aggregate with content from the buffer
      int filled = BufferUtil.fill(_aggregate, b, off, len);

      // return if we are not complete, not full and filled all the content
      if (filled == len && !BufferUtil.isFull(_aggregate)) return;

      // adjust offset/length
      off += filled;
      len -= filled;
    }

    // flush any content from the aggregate
    if (BufferUtil.hasContent(_aggregate)) {
      _channel.write(_aggregate, complete && len == 0);

      // should we fill aggregate again from the buffer?
      if (len > 0 && !complete && len <= _commitSize) {
        BufferUtil.append(_aggregate, b, off, len);
        return;
      }
    }

    // write any remaining content in the buffer directly
    if (len > 0)
      // pass as readonly to avoid space stealing optimisation in HttpConnection
      _channel.write(ByteBuffer.wrap(b, off, len).asReadOnlyBuffer(), complete);
    else if (complete) _channel.write(BufferUtil.EMPTY_BUFFER, complete);

    if (complete) {
      closed();
    }
  }