@Override public void messageReceived(final ChannelHandlerContext ctx, final MessageEvent e) throws Exception { if (!chunked) { final HttpRequest request = (HttpRequest) e.getMessage(); final ChannelBuffer buffer = request.getContent(); receivedData.write(buffer.array()); // System.out.println("received "+buffer.array() ); // System.out.println(buffer.array().length); if (!request.isChunked()) { processRequest(e); } else { chunked = true; } // final boolean keepAlive = isKeepAlive(request); } else { final HttpChunk chunk = (HttpChunk) e.getMessage(); final ChannelBuffer buffer = chunk.getContent(); receivedData.write(buffer.array()); if (chunk.isLast()) { processRequest(e); } } }
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent event) throws Exception { if (event.getMessage() instanceof MappingHttpRequest) { MappingHttpRequest httpRequest = (MappingHttpRequest) event.getMessage(); if (httpRequest.getMessage() instanceof ObjectStorageDataRequestType) { if (httpRequest.isChunked()) { // Chunked request, and beginning, setup map etc. initializeNewPut(ctx, (ObjectStorageDataRequestType) httpRequest.getMessage()); } } } else if (event.getMessage() instanceof HttpChunk) { // Add the chunk to the current streams channel buffer. HttpChunk chunk = (HttpChunk) event.getMessage(); appendChunk(chunk.getContent(), ctx.getChannel()); if (chunk.isLast()) { // Remove from the map Logs.extreme() .debug( "Removing data map due to last chunk processed event for channel: " + ctx.getChannel().getId()); dataMap.remove(ctx.getChannel()); } } // Always pass it on ctx.sendUpstream(event); }
/** * Initialized the internals from a new chunk * * @param chunk the new received chunk * @throws ErrorDataDecoderException if there is a problem with the charset decoding or other * errors */ public void offer(HttpChunk chunk) throws ErrorDataDecoderException { ChannelBuffer chunked = chunk.getContent(); if (undecodedChunk == null) { undecodedChunk = chunked; } else { // undecodedChunk = ChannelBuffers.wrappedBuffer(undecodedChunk, chunk.getContent()); // less memory usage undecodedChunk = ChannelBuffers.wrappedBuffer(undecodedChunk, chunked); } if (chunk.isLast()) { isLastChunk = true; } parseBody(); }
void chunk(HttpChunk chunk) throws Exception { Preconditions.checkState( bodyConsumer != null, "Received chunked content without BodyConsumer."); if (chunk.isLast()) { bodyConsumer.finished(responder); bodyConsumer = null; } else { try { bodyConsumer.chunk(chunk.getContent(), responder); } catch (Throwable t) { bodyConsumer.handleError(t); bodyConsumer = null; throw new HandlerException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "", t); } } }
@Override protected void httpMessageReceived( ChannelHandlerContext ctx, MessageEvent e, HttpChunk httpMessage) throws Exception { ChannelBuffer content = httpMessage.getContent(); if (content.readable()) { fireMessageReceived(httpChildChannel, content); } boolean last = httpMessage.isLast(); if (last) { HttpChildChannel httpChildChannel = this.httpChildChannel; httpChildChannel.readState(HttpReadState.CONTENT_COMPLETE); this.httpChildChannel = null; fireInputShutdown(httpChildChannel); if (httpChildChannel.setReadClosed()) { fireChannelDisconnected(httpChildChannel); fireChannelUnbound(httpChildChannel); fireChannelClosed(httpChildChannel); } } }
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { if (!readingChunks) { request = (HttpRequest) e.getMessage(); if (is100ContinueExpected(request)) { send100Continue(e); } if (request.isChunked()) { readingChunks = true; } else { writeObject(e, request); } } else { HttpChunk chunk = (HttpChunk) e.getMessage(); if (chunk.isLast()) { readingChunks = false; writeObject(e, chunk); } } }
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { Channel ch = e.getChannel(); ClientConnection conn = connectionMap.get(ch); Object msg = e.getMessage(); if (msg instanceof HttpResponse) { HttpResponse response = (HttpResponse) msg; conn.handleResponse(response); ChannelBuffer content = response.getContent(); if (content.readable()) { conn.handleResponseChunk(new Buffer(content)); } if (!response.isChunked()) { conn.handleResponseEnd(); } } else if (msg instanceof HttpChunk) { HttpChunk chunk = (HttpChunk) msg; if (chunk.getContent().readable()) { Buffer buff = new Buffer(chunk.getContent()); conn.handleResponseChunk(buff); } if (chunk.isLast()) { if (chunk instanceof HttpChunkTrailer) { HttpChunkTrailer trailer = (HttpChunkTrailer) chunk; conn.handleResponseEnd(trailer); } else { conn.handleResponseEnd(); } } } else if (msg instanceof WebSocketFrame) { WebSocketFrame frame = (WebSocketFrame) msg; conn.handleWsFrame(frame); } else { throw new IllegalStateException("Invalid object " + e.getMessage()); } }
@Override protected Object decode( ChannelHandlerContext ctx, Channel channel, ChannelBuffer buffer, State state) throws Exception { switch (state) { case SKIP_CONTROL_CHARS: { try { skipControlCharacters(buffer); checkpoint(State.READ_INITIAL); } finally { checkpoint(); } } case READ_INITIAL: { final String[] initialLine = splitInitialLine(readLine(buffer, maxInitialLineLength)); if (initialLine.length < 3) { // Invalid initial line - ignore. checkpoint(State.SKIP_CONTROL_CHARS); return null; } message = createMessage(initialLine); checkpoint(State.READ_HEADER); } case READ_HEADER: { State nextState = readHeaders(buffer); checkpoint(nextState); if (nextState == State.SKIP_CONTROL_CHARS) { return message; } else { long contentLength = getContentLength(message, -1); if (contentLength == 0 || contentLength == -1 && isDecodingRequest()) { content = ChannelBuffers.EMPTY_BUFFER; return reset(); } switch (nextState) { case READ_FIXED_LENGTH_CONTENT: if (contentLength > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) { // Generate HttpMessage first. HttpChunks will follow. checkpoint(State.READ_FIXED_LENGTH_CONTENT_AS_CHUNKS); message.setChunked(true); // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT_AS_CHUNKS // state reads data chunk by chunk. chunkSize = getContentLength(message, -1); return message; } break; case READ_VARIABLE_LENGTH_CONTENT: if (buffer.readableBytes() > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) { // Generate HttpMessage first. HttpChunks will follow. checkpoint(State.READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS); message.setChunked(true); return message; } break; default: throw new IllegalStateException("Unexpected state: " + nextState); } } // We return null here, this forces decode to be called again where we will decode the // content return null; } case READ_VARIABLE_LENGTH_CONTENT: { if (content == null) { content = ChannelBuffers.dynamicBuffer(channel.getConfig().getBufferFactory()); } // this will cause a replay error until the channel is closed where this will read what's // left in the buffer content.writeBytes(buffer.readBytes(buffer.readableBytes())); return reset(); } case READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS: { // Keep reading data as a chunk until the end of connection is reached. int chunkSize = Math.min(maxChunkSize, buffer.readableBytes()); HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(chunkSize)); if (!buffer.readable()) { // Reached to the end of the connection. reset(); if (!chunk.isLast()) { // Append the last chunk. return new Object[] {chunk, HttpChunk.LAST_CHUNK}; } } return chunk; } case READ_FIXED_LENGTH_CONTENT: { // we have a content-length so we just read the correct number of bytes readFixedLengthContent(buffer); return reset(); } case READ_FIXED_LENGTH_CONTENT_AS_CHUNKS: { long chunkSize = this.chunkSize; HttpChunk chunk; if (chunkSize > maxChunkSize) { chunk = new DefaultHttpChunk(buffer.readBytes(maxChunkSize)); chunkSize -= maxChunkSize; } else { assert chunkSize <= Integer.MAX_VALUE; chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize)); chunkSize = 0; } this.chunkSize = chunkSize; if (chunkSize == 0) { // Read all content. reset(); if (!chunk.isLast()) { // Append the last chunk. return new Object[] {chunk, HttpChunk.LAST_CHUNK}; } } return chunk; } /** * everything else after this point takes care of reading chunked content. basically, read * chunk size, read chunk, read and ignore the CRLF and repeat until 0 */ case READ_CHUNK_SIZE: { String line = readLine(buffer, maxInitialLineLength); int chunkSize = getChunkSize(line); this.chunkSize = chunkSize; if (chunkSize == 0) { checkpoint(State.READ_CHUNK_FOOTER); return null; } else if (chunkSize > maxChunkSize) { // A chunk is too large. Split them into multiple chunks again. checkpoint(State.READ_CHUNKED_CONTENT_AS_CHUNKS); } else { checkpoint(State.READ_CHUNKED_CONTENT); } } case READ_CHUNKED_CONTENT: { assert chunkSize <= Integer.MAX_VALUE; HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize)); checkpoint(State.READ_CHUNK_DELIMITER); return chunk; } case READ_CHUNKED_CONTENT_AS_CHUNKS: { long chunkSize = this.chunkSize; HttpChunk chunk; if (chunkSize > maxChunkSize) { chunk = new DefaultHttpChunk(buffer.readBytes(maxChunkSize)); chunkSize -= maxChunkSize; } else { assert chunkSize <= Integer.MAX_VALUE; chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize)); chunkSize = 0; } this.chunkSize = chunkSize; if (chunkSize == 0) { // Read all content. checkpoint(State.READ_CHUNK_DELIMITER); } if (!chunk.isLast()) { return chunk; } } case READ_CHUNK_DELIMITER: { for (; ; ) { byte next = buffer.readByte(); if (next == HttpCodecUtil.CR) { if (buffer.readByte() == HttpCodecUtil.LF) { checkpoint(State.READ_CHUNK_SIZE); return null; } } else if (next == HttpCodecUtil.LF) { checkpoint(State.READ_CHUNK_SIZE); return null; } } } case READ_CHUNK_FOOTER: { HttpChunkTrailer trailer = readTrailingHeaders(buffer); if (maxChunkSize == 0) { // Chunked encoding disabled. return reset(); } else { reset(); // The last chunk, which is empty return trailer; } } default: { throw new Error("Shouldn't reach here."); } } }
@Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { if (!readingChunks) { HttpResponse response = (HttpResponse) e.getMessage(); StringBuilder sb = new StringBuilder(); if (LOG.isDebugEnabled()) { sb.append("STATUS: ") .append(response.getStatus()) .append(", VERSION: ") .append(response.getProtocolVersion()) .append(", HEADER: "); } if (!response.getHeaderNames().isEmpty()) { for (String name : response.getHeaderNames()) { for (String value : response.getHeaders(name)) { if (LOG.isDebugEnabled()) { sb.append(name + " = " + value); } if (this.length == -1 && name.equals("Content-Length")) { this.length = Long.valueOf(value); } } } } if (LOG.isDebugEnabled()) { LOG.debug(sb.toString()); } if (response.getStatus() == HttpResponseStatus.NO_CONTENT) { LOG.info("There are no data corresponding to the request"); return; } this.raf = new RandomAccessFile(file, "rw"); this.fc = raf.getChannel(); if (response.isChunked()) { readingChunks = true; } else { ChannelBuffer content = response.getContent(); if (content.readable()) { fc.write(content.toByteBuffer()); } } } else { HttpChunk chunk = (HttpChunk) e.getMessage(); if (chunk.isLast()) { readingChunks = false; long fileLength = fc.position(); fc.close(); raf.close(); if (fileLength == length) { LOG.info("Data fetch is done (total received bytes: " + fileLength + ")"); } else { LOG.info( "Data fetch is done, but cannot get all data " + "(received/total: " + fileLength + "/" + length + ")"); } } else { fc.write(chunk.getContent().toByteBuffer()); } } }