@Override public void readFrame(Buffer buffer) { // example data: // --ssBoundary8345 // Content-Type: image/jpeg // Content-Length: 114587 try { String line; // eat leading blank lines while (true) { line = readLine(MAX_LINE_LENGTH); if (line == null) { buffer.setEOM(true); buffer.setLength(0); return; } if (!line.trim().equals("")) break; // end of header } if (boundary == null) { boundary = line.trim(); // TODO: we should be able to get // this from the content type, but // the content type has this // stripped out. So we'll just take // the first nonblank line to be the // boundary. // System.out.println("boundary: " + boundary); } else { if (!line.trim().equals(boundary)) { // throw new IOException("Expected boundary: " + // toPrintable(line)); // TODO: why do we seem to get these when playing back // mmr files recorded using FmjTranscode? logger.warning("Expected boundary (frame " + framesRead + "): " + toPrintable(line)); // handle streams that are truncated in the middle of a // frame: final int eatResult = eatUntil(boundary); // TODO: no // need to // store the // data logger.info( "Ignored bytes (eom after=" + (eatResult < 0) + "): " + (eatResult < 0 ? (-1 * eatResult - 1) : eatResult)); if (eatResult < 0) { buffer.setEOM(true); buffer.setLength(0); return; } // now read boundary line = readLine(MAX_LINE_LENGTH); if (!line.trim().equals(boundary)) { throw new RuntimeException("No boundary found after eatUntil(boundary)"); // should // never // happen } } } final Properties properties = new Properties(); while (true) { line = readLine(MAX_LINE_LENGTH); if (line == null) { buffer.setEOM(true); buffer.setLength(0); return; } if (line.trim().equals("")) break; // end of header if (!parseProperty(line, properties)) throw new IOException("Expected property: " + toPrintable(line)); } final String contentType = properties.getProperty("Content-Type".toUpperCase()); if (contentType == null) { logger.warning("Header properties: " + properties); throw new IOException("Expected Content-Type in header"); } // check supported content types: if (!isSupportedFrameContentType(contentType)) { throw new IOException("Unsupported Content-Type: " + contentType); } if (frameContentType == null) { frameContentType = contentType; } else { if (!contentType.equals(frameContentType)) throw new IOException( "Content type changed during stream from " + frameContentType + " to " + contentType); } // TODO: check that size doesn't change throughout final byte[] data; final String contentLenStr = properties.getProperty("Content-Length".toUpperCase()); if (contentLenStr != null) { // if we know the content length, use it final int contentLen; try { contentLen = Integer.parseInt(contentLenStr); } catch (NumberFormatException e) { throw new IOException("Invalid content length: " + contentLenStr); } // now, read the content-length bytes data = readFully(contentLen); // TODO: don't realloc each // time } else { // if we don't know the content length, just read until we // find the boundary. // Some IP cameras don't specify it, like // http://webcam-1.duesseldorf.it-on.net/cgi-bin/nph-update.cgi data = readUntil(boundary); } // ext final String timestampStr = properties.getProperty(TIMESTAMP_KEY.toUpperCase()); if (timestampStr != null) { try { final long timestamp = Long.parseLong(timestampStr); buffer.setTimeStamp(timestamp); } catch (NumberFormatException e) { logger.log(Level.WARNING, "" + e, e); } } if (data == null) { buffer.setEOM(true); buffer.setLength(0); return; } buffer.setData(data); buffer.setOffset(0); buffer.setLength(data.length); ++framesRead; } catch (IOException e) { throw new RuntimeException(e); } }
/** {@inheritDoc} */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { byte[] inData = (byte[]) inBuffer.getData(); int inOffset = inBuffer.getOffset(); if (!VP8PayloadDescriptor.isValid(inData, inOffset)) { logger.warn("Invalid RTP/VP8 packet discarded."); outBuffer.setDiscard(true); return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK? } long inSeq = inBuffer.getSequenceNumber(); long inRtpTimestamp = inBuffer.getRtpTimeStamp(); int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset); boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0; boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset); int inLength = inBuffer.getLength(); int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset); int inPayloadLength = inLength - inPdSize; if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) { if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } if (!empty) { // if the incoming packet has a different PictureID or timestamp // than those of the current frame, then it belongs to a different // frame. if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId) | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) { if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq { // the packet belongs to a previous frame. discard it if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } else // inSeq > firstSeq (and also presumably isSeq > lastSeq) { // the packet belongs to a subsequent frame (to the one // currently being held). Drop the current frame. if (logger.isInfoEnabled()) logger.info( "Discarding saved packets on arrival of" + " a packet for a subsequent frame: " + inSeq); // TODO: this would be the place to complain about the // not-well-received PictureID by sending a RTCP SLI or NACK. reinit(); } } } // a whole frame in a single packet. avoid the extra copy to // this.data and output it immediately. if (empty && inMarker && inIsStartOfFrame) { byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false); System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength); outBuffer.setOffset(0); outBuffer.setLength(inPayloadLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = inSeq; return BUFFER_PROCESSED_OK; } // add to this.data Container container = free.poll(); if (container == null) container = new Container(); if (container.buf == null || container.buf.length < inPayloadLength) container.buf = new byte[inPayloadLength]; if (data.get(inSeq) != null) { if (logger.isInfoEnabled()) logger.info("(Probable) duplicate packet detected, discarding " + inSeq); outBuffer.setDiscard(true); return BUFFER_PROCESSED_OK; } System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength); container.len = inPayloadLength; data.put(inSeq, container); // update fields frameLength += inPayloadLength; if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq; if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq; if (empty) { // the first received packet for the current frame was just added empty = false; timestamp = inRtpTimestamp; pictureId = inPictureId; } if (inMarker) haveEnd = true; if (inIsStartOfFrame) haveStart = true; // check if we have a full frame if (frameComplete()) { byte[] outData = validateByteArraySize(outBuffer, frameLength, false); int ptr = 0; Container b; for (Map.Entry<Long, Container> entry : data.entrySet()) { b = entry.getValue(); System.arraycopy(b.buf, 0, outData, ptr, b.len); ptr += b.len; } outBuffer.setOffset(0); outBuffer.setLength(frameLength); outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp()); if (TRACE) logger.trace("Out PictureID=" + inPictureId); lastSentSeq = lastSeq; // prepare for the next frame reinit(); return BUFFER_PROCESSED_OK; } else { // frame not complete yet outBuffer.setDiscard(true); return OUTPUT_BUFFER_NOT_FILLED; } }