示例#1
0
  public static VideoCodecMeta createCodecMeta(
      VirtualTrack src, H264Encoder encoder, int thumbWidth, int thumbHeight) {
    VideoCodecMeta codecMeta = (VideoCodecMeta) src.getCodecMeta();

    AvcCBox createAvcC =
        H264Utils.createAvcC(
            encoder.initSPS(new Size(thumbWidth, thumbHeight)), encoder.initPPS(), 4);
    return VideoCodecMeta.createVideoCodecMeta(
        "avc1",
        H264Utils.getAvcCData(createAvcC),
        new Size(thumbWidth, thumbHeight),
        codecMeta.getPasp());
  }
示例#2
0
  private static void mux(FramesMP4MuxerTrack track, File f) throws IOException {
    MappedH264ES es = new MappedH264ES(NIOUtils.map(f));

    ArrayList<ByteBuffer> spsList = new ArrayList<ByteBuffer>();
    ArrayList<ByteBuffer> ppsList = new ArrayList<ByteBuffer>();
    Packet frame = null;
    while ((frame = es.nextFrame()) != null) {
      ByteBuffer data = frame.getData();
      H264Utils.wipePS(data, spsList, ppsList);
      H264Utils.encodeMOVPacket(data);
      MP4Packet pkt = new MP4Packet(new Packet(frame, data), frame.getPts(), 0);
      System.out.println(pkt.getFrameNo());
      track.addFrame(pkt);
    }
    addSampleEntry(track, es.getSps(), es.getPps());
  }
示例#3
0
    public ByteBuffer transcodeFrame(ByteBuffer src, ByteBuffer dst) throws IOException {
      if (src == null) return null;
      Picture8Bit decoded = decoder.decodeFrame8Bit(src, pic0.getData());
      if (pic1 == null) {
        pic1 =
            Picture8Bit.create(
                decoded.getWidth(), decoded.getHeight(), encoder.getSupportedColorSpaces()[0]);
        transform =
            ColorUtil.getTransform8Bit(decoded.getColor(), encoder.getSupportedColorSpaces()[0]);
      }
      transform.transform(decoded, pic1);
      pic1.setCrop(new Rect(0, 0, track.thumbWidth, track.thumbHeight));
      int rate = TARGET_RATE;
      do {
        try {
          encoder.encodeFrame8Bit(pic1, dst);
          break;
        } catch (BufferOverflowException ex) {
          Logger.warn("Abandon frame, buffer too small: " + dst.capacity());
          rate -= 10;
          rc.setRate(rate);
        }
      } while (rate > 10);
      rc.setRate(TARGET_RATE);

      H264Utils.encodeMOVPacket(dst);

      return dst;
    }
示例#4
0
  public static int readSE(BitReader bits, String message) {
    int val = readUE(bits);

    val = H264Utils.golomb2Signed(val);

    trace(message, val);

    return val;
  }
 public static void validateVideoOutputFormat(MediaFormat format) {
   String mime = format.getString(MediaFormat.KEY_MIME);
   // Refer: http://developer.android.com/guide/appendix/media-formats.html#core
   // Refer: http://en.wikipedia.org/wiki/MPEG-4_Part_14#Data_streams
   if (!MediaFormatExtraConstants.MIMETYPE_VIDEO_AVC.equals(mime)) {
     throw new InvalidOutputFormatException(
         "Video codecs other than AVC is not supported, actual mime type: " + mime);
   }
   ByteBuffer spsBuffer = AvcCsdUtils.getSpsBuffer(format);
   SeqParameterSet sps = H264Utils.readSPS(spsBuffer);
   if (sps.profile_idc != PROFILE_IDC_BASELINE) {
     throw new InvalidOutputFormatException(
         "Non-baseline AVC video profile is not supported by Android OS, actual profile_idc: "
             + sps.profile_idc);
   }
 }
示例#6
0
  @Override
  public int probe(ByteBuffer data) {
    boolean validSps = false, validPps = false, validSh = false;
    for (ByteBuffer nalUnit : H264Utils.splitFrame(data.duplicate())) {
      NALUnit marker = NALUnit.read(nalUnit);
      if (marker.type == NALUnitType.IDR_SLICE || marker.type == NALUnitType.NON_IDR_SLICE) {
        BitReader reader = new BitReader(nalUnit);
        validSh = validSh(new SliceHeaderReader().readPart1(reader));
        break;
      } else if (marker.type == NALUnitType.SPS) {
        validSps = validSps(SeqParameterSet.read(nalUnit));
      } else if (marker.type == NALUnitType.PPS) {
        validPps = validPps(PictureParameterSet.read(nalUnit));
      }
    }

    return (validSh ? 60 : 0) + (validSps ? 20 : 0) + (validPps ? 20 : 0);
  }
  private void writeH264WithStartCode(AVPacket frame) throws IOException {
    ByteBuffer data = frame.getByteBuffer();
    if (data.remaining() > 4 && 1 != data.getInt(data.position())) {
      logger.warn("H264 Not Start With 0x00 00 00 01");
      return;
    }
    List<ByteBuffer> segs = H264Utils.splitFrame(data);

    int dataSize = 5 + 4 * segs.size();
    for (ByteBuffer seg : segs) {
      dataSize += seg.remaining();
    }

    ChannelBuffer avc = ChannelBuffers.buffer(11 + dataSize + 4 + 4 + 5);
    avc.writeByte(0x09); // video type
    avc.writeMedium(dataSize); // tag data size

    // timestamp
    long timestamp = frame.getTimestamp(AVTimeUnit.MILLISECONDS);
    avc.writeMedium((int) (0xFFFFFF & timestamp));
    avc.writeByte((int) (0xFF & (timestamp >> 24)));

    avc.writeMedium(0x0); // stream id

    avc.writeByte(frame.isKeyFrame() ? 0x17 : 0x27); // key frame + avc
    avc.writeByte(0x01); // avc NAL
    avc.writeMedium(
        (int) frame.getCompositionTime(AVTimeUnit.MILLISECONDS)); // Composition time offset

    // data
    for (ByteBuffer seg : segs) {
      avc.writeInt(seg.remaining());
      avc.writeBytes(seg);
    }

    avc.writeInt(avc.readableBytes()); // pre tag size

    avc.readBytes(out, avc.readableBytes());
  }
示例#8
0
 public ByteBuffer transcode(ByteBuffer data, ByteBuffer _out) {
   return transcode(H264Utils.splitFrame(data), _out);
 }
示例#9
0
 @Override
 public Frame decodeFrame8Bit(ByteBuffer data, byte[][] buffer) {
   return new FrameDecoder().decodeFrame(H264Utils.splitFrame(data), buffer);
 }
示例#10
0
 @Override
 public Picture decodeFrame(ByteBuffer data, int[][] buffer) {
   Frame frame =
       new FrameDecoder().decodeFrame(H264Utils.splitFrame(data), getSameSizeBuffer(buffer));
   return frame == null ? null : frame.toPicture(8, buffer);
 }