@Override
  public void record(Buffer... samples) throws Exception {
    if (audio_st == null) {
      throw new Exception(
          "No audio output stream (Is audioChannels > 0 and has start() been called?)");
    }
    int ret;

    int inputSize = samples[0].limit() - samples[0].position();
    int inputFormat = AV_SAMPLE_FMT_NONE;
    int inputChannels = samples.length > 1 ? 1 : audioChannels;
    int inputDepth = 0;
    int outputFormat = audio_c.sample_fmt();
    int outputChannels = samples_out.length > 1 ? 1 : audioChannels;
    int outputDepth = av_get_bytes_per_sample(outputFormat);
    if (samples[0] instanceof ByteBuffer) {
      inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
      inputDepth = 1;
      for (int i = 0; i < samples.length; i++) {
        ByteBuffer b = (ByteBuffer) samples[i];
        if (samples_in[i] instanceof BytePointer
            && samples_in[i].capacity() >= inputSize
            && b.hasArray()) {
          ((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
        } else {
          samples_in[i] = new BytePointer(b);
        }
      }
    } else if (samples[0] instanceof ShortBuffer) {
      inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
      inputDepth = 2;
      for (int i = 0; i < samples.length; i++) {
        ShortBuffer b = (ShortBuffer) samples[i];
        if (samples_in[i] instanceof ShortPointer
            && samples_in[i].capacity() >= inputSize
            && b.hasArray()) {
          ((ShortPointer) samples_in[i])
              .position(0)
              .put(b.array(), samples[i].position(), inputSize);
        } else {
          samples_in[i] = new ShortPointer(b);
        }
      }
    } else if (samples[0] instanceof IntBuffer) {
      inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
      inputDepth = 4;
      for (int i = 0; i < samples.length; i++) {
        IntBuffer b = (IntBuffer) samples[i];
        if (samples_in[i] instanceof IntPointer
            && samples_in[i].capacity() >= inputSize
            && b.hasArray()) {
          ((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
        } else {
          samples_in[i] = new IntPointer(b);
        }
      }
    } else if (samples[0] instanceof FloatBuffer) {
      inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
      inputDepth = 4;
      for (int i = 0; i < samples.length; i++) {
        FloatBuffer b = (FloatBuffer) samples[i];
        if (samples_in[i] instanceof FloatPointer
            && samples_in[i].capacity() >= inputSize
            && b.hasArray()) {
          ((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
        } else {
          samples_in[i] = new FloatPointer(b);
        }
      }
    } else if (samples[0] instanceof DoubleBuffer) {
      inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
      inputDepth = 8;
      for (int i = 0; i < samples.length; i++) {
        DoubleBuffer b = (DoubleBuffer) samples[i];
        if (samples_in[i] instanceof DoublePointer
            && samples_in[i].capacity() >= inputSize
            && b.hasArray()) {
          ((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
        } else {
          samples_in[i] = new DoublePointer(b);
        }
      }
    } else {
      throw new Exception("Audio samples Buffer has unsupported type: " + samples);
    }

    if (samples_convert_ctx == null) {
      samples_convert_ctx =
          swr_alloc_set_opts(
              null,
              audio_c.channel_layout(),
              outputFormat,
              audio_c.sample_rate(),
              audio_c.channel_layout(),
              inputFormat,
              audio_c.sample_rate(),
              0,
              null);
      if (samples_convert_ctx == null) {
        throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
      } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
        throw new Exception(
            "swr_init() error " + ret + ": Cannot initialize the conversion context.");
      }
    }

    for (int i = 0; i < samples.length; i++) {
      samples_in[i]
          .position(samples_in[i].position() * inputDepth)
          .limit((samples_in[i].position() + inputSize) * inputDepth);
    }
    while (samples_in[0].position() < samples_in[0].limit()) {
      int inputCount =
          (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth);
      int outputCount =
          (samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth);
      int count = Math.min(inputCount, outputCount);
      for (int i = 0; i < samples.length; i++) {
        samples_in_ptr.put(i, samples_in[i]);
      }
      for (int i = 0; i < samples_out.length; i++) {
        samples_out_ptr.put(i, samples_out[i]);
      }
      if ((ret = swr_convert(samples_convert_ctx, samples_out_ptr, count, samples_in_ptr, count))
          < 0) {
        throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
      }
      for (int i = 0; i < samples.length; i++) {
        samples_in[i].position(samples_in[i].position() + ret * inputChannels * inputDepth);
      }
      for (int i = 0; i < samples_out.length; i++) {
        samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
      }

      if (samples_out[0].position() >= samples_out[0].limit()) {
        frame.nb_samples(audio_input_frame_size);
        avcodec_fill_audio_frame(
            frame, audio_c.channels(), outputFormat, samples_out[0], samples_out[0].limit(), 0);
        for (int i = 0; i < samples_out.length; i++) {
          frame.data(i, samples_out[i].position(0));
          frame.linesize(i, samples_out[i].limit());
        }
        frame.quality(audio_c.global_quality());
        record(frame);
      }
    }
    return;
  }
Exemplo n.º 2
0
  public IplImage grab() throws Exception {
    if (pFormatCtx == null || pFormatCtx.isNull()) {
      throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)");
    }
    boolean done = false;
    long pts = 0;
    while (!done) {
      if (av_read_frame(pFormatCtx, packet) < 0) {
        // throw new Exception("Could not read frame");
        return null; // end of file?
      }

      // Is this a packet from the video stream?
      if (packet.stream_index() == videoStream) {
        // Decode video frame
        int len = avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet);

        LongPointer opaque = new LongPointer(pFrame.opaque());
        if (packet.dts() != AV_NOPTS_VALUE) {
          pts = packet.dts();
        } else if (!opaque.isNull() && opaque.get() != AV_NOPTS_VALUE) {
          pts = opaque.get();
        } else {
          pts = 0;
        }
        AVRational time_base = pStream.time_base();
        pts = 1000 * pts * time_base.num() / time_base.den();

        // Did we get a video frame?
        if (len > 0 && frameFinished[0] != 0) {
          switch (colorMode) {
            case BGR:
            case GRAY:
              // Deinterlace Picture
              if (deinterlace) {
                avpicture_deinterlace(
                    pFrame, pFrame, pCodecCtx.pix_fmt(), pCodecCtx.width(), pCodecCtx.height());
              }

              // Convert the image from its native format to RGB
              sws_scale(
                  img_convert_ctx,
                  new PointerPointer(pFrame),
                  pFrame.linesize(),
                  0,
                  pCodecCtx.height(),
                  new PointerPointer(pFrameRGB),
                  pFrameRGB.linesize());
              return_image.imageData(buffer);
              return_image.widthStep(pFrameRGB.linesize(0));
              break;
            case RAW:
              assert (pCodecCtx.width() == return_image.width()
                  && pCodecCtx.height() == return_image.height());
              return_image.imageData(pFrame.data(0));
              return_image.widthStep(pFrame.linesize(0));
              break;
            default:
              assert (false);
          }
          return_image.imageSize(return_image.height() * return_image.widthStep());

          done = true;
        }
      }

      // Free the packet that was allocated by av_read_frame
      av_free_packet(packet);
    }

    return_image.timestamp = pts;
    return return_image;
  }