Example #1
0
 /**
  * Native callback Converts the given libav/ffmpeg values to {@link AudioFormat} and returns
  * {@link AudioSink#isSupported(AudioFormat)}.
  *
  * @param audioSampleFmt ffmpeg/libav audio-sample-format, see {@link AudioSampleFormat}.
  * @param audioSampleRate sample rate in Hz (1/s)
  * @param audioChannels number of channels
  */
 final boolean isAudioFormatSupported(
     final int audioSampleFmt, final int audioSampleRate, final int audioChannels) {
   final AudioSampleFormat avFmt = AudioSampleFormat.valueOf(audioSampleFmt);
   final AudioFormat audioFormat = avAudioFormat2Local(avFmt, audioSampleRate, audioChannels);
   final boolean res = audioSink.isSupported(audioFormat);
   if (DEBUG) {
     System.err.println(
         "AudioSink.isSupported: "
             + res
             + ": av[fmt "
             + avFmt
             + ", rate "
             + audioSampleRate
             + ", chan "
             + audioChannels
             + "] -> "
             + audioFormat);
   }
   return res;
 }
Example #2
0
  /**
   * Native callback
   *
   * @param vid
   * @param pixFmt
   * @param planes
   * @param bitsPerPixel
   * @param bytesPerPixelPerPlane
   * @param tWd0
   * @param tWd1
   * @param tWd2
   * @param aid
   * @param audioSampleFmt
   * @param audioSampleRate
   * @param audioChannels
   * @param audioSamplesPerFrameAndChannel in audio samples per frame and channel
   */
  void setupFFAttributes(
      final int vid,
      final int pixFmt,
      final int planes,
      final int bitsPerPixel,
      final int bytesPerPixelPerPlane,
      final int tWd0,
      final int tWd1,
      final int tWd2,
      final int vW,
      final int vH,
      final int aid,
      final int audioSampleFmt,
      final int audioSampleRate,
      final int audioChannels,
      final int audioSamplesPerFrameAndChannel) {
    // defaults ..
    vPixelFmt = null;
    vPlanes = 0;
    vBitsPerPixel = 0;
    vBytesPerPixelPerPlane = 0;
    usesTexLookupShader = false;
    texWidth = 0;
    texHeight = 0;

    final int[] vTexWidth = {0, 0, 0}; // per plane

    if (STREAM_ID_NONE != vid) {
      vPixelFmt = VideoPixelFormat.valueOf(pixFmt);
      vPlanes = planes;
      vBitsPerPixel = bitsPerPixel;
      vBytesPerPixelPerPlane = bytesPerPixelPerPlane;
      vTexWidth[0] = tWd0;
      vTexWidth[1] = tWd1;
      vTexWidth[2] = tWd2;

      switch (vPixelFmt) {
        case YUVJ420P:
        case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
          usesTexLookupShader = true;
          // YUV420P: Adding U+V on right side of fixed height texture,
          //          since width is already aligned by decoder.
          //          Splitting texture to 4 quadrants:
          //            Y covers left top/low quadrant
          //            U on top-right quadrant.
          //            V on low-right quadrant.
          // Y=w*h, U=w/2*h/2, V=w/2*h/2
          //   w*h + 2 ( w/2 * h/2 )
          //   w*h + w*h/2
          texWidth = vTexWidth[0] + vTexWidth[1];
          texHeight = vH;
          break;
        case YUVJ422P:
        case YUV422P:
          usesTexLookupShader = true;
          // YUV422P: Adding U+V on right side of fixed height texture,
          //          since width is already aligned by decoder.
          //          Splitting texture to 4 columns
          //            Y covers columns 1+2
          //            U covers columns 3
          //            V covers columns 4
          texWidth = vTexWidth[0] + vTexWidth[1] + vTexWidth[2];
          texHeight = vH;
          break;
        case YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr - stuffed into RGBA half width
                      // texture
        case BGR24:
          usesTexLookupShader = true;
          texWidth = vTexWidth[0];
          texHeight = vH;
          break;

        case RGB24:
        case ARGB:
        case RGBA:
        case ABGR:
        case BGRA:
          usesTexLookupShader = false;
          texWidth = vTexWidth[0];
          texHeight = vH;
          break;
        default: // FIXME: Add more formats !
          throw new RuntimeException("Unsupported pixelformat: " + vPixelFmt);
      }
    }

    // defaults ..
    final AudioSampleFormat aSampleFmt;
    avChosenAudioFormat = null;
    ;
    this.audioSamplesPerFrameAndChannel = 0;

    if (STREAM_ID_NONE != aid) {
      aSampleFmt = AudioSampleFormat.valueOf(audioSampleFmt);
      avChosenAudioFormat = avAudioFormat2Local(aSampleFmt, audioSampleRate, audioChannels);
      this.audioSamplesPerFrameAndChannel = audioSamplesPerFrameAndChannel;
    } else {
      aSampleFmt = null;
    }

    if (DEBUG) {
      System.err.println(
          "audio: id "
              + aid
              + ", fmt "
              + aSampleFmt
              + ", "
              + avChosenAudioFormat
              + ", aFrameSize/fc "
              + audioSamplesPerFrameAndChannel);
      System.err.println(
          "video: id "
              + vid
              + ", fmt "
              + vW
              + "x"
              + vH
              + ", "
              + vPixelFmt
              + ", planes "
              + vPlanes
              + ", bpp "
              + vBitsPerPixel
              + "/"
              + vBytesPerPixelPerPlane
              + ", usesTexLookupShader "
              + usesTexLookupShader);
      for (int i = 0; i < 3; i++) {
        System.err.println("video: p[" + i + "]: " + vTexWidth[i]);
      }
      System.err.println("video: total tex " + texWidth + "x" + texHeight);
      System.err.println(this.toString());
    }
  }