Пример #1
0
  @Override
  protected final void initGLImpl(final GL gl) throws IOException, GLException {
    if (0 == moviePtr) {
      throw new GLException("FFMPEG native instance null");
    }
    if (null == audioSink) {
      throw new GLException("AudioSink null");
    }
    final int audioQueueLimit;
    if (null != gl && STREAM_ID_NONE != getVID()) {
      final GLContextImpl ctx = (GLContextImpl) gl.getContext();
      AccessController.doPrivileged(
          new PrivilegedAction<Object>() {
            @Override
            public Object run() {
              final ProcAddressTable pt = ctx.getGLProcAddressTable();
              final long procAddrGLTexSubImage2D = pt.getAddressFor("glTexSubImage2D");
              final long procAddrGLGetError = pt.getAddressFor("glGetError");
              final long procAddrGLFlush = pt.getAddressFor("glFlush");
              final long procAddrGLFinish = pt.getAddressFor("glFinish");
              natives.setGLFuncs0(
                  moviePtr,
                  procAddrGLTexSubImage2D,
                  procAddrGLGetError,
                  procAddrGLFlush,
                  procAddrGLFinish);
              return null;
            }
          });
      audioQueueLimit = AudioSink.DefaultQueueLimitWithVideo;
    } else {
      audioQueueLimit = AudioSink.DefaultQueueLimitAudioOnly;
    }
    if (DEBUG) {
      System.err.println("initGL: p3 avChosen " + avChosenAudioFormat);
    }

    if (STREAM_ID_NONE == getAID()) {
      audioSink.destroy();
      audioSink = AudioSinkFactory.createNull();
      audioSink.init(
          AudioSink.DefaultFormat,
          0,
          AudioSink.DefaultInitialQueueSize,
          AudioSink.DefaultQueueGrowAmount,
          audioQueueLimit);
    } else {
      final float frameDuration;
      if (audioSamplesPerFrameAndChannel > 0) {
        frameDuration = avChosenAudioFormat.getSamplesDuration(audioSamplesPerFrameAndChannel);
      } else {
        frameDuration = AudioSink.DefaultFrameDuration;
      }
      final boolean audioSinkOK =
          audioSink.init(
              avChosenAudioFormat,
              frameDuration,
              AudioSink.DefaultInitialQueueSize,
              AudioSink.DefaultQueueGrowAmount,
              audioQueueLimit);
      if (!audioSinkOK) {
        System.err.println(
            "AudioSink "
                + audioSink.getClass().getName()
                + " does not support "
                + avChosenAudioFormat
                + ", using Null");
        audioSink.destroy();
        audioSink = AudioSinkFactory.createNull();
        audioSink.init(
            avChosenAudioFormat,
            0,
            AudioSink.DefaultInitialQueueSize,
            AudioSink.DefaultQueueGrowAmount,
            audioQueueLimit);
      }
    }
    if (DEBUG) {
      System.err.println("initGL: p4 chosen " + avChosenAudioFormat);
      System.err.println("initGL: p4 chosen " + audioSink);
    }

    if (null != gl && STREAM_ID_NONE != getVID()) {
      int tf, tif = GL.GL_RGBA; // texture format and internal format
      final int tt = GL.GL_UNSIGNED_BYTE;
      switch (vBytesPerPixelPerPlane) {
        case 1:
          if (gl.isGL3ES3()) {
            // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core
            tf = GL2ES2.GL_RED;
            tif = GL2ES2.GL_RED;
            singleTexComp = "r";
          } else {
            // ALPHA is supported on ES2 and GL2, i.e. <= GL3 [core] or compatibility
            tf = GL.GL_ALPHA;
            tif = GL.GL_ALPHA;
            singleTexComp = "a";
          }
          break;

        case 2:
          if (vPixelFmt == VideoPixelFormat.YUYV422) {
            // YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr
            // Stuffed into RGBA half width texture
            tf = GL.GL_RGBA;
            tif = GL.GL_RGBA;
            break;
          } else {
            tf = GL2ES2.GL_RG;
            tif = GL2ES2.GL_RG;
            break;
          }
        case 3:
          tf = GL.GL_RGB;
          tif = GL.GL_RGB;
          break;
        case 4:
          if (vPixelFmt == VideoPixelFormat.BGRA) {
            tf = GL.GL_BGRA;
            tif = GL.GL_RGBA;
            break;
          } else {
            tf = GL.GL_RGBA;
            tif = GL.GL_RGBA;
            break;
          }
        default:
          throw new RuntimeException(
              "Unsupported bytes-per-pixel / plane " + vBytesPerPixelPerPlane);
      }
      setTextureFormat(tif, tf);
      setTextureType(tt);
      if (DEBUG) {
        System.err.println(
            "initGL: p5: video "
                + vPixelFmt
                + ", planes "
                + vPlanes
                + ", bpp "
                + vBitsPerPixel
                + "/"
                + vBytesPerPixelPerPlane
                + ", tex "
                + texWidth
                + "x"
                + texHeight
                + ", usesTexLookupShader "
                + usesTexLookupShader);
      }
    }
  }
Пример #2
0
  @Override
  protected final void initStreamImpl(final int vid, final int aid) throws IOException {
    if (0 == moviePtr) {
      throw new GLException("FFMPEG native instance null");
    }
    if (DEBUG) {
      System.err.println("initStream: p1 " + this);
    }

    final String streamLocS = IOUtil.decodeURIIfFilePath(getURI());
    destroyAudioSink();
    if (GLMediaPlayer.STREAM_ID_NONE == aid) {
      audioSink = AudioSinkFactory.createNull();
    } else {
      audioSink = AudioSinkFactory.createDefault();
    }
    final AudioFormat preferredAudioFormat = audioSink.getPreferredFormat();
    if (DEBUG) {
      System.err.println("initStream: p2 preferred " + preferredAudioFormat + ", " + this);
    }

    final boolean isCameraInput = null != cameraPath;
    final String resStreamLocS;
    // int rw=640, rh=480, rr=15;
    int rw = -1, rh = -1, rr = -1;
    String sizes = null;
    if (isCameraInput) {
      switch (PlatformPropsImpl.OS_TYPE) {
        case ANDROID:
          // ??
        case FREEBSD:
        case HPUX:
        case LINUX:
        case SUNOS:
          resStreamLocS = dev_video_linux + cameraPath;
          break;
        case WINDOWS:
          resStreamLocS = cameraPath;
          break;
        case MACOS:
        case OPENKODE:
        default:
          resStreamLocS = streamLocS; // FIXME: ??
          break;
      }
      if (null != cameraProps) {
        sizes = cameraProps.get(CameraPropSizeS);
        int v = getPropIntVal(cameraProps, CameraPropWidth);
        if (v > 0) {
          rw = v;
        }
        v = getPropIntVal(cameraProps, CameraPropHeight);
        if (v > 0) {
          rh = v;
        }
        v = getPropIntVal(cameraProps, CameraPropRate);
        if (v > 0) {
          rr = v;
        }
      }
    } else {
      resStreamLocS = streamLocS;
    }
    final int aMaxChannelCount = audioSink.getMaxSupportedChannels();
    final int aPrefSampleRate = preferredAudioFormat.sampleRate;
    // setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc
    if (DEBUG) {
      System.err.println(
          "initStream: p3 cameraPath " + cameraPath + ", isCameraInput " + isCameraInput);
      System.err.println(
          "initStream: p3 stream " + getURI() + " -> " + streamLocS + " -> " + resStreamLocS);
      System.err.println(
          "initStream: p3 vid "
              + vid
              + ", sizes "
              + sizes
              + ", reqVideo "
              + rw
              + "x"
              + rh
              + "@"
              + rr
              + ", aid "
              + aid
              + ", aMaxChannelCount "
              + aMaxChannelCount
              + ", aPrefSampleRate "
              + aPrefSampleRate);
    }
    natives.setStream0(
        moviePtr,
        resStreamLocS,
        isCameraInput,
        vid,
        sizes,
        rw,
        rh,
        rr,
        aid,
        aMaxChannelCount,
        aPrefSampleRate);
  }