コード例 #1
-10
ファイル: ClientPilot.java プロジェクト: karmeleon/414MP3
  private static void startStreaming(final VideoComponent vc, String settings, int port) {
    Gst.init();
    clientPipe = new Pipeline("pipeline");
    pushLog("> CTRL: " + "PLAY");
    pushLog("> SYS: " + " INIT STREAM");

    System.out.println("Starting with: C=" + clientLoc + ", S=" + serverLoc);

    // VIDEO
    Element udpVideoSrc = ElementFactory.make("udpsrc", "src1");
    udpVideoSrc.setCaps(
        Caps.fromString(
            "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)JPEG, payload=(int)96, ssrc=(uint)2156703816, clock-base=(uint)1678649553, seqnum-base=(uint)31324"));
    udpVideoSrc.set("uri", "udp://" + clientLoc + ":" + port);

    Element videoRtcpIn = ElementFactory.make("udpsrc", "src3");
    videoRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 1));

    Element videoRtcpOut = ElementFactory.make("udpsink", "snk1");
    videoRtcpOut.set("host", serverLoc);
    videoRtcpOut.set("port", "" + (port + 5));
    videoRtcpOut.set("sync", "false");
    videoRtcpOut.set("async", "false");

    Element udpAudioSrc = null, audioRtcpIn = null, audioRtcpOut = null, taud = null;

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO
      udpAudioSrc = ElementFactory.make("udpsrc", "src2");
      udpAudioSrc.setCaps(
          Caps.fromString(
              "application/x-rtp, media=(string)audio, clock-rate=(int)8000, encoding-name=(string)L16, encoding-params=(string)2, channels=(int)2, payload=(int)96, ssrc=(uint)3489550614, clock-base=(uint)2613725642, seqnum-base=(uint)1704"));
      udpAudioSrc.set("uri", "udp://" + clientLoc + ":" + (port + 2));

      taud = ElementFactory.make("tee", "taud");
      Element qaud = ElementFactory.make("queue", "qaud");
      AppSink appAudioSink = (AppSink) ElementFactory.make("appsink", "appAudioSink");
      appAudioSink.set("emit-signals", true);
      appAudioSink.setSync(false);
      audioQ = new LinkedList<FrameInfo>();
      appAudioSink.connect(
          new AppSink.NEW_BUFFER() {
            public void newBuffer(AppSink sink) {
              Buffer b = sink.getLastBuffer();
              if (b != null) {
                audioQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              }
            }
          });

      audioRtcpIn = ElementFactory.make("udpsrc", "src4");
      audioRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 3));

      audioRtcpOut = ElementFactory.make("udpsink", "snk2");
      audioRtcpOut.set("host", serverLoc);
      audioRtcpOut.set("port", "" + (port + 7));
      audioRtcpOut.set("sync", "false");
      audioRtcpOut.set("async", "false");

      clientPipe.addMany(taud, qaud, appAudioSink);
      clientPipe.addMany(udpAudioSrc, audioRtcpIn, audioRtcpOut);
      Element.linkMany(udpAudioSrc, taud, qaud, appAudioSink);
    }

    Element tvid = ElementFactory.make("tee", "tvid");
    Element qvid = ElementFactory.make("queue", "qvid");
    AppSink appVideoSink = (AppSink) ElementFactory.make("appsink", "appVideoSink");
    appVideoSink.set("emit-signals", true);
    appVideoSink.setSync(false);
    videoQ = new LinkedList<FrameInfo>();
    appVideoSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            Buffer b = sink.getLastBuffer();
            if (b != null) {
              videoQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              // System.out.println(System.currentTimeMillis());
            }
          }
        });
    clientPipe.addMany(tvid, qvid, appVideoSink);
    clientPipe.addMany(udpVideoSrc, videoRtcpIn, videoRtcpOut);
    Element.linkMany(udpVideoSrc, tvid, qvid, appVideoSink);

    // VIDEO BIN

    videoBin = new Bin("videoBin");

    // src1
    Element videoDepay = ElementFactory.make("rtpjpegdepay", "depay");
    Element videoDecode = ElementFactory.make("jpegdec", "decode");
    Element videoRate = ElementFactory.make("videorate", "rate1");
    Element videoColor = ElementFactory.make("ffmpegcolorspace", "color");
    Element videoSrc1Caps = ElementFactory.make("capsfilter", "src1caps");
    videoSrc1Caps.setCaps(Caps.fromString("video/x-raw-yuv, framerate=30/1"));
    Element videoColor2 = ElementFactory.make("ffmpegcolorspace", "color2");

    videoBin.addMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);
    Element.linkMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);

    videoBin.addPad(new GhostPad("sink", videoDepay.getStaticPad("sink")));
    clientPipe.add(videoBin);

    final Bin audioBin = new Bin("audioBin");

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO BIN

      final Element audioDepay = ElementFactory.make("rtpL16depay", "auddepay");
      Element audioConvert = ElementFactory.make("audioconvert", "audconv");
      mute = ElementFactory.make("volume", "vol");
      mute.set("mute", "true");
      final Element audioSink = ElementFactory.make("autoaudiosink", "audsink");

      audioBin.addMany(audioDepay, audioConvert, mute, audioSink);
      Element.linkMany(audioDepay, audioConvert, mute, audioSink);

      audioBin.addPad(new GhostPad("sink", audioDepay.getStaticPad("sink")));
      clientPipe.add(audioBin);
    }

    // RTPBIN

    final RTPBin rtp = new RTPBin("rtp");
    clientPipe.add(rtp);

    Element.linkPads(tvid, "src1", rtp, "recv_rtp_sink_0");
    Element.linkPads(videoRtcpIn, "src", rtp, "recv_rtcp_sink_0");
    Element.linkPads(rtp, "send_rtcp_src_0", videoRtcpOut, "sink");

    if (attribute.equalsIgnoreCase("active")) {
      Element.linkPads(taud, "src1", rtp, "recv_rtp_sink_1");
      Element.linkPads(audioRtcpIn, "src", rtp, "recv_rtcp_sink_1");
      Element.linkPads(rtp, "send_rtcp_src_1", audioRtcpOut, "sink");
    }

    // BUS

    rtp.connect(
        new Element.PAD_ADDED() {
          @Override
          public void padAdded(Element arg0, Pad arg1) {
            if (arg1.getName().startsWith("recv_rtp_src_0")) {
              arg1.link(videoBin.getStaticPad("sink"));
            } else if (arg1.getName().startsWith("recv_rtp_src_1")
                && attribute.equalsIgnoreCase("active")) {
              arg1.link(audioBin.getStaticPad("sink"));
            }
            clientPipe.debugToDotFile(1, "clientsucc");
          }
        });

    Bus bus = clientPipe.getBus();

    bus.connect(
        new Bus.ERROR() {
          public void errorMessage(GstObject source, int code, String message) {
            pushLog("> GSTREAMER ERROR: code=" + code + " message=" + message);
            clientPipe.debugToDotFile(1, "clienterr");
          }
        });
    bus.connect(
        new Bus.EOS() {

          public void endOfStream(GstObject source) {
            clientPipe.setState(State.NULL);
            System.out.println("EOS");
          }
        });

    videoBin.add(vc.getElement());

    AppSink appJointSink = (AppSink) ElementFactory.make("appsink", "appJointSink");
    appJointSink.set("emit-signals", true);
    appJointSink.setSync(false);
    jointQ = new LinkedList<CompareInfo>();
    appJointSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            /*
            int vs = 0; int as = 0;
            while (videoQ != null) {
            	vs++; videoQ.poll();
            }
            while (audioQ != null) {
            	as++; audioQ.poll();
            }
            System.out.println("Compare: " + as + " : " + vs);
            */
          }
        });

    Element.linkMany(videoColor2, vc.getElement());

    Thread videoThread =
        new Thread() {
          public void run() {
            clientPipe.setState(org.gstreamer.State.PLAYING);
          }
        };
    videoThread.start();
    clientPipe.debugToDotFile(0, "appsink");
  }
コード例 #2
-11
 private void hookUpBus() {
   logger.debug("Starting to hookup GStreamer Pipeline bus. ");
   // Hook up the shutdown handlers
   Bus bus = pipeline.getBus();
   bus.connect(
       new Bus.EOS() {
         /**
          * {@inheritDoc}
          *
          * @see org.gstreamer.Bus.EOS#endOfStream(org.gstreamer.GstObject)
          */
         public void endOfStream(GstObject arg0) {
           logger.debug("Pipeline received EOS.");
           pipeline.setState(State.NULL);
           pipeline = null;
         }
       });
   bus.connect(
       new Bus.ERROR() {
         /**
          * {@inheritDoc}
          *
          * @see org.gstreamer.Bus.ERROR#errorMessage(org.gstreamer.GstObject, int,
          *     java.lang.String)
          */
         public void errorMessage(GstObject obj, int retCode, String msg) {
           logger.warn("{}: {}", obj.getName(), msg);
         }
       });
   bus.connect(
       new Bus.WARNING() {
         /**
          * {@inheritDoc}
          *
          * @see org.gstreamer.Bus.WARNING#warningMessage(org.gstreamer.GstObject, int,
          *     java.lang.String)
          */
         public void warningMessage(GstObject obj, int retCode, String msg) {
           logger.warn("{}: {}", obj.getName(), msg);
         }
       });
   logger.debug("Successfully hooked up GStreamer Pipeline bus to Log4J.");
 }
コード例 #3
-15
  /**
   * Toma un archivo de audio en formato WAV, obtiene el raw audio y le aplica la compresión de
   * acuerdo a las características indicadas, devolviendo el conjunto de buffers que contienen el
   * audio comprimido.
   *
   * @param audioOriginal Archivo de audio en formato WAV
   * @param formatoObjetivo Propiedades del audio comprimido. Debe ser de tipo <code>FormatoCodecGst
   *     </code>
   * @return Los buffers con el audio comprimido
   * @throws AudioException Si ocurre un error durante el procesamiento
   * @see CodificadorAudio
   * @see FormatoCodecGst
   * @see Pipeline
   * @see Element
   */
  public synchronized BuffersAudio codificar(File audioOriginal, FormatoCodec formatoObjetivo)
      throws AudioException {

    FormatoCodecGst formatoObjetivoGst = (FormatoCodecGst) formatoObjetivo;
    FormatoRawAudioGst formatoRawAudioGst =
        (FormatoRawAudioGst) formatoObjetivoGst.getFormatoRawAudio();

    // El pipeline de gstreamer para codificación está formado por:
    // filesrc | wavparse | audioresample | audioconvert | capsfilter | element(codificador) |
    // appsink
    this.pipeline = new Pipeline("pipeline");
    Bus bus = this.pipeline.getBus();
    this.audioException = null;

    // definición de elementos
    FileSrc filesrc = (FileSrc) ElementFactory.make("filesrc", "source");
    filesrc.setLocation(audioOriginal);
    // System.out.println(filesrc.get("location"));
    Element demuxer = ElementFactory.make("wavparse", "fileDecoder");
    Element ident = ElementFactory.make("identity", "ident");
    Element audioconvert = ElementFactory.make("audioconvert", "audio converter");
    Element audioresample = ElementFactory.make("audioresample", "audio resampler");
    Element capsfilterreq = ElementFactory.make("capsfilter", "capsfiltereq");

    capsfilterreq.setCaps(formatoObjetivoGst.getCapsNecesarios());

    Element codaudio = ElementFactory.make(formatoObjetivoGst.getGstCodificador(), "codaudio");

    // seleccion de propiedades de codec
    PropiedadesCodificador pc = formatoObjetivoGst.getPropiedadesCodec();
    int numeroPropiedades = pc.getNumeroPropiedades();

    String[] nombrePropiedades = pc.getNombrePropiedades();
    for (int i = 0; i < numeroPropiedades; i++) {

      codaudio.set(nombrePropiedades[i], pc.getValorPropiedad(nombrePropiedades[i]));
    }

    AppSink appsink = (AppSink) ElementFactory.make("appsink", "appsink");
    appsink.set("emit-signals", true);
    appsink.set("sync", false);

    // creación del pipeline
    this.pipeline.addMany(
        filesrc, demuxer, ident, audioresample, audioconvert, capsfilterreq, codaudio, appsink);
    Element.linkMany(filesrc, demuxer);
    Element.linkMany(ident, audioresample, audioconvert, capsfilterreq, codaudio, appsink);

    // añadir listeners
    // enlace dinámico para el decodificador del fichero
    CodificadorAudioGst.SignalPadAdded signalPadAdded =
        new CodificadorAudioGst.SignalPadAdded(ident);
    demuxer.connect(signalPadAdded);

    // manejo del flujo de bytes en el pipeline
    CodificadorAudioGst.NewBufferSignal newBufferSignal =
        new CodificadorAudioGst.NewBufferSignal(appsink);
    appsink.connect(newBufferSignal);

    // fin de stream
    bus.connect(new CodificadorAudioGst.SignalBusEos());
    // error en el procesamient
    bus.connect(new CodificadorAudioGst.SignalBusError());

    // inicio la codificación
    this.pipeline.play();

    // esperar el fin de la codificación
    this.semaforo.acquireUninterruptibly();

    formatoObjetivoGst.setCapsCodificacion(newBufferSignal.capsCodificacion);
    formatoRawAudioGst.setCapsRawAudio(signalPadAdded.caps);

    newBufferSignal.buffersAudio.setFormatoCodec(formatoObjetivoGst);

    this.pipeline.dispose();

    if (this.audioException != null) throw this.audioException;

    return newBufferSignal.buffersAudio;
  }
コード例 #4
-15
  /**
   * Toma un cojunto de buffers de audio y le aplica el proceso de descompresión generando un
   * archivo en formato WAV con el resultado de la descompresión.
   *
   * @param targetFile El archivo de audio WAV que contendrá el audio descomprimido
   * @param audioCodificado Los buffers de audio comprimido. Las propiedades de audio que deben
   *     obtenerse mediante un objeto tipo <code>FormatoCodecGst</code>
   * @return El archivo de audio WAV que contendrá el audio descomprimido
   * @throws AudioException Si ocurre un error durante el procesamiento
   * @see BuffersAudio
   * @see FormatoCodecGst
   * @see Pipeline
   * @see Element
   */
  public synchronized File decodificar(File targetFile, BuffersAudio audioCodificado)
      throws AudioException {

    FormatoCodecGst formatoOrigenGst = (FormatoCodecGst) audioCodificado.getFormatoCodec();
    FormatoRawAudioGst formatoRawAudioGst =
        (FormatoRawAudioGst) formatoOrigenGst.getFormatoRawAudio();
    // El pipeline de gstreamer para decodificación está formado por:
    // appsrc | capsfilter | element(decodifcador) | audioconvert | audioresample | capsfilter |
    // wavenc | filesink
    this.pipeline = new Pipeline("pipeline");
    Bus bus = this.pipeline.getBus();
    this.audioException = null;

    // definición de elementos
    AppSrc appsrc = (AppSrc) ElementFactory.make("appsrc", "appsrc");
    Element capsfilterin = ElementFactory.make("capsfilter", "capsfilterint");

    capsfilterin.setCaps(formatoOrigenGst.getCapsCodificacion());

    Element decodaudio = ElementFactory.make(formatoOrigenGst.getGstDecodificador(), "decodaudio");

    Element audioconvert = ElementFactory.make("audioconvert", "audioconvert");
    Element audioresample = ElementFactory.make("audioresample", "audioresample");
    Element muxer = ElementFactory.make("wavenc", "fileEncoder");
    Element filesink = ElementFactory.make("filesink", "filesink");
    filesink.set("location", targetFile);
    Element capsfilterout = ElementFactory.make("capsfilter", "capsfilterout");
    capsfilterout.setCaps(formatoRawAudioGst.getCapsRawAudio());

    this.pipeline.addMany(
        appsrc,
        capsfilterin,
        decodaudio,
        audioconvert,
        audioresample,
        capsfilterout,
        muxer,
        filesink);
    Element.linkMany(
        appsrc,
        capsfilterin,
        decodaudio,
        audioconvert,
        audioresample,
        capsfilterout,
        muxer,
        filesink);

    // manejo del flujo de bytes en el pipeline
    CodificadorAudioGst.NeedDataSignal needDataSignal =
        new CodificadorAudioGst.NeedDataSignal(appsrc, audioCodificado);
    appsrc.connect(needDataSignal);

    // añadir listeners
    bus.connect(new CodificadorAudioGst.SignalBusEos());
    bus.connect(new CodificadorAudioGst.SignalBusError());

    // inicio de la decodificación
    this.pipeline.play();
    // esperar el fin de la decodificación
    this.semaforo.acquireUninterruptibly();

    this.pipeline.dispose();
    if (this.audioException != null) throw this.audioException;
    return targetFile;
  }
コード例 #5
-37
ファイル: LiveMixer_old.java プロジェクト: lucasa/livemixer
  public void prepareBus() {
    Bus bus = pipe.getBus();

    STATE_CHANGED =
        new STATE_CHANGED() {
          @Override
          public void stateChanged(GstObject source, State old, State current, State pend) {
            if (source == pipe) {
              System.out.println("Pipeline new state: " + current);
              if (old == State.PLAYING && current == State.NULL) {
                if (AUTO_RECOVERY) {
                  pipe.setState(State.NULL);
                  pipe = null;
                  inputElements.clear();
                  closeFFmpegProcess();
                  createPipeline();
                  playThread();
                }
              }
            }
          }
        };
    bus.connect(STATE_CHANGED);

    bus.connect(
        new Bus.ERROR() {
          public void errorMessage(GstObject source, int code, String message) {
            System.out.println("Error: code=" + code + " message=" + message);
            if (code == 1) {
              pipe.setState(State.NULL);
            }
          }
        });
    bus.connect(
        new Bus.EOS() {
          public void endOfStream(GstObject source) {
            if (AUTO_RECOVERY) {
              pipe.setState(State.NULL);
              pipe = null;
              createPipeline();
              closeFFmpegProcess();
              startFFmpegProcess();
              playThread();
              // writer.close();
              // if (outContainer.writeTrailer() < 0)
              // throw new RuntimeException();
              // outContainer.close();
              // System.exit(0);
            }
          }
        });
  }