Exemple #1
0
  private void addInput(String url) {

    INDEX++;
    int i = INDEX;
    System.out.println("\n------------\naddInput[" + i + "]: " + url + "\n-------------\n");

    Element input = null;
    if (!FAKE_INPUT) {
      /* create audio output */
      final Bin audioBin = new Bin("Audio Bin" + i);

      Element src = null;
      if (url.contains("http://")) {
        src = ElementFactory.make("gnomevfssrc", "Input" + i);
        src.set("location", url);
      } else if (url.contains("rtmp") && url.contains("://")) {
        src = ElementFactory.make("rtmpsrc", "Input" + i);
        // src.set("do-timestamp", true);
        src.set("location", url);
      } else {
        src = ElementFactory.make("filesrc", "Input" + i);
        src.set("location", url);
      }

      DecodeBin2 decodeBin = new DecodeBin2("Decode Bin" + i);
      Element decodeQueue = ElementFactory.make("queue2", "Decode Queue" + i);

      Element conv = ElementFactory.make("audioconvert", "Audio Convert" + i);
      Element resample = ElementFactory.make("audioresample", "Audio Resample" + i);
      Element volume = ElementFactory.make("volume", "Audio Volume" + i);
      volume.set("volume", 1.0f);
      volumeElements.put(url, volume);
      audioBin.addMany(conv, resample, volume);
      Element.linkMany(conv, resample, volume);
      audioBin.addPad(new GhostPad("src", volume.getStaticPad("src")));
      audioBin.addPad(new GhostPad("sink", conv.getStaticPad("sink")));

      input = new Bin("Input Bin" + i);
      ((Bin) input).addMany(src, decodeQueue, decodeBin, audioBin);
      Element.linkMany(src, decodeQueue, decodeBin, audioBin);
      input.addPad(new GhostPad("src", audioBin.getSrcPads().get(0)));

      decodeBin.connect(
          new DecodeBin2.NEW_DECODED_PAD() {
            public void newDecodedPad(Element elem, Pad pad, boolean last) {
              /* only link once */
              if (pad.isLinked()) {
                return;
              }
              /* check media type */
              Caps caps = pad.getCaps();
              Structure struct = caps.getStructure(0);
              if (struct.getName().startsWith("audio/")) {
                System.out.println("Linking audio pad: " + struct.getName());
                if (audioBin.getStaticPad("sink").getPeer() == null) {
                  PadLinkReturn linked = pad.link(audioBin.getStaticPad("sink"));
                  System.out.println("Decodebin linked " + linked);
                }
              } else if (struct.getName().startsWith("video/")) {
                System.out.println("Linking video pad: " + struct.getName());
              } else {
                System.out.println("Unknown pad [" + struct.getName() + "]");
              }
            }
          });
    } else {
      input = ElementFactory.make("audiotestsrc", "Audio Fake" + i);
      int w = i;
      if (i > 1) w = 5;
      input.set("wave", w);
      input.set("is-live", true);
    }

    if (!inputURLs.contains(url)) {
      inputURLs.add(url);
    }
    inputElements.put(url, input);

    boolean playing = pipe.isPlaying();

    Pad adderSink = adder.getRequestPad("sink%d");
    if (playing) {
      System.out.println("Adder inputs: " + adder.getSinkPads());
      pipe.setState(State.PLAYING);
    }
  }
Exemple #2
0
  /**
   * @param inputURLs
   * @param outputURL
   * @param pathFifo
   */
  public void createPipeline() {
    String config = "rate=" + OUTPUT_FREQ + ",channels=" + OUTPUT_CHANELS + ",depth=16";
    pathFifo = "/tmp/" + outputURL.replace('/', '_') + "_" + config;
    this.capsAudio = Caps.fromString("audio/x-raw-int," + config + ";audio/x-raw-float," + config);
    pipe = new Pipeline("Audio Mixer Pipeline");
    try {
      adder = ElementFactory.make("liveadder", "liveadder");
      // adder.set("latency", 5 * 1000);
    } catch (Exception e) {
    }
    adder.connect(
        new PAD_REMOVED() {
          @Override
          public void padRemoved(Element element, Pad pad) {
            if (removedInput != null) {
              pipe.remove(removedInput);
              pipe.remove(removedIdentity);
              removedInput.setState(State.NULL);
              removedIdentity.setState(State.NULL);
              System.out.println("padRemoved: " + removedInput);
              removedInput = null;
              removedInput = null;
              System.gc();
            }
          }
        });

    adder.connect(
        new PAD_ADDED() {
          @Override
          public void padAdded(Element element, Pad pad) {
            linkNewInputToPad(pad);
          }
        });

    Element tee = ElementFactory.make("tee", "tee");

    Element capsfilter = ElementFactory.make("capsfilter", null);
    capsfilter.setCaps(capsAudio);
    queueTee = ElementFactory.make("queue2", null);
    pipe.addMany(adder, queueTee, tee, capsfilter);
    Element.linkMany(adder, queueTee, capsfilter, tee);

    if (AUDIO_OUTPUT) {
      Element audioSink = ElementFactory.make("alsasink", "Audio Sink");
      audioSink.set("sync", false);
      pipe.addMany(audioSink);
      Element.linkMany(tee, audioSink);
    }

    if (STREAM_OUTPUT) {

      // fileFifo.deleteOnExit();
      fileFifo = new File(pathFifo);
      try {
        if (!fileFifo.exists()) {
          // fileFifo.delete();
          String command = "/usr/bin/mkfifo " + fileFifo.getAbsolutePath();
          ProcessBuilder b = new ProcessBuilder("/bin/sh", "-c", command);
          b.start().waitFor();
        }
      } catch (Exception e) {
        e.printStackTrace();
      }

      Element codecEnc = ElementFactory.make("lamemp3enc", "MP3 Encoder");
      Element mux = ElementFactory.make("flvmux", "FLV Muxer");
      Element queue = ElementFactory.make("queue2", "Fifo Queue");
      Element filesink = ElementFactory.make("filesink", "Fifo Sink");
      filesink.set("sync", false);
      filesink.set("location", fileFifo.getAbsolutePath());
      pipe.addMany(queue, codecEnc, mux, filesink);
      Element.linkMany(tee, queue, codecEnc, mux, filesink);

      startFFmpegProcess();
    }

    int i = 0;
    for (String url : inputURLs) {
      i++;
      if (checkUrl(url)) {
        addInput(url);
      }
    }

    prepareBus();
  }
Exemple #3
-10
  private static void startStreaming(final VideoComponent vc, String settings, int port) {
    Gst.init();
    clientPipe = new Pipeline("pipeline");
    pushLog("> CTRL: " + "PLAY");
    pushLog("> SYS: " + " INIT STREAM");

    System.out.println("Starting with: C=" + clientLoc + ", S=" + serverLoc);

    // VIDEO
    Element udpVideoSrc = ElementFactory.make("udpsrc", "src1");
    udpVideoSrc.setCaps(
        Caps.fromString(
            "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)JPEG, payload=(int)96, ssrc=(uint)2156703816, clock-base=(uint)1678649553, seqnum-base=(uint)31324"));
    udpVideoSrc.set("uri", "udp://" + clientLoc + ":" + port);

    Element videoRtcpIn = ElementFactory.make("udpsrc", "src3");
    videoRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 1));

    Element videoRtcpOut = ElementFactory.make("udpsink", "snk1");
    videoRtcpOut.set("host", serverLoc);
    videoRtcpOut.set("port", "" + (port + 5));
    videoRtcpOut.set("sync", "false");
    videoRtcpOut.set("async", "false");

    Element udpAudioSrc = null, audioRtcpIn = null, audioRtcpOut = null, taud = null;

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO
      udpAudioSrc = ElementFactory.make("udpsrc", "src2");
      udpAudioSrc.setCaps(
          Caps.fromString(
              "application/x-rtp, media=(string)audio, clock-rate=(int)8000, encoding-name=(string)L16, encoding-params=(string)2, channels=(int)2, payload=(int)96, ssrc=(uint)3489550614, clock-base=(uint)2613725642, seqnum-base=(uint)1704"));
      udpAudioSrc.set("uri", "udp://" + clientLoc + ":" + (port + 2));

      taud = ElementFactory.make("tee", "taud");
      Element qaud = ElementFactory.make("queue", "qaud");
      AppSink appAudioSink = (AppSink) ElementFactory.make("appsink", "appAudioSink");
      appAudioSink.set("emit-signals", true);
      appAudioSink.setSync(false);
      audioQ = new LinkedList<FrameInfo>();
      appAudioSink.connect(
          new AppSink.NEW_BUFFER() {
            public void newBuffer(AppSink sink) {
              Buffer b = sink.getLastBuffer();
              if (b != null) {
                audioQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              }
            }
          });

      audioRtcpIn = ElementFactory.make("udpsrc", "src4");
      audioRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 3));

      audioRtcpOut = ElementFactory.make("udpsink", "snk2");
      audioRtcpOut.set("host", serverLoc);
      audioRtcpOut.set("port", "" + (port + 7));
      audioRtcpOut.set("sync", "false");
      audioRtcpOut.set("async", "false");

      clientPipe.addMany(taud, qaud, appAudioSink);
      clientPipe.addMany(udpAudioSrc, audioRtcpIn, audioRtcpOut);
      Element.linkMany(udpAudioSrc, taud, qaud, appAudioSink);
    }

    Element tvid = ElementFactory.make("tee", "tvid");
    Element qvid = ElementFactory.make("queue", "qvid");
    AppSink appVideoSink = (AppSink) ElementFactory.make("appsink", "appVideoSink");
    appVideoSink.set("emit-signals", true);
    appVideoSink.setSync(false);
    videoQ = new LinkedList<FrameInfo>();
    appVideoSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            Buffer b = sink.getLastBuffer();
            if (b != null) {
              videoQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              // System.out.println(System.currentTimeMillis());
            }
          }
        });
    clientPipe.addMany(tvid, qvid, appVideoSink);
    clientPipe.addMany(udpVideoSrc, videoRtcpIn, videoRtcpOut);
    Element.linkMany(udpVideoSrc, tvid, qvid, appVideoSink);

    // VIDEO BIN

    videoBin = new Bin("videoBin");

    // src1
    Element videoDepay = ElementFactory.make("rtpjpegdepay", "depay");
    Element videoDecode = ElementFactory.make("jpegdec", "decode");
    Element videoRate = ElementFactory.make("videorate", "rate1");
    Element videoColor = ElementFactory.make("ffmpegcolorspace", "color");
    Element videoSrc1Caps = ElementFactory.make("capsfilter", "src1caps");
    videoSrc1Caps.setCaps(Caps.fromString("video/x-raw-yuv, framerate=30/1"));
    Element videoColor2 = ElementFactory.make("ffmpegcolorspace", "color2");

    videoBin.addMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);
    Element.linkMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);

    videoBin.addPad(new GhostPad("sink", videoDepay.getStaticPad("sink")));
    clientPipe.add(videoBin);

    final Bin audioBin = new Bin("audioBin");

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO BIN

      final Element audioDepay = ElementFactory.make("rtpL16depay", "auddepay");
      Element audioConvert = ElementFactory.make("audioconvert", "audconv");
      mute = ElementFactory.make("volume", "vol");
      mute.set("mute", "true");
      final Element audioSink = ElementFactory.make("autoaudiosink", "audsink");

      audioBin.addMany(audioDepay, audioConvert, mute, audioSink);
      Element.linkMany(audioDepay, audioConvert, mute, audioSink);

      audioBin.addPad(new GhostPad("sink", audioDepay.getStaticPad("sink")));
      clientPipe.add(audioBin);
    }

    // RTPBIN

    final RTPBin rtp = new RTPBin("rtp");
    clientPipe.add(rtp);

    Element.linkPads(tvid, "src1", rtp, "recv_rtp_sink_0");
    Element.linkPads(videoRtcpIn, "src", rtp, "recv_rtcp_sink_0");
    Element.linkPads(rtp, "send_rtcp_src_0", videoRtcpOut, "sink");

    if (attribute.equalsIgnoreCase("active")) {
      Element.linkPads(taud, "src1", rtp, "recv_rtp_sink_1");
      Element.linkPads(audioRtcpIn, "src", rtp, "recv_rtcp_sink_1");
      Element.linkPads(rtp, "send_rtcp_src_1", audioRtcpOut, "sink");
    }

    // BUS

    rtp.connect(
        new Element.PAD_ADDED() {
          @Override
          public void padAdded(Element arg0, Pad arg1) {
            if (arg1.getName().startsWith("recv_rtp_src_0")) {
              arg1.link(videoBin.getStaticPad("sink"));
            } else if (arg1.getName().startsWith("recv_rtp_src_1")
                && attribute.equalsIgnoreCase("active")) {
              arg1.link(audioBin.getStaticPad("sink"));
            }
            clientPipe.debugToDotFile(1, "clientsucc");
          }
        });

    Bus bus = clientPipe.getBus();

    bus.connect(
        new Bus.ERROR() {
          public void errorMessage(GstObject source, int code, String message) {
            pushLog("> GSTREAMER ERROR: code=" + code + " message=" + message);
            clientPipe.debugToDotFile(1, "clienterr");
          }
        });
    bus.connect(
        new Bus.EOS() {

          public void endOfStream(GstObject source) {
            clientPipe.setState(State.NULL);
            System.out.println("EOS");
          }
        });

    videoBin.add(vc.getElement());

    AppSink appJointSink = (AppSink) ElementFactory.make("appsink", "appJointSink");
    appJointSink.set("emit-signals", true);
    appJointSink.setSync(false);
    jointQ = new LinkedList<CompareInfo>();
    appJointSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            /*
            int vs = 0; int as = 0;
            while (videoQ != null) {
            	vs++; videoQ.poll();
            }
            while (audioQ != null) {
            	as++; audioQ.poll();
            }
            System.out.println("Compare: " + as + " : " + vs);
            */
          }
        });

    Element.linkMany(videoColor2, vc.getElement());

    Thread videoThread =
        new Thread() {
          public void run() {
            clientPipe.setState(org.gstreamer.State.PLAYING);
          }
        };
    videoThread.start();
    clientPipe.debugToDotFile(0, "appsink");
  }
  /**
   * Toma un archivo de audio en formato WAV, obtiene el raw audio y le aplica la compresión de
   * acuerdo a las características indicadas, devolviendo el conjunto de buffers que contienen el
   * audio comprimido.
   *
   * @param audioOriginal Archivo de audio en formato WAV
   * @param formatoObjetivo Propiedades del audio comprimido. Debe ser de tipo <code>FormatoCodecGst
   *     </code>
   * @return Los buffers con el audio comprimido
   * @throws AudioException Si ocurre un error durante el procesamiento
   * @see CodificadorAudio
   * @see FormatoCodecGst
   * @see Pipeline
   * @see Element
   */
  public synchronized BuffersAudio codificar(File audioOriginal, FormatoCodec formatoObjetivo)
      throws AudioException {

    FormatoCodecGst formatoObjetivoGst = (FormatoCodecGst) formatoObjetivo;
    FormatoRawAudioGst formatoRawAudioGst =
        (FormatoRawAudioGst) formatoObjetivoGst.getFormatoRawAudio();

    // El pipeline de gstreamer para codificación está formado por:
    // filesrc | wavparse | audioresample | audioconvert | capsfilter | element(codificador) |
    // appsink
    this.pipeline = new Pipeline("pipeline");
    Bus bus = this.pipeline.getBus();
    this.audioException = null;

    // definición de elementos
    FileSrc filesrc = (FileSrc) ElementFactory.make("filesrc", "source");
    filesrc.setLocation(audioOriginal);
    // System.out.println(filesrc.get("location"));
    Element demuxer = ElementFactory.make("wavparse", "fileDecoder");
    Element ident = ElementFactory.make("identity", "ident");
    Element audioconvert = ElementFactory.make("audioconvert", "audio converter");
    Element audioresample = ElementFactory.make("audioresample", "audio resampler");
    Element capsfilterreq = ElementFactory.make("capsfilter", "capsfiltereq");

    capsfilterreq.setCaps(formatoObjetivoGst.getCapsNecesarios());

    Element codaudio = ElementFactory.make(formatoObjetivoGst.getGstCodificador(), "codaudio");

    // seleccion de propiedades de codec
    PropiedadesCodificador pc = formatoObjetivoGst.getPropiedadesCodec();
    int numeroPropiedades = pc.getNumeroPropiedades();

    String[] nombrePropiedades = pc.getNombrePropiedades();
    for (int i = 0; i < numeroPropiedades; i++) {

      codaudio.set(nombrePropiedades[i], pc.getValorPropiedad(nombrePropiedades[i]));
    }

    AppSink appsink = (AppSink) ElementFactory.make("appsink", "appsink");
    appsink.set("emit-signals", true);
    appsink.set("sync", false);

    // creación del pipeline
    this.pipeline.addMany(
        filesrc, demuxer, ident, audioresample, audioconvert, capsfilterreq, codaudio, appsink);
    Element.linkMany(filesrc, demuxer);
    Element.linkMany(ident, audioresample, audioconvert, capsfilterreq, codaudio, appsink);

    // añadir listeners
    // enlace dinámico para el decodificador del fichero
    CodificadorAudioGst.SignalPadAdded signalPadAdded =
        new CodificadorAudioGst.SignalPadAdded(ident);
    demuxer.connect(signalPadAdded);

    // manejo del flujo de bytes en el pipeline
    CodificadorAudioGst.NewBufferSignal newBufferSignal =
        new CodificadorAudioGst.NewBufferSignal(appsink);
    appsink.connect(newBufferSignal);

    // fin de stream
    bus.connect(new CodificadorAudioGst.SignalBusEos());
    // error en el procesamient
    bus.connect(new CodificadorAudioGst.SignalBusError());

    // inicio la codificación
    this.pipeline.play();

    // esperar el fin de la codificación
    this.semaforo.acquireUninterruptibly();

    formatoObjetivoGst.setCapsCodificacion(newBufferSignal.capsCodificacion);
    formatoRawAudioGst.setCapsRawAudio(signalPadAdded.caps);

    newBufferSignal.buffersAudio.setFormatoCodec(formatoObjetivoGst);

    this.pipeline.dispose();

    if (this.audioException != null) throw this.audioException;

    return newBufferSignal.buffersAudio;
  }
  /**
   * Toma un cojunto de buffers de audio y le aplica el proceso de descompresión generando un
   * archivo en formato WAV con el resultado de la descompresión.
   *
   * @param targetFile El archivo de audio WAV que contendrá el audio descomprimido
   * @param audioCodificado Los buffers de audio comprimido. Las propiedades de audio que deben
   *     obtenerse mediante un objeto tipo <code>FormatoCodecGst</code>
   * @return El archivo de audio WAV que contendrá el audio descomprimido
   * @throws AudioException Si ocurre un error durante el procesamiento
   * @see BuffersAudio
   * @see FormatoCodecGst
   * @see Pipeline
   * @see Element
   */
  public synchronized File decodificar(File targetFile, BuffersAudio audioCodificado)
      throws AudioException {

    FormatoCodecGst formatoOrigenGst = (FormatoCodecGst) audioCodificado.getFormatoCodec();
    FormatoRawAudioGst formatoRawAudioGst =
        (FormatoRawAudioGst) formatoOrigenGst.getFormatoRawAudio();
    // El pipeline de gstreamer para decodificación está formado por:
    // appsrc | capsfilter | element(decodifcador) | audioconvert | audioresample | capsfilter |
    // wavenc | filesink
    this.pipeline = new Pipeline("pipeline");
    Bus bus = this.pipeline.getBus();
    this.audioException = null;

    // definición de elementos
    AppSrc appsrc = (AppSrc) ElementFactory.make("appsrc", "appsrc");
    Element capsfilterin = ElementFactory.make("capsfilter", "capsfilterint");

    capsfilterin.setCaps(formatoOrigenGst.getCapsCodificacion());

    Element decodaudio = ElementFactory.make(formatoOrigenGst.getGstDecodificador(), "decodaudio");

    Element audioconvert = ElementFactory.make("audioconvert", "audioconvert");
    Element audioresample = ElementFactory.make("audioresample", "audioresample");
    Element muxer = ElementFactory.make("wavenc", "fileEncoder");
    Element filesink = ElementFactory.make("filesink", "filesink");
    filesink.set("location", targetFile);
    Element capsfilterout = ElementFactory.make("capsfilter", "capsfilterout");
    capsfilterout.setCaps(formatoRawAudioGst.getCapsRawAudio());

    this.pipeline.addMany(
        appsrc,
        capsfilterin,
        decodaudio,
        audioconvert,
        audioresample,
        capsfilterout,
        muxer,
        filesink);
    Element.linkMany(
        appsrc,
        capsfilterin,
        decodaudio,
        audioconvert,
        audioresample,
        capsfilterout,
        muxer,
        filesink);

    // manejo del flujo de bytes en el pipeline
    CodificadorAudioGst.NeedDataSignal needDataSignal =
        new CodificadorAudioGst.NeedDataSignal(appsrc, audioCodificado);
    appsrc.connect(needDataSignal);

    // añadir listeners
    bus.connect(new CodificadorAudioGst.SignalBusEos());
    bus.connect(new CodificadorAudioGst.SignalBusError());

    // inicio de la decodificación
    this.pipeline.play();
    // esperar el fin de la decodificación
    this.semaforo.acquireUninterruptibly();

    this.pipeline.dispose();
    if (this.audioException != null) throw this.audioException;
    return targetFile;
  }