예제 #1
0
  public static void main(String[] args) {
    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init("AudioPlayer", args);

    if (args.length < 1) {
      System.out.println("Usage: AudioPlayer <file to play>");
      System.exit(1);
    }
    //
    // Create a PlayBin2 to play the media file.  A PlayBin2 is a Pipeline that
    // creates all the needed elements and automatically links them together.
    //
    PlayBin2 playbin = new PlayBin2("AudioPlayer");

    // Make sure a video window does not appear.
    playbin.setVideoSink(ElementFactory.make("fakesink", "videosink"));

    // Set the file to play
    playbin.setInputFile(new File(args[0]));

    // Start the pipeline playing
    playbin.play();
    Gst.main();

    // Clean up (gstreamer requires elements to be in State.NULL before disposal)
    playbin.stop();
  }
예제 #2
0
 @BeforeTest
 public void initGStreamer() {
   if (System.getProperty("jna.library.path") == null) {
     System.setProperty("jna.library.path", "/opt/local/lib");
     Gst.init();
   }
 }
예제 #3
0
  public static void main(String[] args) throws InterruptedException {
    if (args.length < 2)
      throw new IllegalArgumentException(
          "Parameters: input_file_or_url_1 [input_file_or_url_2] ... [input_file_or_url_N] output_file_or_url");

    // Gst.init("StreamAdder", new String[] { "--gst-debug=liveadder:4" });
    Gst.init("StreamAdder", new String[] {"--gst-debug-level=1"});

    List<String> inputUrls = new ArrayList<String>();
    for (int i = 0; i < args.length - 1; i++) {
      if (args[i].toLowerCase().equals("false") || args[i].toLowerCase().equals("true"))
        DEBUG = Boolean.valueOf(args[i]);
      else inputUrls.add(args[i]);
    }

    String outputUrl = args[args.length - 1];
    final LiveMixer_old adder = new LiveMixer_old("rtmp", inputUrls, outputUrl);
    adder.playThread();

    Runtime.getRuntime()
        .addShutdownHook(
            new Thread(
                new Runnable() {
                  public void run() {
                    System.err.println("System is Shutting Down...");
                    adder.getFfmpegProcess().destroy();
                  }
                }));

    String url2 = "rtmp://wowza01.dyb.fm/teste/teste/media3";
    String url3 = "rtmp://wowza01.dyb.fm/dyb/runningradio/media";

    //		Thread.sleep(5000);
    //		adder.pipe.setState(State.NULL);
    //		Thread.sleep(5000);
    //		adder.pipe.setState(State.PLAYING);

    for (int i = 0; i < 10; ) {
      Thread.sleep(20000);
      adder.addInputURL(url3);
      Thread.sleep(20000);
      adder.removeInput(url3);
    }

    Gst.main();
  }
  GstreamerPlayer(ElphelVision parent) {
    this.Parent = parent;

    args = new String[2];
    args[1] = "";
    args[0] = "";

    try {
      args = Gst.init("ElphelVision", args);
    } catch (GstException e) {
      Parent.WriteErrortoConsole("Gst.init error: " + e);
    }

    //        Playbin = new PlayBin2("VideoPlayer");
    //        rtspsource = "rtspsrc location=rtsp://" + Parent.Camera.GetIP()[0] + ":554
    // protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue !
    // bayer2rgb2 method=0 ! ffmpegcolorspace  name=elphelstream";
    // Playbin.setInputFile(new File("test.avi"));
    /*java.net.URI uri = null;
    try {
    uri = new URI("rtsp://" + Parent.Camera.GetIP()[0] + ":554");
    } catch (URISyntaxException ex) {
    Logger.getLogger(GstreamerPlayer.class.getName()).log(Level.SEVERE, null, ex);
    }
    Playbin.setURI(uri);*/

    videoComponent = new VideoComponent();
    videoComponent.setPreferredSize(new Dimension(850, 480));
    //     Playbin.setVideoSink(videoComponent.getElement());

    /*
    Playbin.getBus().connect(new Bus.EOS() {

    public void endOfStream(GstObject source) {
    System.out.println("Finished playing file");
    Gst.quit();
    }
    });
    Playbin.getBus().connect(new Bus.ERROR() {

    public void errorMessage(GstObject source, int code, String message) {
    System.out.println("Error occurred: " + message);
    Gst.quit();
    }
    });
    Playbin.getBus().connect(new Bus.STATE_CHANGED() {

    public void stateChanged(GstObject source, State old, State current, State pending) {
    if (source == Playbin) {
    System.out.println("Pipeline state changed from " + old + " to " + current);
    }
    }
    });
     */
  }
  /**
   * Initializes the pipeline itself, but does not start capturing
   *
   * @param devices The list of devices to capture from.
   * @param confidence True to enable confidence monitoring.
   * @return The created {@code Pipeline}, or null in the case of an error.
   */
  private Pipeline startPipeline(ArrayList<CaptureDevice> devices, boolean confidence) {
    logger.info("Successfully initialised {} devices.", devices.size());
    for (int i = 0; i < devices.size(); i++) logger.debug("Device #{}: {}.", i, devices.get(i));

    // setup gstreamer pipeline using capture devices
    Gst.init(); // cannot using gst library without first initialising it

    Pipeline pipeline = new Pipeline();
    for (CaptureDevice c : devices) {
      if (!addCaptureDeviceBinsToPipeline(c, pipeline))
        logger.error("Failed to create pipeline for {}.", c);
    }

    pipeline.debugToDotFile(Pipeline.DEBUG_GRAPH_SHOW_ALL, pipeline.getName());
    return pipeline;
  }
 @Before
 public void setUp() {
   try {
     Gst.init();
   } catch (Throwable t) {
     logger.warn("Skipping agent tests due to unsatisifed gstreamer installation");
     gstreamerInstalled = false;
   }
   gstreamerCapture = new GStreamerCaptureFramework();
   mockRecording = EasyMock.createNiceMock(RecordingImpl.class);
   properties = new XProperties();
   properties.put(CaptureParameters.CAPTURE_DEVICE_NAMES, "vga, cam, mic");
   EasyMock.expect(mockRecording.getProperties()).andReturn(properties);
   EasyMock.replay(mockRecording);
   captureFailureHandler = EasyMock.createNiceMock(CaptureFailureHandler.class);
   configurationManager = EasyMock.createNiceMock(ConfigurationManager.class);
 }
예제 #7
0
  public static void main(String[] args) {
    final CountDownLatch done = new CountDownLatch(1);

    //
    // Initialize the gstreamer framework, and let it interpret any command
    // line flags it is interested in.
    //
    args = Gst.init(progname, args);

    if (args.length < 0) {
      System.out.println("Usage: " + progname + " <filename>");
      System.exit(1);
    }
    //
    // Instead of using a playbin, it would be possible to use a pipe
    // a typefind element and a demux and wire them up manually.
    //
    final PlayBin pipe = new PlayBin(progname);
    pipe.setInputFile(new File(args[0]));
    FakeSink audio = (FakeSink) ElementFactory.make("fakesink", "audio-sink");
    FakeSink video = (FakeSink) ElementFactory.make("fakesink", "video-sink");
    pipe.setAudioSink(audio);
    pipe.setVideoSink(video);

    pipe.getBus()
        .connect(
            new Bus.TAG() {
              public void tagsFound(GstObject source, TagList tagList) {
                for (String tag : tagList.getTagNames()) {
                  System.out.println("Found tag " + tag + " = " + tagList.getValue(tag, 0));
                }
              }
            });

    //
    // In theory, an ASYNC_DONE from the pipeline corresponds with the demux
    // completing parsing the media file
    //
    pipe.getBus()
        .connect(
            new Bus.ASYNC_DONE() {
              public void asyncDone(GstObject source) {
                pipe.stop();
                done.countDown();
              }
            });
    audio.set("signal-handoffs", true);
    video.set("signal-handoffs", true);

    //
    // As soon as data starts to flow, it means all tags have been found
    //
    BaseSink.HANDOFF handoff =
        new BaseSink.HANDOFF() {
          public void handoff(BaseSink sink, Buffer buffer, Pad pad) {
            pipe.stop();
            done.countDown();
          }
        };
    audio.connect(handoff);
    video.connect(handoff);

    // Start the pipeline playing
    pipe.pause();
    try {
      done.await();
    } catch (InterruptedException ex) {
    }
    pipe.stop();
  }
예제 #8
-10
  private static void startStreaming(final VideoComponent vc, String settings, int port) {
    Gst.init();
    clientPipe = new Pipeline("pipeline");
    pushLog("> CTRL: " + "PLAY");
    pushLog("> SYS: " + " INIT STREAM");

    System.out.println("Starting with: C=" + clientLoc + ", S=" + serverLoc);

    // VIDEO
    Element udpVideoSrc = ElementFactory.make("udpsrc", "src1");
    udpVideoSrc.setCaps(
        Caps.fromString(
            "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)JPEG, payload=(int)96, ssrc=(uint)2156703816, clock-base=(uint)1678649553, seqnum-base=(uint)31324"));
    udpVideoSrc.set("uri", "udp://" + clientLoc + ":" + port);

    Element videoRtcpIn = ElementFactory.make("udpsrc", "src3");
    videoRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 1));

    Element videoRtcpOut = ElementFactory.make("udpsink", "snk1");
    videoRtcpOut.set("host", serverLoc);
    videoRtcpOut.set("port", "" + (port + 5));
    videoRtcpOut.set("sync", "false");
    videoRtcpOut.set("async", "false");

    Element udpAudioSrc = null, audioRtcpIn = null, audioRtcpOut = null, taud = null;

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO
      udpAudioSrc = ElementFactory.make("udpsrc", "src2");
      udpAudioSrc.setCaps(
          Caps.fromString(
              "application/x-rtp, media=(string)audio, clock-rate=(int)8000, encoding-name=(string)L16, encoding-params=(string)2, channels=(int)2, payload=(int)96, ssrc=(uint)3489550614, clock-base=(uint)2613725642, seqnum-base=(uint)1704"));
      udpAudioSrc.set("uri", "udp://" + clientLoc + ":" + (port + 2));

      taud = ElementFactory.make("tee", "taud");
      Element qaud = ElementFactory.make("queue", "qaud");
      AppSink appAudioSink = (AppSink) ElementFactory.make("appsink", "appAudioSink");
      appAudioSink.set("emit-signals", true);
      appAudioSink.setSync(false);
      audioQ = new LinkedList<FrameInfo>();
      appAudioSink.connect(
          new AppSink.NEW_BUFFER() {
            public void newBuffer(AppSink sink) {
              Buffer b = sink.getLastBuffer();
              if (b != null) {
                audioQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              }
            }
          });

      audioRtcpIn = ElementFactory.make("udpsrc", "src4");
      audioRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 3));

      audioRtcpOut = ElementFactory.make("udpsink", "snk2");
      audioRtcpOut.set("host", serverLoc);
      audioRtcpOut.set("port", "" + (port + 7));
      audioRtcpOut.set("sync", "false");
      audioRtcpOut.set("async", "false");

      clientPipe.addMany(taud, qaud, appAudioSink);
      clientPipe.addMany(udpAudioSrc, audioRtcpIn, audioRtcpOut);
      Element.linkMany(udpAudioSrc, taud, qaud, appAudioSink);
    }

    Element tvid = ElementFactory.make("tee", "tvid");
    Element qvid = ElementFactory.make("queue", "qvid");
    AppSink appVideoSink = (AppSink) ElementFactory.make("appsink", "appVideoSink");
    appVideoSink.set("emit-signals", true);
    appVideoSink.setSync(false);
    videoQ = new LinkedList<FrameInfo>();
    appVideoSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            Buffer b = sink.getLastBuffer();
            if (b != null) {
              videoQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              // System.out.println(System.currentTimeMillis());
            }
          }
        });
    clientPipe.addMany(tvid, qvid, appVideoSink);
    clientPipe.addMany(udpVideoSrc, videoRtcpIn, videoRtcpOut);
    Element.linkMany(udpVideoSrc, tvid, qvid, appVideoSink);

    // VIDEO BIN

    videoBin = new Bin("videoBin");

    // src1
    Element videoDepay = ElementFactory.make("rtpjpegdepay", "depay");
    Element videoDecode = ElementFactory.make("jpegdec", "decode");
    Element videoRate = ElementFactory.make("videorate", "rate1");
    Element videoColor = ElementFactory.make("ffmpegcolorspace", "color");
    Element videoSrc1Caps = ElementFactory.make("capsfilter", "src1caps");
    videoSrc1Caps.setCaps(Caps.fromString("video/x-raw-yuv, framerate=30/1"));
    Element videoColor2 = ElementFactory.make("ffmpegcolorspace", "color2");

    videoBin.addMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);
    Element.linkMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);

    videoBin.addPad(new GhostPad("sink", videoDepay.getStaticPad("sink")));
    clientPipe.add(videoBin);

    final Bin audioBin = new Bin("audioBin");

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO BIN

      final Element audioDepay = ElementFactory.make("rtpL16depay", "auddepay");
      Element audioConvert = ElementFactory.make("audioconvert", "audconv");
      mute = ElementFactory.make("volume", "vol");
      mute.set("mute", "true");
      final Element audioSink = ElementFactory.make("autoaudiosink", "audsink");

      audioBin.addMany(audioDepay, audioConvert, mute, audioSink);
      Element.linkMany(audioDepay, audioConvert, mute, audioSink);

      audioBin.addPad(new GhostPad("sink", audioDepay.getStaticPad("sink")));
      clientPipe.add(audioBin);
    }

    // RTPBIN

    final RTPBin rtp = new RTPBin("rtp");
    clientPipe.add(rtp);

    Element.linkPads(tvid, "src1", rtp, "recv_rtp_sink_0");
    Element.linkPads(videoRtcpIn, "src", rtp, "recv_rtcp_sink_0");
    Element.linkPads(rtp, "send_rtcp_src_0", videoRtcpOut, "sink");

    if (attribute.equalsIgnoreCase("active")) {
      Element.linkPads(taud, "src1", rtp, "recv_rtp_sink_1");
      Element.linkPads(audioRtcpIn, "src", rtp, "recv_rtcp_sink_1");
      Element.linkPads(rtp, "send_rtcp_src_1", audioRtcpOut, "sink");
    }

    // BUS

    rtp.connect(
        new Element.PAD_ADDED() {
          @Override
          public void padAdded(Element arg0, Pad arg1) {
            if (arg1.getName().startsWith("recv_rtp_src_0")) {
              arg1.link(videoBin.getStaticPad("sink"));
            } else if (arg1.getName().startsWith("recv_rtp_src_1")
                && attribute.equalsIgnoreCase("active")) {
              arg1.link(audioBin.getStaticPad("sink"));
            }
            clientPipe.debugToDotFile(1, "clientsucc");
          }
        });

    Bus bus = clientPipe.getBus();

    bus.connect(
        new Bus.ERROR() {
          public void errorMessage(GstObject source, int code, String message) {
            pushLog("> GSTREAMER ERROR: code=" + code + " message=" + message);
            clientPipe.debugToDotFile(1, "clienterr");
          }
        });
    bus.connect(
        new Bus.EOS() {

          public void endOfStream(GstObject source) {
            clientPipe.setState(State.NULL);
            System.out.println("EOS");
          }
        });

    videoBin.add(vc.getElement());

    AppSink appJointSink = (AppSink) ElementFactory.make("appsink", "appJointSink");
    appJointSink.set("emit-signals", true);
    appJointSink.setSync(false);
    jointQ = new LinkedList<CompareInfo>();
    appJointSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            /*
            int vs = 0; int as = 0;
            while (videoQ != null) {
            	vs++; videoQ.poll();
            }
            while (audioQ != null) {
            	as++; audioQ.poll();
            }
            System.out.println("Compare: " + as + " : " + vs);
            */
          }
        });

    Element.linkMany(videoColor2, vc.getElement());

    Thread videoThread =
        new Thread() {
          public void run() {
            clientPipe.setState(org.gstreamer.State.PLAYING);
          }
        };
    videoThread.start();
    clientPipe.debugToDotFile(0, "appsink");
  }