/**
   * This method waits until the pipeline has had an opportunity to shutdown and if it surpasses the
   * maximum timeout value it will be manually stopped.
   */
  public void stop(long timeout) {
    // We must stop the capture as soon as possible, then check whatever needed
    for (CaptureDeviceBin captureDeviceBin : captureDeviceBins) {
      captureDeviceBin.shutdown();
    }

    long startWait = System.currentTimeMillis();

    while (pipeline != null
        && (pipeline.getState() != State.PAUSED || pipeline.getState() != State.NULL)) {
      try {
        Thread.sleep(WAIT_FOR_NULL_SLEEP_TIME);
      } catch (InterruptedException e) {
      }
      // If we've timed out then force kill the pipeline
      if (System.currentTimeMillis() - startWait >= timeout) {
        if (pipeline != null) {
          logger.debug("The pipeline took too long to shut down, now sending State.NULL.");
          pipeline.setState(State.NULL);
        }
        pipeline = null;
      }
    }

    if (pipeline != null) {
      pipeline.setState(State.NULL);
    }
    pipeline = null;
  }
示例#2
0
  /** @param pad */
  public void linkNewInputToPad(Pad pad) {
    String addedUrl = inputURLs.get(inputURLs.size() - 1);
    final Element newInput = inputElements.get(addedUrl);

    EVENT_PROBE probeListener =
        new EVENT_PROBE() {
          public boolean eventReceived(Pad pad, Event event) {
            String type = event.getStructure().toString();
            if (DEBUG) System.out.println("eventReceived: " + type);
            if (type.toLowerCase().indexOf("error") >= 0) {
              System.out.println("Error");
              return false;
            }
            return true;
          }
        };
    adder.getSrcPads().get(0).addEventProbe(probeListener);

    Element identity = ElementFactory.make("identity", "identity" + INDEX);
    identity.set("sync", true);
    identity.set("single-segment", true);
    // identity.set("silent", false);
    // Element mux = ElementFactory.make("flvmux", "mux-timestamp" + INDEX);
    // Element enc = ElementFactory.make("lamemp3enc", "enc-timestamp" +
    // INDEX);
    // Element demux = ElementFactory.make("flvdemux", "demux-timestamp" +
    // INDEX);
    // Element dec = ElementFactory.make("flump3dec", "dec-timestamp" +
    // INDEX);
    //
    // // identity.set("silent", false);
    // pipe.addMany(identity, enc, mux, demux, dec);
    //
    // identity.setState(State.PAUSED);
    // mux.setState(State.PAUSED);
    // enc.setState(State.PAUSED);
    // demux.setState(State.PAUSED);
    // dec.setState(State.PAUSED);
    //
    pipe.addMany(newInput, identity);
    PadLinkReturn linked = newInput.getSrcPads().get(0).link(identity.getSinkPads().get(0));
    System.out.println("new input linked: " + linked);
    //
    // boolean l = Element.linkMany(identity, enc, mux, demux, dec);
    // System.out.println("mux demux linked: "+l);
    //
    // PadLinkReturn linked2 = dec.getSrcPads().get(0).link(pad);
    // System.out.println("new dec linked: " + linked2);
    if (pipe.isPlaying()) {
      State state = State.READY;
      // newInput.setState(state);
      pipe.setState(state);
    }
    PadLinkReturn linked3 = identity.getSrcPads().get(0).link(pad);
    System.out.println("new identity linked: " + linked3);
  }
  public void debugGST() {

    List<Element> sinks = pipe.getSinks();
    List<Element> elements = pipe.getElementsRecursive();
    List<Element> sources = pipe.getSources();

    State state = pipe.getState();

    int a = 1;
  }
  /**
   * Initializes the pipeline itself, but does not start capturing
   *
   * @param devices The list of devices to capture from.
   * @param confidence True to enable confidence monitoring.
   * @return The created {@code Pipeline}, or null in the case of an error.
   */
  private Pipeline startPipeline(ArrayList<CaptureDevice> devices, boolean confidence) {
    logger.info("Successfully initialised {} devices.", devices.size());
    for (int i = 0; i < devices.size(); i++) logger.debug("Device #{}: {}.", i, devices.get(i));

    // setup gstreamer pipeline using capture devices
    Gst.init(); // cannot using gst library without first initialising it

    Pipeline pipeline = new Pipeline();
    for (CaptureDevice c : devices) {
      if (!addCaptureDeviceBinsToPipeline(c, pipeline))
        logger.error("Failed to create pipeline for {}.", c);
    }

    pipeline.debugToDotFile(Pipeline.DEBUG_GRAPH_SHOW_ALL, pipeline.getName());
    return pipeline;
  }
  /**
   * Creates the gStreamer pipeline and blocks until it starts successfully
   *
   * @param newRec The RecordingImpl of the capture we wish to perform.
   * @return The recording ID (equal to newRec.getID()) or null in the case of an error
   */
  public void start(RecordingImpl newRec) {
    // Create the pipeline
    try {
      pipeline = create(newRec.getProperties(), false);
    } catch (UnsatisfiedLinkError e) {
      throw new UnableToStartCaptureException(
          e.getMessage() + " : please add libjv4linfo.so to /usr/lib to correct this issue.");
    }

    // Check if the pipeline came up ok
    if (pipeline == null) {
      // logger.error("Capture {} could not start, pipeline was null!", newRec.getID());
      captureFailureHandler.resetOnFailure(newRec.getID());
      throw new UnableToStartCaptureException(
          "Capture " + newRec.getID() + " could not start, pipeline was null!");
    }

    logger.info("Initializing devices for capture.");

    hookUpBus();

    // Grab time to wait for pipeline to start
    int wait;
    String waitProp = newRec.getProperty(CaptureParameters.CAPTURE_START_WAIT);
    if (waitProp != null) {
      wait = Integer.parseInt(waitProp);
    } else {
      wait = 5; // Default taken from gstreamer docs
    }

    pipeline.debugToDotFile(Pipeline.DEBUG_GRAPH_SHOW_ALL, pipeline.getName());
    // Try and start the pipeline
    pipeline.play();
    if (pipeline.getState(wait * GStreamerPipeline.GST_SECOND) != State.PLAYING) {
      // In case of an error call stop to clean up the pipeline.
      logger.debug("Pipeline was unable to start after " + wait + " seconds.");
      stop(GStreamerPipeline.DEFAULT_PIPELINE_SHUTDOWN_TIMEOUT);
      throw new UnableToStartCaptureException(
          "Unable to start pipeline after " + wait + " seconds.  Aborting!");
    }
    logger.info("{} started.", pipeline.getName());
  }
示例#6
0
  public void removeInput(final String url) {

    if (!inputElements.containsKey(url)) return;

    System.out.println("\n------------\nremoveInput: " + url + "\n-------------\n");
    removedInput = inputElements.get(url);

    boolean playing = pipe.isPlaying();
    Pad inputSrcPad = removedInput.getSrcPads().get(0);
    removedIdentity = inputSrcPad.getPeer().getParentElement();
    Pad adderSinkPad = removedIdentity.getSrcPads().get(0).getPeer();
    pipe.setState(State.READY);
    boolean removed = adder.removePad(adderSinkPad);
    System.out.println("Pad removed: " + removed);

    inputElements.remove(url);
    if (playing) {
      System.out.println("Adder inputs: " + adder.getSinkPads());
      pipe.setState(State.PLAYING);
    }
  }
示例#7
0
  public static void commandStop(VideoComponent vc) {
    commandPause();

    clientPipe.setState(State.PAUSED);
    videoBin.setState(State.PAUSED);
    videoBin.unlink(vc.getElement());
    videoBin.remove(vc.getElement());

    clientPipe = null;
    JSONObject json_pause = new JSONObject();
    json_pause.put("command", "stop");
    out.println(json_pause.toString());
  }
  /**
   * addPipeline will add a pipeline for the specified capture device to the bin.
   *
   * @param captureDevice {@code CaptureDevice} to create pipeline around
   * @param pipeline The {@code Pipeline} bin to add it to
   * @return True, if successful
   */
  protected boolean addCaptureDeviceBinsToPipeline(CaptureDevice captureDevice, Pipeline pipeline) {

    CaptureDeviceBin captureDeviceBin = null;
    try {
      captureDeviceBin = new CaptureDeviceBin(captureDevice, properties);
    } catch (Exception e) {
      e.printStackTrace();
      return false;
    }
    pipeline.add(captureDeviceBin.getBin());
    // Add them to a list so that we can send EOS's to their source Elements.
    captureDeviceBins.add(captureDeviceBin);
    return true;
  }
  public void StopVideoStream() {
    Parent.WriteLogtoConsole("Stopping Gstreamer Video Player");

    pipe.setState(State.READY);

    // debugGST();

    Element videosink = videoComponent.getElement();
    videoComponent.repaint();
    pipe.getElementByName("elphelstream").unlink(videosink);
    pipe.remove(videosink);

    // debugGST();

    pipe.setState(State.NULL);

    // debugGST();

    List<Element> elements = pipe.getElementsRecursive();
    for (int i = 0; i < elements.size(); i++) {
      pipe.unlink(elements.get(i));
      pipe.remove(elements.get(i));
      elements.get(i).dispose();
    }

    List<Element> sources = pipe.getSources();
    for (int i = 0; i < sources.size(); i++) {
      pipe.unlink(sources.get(i));
      pipe.remove(sources.get(i));
      sources.get(i).dispose();
    }

    // debugGST();
    pipe = null;
    // debugGST();
  }
示例#10
0
 public void play() {
   pipe.setState(State.PLAYING);
 }
示例#11
0
  private void addInput(String url) {

    INDEX++;
    int i = INDEX;
    System.out.println("\n------------\naddInput[" + i + "]: " + url + "\n-------------\n");

    Element input = null;
    if (!FAKE_INPUT) {
      /* create audio output */
      final Bin audioBin = new Bin("Audio Bin" + i);

      Element src = null;
      if (url.contains("http://")) {
        src = ElementFactory.make("gnomevfssrc", "Input" + i);
        src.set("location", url);
      } else if (url.contains("rtmp") && url.contains("://")) {
        src = ElementFactory.make("rtmpsrc", "Input" + i);
        // src.set("do-timestamp", true);
        src.set("location", url);
      } else {
        src = ElementFactory.make("filesrc", "Input" + i);
        src.set("location", url);
      }

      DecodeBin2 decodeBin = new DecodeBin2("Decode Bin" + i);
      Element decodeQueue = ElementFactory.make("queue2", "Decode Queue" + i);

      Element conv = ElementFactory.make("audioconvert", "Audio Convert" + i);
      Element resample = ElementFactory.make("audioresample", "Audio Resample" + i);
      Element volume = ElementFactory.make("volume", "Audio Volume" + i);
      volume.set("volume", 1.0f);
      volumeElements.put(url, volume);
      audioBin.addMany(conv, resample, volume);
      Element.linkMany(conv, resample, volume);
      audioBin.addPad(new GhostPad("src", volume.getStaticPad("src")));
      audioBin.addPad(new GhostPad("sink", conv.getStaticPad("sink")));

      input = new Bin("Input Bin" + i);
      ((Bin) input).addMany(src, decodeQueue, decodeBin, audioBin);
      Element.linkMany(src, decodeQueue, decodeBin, audioBin);
      input.addPad(new GhostPad("src", audioBin.getSrcPads().get(0)));

      decodeBin.connect(
          new DecodeBin2.NEW_DECODED_PAD() {
            public void newDecodedPad(Element elem, Pad pad, boolean last) {
              /* only link once */
              if (pad.isLinked()) {
                return;
              }
              /* check media type */
              Caps caps = pad.getCaps();
              Structure struct = caps.getStructure(0);
              if (struct.getName().startsWith("audio/")) {
                System.out.println("Linking audio pad: " + struct.getName());
                if (audioBin.getStaticPad("sink").getPeer() == null) {
                  PadLinkReturn linked = pad.link(audioBin.getStaticPad("sink"));
                  System.out.println("Decodebin linked " + linked);
                }
              } else if (struct.getName().startsWith("video/")) {
                System.out.println("Linking video pad: " + struct.getName());
              } else {
                System.out.println("Unknown pad [" + struct.getName() + "]");
              }
            }
          });
    } else {
      input = ElementFactory.make("audiotestsrc", "Audio Fake" + i);
      int w = i;
      if (i > 1) w = 5;
      input.set("wave", w);
      input.set("is-live", true);
    }

    if (!inputURLs.contains(url)) {
      inputURLs.add(url);
    }
    inputElements.put(url, input);

    boolean playing = pipe.isPlaying();

    Pad adderSink = adder.getRequestPad("sink%d");
    if (playing) {
      System.out.println("Adder inputs: " + adder.getSinkPads());
      pipe.setState(State.PLAYING);
    }
  }
示例#12
0
  /**
   * @param inputURLs
   * @param outputURL
   * @param pathFifo
   */
  public void createPipeline() {
    String config = "rate=" + OUTPUT_FREQ + ",channels=" + OUTPUT_CHANELS + ",depth=16";
    pathFifo = "/tmp/" + outputURL.replace('/', '_') + "_" + config;
    this.capsAudio = Caps.fromString("audio/x-raw-int," + config + ";audio/x-raw-float," + config);
    pipe = new Pipeline("Audio Mixer Pipeline");
    try {
      adder = ElementFactory.make("liveadder", "liveadder");
      // adder.set("latency", 5 * 1000);
    } catch (Exception e) {
    }
    adder.connect(
        new PAD_REMOVED() {
          @Override
          public void padRemoved(Element element, Pad pad) {
            if (removedInput != null) {
              pipe.remove(removedInput);
              pipe.remove(removedIdentity);
              removedInput.setState(State.NULL);
              removedIdentity.setState(State.NULL);
              System.out.println("padRemoved: " + removedInput);
              removedInput = null;
              removedInput = null;
              System.gc();
            }
          }
        });

    adder.connect(
        new PAD_ADDED() {
          @Override
          public void padAdded(Element element, Pad pad) {
            linkNewInputToPad(pad);
          }
        });

    Element tee = ElementFactory.make("tee", "tee");

    Element capsfilter = ElementFactory.make("capsfilter", null);
    capsfilter.setCaps(capsAudio);
    queueTee = ElementFactory.make("queue2", null);
    pipe.addMany(adder, queueTee, tee, capsfilter);
    Element.linkMany(adder, queueTee, capsfilter, tee);

    if (AUDIO_OUTPUT) {
      Element audioSink = ElementFactory.make("alsasink", "Audio Sink");
      audioSink.set("sync", false);
      pipe.addMany(audioSink);
      Element.linkMany(tee, audioSink);
    }

    if (STREAM_OUTPUT) {

      // fileFifo.deleteOnExit();
      fileFifo = new File(pathFifo);
      try {
        if (!fileFifo.exists()) {
          // fileFifo.delete();
          String command = "/usr/bin/mkfifo " + fileFifo.getAbsolutePath();
          ProcessBuilder b = new ProcessBuilder("/bin/sh", "-c", command);
          b.start().waitFor();
        }
      } catch (Exception e) {
        e.printStackTrace();
      }

      Element codecEnc = ElementFactory.make("lamemp3enc", "MP3 Encoder");
      Element mux = ElementFactory.make("flvmux", "FLV Muxer");
      Element queue = ElementFactory.make("queue2", "Fifo Queue");
      Element filesink = ElementFactory.make("filesink", "Fifo Sink");
      filesink.set("sync", false);
      filesink.set("location", fileFifo.getAbsolutePath());
      pipe.addMany(queue, codecEnc, mux, filesink);
      Element.linkMany(tee, queue, codecEnc, mux, filesink);

      startFFmpegProcess();
    }

    int i = 0;
    for (String url : inputURLs) {
      i++;
      if (checkUrl(url)) {
        addInput(url);
      }
    }

    prepareBus();
  }
  public void PlayVideoStream() {
    // debugGST();

    String rtspsource = "";
    if (Parent.Camera.GetColorMode() == ColorMode.RGB) {
      rtspsource =
          "rtspsrc location=rtsp://"
              + Parent.Camera.GetIP()[0]
              + ":554 protocols=0x00000001 latency=60 name=camerasource ! rtpjpegdepay ! jpegdec ! decodebin2 ! ffmpegcolorspace name=elphelstream";
    } else if (Parent.Camera.GetColorMode() == ColorMode.JP46) {
      //            rtspsrc location=rtsp://" + Parent.Camera.GetIP() + ":554 protocols=0x00000001
      // latency=50 ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue ! bayer2rgb2 !
      // ffmpegcolorspace ! videorate ! "video/x-raw-yuv, format=(fourcc)I420, width=(int)1920,
      // height=(int)1088, framerate=(fraction)25/1" ! xvimagesink sync=false max-lateness=-1
      rtspsource =
          "rtspsrc location=rtsp://"
              + Parent.Camera.GetIP()[0]
              + ":554 protocols=0x00000001 latency=60 name=camerasource ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue ! bayer2rgb2 method=0 ! ffmpegcolorspace  name=elphelstream";
    } else {
      // TODO in this mode we dont see anything from the non-jpeg compliant stream so the jp46
      // filter wont help, but what else should we show?
      rtspsource =
          "rtspsrc location=rtsp://"
              + Parent.Camera.GetIP()[0]
              + ":554 protocols=0x00000001 latency=60 name=camerasource ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue ! bayer2rgb2 method=0 name=elphelstream";
    }

    pipe = Pipeline.launch(rtspsource);

    videoComponent.setKeepAspect(true);
    Element videosink = videoComponent.getElement();
    pipe.add(videosink);

    pipe.getElementByName("elphelstream").link(videosink);

    pipe.getBus()
        .connect(
            new Bus.EOS() {

              public void endOfStream(GstObject source) {
                System.out.println("Finished playing file");
                // Gst.quit();
              }
            });
    /*pipe.getBus().connect(new Bus.INFO() {

    public void infoMessage(GstObject source, int code, String message) {
    System.out.println("message: " + message);
    }
    });
    pipe.getBus().connect(new Bus.MESSAGE() {

    public void busMessage(Bus bus, Message message) {
    System.out.println("message: " + message);
    }
    });
    pipe.getBus().connect(new Bus.WARNING() {

    public void warningMessage(GstObject source, int code, String message) {
    System.out.println("warning: " + message);
    }
    });*/
    pipe.getBus()
        .connect(
            new Bus.ERROR() {

              public void errorMessage(GstObject source, int code, String message) {
                System.out.println("Error occurred: " + message);
                Gst.quit();
              }
            });
    /*pipe.getBus().connect(new Bus.STATE_CHANGED() {

    public void stateChanged(GstObject source, State old, State current, State pending) {
    if (source == pipe) {
    System.out.println("Pipeline state changed from " + old + " to " + current);
    }
    }
    });*/

    pipe.setState(State.PLAYING);

    /*
    Playbin.add(videosink);
    //     Pipeline pipe = new Pipeline("SimplePipeline");
    /*
    Element src = ElementFactory.make("rtspsrc", "elphelsrc");
    src.set("location", "rtsp://" + Parent.Camera.GetIP()[0] + ":554");
    src.set("protocols", "00000001"); // "0x00000001" doesnt work
    src.set("latency", "50");
    Playbin.add(src);*/
    // Playbin.getElementByName("elphelsrc").link(Playbin.getSinks().get(0));
    // Element sink = ElementFactory.make("fakesink", "Destination");
    // Playbin.addMany(src, sink);
    // src.link(sink);*/
    // Playbin2
    /*
    try {
    Playbin.setURI(new URI("rtsp://" + Parent.Camera.GetIP()[0] + ":554"));
    } catch (URISyntaxException ex) {
    Logger.getLogger(GstreamerPlayer.class.getName()).log(Level.SEVERE, null, ex);
    }
    //.getElementByName("uri").set("protocols", "00000001");
    //Playbin.getElementByName("uri").set("latency", "50");

    //Playbin.setInputFile(new File("test.avi")); // works

    // Playbin.set("buffer-duration", 0); // NO Effect
    // Playbin.set("buffer-size", 0); // NO Effect

    Playbin.setState(State.PLAYING);
     */
    /*
    final List<URI> playList = new LinkedList<URI>();
    for (String arg : args) {
    playList.add(new File(arg).toURI());
    }

    GSTPlayer.setPreferredSize(new Dimension(640, 480));
    GSTPlayer.setControlsVisible(false);
    //player.setKeepAspect(true);
    GSTPlayer.getMediaPlayer().setPlaylist(playList);
    GSTPlayer.getMediaPlayer().play();

     */
    // GstreamerPlayer.SetVideocomponent(GstreamerVideoComponent);
    /*
    String rtspsource = "";
    if (Parent.Camera.GetColorMode() == ColorMode.RGB) {
    rtspsource = "rtspsrc location=rtsp://" + Parent.Camera.GetIP()[0] + ":554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! decodebin ! ffmpegcolorspace name=elphelstream";
    } else if (Parent.Camera.GetColorMode() == ColorMode.JP46) {
    //            rtspsrc location=rtsp://" + Parent.Camera.GetIP() + ":554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue ! bayer2rgb2 ! ffmpegcolorspace ! videorate ! "video/x-raw-yuv, format=(fourcc)I420, width=(int)1920, height=(int)1088, framerate=(fraction)25/1" ! xvimagesink sync=false max-lateness=-1
    rtspsource = "rtspsrc location=rtsp://" + Parent.Camera.GetIP()[0] + ":554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue ! bayer2rgb2 method=0 ! ffmpegcolorspace  name=elphelstream";
    } else {
    //TODO in this mode we dont see anything from the non-jpeg compliant stream so the jp46 filter wont help, but what else should we show?
    rtspsource = "rtspsrc location=rtsp://" + Parent.Camera.GetIP()[0] + ":554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! jp462bayer ! queue ! bayer2rgb2 method=0 name=elphelstream";
    }

    //Notes

    //LUT
    //gst-launch rtspsrc location=rtsp://192.168.10.141:554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! ffmpegcolorspace ! videorate ! coloreffects preset=heat ! ffmpegcolorspace ! autovideosink -v

    //edge detection:
    //gst-launch rtspsrc location=rtsp://192.168.10.141:554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! ffmpegcolorspace ! videorate ! edgetv ! ffmpegcolorspace ! autovideosink -v

    // kind of scopes
    //gst-launch rtspsrc location=rtsp://192.168.10.141:554 protocols=0x00000001 latency=50 ! rtpjpegdepay ! jpegdec ! queue ! ffmpegcolorspace ! videorate ! revtv ! ffmpegcolorspace ! autovideosink -v

    pipe = Pipeline.launch(rtspsource);

    videoComponent.setKeepAspect(true);
    Element videosink = videoComponent.getElement();
    pipe.add(videosink);
    pipe.getElementByName("elphelstream").link(videosink);


    pipe.setState(State.PLAYING);
    List<Element> sinks = pipe.getSinks();

    /*

    String rtspsource = "rtspsrc location=rtsp://" + Parent.Camera.GetIP() + ":554 latency=30 ! rtpjpegdepay ! jpegdec name=elphelstream";
    pipe = Pipeline.launch(rtspsource);

    //pipe = new Pipeline("GstreamerViewer");

    //videosrc = ElementFactory.make("videotestsrc", "source");
    //videosrc = ElementFactory.make("rtspsource ", "source");
    ///videosrc.setCaps(Caps.fromString("location=rtsp://192.168.10.141:554 latency=30"));
    //Element rtpjpegdepay = ElementFactory.make("rtpjpegdepay", "rtpjpegdepay");
    //Element jpegdec = ElementFactory.make("jpegdec", "jpegdec");

    Element videosink = ElementFactory.make("xvimagesink", "sink");

    //Element videosink = videoComponent.getElement();
    pipe.add(videosink);
    pipe.getElementByName("elphelstream").link(videosink);

    //playbin.setInputFile(new File(args[0]));
    //pipe.addMany(videosrc, rtpjpegdepay, jpegdec, videosink);
    //videosrc.link(videosink);
    //Element.linkMany(videosrc, rtpjpegdepay, jpegdec, videosink);
    pipe.setState(State.PLAYING);
    Gst.main();
    pipe.setState(State.NULL);


    //gst-launch rtspsrc location=rtsp://192.168.10.141:554 latency=30 ! rtpjpegdepay ! jpegdec ! xvimagesink

    //VideoComponent videoComponent = new VideoComponent();
    //videosink = videoComponent.getElement();
    //playbin.setVideoSink(videosink);


    //Element.linkMany(videosrc, videosink);*/
  }
示例#14
-10
  private static void startStreaming(final VideoComponent vc, String settings, int port) {
    Gst.init();
    clientPipe = new Pipeline("pipeline");
    pushLog("> CTRL: " + "PLAY");
    pushLog("> SYS: " + " INIT STREAM");

    System.out.println("Starting with: C=" + clientLoc + ", S=" + serverLoc);

    // VIDEO
    Element udpVideoSrc = ElementFactory.make("udpsrc", "src1");
    udpVideoSrc.setCaps(
        Caps.fromString(
            "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)JPEG, payload=(int)96, ssrc=(uint)2156703816, clock-base=(uint)1678649553, seqnum-base=(uint)31324"));
    udpVideoSrc.set("uri", "udp://" + clientLoc + ":" + port);

    Element videoRtcpIn = ElementFactory.make("udpsrc", "src3");
    videoRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 1));

    Element videoRtcpOut = ElementFactory.make("udpsink", "snk1");
    videoRtcpOut.set("host", serverLoc);
    videoRtcpOut.set("port", "" + (port + 5));
    videoRtcpOut.set("sync", "false");
    videoRtcpOut.set("async", "false");

    Element udpAudioSrc = null, audioRtcpIn = null, audioRtcpOut = null, taud = null;

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO
      udpAudioSrc = ElementFactory.make("udpsrc", "src2");
      udpAudioSrc.setCaps(
          Caps.fromString(
              "application/x-rtp, media=(string)audio, clock-rate=(int)8000, encoding-name=(string)L16, encoding-params=(string)2, channels=(int)2, payload=(int)96, ssrc=(uint)3489550614, clock-base=(uint)2613725642, seqnum-base=(uint)1704"));
      udpAudioSrc.set("uri", "udp://" + clientLoc + ":" + (port + 2));

      taud = ElementFactory.make("tee", "taud");
      Element qaud = ElementFactory.make("queue", "qaud");
      AppSink appAudioSink = (AppSink) ElementFactory.make("appsink", "appAudioSink");
      appAudioSink.set("emit-signals", true);
      appAudioSink.setSync(false);
      audioQ = new LinkedList<FrameInfo>();
      appAudioSink.connect(
          new AppSink.NEW_BUFFER() {
            public void newBuffer(AppSink sink) {
              Buffer b = sink.getLastBuffer();
              if (b != null) {
                audioQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              }
            }
          });

      audioRtcpIn = ElementFactory.make("udpsrc", "src4");
      audioRtcpIn.set("uri", "udp://" + clientLoc + ":" + (port + 3));

      audioRtcpOut = ElementFactory.make("udpsink", "snk2");
      audioRtcpOut.set("host", serverLoc);
      audioRtcpOut.set("port", "" + (port + 7));
      audioRtcpOut.set("sync", "false");
      audioRtcpOut.set("async", "false");

      clientPipe.addMany(taud, qaud, appAudioSink);
      clientPipe.addMany(udpAudioSrc, audioRtcpIn, audioRtcpOut);
      Element.linkMany(udpAudioSrc, taud, qaud, appAudioSink);
    }

    Element tvid = ElementFactory.make("tee", "tvid");
    Element qvid = ElementFactory.make("queue", "qvid");
    AppSink appVideoSink = (AppSink) ElementFactory.make("appsink", "appVideoSink");
    appVideoSink.set("emit-signals", true);
    appVideoSink.setSync(false);
    videoQ = new LinkedList<FrameInfo>();
    appVideoSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            Buffer b = sink.getLastBuffer();
            if (b != null) {
              videoQ.offer(new FrameInfo(System.currentTimeMillis(), b.getSize()));
              // System.out.println(System.currentTimeMillis());
            }
          }
        });
    clientPipe.addMany(tvid, qvid, appVideoSink);
    clientPipe.addMany(udpVideoSrc, videoRtcpIn, videoRtcpOut);
    Element.linkMany(udpVideoSrc, tvid, qvid, appVideoSink);

    // VIDEO BIN

    videoBin = new Bin("videoBin");

    // src1
    Element videoDepay = ElementFactory.make("rtpjpegdepay", "depay");
    Element videoDecode = ElementFactory.make("jpegdec", "decode");
    Element videoRate = ElementFactory.make("videorate", "rate1");
    Element videoColor = ElementFactory.make("ffmpegcolorspace", "color");
    Element videoSrc1Caps = ElementFactory.make("capsfilter", "src1caps");
    videoSrc1Caps.setCaps(Caps.fromString("video/x-raw-yuv, framerate=30/1"));
    Element videoColor2 = ElementFactory.make("ffmpegcolorspace", "color2");

    videoBin.addMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);
    Element.linkMany(videoDepay, videoDecode, videoRate, videoColor, videoSrc1Caps, videoColor2);

    videoBin.addPad(new GhostPad("sink", videoDepay.getStaticPad("sink")));
    clientPipe.add(videoBin);

    final Bin audioBin = new Bin("audioBin");

    if (attribute.equalsIgnoreCase("active")) {
      // AUDIO BIN

      final Element audioDepay = ElementFactory.make("rtpL16depay", "auddepay");
      Element audioConvert = ElementFactory.make("audioconvert", "audconv");
      mute = ElementFactory.make("volume", "vol");
      mute.set("mute", "true");
      final Element audioSink = ElementFactory.make("autoaudiosink", "audsink");

      audioBin.addMany(audioDepay, audioConvert, mute, audioSink);
      Element.linkMany(audioDepay, audioConvert, mute, audioSink);

      audioBin.addPad(new GhostPad("sink", audioDepay.getStaticPad("sink")));
      clientPipe.add(audioBin);
    }

    // RTPBIN

    final RTPBin rtp = new RTPBin("rtp");
    clientPipe.add(rtp);

    Element.linkPads(tvid, "src1", rtp, "recv_rtp_sink_0");
    Element.linkPads(videoRtcpIn, "src", rtp, "recv_rtcp_sink_0");
    Element.linkPads(rtp, "send_rtcp_src_0", videoRtcpOut, "sink");

    if (attribute.equalsIgnoreCase("active")) {
      Element.linkPads(taud, "src1", rtp, "recv_rtp_sink_1");
      Element.linkPads(audioRtcpIn, "src", rtp, "recv_rtcp_sink_1");
      Element.linkPads(rtp, "send_rtcp_src_1", audioRtcpOut, "sink");
    }

    // BUS

    rtp.connect(
        new Element.PAD_ADDED() {
          @Override
          public void padAdded(Element arg0, Pad arg1) {
            if (arg1.getName().startsWith("recv_rtp_src_0")) {
              arg1.link(videoBin.getStaticPad("sink"));
            } else if (arg1.getName().startsWith("recv_rtp_src_1")
                && attribute.equalsIgnoreCase("active")) {
              arg1.link(audioBin.getStaticPad("sink"));
            }
            clientPipe.debugToDotFile(1, "clientsucc");
          }
        });

    Bus bus = clientPipe.getBus();

    bus.connect(
        new Bus.ERROR() {
          public void errorMessage(GstObject source, int code, String message) {
            pushLog("> GSTREAMER ERROR: code=" + code + " message=" + message);
            clientPipe.debugToDotFile(1, "clienterr");
          }
        });
    bus.connect(
        new Bus.EOS() {

          public void endOfStream(GstObject source) {
            clientPipe.setState(State.NULL);
            System.out.println("EOS");
          }
        });

    videoBin.add(vc.getElement());

    AppSink appJointSink = (AppSink) ElementFactory.make("appsink", "appJointSink");
    appJointSink.set("emit-signals", true);
    appJointSink.setSync(false);
    jointQ = new LinkedList<CompareInfo>();
    appJointSink.connect(
        new AppSink.NEW_BUFFER() {
          public void newBuffer(AppSink sink) {
            /*
            int vs = 0; int as = 0;
            while (videoQ != null) {
            	vs++; videoQ.poll();
            }
            while (audioQ != null) {
            	as++; audioQ.poll();
            }
            System.out.println("Compare: " + as + " : " + vs);
            */
          }
        });

    Element.linkMany(videoColor2, vc.getElement());

    Thread videoThread =
        new Thread() {
          public void run() {
            clientPipe.setState(org.gstreamer.State.PLAYING);
          }
        };
    videoThread.start();
    clientPipe.debugToDotFile(0, "appsink");
  }
 private void hookUpBus() {
   logger.debug("Starting to hookup GStreamer Pipeline bus. ");
   // Hook up the shutdown handlers
   Bus bus = pipeline.getBus();
   bus.connect(
       new Bus.EOS() {
         /**
          * {@inheritDoc}
          *
          * @see org.gstreamer.Bus.EOS#endOfStream(org.gstreamer.GstObject)
          */
         public void endOfStream(GstObject arg0) {
           logger.debug("Pipeline received EOS.");
           pipeline.setState(State.NULL);
           pipeline = null;
         }
       });
   bus.connect(
       new Bus.ERROR() {
         /**
          * {@inheritDoc}
          *
          * @see org.gstreamer.Bus.ERROR#errorMessage(org.gstreamer.GstObject, int,
          *     java.lang.String)
          */
         public void errorMessage(GstObject obj, int retCode, String msg) {
           logger.warn("{}: {}", obj.getName(), msg);
         }
       });
   bus.connect(
       new Bus.WARNING() {
         /**
          * {@inheritDoc}
          *
          * @see org.gstreamer.Bus.WARNING#warningMessage(org.gstreamer.GstObject, int,
          *     java.lang.String)
          */
         public void warningMessage(GstObject obj, int retCode, String msg) {
           logger.warn("{}: {}", obj.getName(), msg);
         }
       });
   logger.debug("Successfully hooked up GStreamer Pipeline bus to Log4J.");
 }
示例#16
-37
  public void prepareBus() {
    Bus bus = pipe.getBus();

    STATE_CHANGED =
        new STATE_CHANGED() {
          @Override
          public void stateChanged(GstObject source, State old, State current, State pend) {
            if (source == pipe) {
              System.out.println("Pipeline new state: " + current);
              if (old == State.PLAYING && current == State.NULL) {
                if (AUTO_RECOVERY) {
                  pipe.setState(State.NULL);
                  pipe = null;
                  inputElements.clear();
                  closeFFmpegProcess();
                  createPipeline();
                  playThread();
                }
              }
            }
          }
        };
    bus.connect(STATE_CHANGED);

    bus.connect(
        new Bus.ERROR() {
          public void errorMessage(GstObject source, int code, String message) {
            System.out.println("Error: code=" + code + " message=" + message);
            if (code == 1) {
              pipe.setState(State.NULL);
            }
          }
        });
    bus.connect(
        new Bus.EOS() {
          public void endOfStream(GstObject source) {
            if (AUTO_RECOVERY) {
              pipe.setState(State.NULL);
              pipe = null;
              createPipeline();
              closeFFmpegProcess();
              startFFmpegProcess();
              playThread();
              // writer.close();
              // if (outContainer.writeTrailer() < 0)
              // throw new RuntimeException();
              // outContainer.close();
              // System.exit(0);
            }
          }
        });
  }