예제 #1
0
  public void setMainSource() {
    setProcessing(false);
    VideoFormat vidformat = new VideoFormat(VideoFormat.RGB);
    Vector devices = CaptureDeviceManager.getDeviceList(vidformat);
    CaptureDeviceInfo di = null;

    if (devices.size() > 0) di = (CaptureDeviceInfo) devices.elementAt(0);
    else {
      JOptionPane.showMessageDialog(
          parent, "Your camera is not connected", "No webcam found", JOptionPane.WARNING_MESSAGE);
      return;
    }

    try {
      ml = di.getLocator();
      setMainCamSource(Manager.createDataSource(ml));
    } catch (Exception e) {
      JOptionPane.showMessageDialog(
          parent,
          "Exception locating media: " + e.getMessage(),
          "Error",
          JOptionPane.WARNING_MESSAGE);
      return;
    }
  }
예제 #2
0
  public void player_begin(URL url) {
    if (h4JmfPlugin.playMP3 != null) {
      logger.severe("playMP3!=null");
      h4JmfPlugin.cnsl.append("playMP3!=null");
      return;
    }
    if (url == null) {
      h4JmfPlugin.cnsl.append("url==null");
      return;
    }
    MediaLocator mediaLocator = new MediaLocator(url);
    try {
      // final JPanel jpnl_this=this;

      DataSource ds = Manager.createDataSource(mediaLocator);
      // cnsl.append("ds="+ds);
      h4JmfPlugin.playMP3 = Manager.createPlayer(ds);
      /**
       * ********************************************************** ControllerListener moved to
       * outer class addControllerListener done in h4JmfPlugin **********************************
       */
      h4JmfPlugin.player_begin();
    } catch (Exception e) {
      logger.severe(e.getMessage());
      h4JmfPlugin.cnsl.append(e);
      return;
    }
    // h4JmfPlugin.playMP3.realize();
    // logger.info("after realize()");
    // but possible [JMF thread: com.sun.media.PlaybackEngine@1ac13d7[
    // com.sun.media.PlaybackEngine@1ac13d7 ] ( realizeThread)] [error] PlaybackEngine@1ac13d7 ] (
    // realizeThread):   Unable to handle format: mpeglayer3, 16000.0 Hz, 16-bit, Mono,
    // LittleEndian, Signed, 2000.0 frame rate, FrameSize=16384 bits
    // running tshvr under hedwig :11:17:08 PM [JMF thread:
    // com.sun.media.content.unknown.Handler@8c7be5 ( prefetchThread)] [error] Handler@8c7be5 (
    // prefetchThread): Error: Unable to prefetch com.sun.media.PlaybackEngine@6d3b92
  } // player_begin
예제 #3
0
  private String createProcessor() {
    if (locator == null) return "Locator is null";

    DataSource ds;
    DataSource clone;

    try {
      ds = Manager.createDataSource(locator);
    } catch (Exception e) {
      return "Couldn't create DataSource";
    }

    // Try to create a processor to handle the input media locator
    try {
      processor = Manager.createProcessor(ds);
    } catch (NoProcessorException npe) {
      return "Couldn't create processor";
    } catch (IOException ioe) {
      return "IOException creating processor";
    }

    // Wait for it to configure
    boolean result = waitForState(processor, Processor.Configured);
    if (result == false) return "Couldn't configure processor";

    // Get the tracks from the processor
    TrackControl[] tracks = processor.getTrackControls();

    // Do we have atleast one track?
    if (tracks == null || tracks.length < 1) return "Couldn't find tracks in processor";

    boolean programmed = false;

    // Search through the tracks for a video track
    for (int i = 0; i < tracks.length; i++) {
      Format format = tracks[i].getFormat();
      if (tracks[i].isEnabled() && format instanceof VideoFormat && !programmed) {

        // Found a video track. Try to program it to output JPEG/RTP
        // Make sure the sizes are multiple of 8's.
        Dimension size = ((VideoFormat) format).getSize();
        float frameRate = ((VideoFormat) format).getFrameRate();
        int w = (size.width % 8 == 0 ? size.width : (int) (size.width / 8) * 8);
        int h = (size.height % 8 == 0 ? size.height : (int) (size.height / 8) * 8);
        VideoFormat jpegFormat =
            new VideoFormat(
                VideoFormat.JPEG_RTP,
                new Dimension(w, h),
                Format.NOT_SPECIFIED,
                Format.byteArray,
                frameRate);
        tracks[i].setFormat(jpegFormat);
        System.err.println("Video transmitted as:");
        System.err.println("  " + jpegFormat);
        // Assume succesful
        programmed = true;
      } else tracks[i].setEnabled(false);
    }

    if (!programmed) return "Couldn't find video track";

    // Set the output content descriptor to RAW_RTP
    ContentDescriptor cd = new ContentDescriptor(ContentDescriptor.RAW_RTP);
    processor.setContentDescriptor(cd);

    // Realize the processor. This will internally create a flow
    // graph and attempt to create an output datasource for JPEG/RTP
    // video frames.
    result = waitForState(processor, Controller.Realized);
    if (result == false) return "Couldn't realize processor";

    // Set the JPEG quality to .5.
    setJPEGQuality(processor, 0.5f);

    // Get the output data source of the processor
    dataOutput = processor.getDataOutput();
    return null;
  }