Exemplo n.º 1
0
  // See comment in createericlist regarding HandleViewParams
  private /*synchronized*/ String HandleViewParams(
      String tempString, LinkedList tempList, StringTokenizer st) throws VisualizerLoadException {
    while (tempString.toUpperCase().startsWith("VIEW")) {
      StringTokenizer t = new StringTokenizer(tempString, " \t");
      String s1 = t.nextToken().toUpperCase();
      String s2 = t.nextToken().toUpperCase();
      String s3 = t.nextToken();

      // HERE PROCESS URL's AS YOU DID QUESTIONS

      if (s2.compareTo("ALGO") == 0) {
        add_a_pseudocode_URL(s3);
        // System.out.println("Adding to urls: "+Snaps+":"+s3);
        //                 urlList.append(Snaps+1 +":"+s3);
      } else if (s2.compareTo("DOCS") == 0) {
        add_a_documentation_URL(s3);
        // System.out.println("Adding to urls: "+Snaps+":"+s3);
        //                 if (debug) System.out.println("Adding to urlList: "+Snaps+1+":"+s3);
        //                 urlList.append(Snaps+1 +":"+s3);
      } else if (s2.compareTo("SCALE") == 0) {
        GKS.scale(Format.atof(s3.toUpperCase()), tempList, this);
      } else if (s2.compareTo("WINDOWS") == 0) {
        GKS.windows(Format.atoi(s3.toUpperCase()), tempList, this);
      } else if (s2.compareTo("JUMP") == 0) {
        GKS.jump(Format.atoi(s3.toUpperCase()), tempList, this);
      } else throw (new VisualizerLoadException(s2 + " is invalid VIEW parameter"));
      tempString = st.nextToken();
    }
    //         if (urlList.size() == 0)
    //             urlList.append("**");
    return (tempString);
  }
Exemplo n.º 2
0
  public /*synchronized*/ Color colorSet(String values) {
    String temp;
    int x;
    Color c = Color.black;

    StringTokenizer st = new StringTokenizer(values);
    x = Format.atoi(st.nextToken());
    if (x == 1) c = Color.black;
    else if (x == 2) c = Color.blue;
    else if (x == 6) c = Color.cyan;
    else if (x == 13) c = Color.darkGray;
    else if (x == 11) c = Color.gray;
    else if (x == 3) c = Color.green;
    else if (x == 9) c = Color.lightGray;
    else if (x == 5) c = Color.magenta;
    else if (x == 10) c = Color.orange;
    else if (x == 12) c = Color.pink;
    else if (x == 4) c = Color.red;
    else if (x == 8) c = Color.white;
    else if (x == 7) c = Color.yellow;
    else if (x < 0) c = new Color(-x);
    return c;
  }
Exemplo n.º 3
0
  // creates the list of snapshots
  public /*synchronized*/ obj dothis(String inputString) {
    /* Snapshot codes

    29 - rectangle draw
    2 - oval draw
    5 - fill oval
    6 - string
    7 - line & text color
    8 - fill color
    9 - text color (possible to ignore)
    10 - text height
    11 - polydraw
    4 - fill poly
    14 - arc draw
    30 - fill arc
    12 - text style, centered horizontal/vertical
    ???? 45 - url.  Note, when bring up multiple algorithms, the URL's for the most recently
    run algorithm are posted in the upper browser frame
    THE CODE BELOW WOULD INDICATE THIS IS 54, NOT 45 ????

    20 - number of windows.  For static algorithms, 1, 2, 3, 4 have the obvious meaning.
    21 - scale factor
    22 - jump factor

    For 20, 21, 22, the last factor loaded is the one that will affect all snapshots in
    the show

     */

    obj temp = new rectDraw("0 0 0 0", lineC);
    Object urlTest;
    String arrg;

    int graphic_obj_code = Format.atoi(inputString.substring(0, 3));
    StringTokenizer tmp = null;

    switch (graphic_obj_code) {
      case 29:
        temp = new rectDraw(inputString.substring(3, (inputString.length())), lineC);
        break;
      case 2:
        temp = new ovalDraw(inputString.substring(3, (inputString.length())), lineC);
        break;
      case 5:
        temp = new fillOvalDraw(inputString.substring(3, (inputString.length())), fillC);
        break;
      case 6:
        temp =
            new stringDraw(
                inputString.substring(3, (inputString.length())), textC, LineH, LineV, fontMult);
        // 		System.out.println(" printing " + inputString);
        break;
      case 7:
        lineC = colorSet(inputString.substring(3, (inputString.length())));
        textC = lineC;
        break;
      case 8:
        tmp = new StringTokenizer(inputString.substring(2, (inputString.length())));
        fillC = colorSet(tmp.nextToken());
        break;
      case 9:
        textC = colorSet(inputString.substring(3, (inputString.length())));
        break;
      case 10:
        StringTokenizer st = new StringTokenizer(inputString.substring(3, (inputString.length())));
        fontMult = Format.atof(st.nextToken());
        // 		System.out.println("setting fontMult= " + fontMult);
        break;
        // TLN changed on 10/14/97 to accomodate condensed prm files
        // temp=new textHeight(inputString.substring(3,(inputString.length())));
      case 11:
        temp = new polyDraw(inputString.substring(3, (inputString.length())), lineC);
        break;
      case 4:
        temp = new fillPolyDraw(inputString.substring(3, (inputString.length())), fillC);
        break;
      case 64:
        temp = new animated_fillPolyDraw(inputString.substring(3, (inputString.length())), fillC);
        break;
      case 14:
        temp = new arcDraw(inputString.substring(3, (inputString.length())), lineC);
        break;
      case 30:
        temp = new fillArcDraw(inputString.substring(3, (inputString.length())), fillC);
        break;
      case 12:
        tmp = new StringTokenizer(inputString.substring(3, (inputString.length())));
        LineH = Format.atoi(tmp.nextToken());
        LineV = Format.atoi(tmp.nextToken());
        break;
      case 20:
        tmp = new StringTokenizer(inputString.substring(3, (inputString.length())));
        // graphWin.setNumViews(Format.atoi(tmp.nextToken()));
        // multiTrigger=true;
        break;
      case 21:
        tmp = new StringTokenizer(inputString.substring(3, (inputString.length())));
        double tempFloat = Format.atof(tmp.nextToken());
        zoom = tempFloat;
        break;
      case 22:
        tmp = new StringTokenizer(inputString.substring(3, (inputString.length())));
        //            graphWin.setJump(Format.atoi(tmp.nextToken()));  // This is now a noop in
        // gaigs2
        break;
      case 54:
        tmp = new StringTokenizer(inputString.substring(3, (inputString.length())));
        //             if (tmp.hasMoreElements()){
        //                 urlTest=tmp.nextToken();
        //                 urlList.append(urlTest);
        //             }
        //             else{
        //                 tmp=new StringTokenizer("**");
        //                 urlTest=tmp.nextToken();
        //                 urlList.append(urlTest);
        //             }
        break;
    } // end switch

    return (temp);
  }
Exemplo n.º 4
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }