Exemple #1
0
  public void open() {

    dsBuffer =
        nOpen(
            (int) format.getSampleRate(),
            format.getSampleSizeInBits(),
            format.getChannels(),
            bufSize);
    if (dsBuffer == 0) throw new Error("Couldn't create capture buffer");
  }
  protected Format[] getMatchingOutputFormats(Format in) {

    AudioFormat af = (AudioFormat) in;

    supportedOutputFormats =
        new AudioFormat[] {
          new AudioFormat(
              Constants.ALAW_RTP,
              af.getSampleRate(),
              8,
              1,
              Format.NOT_SPECIFIED,
              Format.NOT_SPECIFIED,
              8,
              Format.NOT_SPECIFIED,
              Format.byteArray)
        };
    return supportedOutputFormats;
  }
    @Override
    public void read(Buffer buffer) throws IOException {
      pbs.read(buffer);

      // Remap the time stamps so it won't wrap around
      // while changing to a new file.
      if (buffer.getTimeStamp() != Buffer.TIME_UNKNOWN) {
        long diff = buffer.getTimeStamp() - lastTS;
        lastTS = buffer.getTimeStamp();
        if (diff > 0) timeStamp += diff;
        buffer.setTimeStamp(timeStamp);
      }

      // If this track is to be used as the master time base,
      // we'll need to compute the master time based on this track.
      if (useAsMaster) {
        if (buffer.getFormat() instanceof AudioFormat) {
          AudioFormat af = (AudioFormat) buffer.getFormat();
          masterAudioLen += buffer.getLength();
          long t = af.computeDuration(masterAudioLen);
          if (t > 0) {
            masterTime = t;
          } else {
            masterTime = buffer.getTimeStamp();
          }
        } else {
          masterTime = buffer.getTimeStamp();
        }
      }

      if (buffer.isEOM()) {
        tInfo.done = true;
        if (!ds.handleEOM(tInfo)) {
          // This is not the last processor to be done.
          // We'll need to un-set the EOM flag.
          buffer.setEOM(false);
          buffer.setDiscard(true);
        }
      }
    }
Exemple #4
0
  public Format[] getSupportedOutputFormats(Format in) {

    if (in == null) return new Format[] {new AudioFormat(AudioFormat.ULAW)};

    if (matches(in, inputFormats) == null) return new Format[1];

    if (!(in instanceof AudioFormat)) return new Format[] {new AudioFormat(AudioFormat.ULAW)};

    AudioFormat af = (AudioFormat) in;
    return new Format[] {
      new AudioFormat(
          AudioFormat.ULAW, af.getSampleRate(), af.getSampleSizeInBits(), af.getChannels())
    };
  }
Exemple #5
0
  public Format setInputFormat(Format input, int trackID) {
    if (!(input instanceof AudioFormat)) return null;
    AudioFormat format = (AudioFormat) input;
    double sampleRate = format.getSampleRate();

    String reason = null;
    double epsilon = 0.25;

    // Check to see if some of these restrictions can be removed
    if (!format.getEncoding().equalsIgnoreCase(AudioFormat.GSM)) reason = "Encoding has to be GSM";
    else if (Math.abs(sampleRate - 8000.0) > epsilon)
      reason = "Sample rate should be 8000. Cannot handle sample rate " + sampleRate;
    else if (format.getFrameSizeInBits() != (33 * 8)) reason = "framesize should be 33 bytes";
    else if (format.getChannels() != 1) reason = "Number of channels should be 1";

    if (reason != null) {
      return null;
    } else {
      inputs[0] = format;
      return format;
    }
  }
  @Override
  public Format[] getSupportedOutputFormats(Format input) {
    if (input == null) return outputFormats;
    else {
      if (!(input instanceof AudioFormat)) {
        logger.warning(
            this.getClass().getSimpleName()
                + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for "
                + input); // this can cause an NPE in JMF if it ever
        // happens.
        return new Format[] {null};
      }
      final AudioFormat inputCast = (AudioFormat) input;
      if (!inputCast.getEncoding().equals(AudioFormat.ALAW)
          || (inputCast.getSampleSizeInBits() != 8
              && inputCast.getSampleSizeInBits() != Format.NOT_SPECIFIED)
          || (inputCast.getChannels() != 1 && inputCast.getChannels() != Format.NOT_SPECIFIED)
          || (inputCast.getFrameSizeInBits() != 8
              && inputCast.getFrameSizeInBits() != Format.NOT_SPECIFIED)) {
        logger.warning(
            this.getClass().getSimpleName()
                + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for "
                + input); // this can cause an NPE in JMF if it ever
        // happens.
        return new Format[] {null};
      }
      final AudioFormat result =
          new AudioFormat(
              BonusAudioFormatEncodings.ALAW_RTP,
              inputCast.getSampleRate(),
              8,
              1,
              inputCast.getEndian(),
              inputCast.getSigned(),
              8,
              inputCast.getFrameRate(),
              inputCast.getDataType());

      return new Format[] {result};
    }
  }
  public static void main(String[] args) {
    if (args.length != 2) {
      System.out.println("Usage: rtpaudio <targetIP> <targetPort>");
      System.exit(0);
    }

    try {
      RegistryDefaults.setDefaultFlags(RegistryDefaults.FMJ);

      // create a clean registry
      RegistryDefaults.unRegisterAll(RegistryDefaults.ALL);
      RegistryDefaults.registerAll(RegistryDefaults.FMJ);

      // remove all capture devices
      Vector deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      for (int i = 0; i < deviceList.size(); i++) {
        CaptureDeviceInfo cdi = (CaptureDeviceInfo) deviceList.elementAt(i);
        CaptureDeviceManager.removeDevice(cdi);
      }

      // update capture device list
      new net.sf.fmj.media.cdp.javasound.CaptureDevicePlugger().addCaptureDevices();
      PlugInManager.commit();

      deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      if ((null == deviceList) || (deviceList.size() == 0)) {
        System.out.println("### ERROR found no audio capture device");
        System.exit(0);
      }

      // enumerate all codec
      Vector codecList = PlugInManager.getPlugInList(null, null, PlugInManager.CODEC);
      System.out.println("found " + codecList.size() + " codec");
      for (int i = 0; i < codecList.size(); i++) {
        String aCodecClass = (String) codecList.elementAt(i);
        System.out.println("# " + (i + 1) + " " + aCodecClass);
      }

      // fetch first available audio capture device
      deviceList = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
      CaptureDeviceInfo captureDeviceInfo = (CaptureDeviceInfo) deviceList.elementAt(0);
      System.out.println("### using " + captureDeviceInfo.getName());
      System.out.println("### locator " + captureDeviceInfo.getLocator());

      javax.media.protocol.DataSource dataSource =
          javax.media.Manager.createDataSource(
              new javax.media.MediaLocator(captureDeviceInfo.getLocator().toString()));
      // javax.media.protocol.DataSource dataSource =
      // javax.media.Manager.createDataSource(new
      // javax.media.MediaLocator("javasound://"));
      System.out.println("### created datasource " + dataSource.getClass().getName());

      javax.media.control.FormatControl[] formatControls =
          ((javax.media.protocol.CaptureDevice) dataSource).getFormatControls();
      System.out.println("got format control " + formatControls[0].getClass().getName());

      System.out.println("current format is " + formatControls[0].getFormat());

      // set audio capture format
      javax.media.Format[] formats = formatControls[0].getSupportedFormats();
      for (int i = 0; i < formats.length; i++) {
        javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) formats[i];
        if ((af.getChannels() == 1) && (af.getSampleSizeInBits() == 16)) {
          if (af.getSampleRate() == Format.NOT_SPECIFIED) {
            javax.media.format.AudioFormat newAudioFormat =
                new javax.media.format.AudioFormat(
                    af.getEncoding(),
                    8000.0f,
                    javax.media.Format.NOT_SPECIFIED,
                    javax.media.Format.NOT_SPECIFIED);
            // javax.media.format.AudioFormat newAudioFormat = new
            // javax.media.format.AudioFormat(af.getEncoding(),
            // 44100.0f, javax.media.Format.NOT_SPECIFIED,
            // javax.media.Format.NOT_SPECIFIED);
            formatControls[0].setFormat(newAudioFormat.intersects(af));
            break;
          }
        }
      }
      System.out.println("current format is now " + formatControls[0].getFormat());

      FrameProcessingControl fpc = null;

      // adujst recording buffer ( to adjust latency )
      dataSource.stop();
      Object[] controls = dataSource.getControls();
      for (int i = 0; i < controls.length; i++) {
        String className = controls[i].getClass().getName();
        if (-1 != className.indexOf("JavaSoundBufferControl")) {
          javax.media.control.BufferControl bc = (javax.media.control.BufferControl) controls[i];
          System.out.println(
              "### current javasound buffer length is " + bc.getBufferLength() + " ms");
          bc.setBufferLength(40);
          System.out.println(
              "### current javasound buffer length is " + bc.getBufferLength() + " ms");
        } else if (-1 != className.indexOf("JitterBufferControl")) {
          javax.media.control.BufferControl bc = (javax.media.control.BufferControl) controls[i];
          System.out.println("### current jitter buffer length is " + bc.getBufferLength() + " ms");
          bc.setBufferLength(80);
          System.out.println("### current jitter buffer length is " + bc.getBufferLength() + " ms");
        } else if (-1 != className.indexOf("FPC")) {
          fpc = (FrameProcessingControl) controls[i];
          System.out.println("### found bitrate control " + fpc.getClass());
        }
      }
      dataSource.start();

      // create processor
      javax.media.Processor processor = javax.media.Manager.createProcessor(dataSource);
      System.out.println("### created processor " + processor.getClass().getName());

      processor.configure();
      for (int idx = 0; idx < 100; idx++) {
        if (processor.getState() == Processor.Configured) {
          break;
        }
        Thread.sleep(100);
      }
      System.out.println("### processor state " + processor.getState());

      processor.setContentDescriptor(
          new javax.media.protocol.ContentDescriptor(ContentDescriptor.RAW_RTP));

      javax.media.control.TrackControl[] tracks = processor.getTrackControls();
      // /tracks[0].setFormat(new
      // javax.media.format.AudioFormat(javax.media.format.AudioFormat.ULAW_RTP,
      // 8000, 8, 1));
      tracks[0].setFormat(
          new javax.media.format.AudioFormat(javax.media.format.AudioFormat.GSM_RTP, 8000, 8, 1));

      processor.realize();
      for (int idx = 0; idx < 100; idx++) {
        if (processor.getState() == Controller.Realized) {
          break;
        }
        Thread.sleep(100);
      }
      System.out.println("### processor state " + processor.getState());

      javax.media.protocol.DataSource dataOutput = processor.getDataOutput();
      System.out.println("### processor data output " + dataOutput.getClass().getName());

      // BitRateControl
      BitRateControl bitrateControl = null;

      Object[] controls2 = dataOutput.getControls();
      for (int i = 0; i < controls2.length; i++) {
        if (controls2[i] instanceof BitRateControl) {
          bitrateControl = (BitRateControl) controls2[i];
          System.out.println("### found bitrate control " + bitrateControl.getClass());
          break;
        }
      }

      // PacketSizeControl
      Object[] controls3 = processor.getControls();
      for (int i = 0; i < controls3.length; i++) {
        if (controls3[i] instanceof PacketSizeControl) {
          PacketSizeControl psc = (PacketSizeControl) controls3[i];
          System.out.println("### current packetsize is " + psc.getPacketSize() + " bytes");
          psc.setPacketSize(66);
          System.out.println("### current packetsize is " + psc.getPacketSize() + " bytes");
          break;
        }
      }

      // enumerate all controls of the processor
      Object[] pcontrols = processor.getControls();
      for (int i = 0; i < pcontrols.length; i++) {
        System.out.println("processor control " + i + " " + pcontrols[i]);
      }

      javax.media.rtp.RTPManager rtpManager = javax.media.rtp.RTPManager.newInstance();

      javax.media.rtp.SessionAddress local =
          new javax.media.rtp.SessionAddress(
              InetAddress.getLocalHost(), Integer.valueOf(args[1]).intValue());
      javax.media.rtp.SessionAddress target =
          new javax.media.rtp.SessionAddress(
              InetAddress.getByName(args[0]), Integer.valueOf(args[1]).intValue());

      rtpManager.initialize(local);
      rtpManager.addTarget(target);

      javax.media.rtp.SendStream sendStream = rtpManager.createSendStream(dataOutput, 0);
      sendStream.start();

      processor.start();
      Thread.sleep(1000);

      System.out.println("\n>>>>>>  TRANSMITTING ULAW/RTP AUDIO NOW");
      while (2 > 1) {
        Thread.sleep(1000);

        if (null != bitrateControl) {
          TransmissionStats stats = sendStream.getSourceTransmissionStats();
          System.out.println(
              "rtp audio send: bitrate="
                  + bitrateControl.getBitRate()
                  + " (pdu="
                  + stats.getPDUTransmitted()
                  + " bytes="
                  + stats.getBytesTransmitted()
                  + " overrun="
                  + fpc.getFramesDropped()
                  + ")");
        }
      }
    } catch (Exception ex) {
      ex.printStackTrace();
    }

    System.exit(0);
  }
    /**
     * Opens a connection to the media source of the associated <tt>DataSource</tt>.
     *
     * @throws IOException if anything goes wrong while opening a connection to the media source of
     *     the associated <tt>DataSource</tt>
     */
    public synchronized void connect() throws IOException {
      javax.media.format.AudioFormat af = (javax.media.format.AudioFormat) getFormat();
      int channels = af.getChannels();
      int channelConfig;

      switch (channels) {
        case Format.NOT_SPECIFIED:
        case 1:
          channelConfig = AudioFormat.CHANNEL_IN_MONO;
          break;
        case 2:
          channelConfig = AudioFormat.CHANNEL_IN_STEREO;
          break;
        default:
          throw new IOException("channels");
      }

      int sampleSizeInBits = af.getSampleSizeInBits();
      int audioFormat;

      switch (sampleSizeInBits) {
        case 8:
          audioFormat = AudioFormat.ENCODING_PCM_8BIT;
          break;
        case 16:
          audioFormat = AudioFormat.ENCODING_PCM_16BIT;
          break;
        default:
          throw new IOException("sampleSizeInBits");
      }

      double sampleRate = af.getSampleRate();

      length =
          (int)
              Math.round(
                  20 /* milliseconds */ * (sampleRate / 1000) * channels * (sampleSizeInBits / 8));

      /*
       * Apart from the thread in which #read(Buffer) is executed, use the
       * thread priority for the thread which will create the AudioRecord.
       */
      setThreadPriority();
      try {
        int minBufferSize =
            AudioRecord.getMinBufferSize((int) sampleRate, channelConfig, audioFormat);

        audioRecord =
            new AudioRecord(
                MediaRecorder.AudioSource.DEFAULT,
                (int) sampleRate,
                channelConfig,
                audioFormat,
                Math.max(length, minBufferSize));

        // tries to configure audio effects if available
        configureEffects();
      } catch (IllegalArgumentException iae) {
        IOException ioe = new IOException();

        ioe.initCause(iae);
        throw ioe;
      }

      setThreadPriority = true;
    }
  public static javax.sound.sampled.AudioFormat convertFormat(AudioFormat format) {

    String encodingString = format.getEncoding();
    int channels = format.getChannels();
    double frameRate = format.getFrameRate();
    int frameSize = format.getFrameSizeInBits() / 8;
    double sampleRate = format.getSampleRate();
    int sampleSize = format.getSampleSizeInBits();
    boolean endian = (format.getEndian() == AudioFormat.BIG_ENDIAN);
    int signed = format.getSigned();

    Encoding encoding;
    if (AudioFormat.LINEAR.equals(encodingString)) {
      switch (signed) {
        case AudioFormat.SIGNED:
          encoding = Encoding.PCM_SIGNED;
          break;
        case AudioFormat.UNSIGNED:
          encoding = Encoding.PCM_UNSIGNED;
          break;
        default:
          encoding = Encoding.PCM_SIGNED; // TODO: return null
      }
    } else if (AudioFormat.ALAW.equals(encodingString)) {
      encoding = Encoding.ALAW;
    } else if (AudioFormat.ULAW.equals(encodingString)) {
      encoding = Encoding.ULAW;
    } else if (toMpegEncoding(encodingString) != null) {

      encoding = toMpegEncoding(encodingString);

    } else if (toVorbisEncoding(encodingString) != null) {

      encoding = toVorbisEncoding(encodingString);

    } else {
      encoding = new CustomEncoding(encodingString);
    }

    final javax.sound.sampled.AudioFormat sampledFormat;

    if (encoding == Encoding.PCM_SIGNED) {
      sampledFormat =
          new javax.sound.sampled.AudioFormat(
              (float) sampleRate, sampleSize, channels, true, endian);

    } else if (encoding == Encoding.PCM_UNSIGNED) {
      sampledFormat =
          new javax.sound.sampled.AudioFormat(
              (float) sampleRate, sampleSize, channels, false, endian);
    } else if (encoding instanceof MpegEncoding) {
      // TODO: perhaps we should use reflection to avoid class not found problems if javazoom is not
      // in the classpath.
      return new MpegAudioFormat(
          encoding,
          (float) sampleRate,
          sampleSize,
          channels,
          // signed,
          frameSize,
          (float) frameRate,
          endian,
          new HashMap());
    } else if (encoding instanceof VorbisEncoding) {
      // TODO: perhaps we should use reflection to avoid class not found problems if javazoom is not
      // in the classpath.
      return new VorbisAudioFormat(
          encoding,
          (float) sampleRate,
          sampleSize,
          channels,
          // signed,
          frameSize,
          (float) frameRate,
          endian,
          new HashMap());
    } else {
      sampledFormat =
          new javax.sound.sampled.AudioFormat(
              encoding,
              (float) sampleRate,
              sampleSize,
              channels,
              frameSize,
              (float) frameRate,
              endian);
    }

    return sampledFormat;
  }
  /**
   * Open the plugin. Must be called after the formats have been determined and before "process" is
   * called.
   *
   * <p>Open the DataLine.
   */
  public void open() throws ResourceUnavailableException {
    javax.sound.sampled.AudioFormat audioFormat = convertFormat(inputFormat);
    logger.info("opening with javax.sound format: " + audioFormat);
    try {

      if (!inputFormat.getEncoding().equals(AudioFormat.LINEAR)) {
        logger.info("JavaSoundRenderer: Audio format is not linear, creating conversion");

        if (inputFormat.getEncoding().equals(AudioFormat.ULAW))
          codec =
              new net.sf.fmj.media.codec.audio.ulaw
                  .Decoder(); // much more efficient than JavaSoundCodec
        else if (inputFormat.getEncoding().equals(AudioFormat.ALAW))
          codec =
              new net.sf.fmj.media.codec.audio.alaw
                  .Decoder(); // much more efficient than JavaSoundCodec
        else
          throw new ResourceUnavailableException(
              "Unsupported input format encoding: " + inputFormat.getEncoding());
        // codec = new net.sf.fmj.media.codec.JavaSoundCodec();
        codec.setInputFormat(inputFormat);
        final Format[] outputFormats = codec.getSupportedOutputFormats(inputFormat);
        if (outputFormats.length < 1)
          throw new ResourceUnavailableException(
              "Unable to get an output format for input format: " + inputFormat);
        final AudioFormat codecOutputFormat =
            (AudioFormat) outputFormats[0]; // TODO: choose the best quality one.
        codec.setOutputFormat(codecOutputFormat);
        audioFormat = convertFormat(codecOutputFormat);

        codec.open();

        logger.info(
            "JavaSoundRenderer: Audio format is not linear, created conversion from "
                + inputFormat
                + " to "
                + codecOutputFormat);
      }

      sourceLine = getSourceDataLine(audioFormat);
      sourceLine.open(audioFormat);

      {
        FloatControl gainFloatControl = null;
        BooleanControl muteBooleanControl = null;

        try {
          gainFloatControl = (FloatControl) sourceLine.getControl(FloatControl.Type.MASTER_GAIN);
        } catch (Exception e) {
          e.printStackTrace();
        }

        try {
          muteBooleanControl = (BooleanControl) sourceLine.getControl(BooleanControl.Type.MUTE);
        } catch (Exception e) {
          e.printStackTrace();
        }

        // TODO add other controls
        JavaSoundGainControl gainControl =
            new JavaSoundGainControl(gainFloatControl, muteBooleanControl);
        controls.addControl(gainControl);
      }

      logControls(sourceLine.getControls());
    } catch (LineUnavailableException e) {
      throw new ResourceUnavailableException(e.getMessage());
    }
  }