예제 #1
0
  public void onIncomingEncodedFrame(byte[] frame) {

    Logger.hysteria("AudioProcessor.onIncomingEncodedFrame");
    if (sender != null) sender.onReceivedFrame(frame);
    else {
      onIncomingReceivedFrame(frame, payloadType);
      Logger.debug("sender == null. Redirecting to receiver");
    }
  }
예제 #2
0
  public void onIncomingReceivedFrame(byte[] frame, int payloadType) {

    if (frame == null) Logger.error("onIncomingReceivedFrame:frame=null");
    Logger.hysteria("AudioProcessor: onIncomingReceivedFrame " + frameCounter++);
    if (payloadType == 19 && !audioPaused) pauseAudio();
    if (decoder != null && payloadType == this.payloadType) {
      if (audioPaused) resumeAudio();
      if (decoder.canDecode()) onIncomingDecodedFrame(decoder.decode(frame));
      else Logger.error("Selected codec does not support decoding");
    } else Logger.error("Unsupported RTP payload type (" + payloadType + ").");
  }
예제 #3
0
  public void onIncomingRawFrame(byte[] frame) {

    // TODO Auto-generated method stub
    if (encoder != null) {
      Logger.hysteria("AudioProcessor.onIncomingRawFrame");
      if (encoder.canEncode()) onIncomingEncodedFrame(encoder.encode(frame));
      else Logger.error("Selected codec does not support encoding");
    } else {

      onIncomingEncodedFrame(frame);
    }
  }
예제 #4
0
  public AudioProcessor(
      int pt, AudioSource src, AudioSender snd, AudioDestination dst, AudioReceiver rcv, int fs) {
    Logger.debug("in AudioProcessor constructor");
    if (disableOutgoing) {
      src = null;
      // snd = null;
    }

    init(pt, src, snd, dst, rcv, fs);
  }
예제 #5
0
  public void stop() {

    Logger.info("AudioProcessor.stop: closing all inputs and outputs");
    if (source != null) source.close();
    if (sender != null) sender.close();
    if (destination != null) destination.close();
    if (receiver != null) receiver.close();

    return;
  }
예제 #6
0
 public boolean resumeSuspendedSource() {
   Logger.debug("audioprosessor.resumesuspendedsource starts");
   if (suspendedSource == null) return false;
   Logger.debug("close source");
   source.close();
   Logger.debug("source closed");
   source = null;
   Logger.debug("vanha source");
   source = suspendedSource;
   Logger.debug("unhalt vanha source");
   boolean ok = source.unhalt();
   Logger.debug("vanha source unhalted");
   Logger.debug("audioprosessor.resumesuspendedsource ends");
   return ok;
 }
예제 #7
0
  private void init(
      int payloadType,
      AudioSource src,
      AudioSender snd,
      AudioDestination dst,
      AudioReceiver rcv,
      int fs) {

    Logger.debug("AudioProcessor.init:");
    Logger.debug("		destination=" + dst.getClass());
    if (src != null) Logger.debug("		source=" + src.getClass());
    else Logger.debug("		source=null");
    if (snd != null) Logger.debug("		sender=" + snd.getClass());
    else Logger.debug("		sender=null");
    if (rcv != null) Logger.debug("		receiver=" + rcv.getClass());
    else Logger.debug("		receiver=null");
    this.payloadType = payloadType;
    frameSize = fs;
    {
      Logger.debug("");
      switch (this.payloadType) {
        case 0:
          encoder = new PCMUCodec4(this);
          decoder = new PCMUCodec4(this);
          break;
        case 8:
          encoder = new PCMACodec(this);
          decoder = new PCMACodec(this);
          break;
        default:
          encoder = null;
          decoder = null;
          break;
      }
      if (encoder != null) Logger.debug("encoder=" + encoder.getClass());
      if (decoder != null) Logger.debug("decoder=" + decoder.getClass());
      if (encoder != null) encoder.init();
      if (decoder != null) decoder.init();
    }
    source = src;
    sender = snd;
    destination = dst;
    // Logger.debug("AudioProcessor constructor: destination="+destination);
    receiver = rcv;
    if (sender != null) sender.init(this.payloadType);
    AudioFormat format =
        new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 8000, 16, 1, 2, 8000, bigEndian);
    //	AudioFormat format=new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,22050,8,1,1,22050,false);
    if (source != null) source.init(this, format, frameSize);
    if (destination != null) destination.init(this, format, frameSize);
    else Logger.error("AudioProcessor: destination=null");
    if (receiver != null) receiver.init(this);
  }
예제 #8
0
 public void onIncomingDecodedFrame(byte[] b) {
   Logger.hysteria("AudioPorcessor.onIncomingDecodedFrame");
   if (b == null) Logger.error("Decoded frame = null");
   if (destination != null) destination.onReceivedDestinationFrame(b);
   else Logger.error("Audio destination = null");
 }