public void onIncomingEncodedFrame(byte[] frame) { Logger.hysteria("AudioProcessor.onIncomingEncodedFrame"); if (sender != null) sender.onReceivedFrame(frame); else { onIncomingReceivedFrame(frame, payloadType); Logger.debug("sender == null. Redirecting to receiver"); } }
public void onIncomingReceivedFrame(byte[] frame, int payloadType) { if (frame == null) Logger.error("onIncomingReceivedFrame:frame=null"); Logger.hysteria("AudioProcessor: onIncomingReceivedFrame " + frameCounter++); if (payloadType == 19 && !audioPaused) pauseAudio(); if (decoder != null && payloadType == this.payloadType) { if (audioPaused) resumeAudio(); if (decoder.canDecode()) onIncomingDecodedFrame(decoder.decode(frame)); else Logger.error("Selected codec does not support decoding"); } else Logger.error("Unsupported RTP payload type (" + payloadType + ")."); }
public void onIncomingRawFrame(byte[] frame) { // TODO Auto-generated method stub if (encoder != null) { Logger.hysteria("AudioProcessor.onIncomingRawFrame"); if (encoder.canEncode()) onIncomingEncodedFrame(encoder.encode(frame)); else Logger.error("Selected codec does not support encoding"); } else { onIncomingEncodedFrame(frame); } }
public AudioProcessor( int pt, AudioSource src, AudioSender snd, AudioDestination dst, AudioReceiver rcv, int fs) { Logger.debug("in AudioProcessor constructor"); if (disableOutgoing) { src = null; // snd = null; } init(pt, src, snd, dst, rcv, fs); }
public void stop() { Logger.info("AudioProcessor.stop: closing all inputs and outputs"); if (source != null) source.close(); if (sender != null) sender.close(); if (destination != null) destination.close(); if (receiver != null) receiver.close(); return; }
public boolean resumeSuspendedSource() { Logger.debug("audioprosessor.resumesuspendedsource starts"); if (suspendedSource == null) return false; Logger.debug("close source"); source.close(); Logger.debug("source closed"); source = null; Logger.debug("vanha source"); source = suspendedSource; Logger.debug("unhalt vanha source"); boolean ok = source.unhalt(); Logger.debug("vanha source unhalted"); Logger.debug("audioprosessor.resumesuspendedsource ends"); return ok; }
private void init( int payloadType, AudioSource src, AudioSender snd, AudioDestination dst, AudioReceiver rcv, int fs) { Logger.debug("AudioProcessor.init:"); Logger.debug(" destination=" + dst.getClass()); if (src != null) Logger.debug(" source=" + src.getClass()); else Logger.debug(" source=null"); if (snd != null) Logger.debug(" sender=" + snd.getClass()); else Logger.debug(" sender=null"); if (rcv != null) Logger.debug(" receiver=" + rcv.getClass()); else Logger.debug(" receiver=null"); this.payloadType = payloadType; frameSize = fs; { Logger.debug(""); switch (this.payloadType) { case 0: encoder = new PCMUCodec4(this); decoder = new PCMUCodec4(this); break; case 8: encoder = new PCMACodec(this); decoder = new PCMACodec(this); break; default: encoder = null; decoder = null; break; } if (encoder != null) Logger.debug("encoder=" + encoder.getClass()); if (decoder != null) Logger.debug("decoder=" + decoder.getClass()); if (encoder != null) encoder.init(); if (decoder != null) decoder.init(); } source = src; sender = snd; destination = dst; // Logger.debug("AudioProcessor constructor: destination="+destination); receiver = rcv; if (sender != null) sender.init(this.payloadType); AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 8000, 16, 1, 2, 8000, bigEndian); // AudioFormat format=new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,22050,8,1,1,22050,false); if (source != null) source.init(this, format, frameSize); if (destination != null) destination.init(this, format, frameSize); else Logger.error("AudioProcessor: destination=null"); if (receiver != null) receiver.init(this); }
public void onIncomingDecodedFrame(byte[] b) { Logger.hysteria("AudioPorcessor.onIncomingDecodedFrame"); if (b == null) Logger.error("Decoded frame = null"); if (destination != null) destination.onReceivedDestinationFrame(b); else Logger.error("Audio destination = null"); }