public void setActive(boolean active) { this.active = active; Visualizer visualizer = getVizualiser(); if (visualizer == null) { return; } int captureRate = Math.min(PREFERRED_CAPTURE_RATE_MILLIHERTZ, Visualizer.getMaxCaptureRate()); if (active) { visualizer.setDataCaptureListener( new Visualizer.OnDataCaptureListener() { @Override public void onWaveFormDataCapture( Visualizer visualizer, byte[] waveform, int samplingRate) { updateVisualizer(waveform); } @Override public void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate) {} }, captureRate, true, false); } else { visualizer.setDataCaptureListener(null, captureRate, false, false); } visualizer.setEnabled(active); invalidate(); }
public VisualizerGLSurfaceView(Context context, int mediaPlayerSessionId) { super(context); mWaveformData = null; mVisualizer = new Visualizer(mediaPlayerSessionId); mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]); mVisualizer.setDataCaptureListener(this, Visualizer.getMaxCaptureRate(), true, false); mVisualizer.setEnabled(true); setEGLContextClientVersion(1); setRenderer(new VisualizerGLRenderer(this)); }
// Runs on a SECONDARY thread @Override public void processFrame(android.media.audiofx.Visualizer visualizer, boolean playing) { if (!lock.lockLowPriority()) return; try { if (transmitting) { // We use ignoreInput, because sampling 1024, 60 times a seconds, // is useless, as there are only 44100 or 48000 samples in one second if (ignoreInput == 0) { // WE MUST NEVER call any method from visualizer // while the player is not actually playing if (!playing) Arrays.fill(waveform, 0, 1024, (byte) 0x80); else visualizer.getWaveForm(waveform); } if (framesToSkip <= 0) { framesToSkip = framesToSkipOriginal; bt.getOutputStream() .write( waveform, 0, SimpleVisualizerJni.commonProcess(waveform, size | ignoreInput | dataType)); packetsSent++; } else { SimpleVisualizerJni.commonProcess(waveform, ignoreInput | dataType); framesToSkip--; } ignoreInput ^= IGNORE_INPUT; } int stateI = state.getAndSet(0); if (stateI != 0) { // Build and send a Player state message waveform[0] = StartOfHeading; waveform[1] = (byte) MessagePlayerState; waveform[3] = 0; int len = 0; len = writeByte(waveform, len, stateI & 3); len = writeByte(waveform, len, stateVolume); stateI = stateSongPosition; len = writeByte(waveform, len, stateI); len = writeByte(waveform, len, stateI >> 8); len = writeByte(waveform, len, stateI >> 16); len = writeByte(waveform, len, stateI >> 24); stateI = stateSongLength; len = writeByte(waveform, len, stateI); len = writeByte(waveform, len, stateI >> 8); len = writeByte(waveform, len, stateI >> 16); len = writeByte(waveform, len, stateI >> 24); waveform[2] = (byte) (len << 1); waveform[4 + len] = EndOfTransmission; bt.getOutputStream().write(waveform, 0, len + 5); packetsSent++; } } catch (IOException ex) { // Bluetooth error if (connected) MainHandler.sendMessage(this, MSG_BLUETOOTH_RXTX_ERROR); } catch (Throwable ex) { ex.printStackTrace(); } finally { lock.releaseLowPriority(); } }
public EqualizerController(View rootView, Context context) { mView = rootView; mContext = context; mPlayer = MediaPlayer.create(context, R.raw.testmp3); setupVisualizerUI(); mEqualizer = new Equalizer(0, mPlayer.getAudioSessionId()); // mEqualizer = new Equalizer(0, 0); mNumFrequencyBands = mEqualizer.getNumberOfBands(); mLowerBandLevel = mEqualizer.getBandLevelRange()[0]; mUpperBandLevel = mEqualizer.getBandLevelRange()[1]; mEqualizer.setEnabled(true); setupEqualizerUI(); mVisualizer.setEnabled(true); // When the stream ends, we don't need to collect any more data. We // don't do this in // setupVisualizerFxAndUI because we likely want to have more, // non-Visualizer related code // in this callback. mPlayer.setOnCompletionListener( new MediaPlayer.OnCompletionListener() { public void onCompletion(MediaPlayer mediaPlayer) { mVisualizer.setEnabled(false); } }); mPlayer.start(); }
private void setupVisualizerUI() { // Create the Visualizer object and attach it to our media player. mVisualizerView = (VisualizerView) mView.findViewById(R.id.myvisualizerview); mVisualizer = new Visualizer(mPlayer.getAudioSessionId()); mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]); mVisualizer.setDataCaptureListener( new Visualizer.OnDataCaptureListener() { public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) { mVisualizerView.updateVisualizer(bytes); } public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {} }, Visualizer.getMaxCaptureRate() / 2, true, false); }
@Override public void onPause() { if (started) { recordTask.cancel(true); } started = false; analyseButtonText.setText("Start Analyzing"); if (!MainActivity.analyzeMode) { visualizer.release(); } super.onPause(); }
/** * Links the visualizer to a player * * @param player - MediaPlayer instance to link to */ public void link(MediaPlayer player) { if (player == null) { throw new NullPointerException("Cannot link to null MediaPlayer"); } // Create the Visualizer object and attach it to our media player. mVisualizer = new Visualizer(player.getAudioSessionId()); mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]); // Pass through Visualizer data to VisualizerView Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener() { @Override public void onWaveFormDataCapture( Visualizer visualizer, byte[] bytes, int samplingRate) { updateVisualizer(bytes); } @Override public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) { updateVisualizerFFT(bytes); } }; mVisualizer.setDataCaptureListener( captureListener, Visualizer.getMaxCaptureRate() / 2, true, true); // Enabled Visualizer and disable when we're done with the stream mVisualizer.setEnabled(true); player.setOnCompletionListener( new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mediaPlayer) { mVisualizer.setEnabled(false); } }); }
/** Turns analyzing on and off -- makes new canvas (and visualizer) */ public void toggleAnalyse(View v) { if (started) { if (MainActivity.autoSave) { T.cancel(); } timeCount = 0; started = false; analyseButtonText.setText("Start Analyzing"); recordTask.cancel(true); bitmap = Bitmap.createBitmap((int) imageWidth, (int) imageHeight, Bitmap.Config.ARGB_8888); canvas = new Canvas(bitmap); imageView.setImageBitmap(bitmap); canvas.drawColor(0xFF444444); imageView.invalidate(); note[0] = ""; // resets note if (!MainActivity.analyzeMode) { visualizer.release(); } } else { timeCount = 0; if (MainActivity.autoSave) { T = new Timer(); T.scheduleAtFixedRate( new TimerTask() { @Override public void run() { getActivity() .runOnUiThread( new Runnable() { @Override public void run() { timeCount++; } }); } }, 10, 10); setFileName(); writeToFile("", false); } drawVisualization(imageView, "start"); if (!MainActivity.analyzeMode) { visualizer = new Visualizer(0); } } }
@Override protected Void doInBackground(Void... arg0) { if (MainActivity.analyzeMode) { // RECORD FROM MICROPHONE try { // SET UP AUDIORECORDER int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding); AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.VOICE_RECOGNITION, frequency, channelConfiguration, audioEncoding, bufferSize); short[] buffer = new short[blockSize]; double[] toTransform = new double[blockSize]; audioRecord.startRecording(); // RECORDS AUDIO & PERFORMS FFT while (started) { int bufferReadResult = audioRecord.read(buffer, 0, blockSize); for (int i = 0; i < blockSize && i < bufferReadResult; i++) { toTransform[i] = (double) buffer[i] / 32768.0; // / 32768.0 } transformer.ft(toTransform); // getAvg(toTransform); getFrequency(toTransform, 0, 1, 2); // getFrequency(toTransform, 1, 0, 2); // getFrequency(toTransform, 2, 1, 0); publishProgress(toTransform); } audioRecord.stop(); } catch (Throwable t) { t.printStackTrace(); Log.e("AudioRecord", "Recording Failed"); } } else { // RECORD FROM SOUND MIX // SETS UP VISUALIZER visualizer = new Visualizer(0); visualizer.setEnabled(false); int capRate = Visualizer.getMaxCaptureRate(); visualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]); // USES VISUALIZER TO RETURN AUDIO & THEN PERFORMS FFT Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener() { public void onWaveFormDataCapture( Visualizer visualizer, byte[] bytes, int samplingRate) { double[] toTransform = new double[blockSize]; for (int i = 0; i < bytes.length; i++) { toTransform[i] = (double) (bytes[i]) / 8192.0; // 32768.0 } transformer.ft(toTransform); // getAvg(toTransform); getFrequency(toTransform, 0, 1, 2); publishProgress(toTransform); } public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {} }; int status = visualizer.setDataCaptureListener( captureListener, capRate, true /*wave*/, false /*no fft needed*/); visualizer.setScalingMode(Visualizer.SCALING_MODE_AS_PLAYED); visualizer.setEnabled(true); } return null; }
public void onDetach() { mVisualizer.setEnabled(false); mPlayer.stop(); }
public void onPause() { mVisualizer.setEnabled(false); mPlayer.pause(); }