private int getValidSampleRates(int channelConfiguration, int audioEncoding) { for (int rate : new int[]{ 8000, 11025, 16000, 22050, 44100, 48000 }) { // add the rates you wish to check against int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfiguration, audioEncoding); if (bufferSize > 0) { return rate; } } return 0; }
public static int getRecordBufferSize(AudioConfiguration audioConfiguration) { int frequency = audioConfiguration.frequency; int audioEncoding = audioConfiguration.encoding; int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; if(audioConfiguration.channelCount == 2) { channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO; } int size = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding); return size; }
public AudioRecordManager() { bufferSize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat .ENCODING_PCM_16BIT); mRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2); }
/** * Configures the recorder and starts it * @throws IOException * @throws IllegalStateException */ public void start() throws IllegalStateException, IOException { if (recorder == null) { minSize = AudioRecord.getMinBufferSize( 8000, AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); Log.e("AudioCodec", "Minimum size is " + minSize); recorder = new AudioRecord( MediaRecorder.AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT, minSize); recorder.startRecording(); } }
public AndroidAudioRecorder (int samplingRate, boolean isMono) { int channelConfig = isMono ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; int minBufferSize = AudioRecord.getMinBufferSize(samplingRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT); recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, samplingRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT, minBufferSize); if (recorder.getState() != AudioRecord.STATE_INITIALIZED) throw new GdxRuntimeException("Unable to initialize AudioRecorder.\nDo you have the RECORD_AUDIO permission?"); recorder.startRecording(); }
public AndroidAudioRecorder (int samplingRate, boolean isMono) { int channelConfig = isMono ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; int minBufferSize = AudioRecord.getMinBufferSize(samplingRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT); recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, samplingRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT, minBufferSize); if (recorder.getState() != AudioRecord.STATE_INITIALIZED) throw new GdxRuntimeException("Unable to initialize AudioRecorder.\nDo you have the RECORD_AUDIO permission?"); recorder.startRecording(); }
AudioRecordRunnable(int sampleRate, int channelConfig, int audioFormat, int byteBufferSize, @NonNull AudioDataCallback audioDataCallback) { mAudioFormat = audioFormat; int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, mAudioFormat); mByteBufferSize = byteBufferSize; mShortBufferSize = mByteBufferSize / 2; mByteBuffer = new byte[mByteBufferSize]; mShortBuffer = new short[mShortBufferSize]; mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfig, audioFormat, Math.max(minBufferSize, byteBufferSize)); mAudioDataCallback = audioDataCallback; }
@SuppressWarnings("ResultOfMethodCallIgnored") @Override public void runImpl() { int bufferSize = Math.max(BUFFER_BYTES_ELEMENTS * BUFFER_BYTES_PER_ELEMENT, AudioRecord.getMinBufferSize(RECORDER_SAMPLE_RATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING)); AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, RECORDER_SAMPLE_RATE, RECORDER_CHANNELS_IN, RECORDER_AUDIO_ENCODING, bufferSize); try { if (recorderState == RECORDER_STATE_STARTING) { recorderState = RECORDER_STATE_BUSY; } recorder.startRecording(); byte recordBuffer[] = new byte[bufferSize]; do { int bytesRead = recorder.read(recordBuffer, 0, bufferSize); if (bytesRead > 0) { recordingCallback.onDataReady(recordBuffer); } else { Log.e(AudioRecorder.class.getSimpleName(), "error: " + bytesRead); onRecordFailure(); } } while (recorderState == RECORDER_STATE_BUSY); } finally { recorder.release(); } onExit(); } }).start();
private boolean configure() { if ("input".equals(io)) { minRecorderBufferSize = AudioRecord.getMinBufferSize(SAMPLING_RATE, RECORDING_CHANNEL, RECORDER_AUDIO_ENCODING); if (minRecorderBufferSize == AudioRecord.ERROR || minRecorderBufferSize == AudioRecord.ERROR_BAD_VALUE) { minRecorderBufferSize = SAMPLING_RATE * 2;
/** * Get PCM buffer size */ private int getPcmBufferSize() { int pcmBufSize = AudioRecord.getMinBufferSize(sampleRate, channel, AudioFormat.ENCODING_PCM_16BIT) + 8191; return pcmBufSize - (pcmBufSize % 8192); }
private StreamPublisherParam(int width, int height, int videoBitRate, int frameRate, int iframeInterval, int samplingRate, int audioBitRate, int audioSource, int channelCfg) { this.width = width; this.height = height; this.videoBitRate = videoBitRate; this.frameRate = frameRate; this.iframeInterval = iframeInterval; this.samplingRate = samplingRate; this.audioBitRate = audioBitRate; this.audioBufferSize = AudioRecord.getMinBufferSize(samplingRate, channelCfg, AudioFormat.ENCODING_PCM_16BIT) * 2; this.audioSource = audioSource; this.channelCfg = channelCfg; }
private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); assertTrue(numChannels == CHANNELS); return AudioRecord.getMinBufferSize( sampleRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) / bytesPerFrame; }
public RecordThread(Handler handler) { this.mHandler = handler; mBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE_IN_HZ, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE_IN_HZ, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mBufferSize); }
private void initialize() { mBufferSize = AudioRecord.getMinBufferSize(REQUIRED_FREQUENCY, REQUIRED_CHANNEL, REQUIRED_FORMAT); Log.d(TAG, "Recommended bufferSize: " + mBufferSize); if (mBufferSize > 0) { buffer = new short[mBufferSize]; mAudio = new AudioRecord(MediaRecorder.AudioSource.MIC, REQUIRED_FREQUENCY, REQUIRED_CHANNEL, REQUIRED_FORMAT, mBufferSize); if (mAudio.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.d(TAG, "Unable to initialize AudioRecord. Ensure nothing else is using the microphone."); this.mListener.quit(OnUpdateListener.ERROR_CODE_MICROPHONE_LOCKED); mAudio = null; } } }
private AudioRecord getRecorderAndInitBufferSize (final int rate, final int channelConfig, final int audioFormat) { int foundBufferSize = AudioRecord.getMinBufferSize (rate, channelConfig, audioFormat) / AndroidRecordSoundProcessor.TWICE; AudioRecord candidateRecorder = null; int stopIfValueIsTen = 0; while (!this.recorderIsInitialized(candidateRecorder) && stopIfValueIsTen < AndroidRecordSoundProcessor.TEN) { foundBufferSize *= AndroidRecordSoundProcessor.TWICE; candidateRecorder = new AudioRecord (AudioSource.DEFAULT, rate, channelConfig, audioFormat, foundBufferSize); stopIfValueIsTen++; } if (stopIfValueIsTen == AndroidRecordSoundProcessor.TEN) { return null; } this.bufferSize = foundBufferSize; return candidateRecorder; }
public static TestSettings computeDefaultSettings() { int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC); int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); int minRecorderBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); return new TestSettings(samplingRate, minPlayerBufferSizeInBytes, minRecorderBufferSizeInBytes); }
private void InitAudioRecord() { _iRecorderBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE_DEF, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT); _AudioRecorder = new AudioRecord(AudioSource.MIC, SAMPLE_RATE_DEF, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, _iRecorderBufferSize); _RecorderBuffer = new byte[_iRecorderBufferSize]; _fdkaacEnc = new FdkAacEncode(); _fdkaacHandle = _fdkaacEnc.FdkAacInit(SAMPLE_RATE_DEF, CHANNEL_NUMBER_DEF); }
private void init() { synchronized (recognizerLock) { final int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT, bufferSize); vad.setEnabled(config.isVoiceActivityDetectionEnabled()); vad.setSpeechListener(this); mediaPlayer = new MediaPlayer(); mediaPlayer.setOnErrorListener(this); mediaPlayer.setOnCompletionListener(this); } }
private void init() { synchronized (recognizerLock) { final int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT, bufferSize); vad.setEnabled(config.isVoiceActivityDetectionEnabled()); vad.setSpeechListener(this); mediaPlayer = new MediaPlayer(); mediaPlayer.setOnErrorListener(this); mediaPlayer.setOnCompletionListener(this); } }
private AudioRecord findAudioRecorder (final StreamInfo streamInfo) throws SoundTransformException { final int audioFormat = streamInfo.getSampleSize () == 1 ? AudioFormat.ENCODING_PCM_8BIT : AudioFormat.ENCODING_PCM_16BIT; final int channelConfig = streamInfo.getChannels () == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; final int rate = (int) streamInfo.getSampleRate (); this.bufferSize = AudioRecord.getMinBufferSize (rate, channelConfig, audioFormat); final AudioRecord candidateRecorder = getRecorderAndInitBufferSize (rate, channelConfig, audioFormat); return maybeNull(candidateRecorder, streamInfo); }