Example usage for android.media AudioRecord STATE_INITIALIZED

List of usage examples for android.media AudioRecord STATE_INITIALIZED

Introduction

In this page you can find the example usage for android.media AudioRecord STATE_INITIALIZED.

Prototype

int STATE_INITIALIZED

To view the source code for android.media AudioRecord STATE_INITIALIZED.

Click Source Link

Document

indicates AudioRecord state is ready to be used

Usage

From source file:Main.java

/**
 * Check correct buffer size for your AudioRecord instance
 *
 * @param audioSource/*from w  w  w . ja va 2 s  . com*/
 *     the audio source
 * @param fs
 *     the fs
 * @param channelConfiguration
 *     the channel configuration
 * @param audioEncoding
 *     the audio encoding
 * @return the int
 */
public static int getValidBufferSize(int audioSource, int fs, int channelConfiguration, int audioEncoding) {
    for (int bufferSize : new int[] { 256, 512, 1024, 2048, 4096 }) { // add the rates you wish to check against
        AudioRecord audioRecordTemp = new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding,
                bufferSize);
        if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) {
            return bufferSize;
        }
    }
    return 0;
}

From source file:Main.java

/**
 * Check correct buffer size for your AudioRecord instance
 *
 * @param audioSource//from   ww w .  j a v a  2  s  . co  m
 *     the audio source
 * @param fs
 *     the fs
 * @param channelConfiguration
 *     the channel configuration
 * @param audioEncoding
 *     the audio encoding
 * @return the int
 */
public static int checkCorrectBufferSize(int audioSource, int fs, int channelConfiguration, int audioEncoding) {
    for (int buffer : new int[] { 256, 512, 1024, 2048, 4096 }) { // add the rates you wish to check against
        AudioRecord audioRecordTemp = new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding,
                buffer);
        if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) {
            return buffer;
        }
    }
    return 0;
}

From source file:com.example.rttytranslator.Dsp_service.java

public void startAudio() {
    if (!_enableDecoder)
        return;//from   w ww .j a v  a  2s. co  m

    //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);

    System.out.println("isRecording: " + isRecording);

    if (!isRecording) {
        isRecording = true;

        buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        buffsize = Math.max(buffsize, 3000);

        mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT, buffsize);

        mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM);

        if (enableEcho) {
            AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            manager.setMode(AudioManager.MODE_IN_CALL);
            manager.setSpeakerphoneOn(true);
        }

        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {

            mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, buffsize);

        }

        mRecorder.startRecording();
        System.out.println("STARTING THREAD");
        Thread ct = new captureThread();

        ct.start();
    }
}

From source file:edu.polyu.screamalert.SoundProcessing.java

public static AudioRecord findAudioRecord() {
    try {/*from  w  w  w  .jav a2s.  c  o m*/
        recordBufferSize = AudioRecord.getMinBufferSize(Config.RECORDER_SAMPLERATE, Config.RECORDER_CHANNELS,
                Config.RECORDER_AUDIO_ENCODING);
        nSubframePerBuf = recordBufferSize / frameShift / 2; // e.g., 8192/128/2 = 32             
        System.out.println("recordBufferSize: " + recordBufferSize);
        if (recordBufferSize != AudioRecord.ERROR_BAD_VALUE) {
            // check if we can instantiate and have a success
            AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, Config.RECORDER_SAMPLERATE,
                    Config.RECORDER_CHANNELS, Config.RECORDER_AUDIO_ENCODING, recordBufferSize);
            if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
                return recorder;
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    Toast.makeText(thisContext, "Fail to create AudioRecord object", Toast.LENGTH_LONG).show();
    return null;
}

From source file:com.inmobi.ultrapush.InfoRecActivity.java

@Override
protected void onResume() {
    super.onResume();
    TextView tv = (TextView) findViewById(R.id.textview_info_rec);
    tv.setMovementMethod(new ScrollingMovementMethod());

    tv.setText("Testing..."); // TODO: No use...
    tv.invalidate();/*from   w  w  w. j  a v a2 s  .co m*/

    // Show supported sample rate and corresponding minimum buffer size.
    String[] requested = new String[] { "8000", "11025", "16000", "22050", "32000", "44100", "48000", "96000" };
    String st = "sampleRate minBufSize\n";
    ArrayList<String> validated = new ArrayList<String>();
    for (String s : requested) {
        int rate = Integer.parseInt(s);
        int minBufSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        if (minBufSize != AudioRecord.ERROR_BAD_VALUE) {
            validated.add(s);
            st += s + "  \t" + Integer.toString(minBufSize) + "\n";
        }
    }
    requested = validated.toArray(new String[0]);

    tv.setText(st);
    tv.invalidate();

    // Test audio source
    String[] audioSourceString = new String[] { "DEFAULT", "MIC", "VOICE_UPLINK", "VOICE_DOWNLINK",
            "VOICE_CALL", "CAMCORDER", "VOICE_RECOGNITION" };
    int[] audioSourceId = new int[] { MediaRecorder.AudioSource.DEFAULT, // Default audio source
            MediaRecorder.AudioSource.MIC, // Microphone audio source
            MediaRecorder.AudioSource.VOICE_UPLINK, // Voice call uplink (Tx) audio source
            MediaRecorder.AudioSource.VOICE_DOWNLINK, // Voice call downlink (Rx) audio source
            MediaRecorder.AudioSource.VOICE_CALL, // Voice call uplink + downlink audio source
            MediaRecorder.AudioSource.CAMCORDER, // Microphone audio source with same orientation as camera if available, the main device microphone otherwise (apilv7)
            MediaRecorder.AudioSource.VOICE_RECOGNITION, // Microphone audio source tuned for voice recognition if available, behaves like DEFAULT otherwise. (apilv7)
            //            MediaRecorder.AudioSource.VOICE_COMMUNICATION, // Microphone audio source tuned for voice communications such as VoIP. It will for instance take advantage of echo cancellation or automatic gain control if available. It otherwise behaves like DEFAULT if no voice processing is applied. (apilv11)
            //            MediaRecorder.AudioSource.REMOTE_SUBMIX,       // Audio source for a submix of audio streams to be presented remotely. (apilv19)
    };
    tv.append("\n-- Audio Source Test --");
    for (String s : requested) {
        int sampleRate = Integer.parseInt(s);
        int recBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        tv.append("\n(" + Integer.toString(sampleRate) + "Hz, MONO, 16BIT)\n");
        for (int iass = 0; iass < audioSourceId.length; iass++) {
            st = "";
            // wait for AudioRecord fully released...
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            AudioRecord record;
            record = new AudioRecord(audioSourceId[iass], sampleRate, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, recBufferSize);
            if (record.getState() == AudioRecord.STATE_INITIALIZED) {
                st += audioSourceString[iass] + " successed";
                int as = record.getAudioSource();
                if (as != audioSourceId[iass]) {
                    int i = 0;
                    while (i < audioSourceId.length) {
                        if (as == audioSourceId[iass]) {
                            break;
                        }
                        i++;
                    }
                    if (i >= audioSourceId.length) {
                        st += "(auto set to \"unknown source\")";
                    } else {
                        st += "(auto set to " + audioSourceString[i] + ")";
                    }
                }
                st += "\n";
            } else {
                st += audioSourceString[iass] + " failed\n";
            }
            record.release();
            record = null;
            tv.append(st);
            tv.invalidate();
        }
    }

}

From source file:com.brejza.matt.habmodem.Dsp_service.java

public void startAudio() {
    if (!_enableDecoder)
        return;/*from w  w  w  .j av  a  2s  .  c  o  m*/

    boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);

    System.out.println("isRecording: " + isRecording);
    logEvent("Starting Audio. Mic avaliable: " + mic, false);
    if (!isRecording) {
        isRecording = true;

        buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        buffsize = Math.max(buffsize, 3000);

        mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT, buffsize);

        mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM);

        if (enableEcho) {
            AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            manager.setMode(AudioManager.MODE_IN_CALL);
            manager.setSpeakerphoneOn(true);
        }

        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {

            mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, buffsize);

            if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
                logEvent("Error - Could not initialise audio", true);
                return;
            }
            logEvent("Using default audio source", false);
        }

        mRecorder.startRecording();
        System.out.println("STARTING THREAD");
        Thread ct = new captureThread();
        logEvent("Starting Audio Thread.", false);
        setDecoderRunningNotification();
        ct.start();
    }
}

From source file:com.example.sensingapp.SensingApp.java

private void startAudioRecording() {
    m_audioRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, m_nAudioSampleRate,
            AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, m_nBufferSize);

    if (m_audioRecorder == null)
        return;//from   w  ww .j  a  va2  s  .  c om

    int i = m_audioRecorder.getState();
    if (i == AudioRecord.STATE_INITIALIZED) {
        m_audioRecorder.startRecording();
    } else {
        return;
    }

    if (m_blnRecordSoundLevel == true || m_blnRecordSoundFile == true) {
        m_processSoundThread = new Thread(new Runnable() {
            public void run() {
                processAudioData();
            }
        }, "Audio Thread");

        m_processSoundThread.start();

        if (m_blnRecordSoundLevel == true) {
            m_soundLevelThread = new Thread(new Runnable() {
                public void run() {
                    calculateAudioSoundLevel();
                }
            }, "Sould Level Thread");

            m_soundLevelThread.start();
        }
    }

}

From source file:com.example.sensingapp.SensingApp.java

private void stopAudioRecording() {

    if (m_audioRecorder != null) {
        int i = m_audioRecorder.getState();

        if (i == AudioRecord.STATE_INITIALIZED) {
            m_audioRecorder.stop();//from  w  ww . j a  v  a  2s  . c om
            m_audioRecorder.release();
        }

        m_audioRecorder = null;

        if (m_blnRecordSoundLevel == true || m_blnRecordSoundFile == true) {
            m_processSoundThread = null;
            m_soundLevelThread = null;
        }

    }

    if (m_blnRecordSoundFile == true) {
        copyWaveFile();
        deleteTempFile();
    }
}