Example usage for android.media AudioFormat CHANNEL_OUT_MONO

List of usage examples for android.media AudioFormat CHANNEL_OUT_MONO

Introduction

In this page you can find the example usage for android.media AudioFormat CHANNEL_OUT_MONO.

Prototype

int CHANNEL_OUT_MONO

To view the source code for android.media AudioFormat CHANNEL_OUT_MONO.

Click Source Link

Usage

From source file:Main.java

public static int getOutFormat(int outChannels) {
    switch (outChannels) {
    case 1://from w w w  . ja v  a2 s . c  o  m
        return AudioFormat.CHANNEL_OUT_MONO;
    case 2:
        return AudioFormat.CHANNEL_OUT_STEREO;
    case 4:
        return AudioFormat.CHANNEL_OUT_QUAD;
    case 6:
        return AudioFormat.CHANNEL_OUT_5POINT1;
    case 8:
        return AudioFormat.CHANNEL_OUT_7POINT1;
    default:
        throw new IllegalArgumentException("illegal number of output channels: " + outChannels);
    }
}

From source file:Main.java

public static final int getMinimumBufferSize(int sampleRate) {
    return AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
}

From source file:Main.java

public static int outChannelMaskFromInChannelMask(int channelMask) {
    switch (channelMask) {
    case AudioFormat.CHANNEL_IN_MONO:
        return AudioFormat.CHANNEL_OUT_MONO;
    case AudioFormat.CHANNEL_IN_STEREO:
        return AudioFormat.CHANNEL_OUT_STEREO;
    default://from   www .j ava2s  . com
        return AudioFormat.CHANNEL_INVALID;
    }
}

From source file:zlyh.dmitry.recaller.threading.PlayBlockThread.java

@Override
public void run() {
    AudioTrack audioTrack = null;/*w  w  w. java 2  s.  c o m*/
    FileInputStream in = null;

    try {
        File rawpcm = new File(path);
        if (!rawpcm.exists()) {
            this.interrupt();
        }

        togglePlaying(true);

        final int audioLength = (int) rawpcm.length();
        final int minBufferSize = AudioRecord.getMinBufferSize(RecordRunnable.frequency,
                AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RecordRunnable.frequency,
                AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
                AudioTrack.MODE_STREAM);

        final int block = 256 * 1024;
        byte[] byteData = new byte[block];

        try {
            in = new FileInputStream(rawpcm);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
            this.interrupt();
        }

        if (in != null) {
            try {
                int bytesread = 0;
                int offset;
                audioTrack.play();
                while (bytesread < audioLength && !isInterrupted()) {
                    offset = in.read(byteData, 0, block);
                    if (offset != -1) {
                        audioTrack.write(byteData, 0, offset);
                        bytesread += offset;
                    } else {
                        break;
                    }
                }
                in.close();

                togglePlaying(false);

                if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
                    audioTrack.stop();
                }

                if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
                    audioTrack.release();
                }
            } catch (Exception e) {
                e.printStackTrace();
                try {
                    in.close();
                } catch (IOException e1) {
                    e1.printStackTrace();
                }

                if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
                    audioTrack.stop();
                }
                if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
                    audioTrack.release();
                }
                togglePlaying(false);

            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        if (audioTrack != null) {
            if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
                audioTrack.stop();
            }
            if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
                audioTrack.release();
            }
        }

        if (in != null) {
            try {
                in.close();
            } catch (IOException e1) {
                e1.printStackTrace();
            }
        }
        togglePlaying(false);

    }

}

From source file:com.ibm.watson.developer_cloud.android.text_to_speech.v1.TTSUtility.java

private void initPlayer() {
    stopTtsPlayer();//from   w  ww  .  j a  va  2s .  c o  m
    // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode.
    int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT);

    synchronized (this) {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
        if (audioTrack != null)
            audioTrack.play();
    }
}

From source file:com.example.rttytranslator.Dsp_service.java

public void startAudio() {
    if (!_enableDecoder)
        return;/*from w  w w  .ja  va  2 s .  c  o  m*/

    //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);

    System.out.println("isRecording: " + isRecording);

    if (!isRecording) {
        isRecording = true;

        buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        buffsize = Math.max(buffsize, 3000);

        mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT, buffsize);

        mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM);

        if (enableEcho) {
            AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            manager.setMode(AudioManager.MODE_IN_CALL);
            manager.setSpeakerphoneOn(true);
        }

        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {

            mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, buffsize);

        }

        mRecorder.startRecording();
        System.out.println("STARTING THREAD");
        Thread ct = new captureThread();

        ct.start();
    }
}

From source file:com.xperia64.timidityae.Globals.java

public static int[] validRates(boolean stereo, boolean sixteen) {
    ArrayList<Integer> valid = new ArrayList<Integer>();
    for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000, 88200, 96000 }) {

        int bufferSize = AudioTrack.getMinBufferSize(rate,
                (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO,
                (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT);
        if (bufferSize > 0) {
            //System.out.println(rate+" "+bufferSize);
            // buffer size is valid, Sample rate supported
            valid.add(rate);//from  w w w.ja va2 s .c om
        }
    }
    int[] rates = new int[valid.size()];
    for (int i = 0; i < rates.length; i++)
        rates[i] = valid.get(i);
    return rates;
}

From source file:com.xperia64.timidityae.Globals.java

public static SparseIntArray validBuffers(int[] rates, boolean stereo, boolean sixteen) {
    SparseIntArray buffers = new SparseIntArray();
    for (int rate : rates) {
        buffers.put(rate,//w  ww . j a  v a  2  s .c  o m
                AudioTrack.getMinBufferSize(rate,
                        (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO,
                        (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT));
    }
    return buffers;
    /*HashMap<Integer, Integer> buffers = new HashMap<Integer, Integer>();
    for(int rate : rates)
    {
       buffers.put(rate, AudioTrack.getMinBufferSize(rate, (stereo)?AudioFormat.CHANNEL_OUT_STEREO:AudioFormat.CHANNEL_OUT_MONO, (sixteen)?AudioFormat.ENCODING_PCM_16BIT:AudioFormat.ENCODING_PCM_8BIT));
    }
    return buffers;*/
}

From source file:net.reichholf.dreamdroid.fragment.SignalFragment.java

void playSound(double freqOfTone) {
    double duration = 0.1; // seconds
    int sampleRate = 8000; // a number

    double dnumSamples = duration * sampleRate;
    dnumSamples = Math.ceil(dnumSamples);
    int numSamples = (int) dnumSamples;
    double sample[] = new double[numSamples];
    byte generatedSnd[] = new byte[2 * numSamples];

    for (int i = 0; i < numSamples; ++i) { // Fill the sample array
        sample[i] = Math.sin(freqOfTone * 2 * Math.PI * i / (sampleRate));
    }//from   w w  w.  j  a v a 2 s . c  o m

    // convert to 16 bit pcm sound array
    // assumes the sample buffer is normalized.
    int idx = 0;
    int i = 0;

    int ramp = numSamples / 20; // Amplitude ramp as a percent of sample
    // count

    for (i = 0; i < numSamples; ++i) { // Ramp amplitude up (to avoid
        // clicks)
        if (i < ramp) {
            double dVal = sample[i];
            // Ramp up to maximum
            final short val = (short) ((dVal * 32767 * i / ramp));
            // in 16 bit wav PCM, first byte is the low order byte
            generatedSnd[idx++] = (byte) (val & 0x00ff);
            generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
        } else if (i < numSamples - ramp) {
            // Max amplitude for most of the samples
            double dVal = sample[i];
            // scale to maximum amplitude
            final short val = (short) ((dVal * 32767));
            // in 16 bit wav PCM, first byte is the low order byte
            generatedSnd[idx++] = (byte) (val & 0x00ff);
            generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
        } else {
            double dVal = sample[i];
            // Ramp down to zero
            final short val = (short) ((dVal * 32767 * (numSamples - i) / ramp));
            // in 16 bit wav PCM, first byte is the low order byte
            generatedSnd[idx++] = (byte) (val & 0x00ff);
            generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
        }
    }

    AudioTrack audioTrack = null; // Get audio track
    try {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, (int) numSamples * 2, AudioTrack.MODE_STATIC);
        // Load the track
        audioTrack.write(generatedSnd, 0, generatedSnd.length);
        audioTrack.play(); // Play the track
    } catch (Exception e) {
    }

    int x = 0;
    do { // Montior playback to find when done
        if (audioTrack != null)
            x = audioTrack.getPlaybackHeadPosition();
        else
            x = numSamples;
    } while (x < numSamples);

    if (audioTrack != null)
        audioTrack.release(); // Track play done. Release track.
}

From source file:uk.co.armedpineapple.cth.SDLActivity.java

public static Object audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
    int channelConfig = isStereo ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
    int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);

    Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit")
            + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");

    // Let the user pick a larger buffer if they really want -- but ye
    // gods they probably shouldn't, the minimums are horrifyingly high
    // latency already
    desiredFrames = Math.max(desiredFrames,
            (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat,
            desiredFrames * frameSize, AudioTrack.MODE_STREAM);

    audioStartThread();/*from w  ww. ja v  a2 s.  c om*/

    Log.v("SDL",
            "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " "
                    + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit")
                    + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");

    if (is16Bit) {
        audioBuffer = new short[desiredFrames * (isStereo ? 2 : 1)];
    } else {
        audioBuffer = new byte[desiredFrames * (isStereo ? 2 : 1)];
    }
    return audioBuffer;
}