Example usage for android.speech RecognizerIntent EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS

List of usage examples for android.speech RecognizerIntent EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS.

Prototype

String EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS

To view the source code for android.speech RecognizerIntent EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS.

Click Source Link

Document

The amount of time that it should take after we stop hearing speech to consider the input complete.

Usage

From source file:com.mobicage.rogerthat.plugins.messaging.widgets.TextLineWidget.java

@Override
public void initializeWidget() {
    mEditText = (EditText) findViewById(R.id.edit_text);
    if (mColorScheme == BrandingMgr.ColorScheme.DARK) {
        UIUtils.setColors(ContextCompat.getColor(mActivity, R.color.mc_white), mEditText);
    } else {//from   w ww  .j  a  v a2s.c  o  m
        UIUtils.setColors(mActivity, mEditText);
    }
    mEditText.setTextColor(mTextColor);
    mEditText.setText((String) mWidgetMap.get("value"));
    mEditText.setHint((String) mWidgetMap.get("place_holder"));
    mEditText.setFilters(new InputFilter[] {
            new InputFilter.LengthFilter(((Long) mWidgetMap.get("max_chars")).intValue()) });
    mEditText.setInputType(
            getDefaultInputTypes() | KeyboardType.getInputType((String) mWidgetMap.get("keyboard_type")));

    ImageButton btnSpeak = (ImageButton) findViewById(R.id.btn_speak);
    if (AppConstants.SPEECH_TO_TEXT && isSpeechRecognitionActivityPresented(mActivity)) {
        IconicsDrawable icon = new IconicsDrawable(mActivity, FontAwesome.Icon.faw_microphone)
                .color(LookAndFeelConstants.getPrimaryIconColor(mActivity)).sizeDp(20);
        btnSpeak.setVisibility(View.VISIBLE);
        btnSpeak.setImageDrawable(icon);
        btnSpeak.setOnClickListener(new OnClickListener() {

            @Override
            public void onClick(View v) {
                try {
                    Intent voiceIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
                    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                            RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
                    voiceIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
                            1500);
                    voiceIntent.putExtra(
                            RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, 1500);
                    voiceIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 15000);
                    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
                    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
                    mActivity.startActivityForResult(voiceIntent, REQUEST_CODE_VOICE);
                } catch (ActivityNotFoundException e) {
                    L.bug(e);
                }
            }
        });
    } else {
        btnSpeak.setVisibility(View.GONE);
    }
}

From source file:de.dfki.iui.mmir.plugins.speech.android.AndroidSpeechRecognizer.java

private void _startSpeechRecognitionActivity(JSONArray args, CallbackContext callbackContext,
        boolean isWithEndOfSpeechDetection) {
    int maxMatches = 0;
    String prompt = "";//TODO remove? (not used when ASR is directly used as service here...)
    String language = Locale.getDefault().toString();
    boolean isIntermediate = false;

    try {//from   ww  w. j  a  v a 2 s . c  o m
        if (args.length() > 0) {
            // Optional language specified
            language = args.getString(0);
        }
        if (args.length() > 1) {
            isIntermediate = args.getBoolean(1);
        }
        if (args.length() > 2) {
            // Maximum number of matches, 0 means that the recognizer "decides"
            String temp = args.getString(2);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 3) {
            // Optional text prompt
            prompt = args.getString(3);
        }

        //TODO if ... withoutEndOfSpeechDetection = ...
    } catch (Exception e) {
        Log.e(PLUGIN_NAME, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }

    // Create the intent and set parameters
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    if (!isWithEndOfSpeechDetection) {

        // try to simulate start/stop-recording behavior (without end-of-speech detection) 

        //NOTE these setting do not seem to have any effect for default Google Recognizer API level > 16

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 10000l);

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, new Long(10000));
        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
                new Long(6 * 1000));
    }

    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);

    if (!prompt.equals(""))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);

    if (isIntermediate)
        intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);

    //NOTE the extra package seems to be required for older Android versions, but not since API level 17(?)
    if (SDK_VERSION <= Build.VERSION_CODES.JELLY_BEAN)
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName());

    synchronized (speechLock) {

        if (speech != null) {
            speech.destroy();
        }
        speech = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity());

        disableSoundFeedback();

        ++recCounter;
        currentRecognizer = new ASRHandler(recCounter, enableMicLevelsListeners, callbackContext, this);
        currentRecognizer.setHapticPrompt(
                (Vibrator) this.cordova.getActivity().getSystemService(Context.VIBRATOR_SERVICE));
        speech.setRecognitionListener(currentRecognizer);
        speech.startListening(intent);

    }
}

From source file:root.gast.playground.speech.SpeechRecognitionPlay.java

/**
 * create the {@link RecognizerIntent} based on the many preferences
 */// w w  w .ja v a2s  . co m
private Intent readRecognizerIntentFromPreferences() {
    Intent intentToSend;

    //web search handling
    boolean isWebSearchAction = preferences.getBoolean(this, R.string.pref_websearch,
            R.string.pref_websearch_default);

    boolean isHandsFreeAction = preferences.getBoolean(this, R.string.pref_handsfree,
            R.string.pref_handsfree_default);

    if (isWebSearchAction) {
        intentToSend = RecognizerIntentFactory.getWebSearchRecognizeIntent();
        final boolean ADD_ORIGIN = true;
        if (ADD_ORIGIN && Build.VERSION.SDK_INT >= 14) {
            intentToSend.putExtra(RecognizerIntent.EXTRA_ORIGIN, true);
        }
    } else {
        if (isHandsFreeAction && Build.VERSION.SDK_INT >= 16) {
            intentToSend = RecognizerIntentFactory.getHandsFreeRecognizeIntent();
        } else {
            intentToSend = RecognizerIntentFactory.getBlankRecognizeIntent();
        }
    }

    //language model
    boolean isFreeFormModel = preferences.getBoolean(this, R.string.pref_languagemodel,
            R.string.pref_languagemodel_default);
    if (isFreeFormModel) {
        intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    } else {
        intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
    }

    //common extras
    String language = preferences.getString(getResources().getString(R.string.pref_language),
            getResources().getString(R.string.pref_language_default));
    intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    String prompt = getResources().getString(R.string.speech_prompt) + ": "
            + whatYouAreTryingToSay.getText().toString();
    intentToSend.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    intentToSend.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,
            preferences.getInt(this, R.string.pref_maxresults, R.string.pref_maxresults_default));
    intentToSend.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,
            preferences.getBoolean(this, R.string.pref_partial, R.string.pref_partial_default));

    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
            R.string.pref_complete_silence, R.string.pref_complete_silence_default, intentToSend);
    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS,
            R.string.pref_minimum_input_length, R.string.pref_minimum_input_length_default, intentToSend);
    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
            R.string.pref_possibly_complete_silence_length,
            R.string.pref_possibly_complete_silence_length_default, intentToSend);

    //pendingIntent handling
    boolean doPending = preferences.getBoolean(this, R.string.pref_withpendingintent,
            R.string.pref_withpendingintent);
    if (doPending) {
        Intent pendingIntentSource = new Intent(this, SpeechRecognitionResultsActivity.class);
        PendingIntent pi = PendingIntent.getActivity(this, 0, pendingIntentSource, 0);

        Bundle extraInfoBundle = new Bundle();
        // pass in what you are trying to say so the results activity can
        // show it
        extraInfoBundle.putString(SpeechRecognitionResultsActivity.WHAT_YOU_ARE_TRYING_TO_SAY_INTENT_INPUT,
                whatYouAreTryingToSay.getText().toString());
        // set the variables in the intent this is sending
        intentToSend.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pi);
        intentToSend.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, extraInfoBundle);
    }

    Log.d(TAG, "sending recognizer intent: " + intentToSend.getExtras().toString());
    return intentToSend;
}