Example usage for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL

List of usage examples for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL.

Prototype

String EXTRA_LANGUAGE_MODEL

To view the source code for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL.

Click Source Link

Document

Informs the recognizer which speech model to prefer when performing #ACTION_RECOGNIZE_SPEECH .

Usage

From source file:com.eugene.fithealthmaingit.UI.ChooseAddMealSearchFragment.java

private void promptSpeechInput() {
    ((InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(mEtSearch.getWindowToken(), 0);
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say Something");
    try {// w  ww  .  j  a va  2 s. co m
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getActivity().getApplicationContext(), "Not Supported", Toast.LENGTH_SHORT).show();
    }
}

From source file:com.meetingninja.csse.MainActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    if (drawerToggle.onOptionsItemSelected(item)) {
        return true;
    }// www  .ja  v  a 2  s. co m

    // Handle other action bar items...
    switch (item.getItemId()) {
    case R.id.action_refresh:
        switch (DrawerLabel.values()[session.getPage()]) {
        case MEETINGS:
            Toast.makeText(this, "Refreshing Meetings", Toast.LENGTH_SHORT).show();
            frag_meetings.fetchMeetings();
            // frag_meetings.populateList();
            return true;
        case NOTES:
            Toast.makeText(this, "Refreshing Notes", Toast.LENGTH_SHORT).show();
            // notesFrag.fetchNotes();
            frag_notes.populateList();
            return true;
        default:
            return super.onOptionsItemSelected(item);
        }

    case R.id.action_new_meeting:
        frag_meetings.editMeeting(null);
        return true;
    case R.id.action_new_note:
        Intent createNote = new Intent(this, EditNoteActivity.class);
        createNote.putExtra(Note.CREATE_NOTE, true);
        startActivityForResult(createNote, 3);
        return true;
    case R.id.action_logout:
        logout();
        return true;
    case R.id.action_settings:
        return true;
    case R.id.action_speak:
        Intent i = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        i.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "en-US");
        i.putExtra(RecognizerIntent.EXTRA_PROMPT, "Go to...");
        try {
            startActivityForResult(i, VOICE_RECOGNITION_REQUEST_CODE);

        } catch (Exception e) {
            Toast.makeText(this, "Error initializing speech to text engine.", Toast.LENGTH_LONG).show();
        }
        return true;
    default:
        return super.onOptionsItemSelected(item);
    }

}

From source file:com.example.navigationsearchview.NavigationSearchView.java

public NavigationSearchView(Context context, AttributeSet attrs, int defStyleAttr) {
    super(context, attrs, defStyleAttr);

    final TintTypedArray a = TintTypedArray.obtainStyledAttributes(context, attrs, R.styleable.SearchView,
            defStyleAttr, 0);//from  w  w  w.j  av  a 2 s . co m
    // Keep the TintManager in case we need it later
    mTintManager = a.getTintManager();

    final LayoutInflater inflater = LayoutInflater.from(context);
    final int layoutResId = com.example.navigationsearchview.R.layout.abc_search_view;
    inflater.inflate(layoutResId, this, true);// original true

    mSearchSrcTextView = (SearchAutoComplete) findViewById(R.id.search_src_text);
    mSearchSrcTextView.setSearchView(this);

    mSearchEditFrame = findViewById(R.id.search_edit_frame);
    mSearchPlate = findViewById(R.id.search_plate);
    mSubmitArea = findViewById(R.id.submit_area);
    mSearchButton = (ImageView) findViewById(R.id.search_button);
    mGoButton = (ImageView) findViewById(R.id.search_go_btn);
    mCloseButton = (ImageView) findViewById(R.id.search_close_btn);
    mVoiceButton = (ImageView) findViewById(R.id.search_voice_btn);
    mCollapsedIcon = (ImageView) findViewById(R.id.search_mag_icon);
    // next and previous
    mNextButton = (ImageView) findViewById(R.id.search_next_btn);
    mPrevButton = (ImageView) findViewById(R.id.search_previous_btn);

    // Set up icons and backgrounds.
    mSearchPlate.setBackground(a.getDrawable((R.styleable.SearchView_queryBackground)));
    mSubmitArea.setBackground(a.getDrawable((R.styleable.SearchView_submitBackground)));
    mSearchButton.setImageDrawable(a.getDrawable(R.styleable.SearchView_searchIcon));
    mGoButton.setImageDrawable(a.getDrawable(R.styleable.SearchView_goIcon));
    mCloseButton.setImageDrawable(a.getDrawable(R.styleable.SearchView_closeIcon));
    mVoiceButton.setImageDrawable(a.getDrawable(R.styleable.SearchView_voiceIcon));
    mCollapsedIcon.setImageDrawable(a.getDrawable(R.styleable.SearchView_searchIcon));

    // next and previous
    mNextButton.setImageResource(R.drawable.arrow_right);
    mPrevButton.setImageResource(R.drawable.arrow_left);
    mSearchHintIcon = a.getDrawable(R.styleable.SearchView_iconifiedByDefault);

    // Extract dropdown layout resource IDs for later use.
    mSuggestionRowLayout = a.getResourceId(R.styleable.SearchView_suggestionRowLayout,
            R.layout.abc_search_dropdown_item_icons_2line);
    mSuggestionCommitIconResId = a.getResourceId(R.styleable.SearchView_commitIcon, 0);

    mSearchButton.setOnClickListener(mOnClickListener);
    mCloseButton.setOnClickListener(mOnClickListener);
    mGoButton.setOnClickListener(mOnClickListener);
    mVoiceButton.setOnClickListener(mOnClickListener);
    mSearchSrcTextView.setOnClickListener(mOnClickListener);
    // next and previous button
    mNextButton.setOnClickListener(mOnClickListener);
    mPrevButton.setOnClickListener(mOnClickListener);

    mSearchSrcTextView.addTextChangedListener(mTextWatcher);
    mSearchSrcTextView.setOnEditorActionListener(mOnEditorActionListener);
    mSearchSrcTextView.setOnItemClickListener(mOnItemClickListener);
    mSearchSrcTextView.setOnItemSelectedListener(mOnItemSelectedListener);
    mSearchSrcTextView.setOnKeyListener(mTextKeyListener);

    // Inform any listener of focus changes
    mSearchSrcTextView.setOnFocusChangeListener(new OnFocusChangeListener() {

        public void onFocusChange(View v, boolean hasFocus) {
            if (mOnQueryTextFocusChangeListener != null) {
                mOnQueryTextFocusChangeListener.onFocusChange(NavigationSearchView.this, hasFocus);
            }
        }
    });
    setIconifiedByDefault(a.getBoolean(R.styleable.SearchView_iconifiedByDefault, true));

    final int maxWidth = a.getDimensionPixelSize(R.styleable.SearchView_android_maxWidth, -1);
    if (maxWidth != -1) {
        setMaxWidth(maxWidth);
    }

    final CharSequence queryHint = a.getText(R.styleable.SearchView_queryHint);
    if (!TextUtils.isEmpty(queryHint)) {
        setQueryHint(queryHint);
    }

    final int imeOptions = a.getInt(R.styleable.SearchView_android_imeOptions, -1);
    if (imeOptions != -1) {
        setImeOptions(imeOptions);
    }

    final int inputType = a.getInt(R.styleable.SearchView_android_inputType, -1);
    if (inputType != -1) {
        setInputType(inputType);
    }

    boolean focusable = true;
    focusable = a.getBoolean(0, focusable);
    setFocusable(focusable);

    a.recycle();

    // Save voice intent for later queries/launching
    mVoiceWebSearchIntent = new Intent(RecognizerIntent.ACTION_WEB_SEARCH);
    mVoiceWebSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
    mVoiceWebSearchIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
            RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);

    mVoiceAppSearchIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    mVoiceAppSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);

    mDropDownAnchor = findViewById(mSearchSrcTextView.getDropDownAnchor());
    if (mDropDownAnchor != null) {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
            addOnLayoutChangeListenerToDropDownAnchorSDK11();
        } else {
            addOnLayoutChangeListenerToDropDownAnchorBase();
        }
    }

    updateViewsVisibility(mIconifiedByDefault);
    updateQueryHint();
}

From source file:com.eugene.fithealthmaingit.UI.NavFragments.FragmentSearch.java

private void promptSpeechInput(EditText e) {
    ((InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(e.getWindowToken(), 0);
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say Something");
    try {//  w  w w.  j  a  v a 2 s  .c o m
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getActivity().getApplicationContext(), "Not Supported", Toast.LENGTH_SHORT).show();
    }
}

From source file:root.gast.playground.speech.SpeechRecognitionPlay.java

/**
 * create the {@link RecognizerIntent} based on the many preferences
 *//*from  w  ww .  j a va2 s  .  c  o m*/
private Intent readRecognizerIntentFromPreferences() {
    Intent intentToSend;

    //web search handling
    boolean isWebSearchAction = preferences.getBoolean(this, R.string.pref_websearch,
            R.string.pref_websearch_default);

    boolean isHandsFreeAction = preferences.getBoolean(this, R.string.pref_handsfree,
            R.string.pref_handsfree_default);

    if (isWebSearchAction) {
        intentToSend = RecognizerIntentFactory.getWebSearchRecognizeIntent();
        final boolean ADD_ORIGIN = true;
        if (ADD_ORIGIN && Build.VERSION.SDK_INT >= 14) {
            intentToSend.putExtra(RecognizerIntent.EXTRA_ORIGIN, true);
        }
    } else {
        if (isHandsFreeAction && Build.VERSION.SDK_INT >= 16) {
            intentToSend = RecognizerIntentFactory.getHandsFreeRecognizeIntent();
        } else {
            intentToSend = RecognizerIntentFactory.getBlankRecognizeIntent();
        }
    }

    //language model
    boolean isFreeFormModel = preferences.getBoolean(this, R.string.pref_languagemodel,
            R.string.pref_languagemodel_default);
    if (isFreeFormModel) {
        intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    } else {
        intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
    }

    //common extras
    String language = preferences.getString(getResources().getString(R.string.pref_language),
            getResources().getString(R.string.pref_language_default));
    intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    String prompt = getResources().getString(R.string.speech_prompt) + ": "
            + whatYouAreTryingToSay.getText().toString();
    intentToSend.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    intentToSend.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,
            preferences.getInt(this, R.string.pref_maxresults, R.string.pref_maxresults_default));
    intentToSend.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,
            preferences.getBoolean(this, R.string.pref_partial, R.string.pref_partial_default));

    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
            R.string.pref_complete_silence, R.string.pref_complete_silence_default, intentToSend);
    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS,
            R.string.pref_minimum_input_length, R.string.pref_minimum_input_length_default, intentToSend);
    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
            R.string.pref_possibly_complete_silence_length,
            R.string.pref_possibly_complete_silence_length_default, intentToSend);

    //pendingIntent handling
    boolean doPending = preferences.getBoolean(this, R.string.pref_withpendingintent,
            R.string.pref_withpendingintent);
    if (doPending) {
        Intent pendingIntentSource = new Intent(this, SpeechRecognitionResultsActivity.class);
        PendingIntent pi = PendingIntent.getActivity(this, 0, pendingIntentSource, 0);

        Bundle extraInfoBundle = new Bundle();
        // pass in what you are trying to say so the results activity can
        // show it
        extraInfoBundle.putString(SpeechRecognitionResultsActivity.WHAT_YOU_ARE_TRYING_TO_SAY_INTENT_INPUT,
                whatYouAreTryingToSay.getText().toString());
        // set the variables in the intent this is sending
        intentToSend.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pi);
        intentToSend.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, extraInfoBundle);
    }

    Log.d(TAG, "sending recognizer intent: " + intentToSend.getExtras().toString());
    return intentToSend;
}

From source file:com.example.castCambot.MainActivity.java

/**
  * Android voice recognition/*from   w  w w . j av  a  2 s  .  c  o m*/
  */
private void startVoiceRecognitionActivity() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.message_to_cast));
    startActivityForResult(intent, REQUEST_CODE);
}

From source file:de.azapps.mirakel.main_activity.tasks_fragment.TasksFragment.java

public void updateButtons() {
    // a) Android 2.3 dosen't support speech toText
    // b) The user can switch off the button
    if (this.view == null) {
        return;//from   w w w .  j av  a2  s.c om
    }
    if (android.os.Build.VERSION.SDK_INT <= android.os.Build.VERSION_CODES.HONEYCOMB
            || !MirakelCommonPreferences.useBtnSpeak()) {
        this.view.findViewById(R.id.btnSpeak_tasks).setVisibility(View.GONE);
    } else {
        final ImageButton btnSpeak = (ImageButton) this.view.findViewById(R.id.btnSpeak_tasks);
        // txtText = newTask;
        btnSpeak.setVisibility(View.VISIBLE);
        btnSpeak.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(final View v) {
                final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
                intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                        TasksFragment.this.main.getString(R.string.speak_lang_code));
                try {
                    getActivity().startActivityForResult(intent, MainActivity.RESULT_SPEECH);
                    TasksFragment.this.newTask.setText("");
                } catch (final ActivityNotFoundException a) {
                    ErrorReporter.report(ErrorType.NO_SPEACH_RECOGNITION);
                }
            }
        });
    }
    if (!MirakelCommonPreferences.useBtnAudioRecord()) {
        this.view.findViewById(R.id.btnAudio_tasks).setVisibility(View.GONE);
    } else {
        final ImageButton btnAudio = (ImageButton) this.view.findViewById(R.id.btnAudio_tasks);
        btnAudio.setVisibility(View.VISIBLE);
        btnAudio.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(final View v) {
                // TODO BAHHHH this is ugly!
                final Task task = Task.getDummy(getActivity(), TasksFragment.this.main.getCurrentList());
                TaskDialogHelpers.handleAudioRecord(TasksFragment.this.main, task, new ExecInterfaceWithTask() {
                    @Override
                    public void exec(final Task t) {
                        TasksFragment.this.main.setCurrentList(t.getList());
                        TasksFragment.this.main.setCurrentTask(t, true);
                    }
                });
            }
        });
    }
    if (!MirakelCommonPreferences.useBtnCamera()
            || !Helpers.isIntentAvailable(this.main, MediaStore.ACTION_IMAGE_CAPTURE)) {
        this.view.findViewById(R.id.btnCamera).setVisibility(View.GONE);
    } else {
        final ImageButton btnCamera = (ImageButton) this.view.findViewById(R.id.btnCamera);
        btnCamera.setVisibility(View.VISIBLE);
        btnCamera.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(final View v) {
                try {
                    final Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
                    final Uri fileUri = FileUtils.getOutputMediaFileUri(FileUtils.MEDIA_TYPE_IMAGE);
                    if (fileUri == null) {
                        return;
                    }
                    TasksFragment.this.main.setFileUri(fileUri);
                    cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);
                    getActivity().startActivityForResult(cameraIntent, MainActivity.RESULT_CAMERA);
                } catch (final ActivityNotFoundException a) {
                    ErrorReporter.report(ErrorType.PHOTO_NO_CAMERA);
                } catch (final IOException e) {
                    if (e.getMessage().equals(FileUtils.ERROR_NO_MEDIA_DIR)) {
                        ErrorReporter.report(ErrorType.PHOTO_NO_MEDIA_DIRECTORY);
                    }
                }
            }
        });
    }
}

From source file:org.botlibre.sdk.activity.MicConfiguration.java

@TargetApi(23)
private void beginListening() {
    setStreamVolume();//from  w  ww.ja  va  2 s .co  m
    lastReply = System.currentTimeMillis();

    muteMicBeep(true);

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    if (MainActivity.offlineSpeech) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language);

        if (!this.failedOfflineLanguage) {
            //en-US will use the English in offline.
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US");
            // intent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
        }
        intent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
    } else {
        if (MainActivity.voice.language != null && !MainActivity.voice.language.isEmpty()) {
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language);
            if (!this.failedOfflineLanguage) {
                intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, MainActivity.voice.language);
            }
        } else {
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "en");
            if (!this.failedOfflineLanguage) {
                intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en");
            }
        }
    }

    try {
        Log.d("BeginListening", "StartListening");
        this.speech.startListening(intent);
        setMicIcon(true, false);
    } catch (ActivityNotFoundException a) {
        Log.d("BeginListening", "CatchError: " + a.getMessage());
        Toast t = Toast.makeText(getApplicationContext(), "Your device doesn't support Speech to Text",
                Toast.LENGTH_SHORT);
        t.show();
        txt.setText("Status: Your device doesn't support Speech to text.");
    }
}

From source file:com.example.michel.facetrack.FaceTrackerActivity.java

/**
 * Start speech to text intent. This opens up Google Speech Recognition API dialog box to listen the speech input.
 * *//*from   www .j av a  2  s  .c  o m*/
private void startSpeechToText() {
    Log.e("start speech to text", " start speech to text");
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speak something...");
    try {
        startActivityForResult(intent, SPEECH_RECOGNITION_CODE);
        System.out.println("hello 2");
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), "Sorry! Speech recognition is not supported in this device.",
                Toast.LENGTH_SHORT).show();
    }
}

From source file:android.support.v17.leanback.app.SearchSupportFragment.java

/**
 * Returns an intent that can be used to request speech recognition.
 * Built from the base {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH} plus
 * extras://from w  ww  . jav  a  2  s  .  c  o m
 *
 * <ul>
 * <li>{@link RecognizerIntent#EXTRA_LANGUAGE_MODEL} set to
 * {@link RecognizerIntent#LANGUAGE_MODEL_FREE_FORM}</li>
 * <li>{@link RecognizerIntent#EXTRA_PARTIAL_RESULTS} set to true</li>
 * <li>{@link RecognizerIntent#EXTRA_PROMPT} set to the search bar hint text</li>
 * </ul>
 *
 * For handling the intent returned from the service, see
 * {@link #setSearchQuery(Intent, boolean)}.
 */
public Intent getRecognizerIntent() {
    Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    if (mSearchBar != null && mSearchBar.getHint() != null) {
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, mSearchBar.getHint());
    }
    recognizerIntent.putExtra(EXTRA_LEANBACK_BADGE_PRESENT, mBadgeDrawable != null);
    return recognizerIntent;
}