Home | History | Annotate | Download | only in speech
      1 /*
      2  * Copyright (C) 2008 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.speech;
     18 
     19 import java.util.ArrayList;
     20 
     21 import android.app.Activity;
     22 import android.content.ActivityNotFoundException;
     23 import android.content.BroadcastReceiver;
     24 import android.content.ComponentName;
     25 import android.content.Context;
     26 import android.content.Intent;
     27 import android.content.pm.PackageManager;
     28 import android.content.pm.ResolveInfo;
     29 import android.os.Bundle;
     30 
     31 /**
     32  * Constants for supporting speech recognition through starting an {@link Intent}
     33  */
     34 public class RecognizerIntent {
     35     /**
     36      * The extra key used in an intent to the speech recognizer for voice search. Not
     37      * generally to be used by developers. The system search dialog uses this, for example,
     38      * to set a calling package for identification by a voice search API. If this extra
     39      * is set by anyone but the system process, it should be overridden by the voice search
     40      * implementation.
     41      */
     42     public final static String EXTRA_CALLING_PACKAGE = "calling_package";
     43 
     44     private RecognizerIntent() {
     45         // Not for instantiating.
     46     }
     47 
     48     /**
     49      * Starts an activity that will prompt the user for speech and sends it through a
     50      * speech recognizer.  The results will be returned via activity results (in
     51      * {@link Activity#onActivityResult}, if you start the intent using
     52      * {@link Activity#startActivityForResult(Intent, int)}), or forwarded via a PendingIntent
     53      * if one is provided.
     54      *
     55      * <p>Starting this intent with just {@link Activity#startActivity(Intent)} is not supported.
     56      * You must either use {@link Activity#startActivityForResult(Intent, int)}, or provide a
     57      * PendingIntent, to receive recognition results.
     58      *
     59      * <p>Required extras:
     60      * <ul>
     61      *   <li>{@link #EXTRA_LANGUAGE_MODEL}
     62      * </ul>
     63      *
     64      * <p>Optional extras:
     65      * <ul>
     66      *   <li>{@link #EXTRA_PROMPT}
     67      *   <li>{@link #EXTRA_LANGUAGE}
     68      *   <li>{@link #EXTRA_MAX_RESULTS}
     69      *   <li>{@link #EXTRA_RESULTS_PENDINGINTENT}
     70      *   <li>{@link #EXTRA_RESULTS_PENDINGINTENT_BUNDLE}
     71      * </ul>
     72      *
     73      * <p> Result extras (returned in the result, not to be specified in the request):
     74      * <ul>
     75      *   <li>{@link #EXTRA_RESULTS}
     76      * </ul>
     77      *
     78      * <p>NOTE: There may not be any applications installed to handle this action, so you should
     79      * make sure to catch {@link ActivityNotFoundException}.
     80      */
     81     public static final String ACTION_RECOGNIZE_SPEECH = "android.speech.action.RECOGNIZE_SPEECH";
     82 
     83     /**
     84      * Starts an activity that will prompt the user for speech, sends it through a
     85      * speech recognizer, and invokes and displays a web search result.
     86      *
     87      * <p>Required extras:
     88      * <ul>
     89      *   <li>{@link #EXTRA_LANGUAGE_MODEL}
     90      * </ul>
     91      *
     92      * <p>Optional extras:
     93      * <ul>
     94      *   <li>{@link #EXTRA_PROMPT}
     95      *   <li>{@link #EXTRA_LANGUAGE}
     96      *   <li>{@link #EXTRA_MAX_RESULTS}
     97      *   <li>{@link #EXTRA_PARTIAL_RESULTS}
     98      * </ul>
     99      *
    100      * <p> Result extras (returned in the result, not to be specified in the request):
    101      * <ul>
    102      *   <li>{@link #EXTRA_RESULTS}
    103      * </ul>
    104      *
    105      * <p>NOTE: There may not be any applications installed to handle this action, so you should
    106      * make sure to catch {@link ActivityNotFoundException}.
    107      */
    108     public static final String ACTION_WEB_SEARCH = "android.speech.action.WEB_SEARCH";
    109 
    110     /**
    111      * The minimum length of an utterance. We will not stop recording before this amount of time.
    112      *
    113      * Note that it is extremely rare you'd want to specify this value in an intent. If you don't
    114      * have a very good reason to change these, you should leave them as they are. Note also that
    115      * certain values may cause undesired or unexpected results - use judiciously! Additionally,
    116      * depending on the recognizer implementation, these values may have no effect.
    117      */
    118     public static final String EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS =
    119             "android.speech.extras.SPEECH_INPUT_MINIMUM_LENGTH_MILLIS";
    120 
    121     /**
    122      * The amount of time that it should take after we stop hearing speech to consider the input
    123      * complete.
    124      *
    125      * Note that it is extremely rare you'd want to specify this value in an intent. If
    126      * you don't have a very good reason to change these, you should leave them as they are. Note
    127      * also that certain values may cause undesired or unexpected results - use judiciously!
    128      * Additionally, depending on the recognizer implementation, these values may have no effect.
    129      */
    130     public static final String EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS =
    131             "android.speech.extras.SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS";
    132 
    133     /**
    134      * The amount of time that it should take after we stop hearing speech to consider the input
    135      * possibly complete. This is used to prevent the endpointer cutting off during very short
    136      * mid-speech pauses.
    137      *
    138      * Note that it is extremely rare you'd want to specify this value in an intent. If
    139      * you don't have a very good reason to change these, you should leave them as they are. Note
    140      * also that certain values may cause undesired or unexpected results - use judiciously!
    141      * Additionally, depending on the recognizer implementation, these values may have no effect.
    142      */
    143     public static final String EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS =
    144             "android.speech.extras.SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS";
    145 
    146     /**
    147      * Informs the recognizer which speech model to prefer when performing
    148      * {@link #ACTION_RECOGNIZE_SPEECH}. The recognizer uses this
    149      * information to fine tune the results. This extra is required. Activities implementing
    150      * {@link #ACTION_RECOGNIZE_SPEECH} may interpret the values as they see fit.
    151      *
    152      *  @see #LANGUAGE_MODEL_FREE_FORM
    153      *  @see #LANGUAGE_MODEL_WEB_SEARCH
    154      */
    155     public static final String EXTRA_LANGUAGE_MODEL = "android.speech.extra.LANGUAGE_MODEL";
    156 
    157     /**
    158      * Use a language model based on free-form speech recognition.  This is a value to use for
    159      * {@link #EXTRA_LANGUAGE_MODEL}.
    160      * @see #EXTRA_LANGUAGE_MODEL
    161      */
    162     public static final String LANGUAGE_MODEL_FREE_FORM = "free_form";
    163     /**
    164      * Use a language model based on web search terms.  This is a value to use for
    165      * {@link #EXTRA_LANGUAGE_MODEL}.
    166      * @see #EXTRA_LANGUAGE_MODEL
    167      */
    168     public static final String LANGUAGE_MODEL_WEB_SEARCH = "web_search";
    169 
    170     /** Optional text prompt to show to the user when asking them to speak. */
    171     public static final String EXTRA_PROMPT = "android.speech.extra.PROMPT";
    172 
    173     /**
    174      * Optional IETF language tag (as defined by BCP 47), for example "en-US". This tag informs the
    175      * recognizer to perform speech recognition in a language different than the one set in the
    176      * {@link java.util.Locale#getDefault()}.
    177      */
    178     public static final String EXTRA_LANGUAGE = "android.speech.extra.LANGUAGE";
    179 
    180     /**
    181      * Optional limit on the maximum number of results to return. If omitted the recognizer
    182      * will choose how many results to return. Must be an integer.
    183      */
    184     public static final String EXTRA_MAX_RESULTS = "android.speech.extra.MAX_RESULTS";
    185 
    186     /**
    187      * Optional boolean to indicate whether partial results should be returned by the recognizer
    188      * as the user speaks (default is false).  The server may ignore a request for partial
    189      * results in some or all cases.
    190      */
    191     public static final String EXTRA_PARTIAL_RESULTS = "android.speech.extra.PARTIAL_RESULTS";
    192 
    193     /**
    194      * When the intent is {@link #ACTION_RECOGNIZE_SPEECH}, the speech input activity will
    195      * return results to you via the activity results mechanism.  Alternatively, if you use this
    196      * extra to supply a PendingIntent, the results will be added to its bundle and the
    197      * PendingIntent will be sent to its target.
    198      */
    199     public static final String EXTRA_RESULTS_PENDINGINTENT =
    200             "android.speech.extra.RESULTS_PENDINGINTENT";
    201 
    202     /**
    203      * If you use {@link #EXTRA_RESULTS_PENDINGINTENT} to supply a forwarding intent, you can
    204      * also use this extra to supply additional extras for the final intent.  The search results
    205      * will be added to this bundle, and the combined bundle will be sent to the target.
    206      */
    207     public static final String EXTRA_RESULTS_PENDINGINTENT_BUNDLE =
    208             "android.speech.extra.RESULTS_PENDINGINTENT_BUNDLE";
    209 
    210     /** Result code returned when no matches are found for the given speech */
    211     public static final int RESULT_NO_MATCH = Activity.RESULT_FIRST_USER;
    212     /** Result code returned when there is a generic client error */
    213     public static final int RESULT_CLIENT_ERROR = Activity.RESULT_FIRST_USER + 1;
    214     /** Result code returned when the recognition server returns an error */
    215     public static final int RESULT_SERVER_ERROR = Activity.RESULT_FIRST_USER + 2;
    216     /** Result code returned when a network error was encountered */
    217     public static final int RESULT_NETWORK_ERROR = Activity.RESULT_FIRST_USER + 3;
    218     /** Result code returned when an audio error was encountered */
    219     public static final int RESULT_AUDIO_ERROR = Activity.RESULT_FIRST_USER + 4;
    220 
    221     /**
    222      * An ArrayList&lt;String&gt; of the recognition results when performing
    223      * {@link #ACTION_RECOGNIZE_SPEECH}. Returned in the results; not to be specified in the
    224      * recognition request. Only present when {@link Activity#RESULT_OK} is returned in
    225      * an activity result. In a PendingIntent, the lack of this extra indicates failure.
    226      */
    227     public static final String EXTRA_RESULTS = "android.speech.extra.RESULTS";
    228 
    229     /**
    230      * Returns the broadcast intent to fire with
    231      * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, Bundle)}
    232      * to receive details from the package that implements voice search.
    233      * <p>
    234      * This is based on the value specified by the voice search {@link Activity} in
    235      * {@link #DETAILS_META_DATA}, and if this is not specified, will return null. Also if there
    236      * is no chosen default to resolve for {@link #ACTION_WEB_SEARCH}, this will return null.
    237      * <p>
    238      * If an intent is returned and is fired, a {@link Bundle} of extras will be returned to the
    239      * provided result receiver, and should ideally contain values for
    240      * {@link #EXTRA_LANGUAGE_PREFERENCE} and {@link #EXTRA_SUPPORTED_LANGUAGES}.
    241      * <p>
    242      * (Whether these are actually provided is up to the particular implementation. It is
    243      * recommended that {@link Activity}s implementing {@link #ACTION_WEB_SEARCH} provide this
    244      * information, but it is not required.)
    245      *
    246      * @param context a context object
    247      * @return the broadcast intent to fire or null if not available
    248      */
    249     public static final Intent getVoiceDetailsIntent(Context context) {
    250         Intent voiceSearchIntent = new Intent(ACTION_WEB_SEARCH);
    251         ResolveInfo ri = context.getPackageManager().resolveActivity(
    252                 voiceSearchIntent, PackageManager.GET_META_DATA);
    253         if (ri == null || ri.activityInfo == null || ri.activityInfo.metaData == null) return null;
    254 
    255         String className = ri.activityInfo.metaData.getString(DETAILS_META_DATA);
    256         if (className == null) return null;
    257 
    258         Intent detailsIntent = new Intent(ACTION_GET_LANGUAGE_DETAILS);
    259         detailsIntent.setComponent(new ComponentName(ri.activityInfo.packageName, className));
    260         return detailsIntent;
    261     }
    262 
    263     /**
    264      * Meta-data name under which an {@link Activity} implementing {@link #ACTION_WEB_SEARCH} can
    265      * use to expose the class name of a {@link BroadcastReceiver} which can respond to request for
    266      * more information, from any of the broadcast intents specified in this class.
    267      * <p>
    268      * Broadcast intents can be directed to the class name specified in the meta-data by creating
    269      * an {@link Intent}, setting the component with
    270      * {@link Intent#setComponent(android.content.ComponentName)}, and using
    271      * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, android.os.Bundle)}
    272      * with another {@link BroadcastReceiver} which can receive the results.
    273      * <p>
    274      * The {@link #getVoiceDetailsIntent(Context)} method is provided as a convenience to create
    275      * a broadcast intent based on the value of this meta-data, if available.
    276      * <p>
    277      * This is optional and not all {@link Activity}s which implement {@link #ACTION_WEB_SEARCH}
    278      * are required to implement this. Thus retrieving this meta-data may be null.
    279      */
    280     public static final String DETAILS_META_DATA = "android.speech.DETAILS";
    281 
    282     /**
    283      * A broadcast intent which can be fired to the {@link BroadcastReceiver} component specified
    284      * in the meta-data defined in the {@link #DETAILS_META_DATA} meta-data of an
    285      * {@link Activity} satisfying {@link #ACTION_WEB_SEARCH}.
    286      * <p>
    287      * When fired with
    288      * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, android.os.Bundle)},
    289      * a {@link Bundle} of extras will be returned to the provided result receiver, and should
    290      * ideally contain values for {@link #EXTRA_LANGUAGE_PREFERENCE} and
    291      * {@link #EXTRA_SUPPORTED_LANGUAGES}.
    292      * <p>
    293      * (Whether these are actually provided is up to the particular implementation. It is
    294      * recommended that {@link Activity}s implementing {@link #ACTION_WEB_SEARCH} provide this
    295      * information, but it is not required.)
    296      */
    297     public static final String ACTION_GET_LANGUAGE_DETAILS =
    298             "android.speech.action.GET_LANGUAGE_DETAILS";
    299 
    300     /**
    301      * Specify this boolean extra in a broadcast of {@link #ACTION_GET_LANGUAGE_DETAILS} to
    302      * indicate that only the current language preference is needed in the response. This
    303      * avoids any additional computation if all you need is {@link #EXTRA_LANGUAGE_PREFERENCE}
    304      * in the response.
    305      */
    306     public static final String EXTRA_ONLY_RETURN_LANGUAGE_PREFERENCE =
    307             "android.speech.extra.ONLY_RETURN_LANGUAGE_PREFERENCE";
    308 
    309     /**
    310      * The key to the extra in the {@link Bundle} returned by {@link #ACTION_GET_LANGUAGE_DETAILS}
    311      * which is a {@link String} that represents the current language preference this user has
    312      * specified - a locale string like "en-US".
    313      */
    314     public static final String EXTRA_LANGUAGE_PREFERENCE =
    315             "android.speech.extra.LANGUAGE_PREFERENCE";
    316 
    317     /**
    318      * The key to the extra in the {@link Bundle} returned by {@link #ACTION_GET_LANGUAGE_DETAILS}
    319      * which is an {@link ArrayList} of {@link String}s that represents the languages supported by
    320      * this implementation of voice recognition - a list of strings like "en-US", "cmn-Hans-CN",
    321      * etc.
    322      */
    323     public static final String EXTRA_SUPPORTED_LANGUAGES =
    324             "android.speech.extra.SUPPORTED_LANGUAGES";
    325 }
    326