diff options
-rw-r--r-- | api/current.xml | 74 | ||||
-rw-r--r-- | core/java/android/speech/RecognitionManager.java | 49 | ||||
-rw-r--r-- | core/java/android/speech/RecognizerIntent.java | 96 |
3 files changed, 208 insertions, 11 deletions
diff --git a/api/current.xml b/api/current.xml index c0a76f8..40739da 100644 --- a/api/current.xml +++ b/api/current.xml @@ -13833,7 +13833,7 @@ > <parameter name="parcel" type="android.os.Parcel"> </parameter> -<parameter name="flags" type="int"> +<parameter name="flagz" type="int"> </parameter> </method> <field name="CREATOR" @@ -137116,6 +137116,21 @@ <parameter name="context" type="android.content.Context"> </parameter> </method> +<method name="createRecognitionManager" + return="android.speech.RecognitionManager" + abstract="false" + native="false" + synchronized="false" + static="true" + final="false" + deprecated="not deprecated" + visibility="public" +> +<parameter name="context" type="android.content.Context"> +</parameter> +<parameter name="serviceComponent" type="android.content.ComponentName"> +</parameter> +</method> <method name="destroy" return="void" abstract="false" @@ -137514,6 +137529,30 @@ deprecated="not deprecated" visibility="public" > +<method name="getVoiceDetailsIntent" + return="android.content.Intent" + abstract="false" + native="false" + synchronized="false" + static="true" + final="true" + deprecated="not deprecated" + visibility="public" +> +<parameter name="context" type="android.content.Context"> +</parameter> +</method> +<field name="ACTION_GET_LANGUAGE_DETAILS" + type="java.lang.String" + transient="false" + volatile="false" + value=""android.speech.action.GET_LANGUAGE_DETAILS"" + static="true" + final="true" + deprecated="not deprecated" + visibility="public" +> +</field> <field name="ACTION_RECOGNIZE_SPEECH" type="java.lang.String" transient="false" @@ -137536,6 +137575,17 @@ visibility="public" > </field> +<field name="DETAILS_META_DATA" + type="java.lang.String" + transient="false" + volatile="false" + value=""android.speech.DETAILS"" + static="true" + final="true" + deprecated="not deprecated" + visibility="public" +> +</field> <field name="EXTRA_LANGUAGE" type="java.lang.String" transient="false" @@ -137558,6 +137608,17 @@ visibility="public" > </field> +<field name="EXTRA_LANGUAGE_PREFERENCE" + type="java.lang.String" + transient="false" + volatile="false" + value=""android.speech.extra.LANGUAGE_PREFERENCE"" + static="true" + final="true" + deprecated="not deprecated" + visibility="public" +> +</field> <field name="EXTRA_MAX_RESULTS" type="java.lang.String" transient="false" @@ -137657,6 +137718,17 @@ visibility="public" > </field> +<field name="EXTRA_SUPPORTED_LANGUAGES" + type="java.lang.String" + transient="false" + volatile="false" + value=""android.speech.extra.SUPPORTED_LANGUAGES"" + static="true" + final="true" + deprecated="not deprecated" + visibility="public" +> +</field> <field name="LANGUAGE_MODEL_FREE_FORM" type="java.lang.String" transient="false" diff --git a/core/java/android/speech/RecognitionManager.java b/core/java/android/speech/RecognitionManager.java index 7f55ad6..16b1f89 100644 --- a/core/java/android/speech/RecognitionManager.java +++ b/core/java/android/speech/RecognitionManager.java @@ -98,6 +98,9 @@ public class RecognitionManager { /** Context with which the manager was created */ private final Context mContext; + + /** Component to direct service intent to */ + private final ComponentName mServiceComponent; /** Handler that will execute the main tasks */ private Handler mHandler = new Handler() { @@ -133,8 +136,9 @@ public class RecognitionManager { * The right way to create a {@code RecognitionManager} is by using * {@link #createRecognitionManager} static factory method */ - private RecognitionManager(final Context context) { + private RecognitionManager(final Context context, final ComponentName serviceComponent) { mContext = context; + mServiceComponent = serviceComponent; } /** @@ -184,11 +188,31 @@ public class RecognitionManager { * @return a new {@code RecognitionManager} */ public static RecognitionManager createRecognitionManager(final Context context) { + return createRecognitionManager(context, null); + } + + /** + * Factory method to create a new {@code RecognitionManager}, please note that + * {@link #setRecognitionListener(RecognitionListener)} must be called before dispatching any + * command to the created {@code RecognitionManager}. + * + * Use this version of the method to specify a specific service to direct this + * {@link RecognitionManager} to. Normally you would not use this; use + * {@link #createRecognitionManager(Context)} instead to use the system default + * recognition service. + * + * @param context in which to create {@code RecognitionManager} + * @param serviceComponent the {@link ComponentName} of a specific service to direct this + * {@code RecognitionManager} to + * @return a new {@code RecognitionManager} + */ + public static RecognitionManager createRecognitionManager(final Context context, + final ComponentName serviceComponent) { if (context == null) { throw new IllegalArgumentException("Context cannot be null)"); } checkIsCalledFromMainThread(); - return new RecognitionManager(context); + return new RecognitionManager(context, serviceComponent); } /** @@ -222,17 +246,22 @@ public class RecognitionManager { mConnection = new Connection(); Intent serviceIntent = new Intent(RecognitionService.SERVICE_INTERFACE); - String serviceComponent = Settings.Secure.getString(mContext.getContentResolver(), - Settings.Secure.VOICE_RECOGNITION_SERVICE); - if (TextUtils.isEmpty(serviceComponent)) { - Log.e(TAG, "no selected voice recognition service"); - mListener.onError(ERROR_CLIENT); - return; + if (mServiceComponent == null) { + String serviceComponent = Settings.Secure.getString(mContext.getContentResolver(), + Settings.Secure.VOICE_RECOGNITION_SERVICE); + + if (TextUtils.isEmpty(serviceComponent)) { + Log.e(TAG, "no selected voice recognition service"); + mListener.onError(ERROR_CLIENT); + return; + } + + serviceIntent.setComponent(ComponentName.unflattenFromString(serviceComponent)); + } else { + serviceIntent.setComponent(mServiceComponent); } - serviceIntent.setComponent(ComponentName.unflattenFromString(serviceComponent)); - if (!mContext.bindService(serviceIntent, mConnection, Context.BIND_AUTO_CREATE)) { Log.e(TAG, "bind to recognition service failed"); mConnection = null; diff --git a/core/java/android/speech/RecognizerIntent.java b/core/java/android/speech/RecognizerIntent.java index 5f651e1..7c15cec 100644 --- a/core/java/android/speech/RecognizerIntent.java +++ b/core/java/android/speech/RecognizerIntent.java @@ -16,9 +16,17 @@ package android.speech; +import java.util.ArrayList; + import android.app.Activity; import android.content.ActivityNotFoundException; +import android.content.BroadcastReceiver; +import android.content.ComponentName; +import android.content.Context; import android.content.Intent; +import android.content.pm.PackageManager; +import android.content.pm.ResolveInfo; +import android.os.Bundle; /** * Constants for supporting speech recognition through starting an {@link Intent} @@ -208,4 +216,92 @@ public class RecognizerIntent { * an activity result. In a PendingIntent, the lack of this extra indicates failure. */ public static final String EXTRA_RESULTS = "android.speech.extra.RESULTS"; + + /** + * Returns the broadcast intent to fire with + * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, Bundle)} + * to receive details from the package that implements voice search. + * <p> + * This is based on the value specified by the voice search {@link Activity} in + * {@link #DETAILS_META_DATA}, and if this is not specified, will return null. Also if there + * is no chosen default to resolve for {@link #ACTION_WEB_SEARCH}, this will return null. + * <p> + * If an intent is returned and is fired, a {@link Bundle} of extras will be returned to the + * provided result receiver, and should ideally contain values for + * {@link #EXTRA_LANGUAGE_PREFERENCE} and {@link #EXTRA_SUPPORTED_LANGUAGES}. + * <p> + * (Whether these are actually provided is up to the particular implementation. It is + * recommended that {@link Activity}s implementing {@link #ACTION_WEB_SEARCH} provide this + * information, but it is not required.) + * + * @param context a context object + * @return the broadcast intent to fire or null if not available + */ + public static final Intent getVoiceDetailsIntent(Context context) { + Intent voiceSearchIntent = new Intent(ACTION_WEB_SEARCH); + ResolveInfo ri = context.getPackageManager().resolveActivity( + voiceSearchIntent, PackageManager.GET_META_DATA); + if (ri == null || ri.activityInfo == null || ri.activityInfo.metaData == null) return null; + + String className = ri.activityInfo.metaData.getString(DETAILS_META_DATA); + if (className == null) return null; + + Intent detailsIntent = new Intent(ACTION_GET_LANGUAGE_DETAILS); + detailsIntent.setComponent(new ComponentName(ri.activityInfo.packageName, className)); + return detailsIntent; + } + + /** + * Meta-data name under which an {@link Activity} implementing {@link #ACTION_WEB_SEARCH} can + * use to expose the class name of a {@link BroadcastReceiver} which can respond to request for + * more information, from any of the broadcast intents specified in this class. + * <p> + * Broadcast intents can be directed to the class name specified in the meta-data by creating + * an {@link Intent}, setting the component with + * {@link Intent#setComponent(android.content.ComponentName)}, and using + * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, android.os.Bundle)} + * with another {@link BroadcastReceiver} which can receive the results. + * <p> + * The {@link #getVoiceDetailsIntent(Context)} method is provided as a convenience to create + * a broadcast intent based on the value of this meta-data, if available. + * <p> + * This is optional and not all {@link Activity}s which implement {@link #ACTION_WEB_SEARCH} + * are required to implement this. Thus retrieving this meta-data may be null. + */ + public static final String DETAILS_META_DATA = "android.speech.DETAILS"; + + /** + * A broadcast intent which can be fired to the {@link BroadcastReceiver} component specified + * in the meta-data defined in the {@link #DETAILS_META_DATA} meta-data of an + * {@link Activity} satisfying {@link #ACTION_WEB_SEARCH}. + * <p> + * When fired with + * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, android.os.Bundle)}, + * a {@link Bundle} of extras will be returned to the provided result receiver, and should + * ideally contain values for {@link #EXTRA_LANGUAGE_PREFERENCE} and + * {@link #EXTRA_SUPPORTED_LANGUAGES}. + * <p> + * (Whether these are actually provided is up to the particular implementation. It is + * recommended that {@link Activity}s implementing {@link #ACTION_WEB_SEARCH} provide this + * information, but it is not required.) + */ + public static final String ACTION_GET_LANGUAGE_DETAILS = + "android.speech.action.GET_LANGUAGE_DETAILS"; + + /** + * The key to the extra in the {@link Bundle} returned by {@link #ACTION_GET_LANGUAGE_DETAILS} + * which is a {@link String} that represents the current language preference this user has + * specified - a locale string like "en-US". + */ + public static final String EXTRA_LANGUAGE_PREFERENCE = + "android.speech.extra.LANGUAGE_PREFERENCE"; + + /** + * The key to the extra in the {@link Bundle} returned by {@link #ACTION_GET_LANGUAGE_DETAILS} + * which is an {@link ArrayList} of {@link String}s that represents the languages supported by + * this implementation of voice recognition - a list of strings like "en-US", "cmn-Hans-CN", + * etc. + */ + public static final String EXTRA_SUPPORTED_LANGUAGES = + "android.speech.extra.SUPPORTED_LANGUAGES"; } |