summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--core/java/android/provider/Settings.java15
-rw-r--r--core/java/android/widget/RelativeLayout.java19
-rw-r--r--core/java/android/widget/VideoView.java69
-rw-r--r--core/jni/android_media_AudioTrack.cpp25
-rw-r--r--data/keyboards/Android.mk28
-rw-r--r--data/keyboards/common.mk6
-rw-r--r--data/keyboards/keyboards.mk12
-rw-r--r--docs/html/about/dashboards/index.jd88
-rw-r--r--media/java/android/media/AudioTimestamp.java47
-rw-r--r--media/java/android/media/AudioTrack.java50
-rw-r--r--media/java/android/media/MediaPlayer.java208
-rw-r--r--packages/InputDevices/Android.mk24
-rw-r--r--services/java/com/android/server/display/WifiDisplayController.java33
13 files changed, 524 insertions, 100 deletions
diff --git a/core/java/android/provider/Settings.java b/core/java/android/provider/Settings.java
index 83e1544..9b982efb 100644
--- a/core/java/android/provider/Settings.java
+++ b/core/java/android/provider/Settings.java
@@ -5107,6 +5107,21 @@ public final class Settings {
"wifi_display_certification_on";
/**
+ * WPS Configuration method used by Wifi display, this setting only
+ * takes effect when WIFI_DISPLAY_CERTIFICATION_ON is 1 (enabled).
+ *
+ * Possible values are:
+ *
+ * WpsInfo.INVALID: use default WPS method chosen by framework
+ * WpsInfo.PBC : use Push button
+ * WpsInfo.KEYPAD : use Keypad
+ * WpsInfo.DISPLAY: use Display
+ * @hide
+ */
+ public static final String WIFI_DISPLAY_WPS_CONFIG =
+ "wifi_display_wps_config";
+
+ /**
* Whether to notify the user of open networks.
* <p>
* If not connected and the scan results have an open network, we will
diff --git a/core/java/android/widget/RelativeLayout.java b/core/java/android/widget/RelativeLayout.java
index f73e2c4..b9b6b08 100644
--- a/core/java/android/widget/RelativeLayout.java
+++ b/core/java/android/widget/RelativeLayout.java
@@ -462,6 +462,7 @@ public class RelativeLayout extends ViewGroup {
views = mSortedVerticalChildren;
count = views.length;
+ final int targetSdkVersion = getContext().getApplicationInfo().targetSdkVersion;
for (int i = 0; i < count; i++) {
View child = views[i];
@@ -476,14 +477,26 @@ public class RelativeLayout extends ViewGroup {
if (isWrapContentWidth) {
if (isLayoutRtl()) {
- width = Math.max(width, myWidth - params.mLeft);
+ if (targetSdkVersion < Build.VERSION_CODES.KEY_LIME_PIE) {
+ width = Math.max(width, myWidth - params.mLeft);
+ } else {
+ width = Math.max(width, myWidth - params.mLeft - params.leftMargin);
+ }
} else {
- width = Math.max(width, params.mRight);
+ if (targetSdkVersion < Build.VERSION_CODES.KEY_LIME_PIE) {
+ width = Math.max(width, params.mRight);
+ } else {
+ width = Math.max(width, params.mRight + params.rightMargin);
+ }
}
}
if (isWrapContentHeight) {
- height = Math.max(height, params.mBottom);
+ if (targetSdkVersion < Build.VERSION_CODES.KEY_LIME_PIE) {
+ height = Math.max(height, params.mBottom);
+ } else {
+ height = Math.max(height, params.mBottom + params.bottomMargin);
+ }
}
if (child != ignore || verticalGravity) {
diff --git a/core/java/android/widget/VideoView.java b/core/java/android/widget/VideoView.java
index 0ddc131..f449797 100644
--- a/core/java/android/widget/VideoView.java
+++ b/core/java/android/widget/VideoView.java
@@ -29,9 +29,12 @@ import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnInfoListener;
import android.media.Metadata;
+import android.media.SubtitleController;
+import android.media.WebVttRenderer;
import android.net.Uri;
import android.util.AttributeSet;
import android.util.Log;
+import android.util.Pair;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
@@ -54,7 +57,8 @@ import java.util.Vector;
* it can be used in any layout manager, and provides various display options
* such as scaling and tinting.
*/
-public class VideoView extends SurfaceView implements MediaPlayerControl {
+public class VideoView extends SurfaceView
+ implements MediaPlayerControl, SubtitleController.Anchor {
private String TAG = "VideoView";
// settable by the client
private Uri mUri;
@@ -208,7 +212,7 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
- mPendingSubtitleTracks = 0;
+ mPendingSubtitleTracks = new Vector<Pair<InputStream, MediaFormat>>();
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
@@ -256,23 +260,19 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
* specify "und" for the language.
*/
public void addSubtitleSource(InputStream is, MediaFormat format) {
- // always signal unsupported message for now
- try {
- if (is != null) {
- is.close();
- }
- } catch (IOException e) {
- }
-
if (mMediaPlayer == null) {
- ++mPendingSubtitleTracks;
+ mPendingSubtitleTracks.add(Pair.create(is, format));
} else {
- mInfoListener.onInfo(
- mMediaPlayer, MediaPlayer.MEDIA_INFO_UNSUPPORTED_SUBTITLE, 0);
+ try {
+ mMediaPlayer.addSubtitleSource(is, format);
+ } catch (IllegalStateException e) {
+ mInfoListener.onInfo(
+ mMediaPlayer, MediaPlayer.MEDIA_INFO_UNSUPPORTED_SUBTITLE, 0);
+ }
}
}
- private int mPendingSubtitleTracks;
+ private Vector<Pair<InputStream, MediaFormat>> mPendingSubtitleTracks;
public void stopPlayback() {
if (mMediaPlayer != null) {
@@ -300,6 +300,15 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
release(false);
try {
mMediaPlayer = new MediaPlayer();
+ // TODO: create SubtitleController in MediaPlayer, but we need
+ // a context for the subtitle renderers
+ SubtitleController controller = new SubtitleController(
+ getContext(),
+ mMediaPlayer.getMediaTimeProvider(),
+ mMediaPlayer);
+ controller.registerRenderer(new WebVttRenderer(getContext(), null));
+ mMediaPlayer.setSubtitleAnchor(controller, this);
+
if (mAudioSession != 0) {
mMediaPlayer.setAudioSessionId(mAudioSession);
} else {
@@ -318,9 +327,13 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.prepareAsync();
- for (int ix = 0; ix < mPendingSubtitleTracks; ix++) {
- mInfoListener.onInfo(
- mMediaPlayer, MediaPlayer.MEDIA_INFO_UNSUPPORTED_SUBTITLE, 0);
+ for (Pair<InputStream, MediaFormat> pending: mPendingSubtitleTracks) {
+ try {
+ mMediaPlayer.addSubtitleSource(pending.first, pending.second);
+ } catch (IllegalStateException e) {
+ mInfoListener.onInfo(
+ mMediaPlayer, MediaPlayer.MEDIA_INFO_UNSUPPORTED_SUBTITLE, 0);
+ }
}
// we don't set the target state here either, but preserve the
@@ -340,7 +353,7 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
} finally {
- mPendingSubtitleTracks = 0;
+ mPendingSubtitleTracks.clear();
}
}
@@ -604,7 +617,7 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
- mPendingSubtitleTracks = 0;
+ mPendingSubtitleTracks.clear();
mCurrentState = STATE_IDLE;
if (cleartargetstate) {
mTargetState = STATE_IDLE;
@@ -874,4 +887,22 @@ public class VideoView extends SurfaceView implements MediaPlayerControl {
overlay.layout(left, top, right, bottom);
}
}
+
+ /** @hide */
+ @Override
+ public void setSubtitleView(View view) {
+ if (mSubtitleView == view) {
+ return;
+ }
+
+ if (mSubtitleView != null) {
+ removeOverlay(mSubtitleView);
+ }
+ mSubtitleView = view;
+ if (mSubtitleView != null) {
+ addOverlay(mSubtitleView);
+ }
+ }
+
+ private View mSubtitleView;
}
diff --git a/core/jni/android_media_AudioTrack.cpp b/core/jni/android_media_AudioTrack.cpp
index 51ba52a..225bf06 100644
--- a/core/jni/android_media_AudioTrack.cpp
+++ b/core/jni/android_media_AudioTrack.cpp
@@ -742,6 +742,30 @@ static jint android_media_AudioTrack_get_latency(JNIEnv *env, jobject thiz) {
// ----------------------------------------------------------------------------
+static jint android_media_AudioTrack_get_timestamp(JNIEnv *env, jobject thiz, jlongArray jTimestamp) {
+ sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
+
+ if (lpTrack == NULL) {
+ ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()");
+ return AUDIOTRACK_ERROR;
+ }
+ AudioTimestamp timestamp;
+ status_t status = lpTrack->getTimestamp(timestamp);
+ if (status == OK) {
+ jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL);
+ if (nTimestamp == NULL) {
+ ALOGE("Unable to get array for getTimestamp()");
+ return AUDIOTRACK_ERROR;
+ }
+ nTimestamp[0] = (jlong) timestamp.mPosition;
+ nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec);
+ env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0);
+ }
+ return (jint) android_media_translateErrorCode(status);
+}
+
+
+// ----------------------------------------------------------------------------
static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz,
jint loopStart, jint loopEnd, jint loopCount) {
sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
@@ -869,6 +893,7 @@ static JNINativeMethod gMethods[] = {
{"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position},
{"native_get_position", "()I", (void *)android_media_AudioTrack_get_position},
{"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency},
+ {"native_get_timestamp", "([J)I", (void *)android_media_AudioTrack_get_timestamp},
{"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop},
{"native_reload_static", "()I", (void *)android_media_AudioTrack_reload},
{"native_get_output_sample_rate",
diff --git a/data/keyboards/Android.mk b/data/keyboards/Android.mk
index a66a884..898efe8 100644
--- a/data/keyboards/Android.mk
+++ b/data/keyboards/Android.mk
@@ -21,17 +21,21 @@ include $(LOCAL_PATH)/common.mk
# Validate all key maps.
include $(CLEAR_VARS)
-validatekeymaps := $(HOST_OUT_EXECUTABLES)/validatekeymaps$(HOST_EXECUTABLE_SUFFIX)
-files := \
- $(foreach file,$(keylayouts),frameworks/base/data/keyboards/$(file)) \
- $(foreach file,$(keycharmaps),frameworks/base/data/keyboards/$(file)) \
- $(foreach file,$(keyconfigs),frameworks/base/data/keyboards/$(file))
-
LOCAL_MODULE := validate_framework_keymaps
-LOCAL_MODULE_TAGS := optional
-LOCAL_REQUIRED_MODULES := validatekeymaps
+intermediates := $(call intermediates-dir-for,ETC,$(LOCAL_MODULE),,COMMON)
+LOCAL_BUILT_MODULE := $(intermediates)/stamp
-validate_framework_keymaps: $(files)
- $(hide) $(validatekeymaps) $(files)
-
-include $(BUILD_PHONY_PACKAGE)
+validatekeymaps := $(HOST_OUT_EXECUTABLES)/validatekeymaps$(HOST_EXECUTABLE_SUFFIX)
+$(LOCAL_BUILT_MODULE): PRIVATE_VALIDATEKEYMAPS := $(validatekeymaps)
+$(LOCAL_BUILT_MODULE) : $(framework_keylayouts) $(framework_keycharmaps) $(framework_keyconfigs) | $(validatekeymaps)
+ $(hide) $(PRIVATE_VALIDATEKEYMAPS) $^
+ $(hide) mkdir -p $(dir $@) && touch $@
+
+# Run validatekeymaps uncondionally for platform build.
+droidcore all_modules : $(LOCAL_BUILT_MODULE)
+
+# Reset temp vars.
+validatekeymaps :=
+framework_keylayouts :=
+framework_keycharmaps :=
+framework_keyconfigs :=
diff --git a/data/keyboards/common.mk b/data/keyboards/common.mk
index 87c2ef5..d75b691 100644
--- a/data/keyboards/common.mk
+++ b/data/keyboards/common.mk
@@ -15,8 +15,8 @@
# This is the list of framework provided keylayouts and key character maps to include.
# Used by Android.mk and keyboards.mk.
-keylayouts := $(notdir $(wildcard $(LOCAL_PATH)/*.kl))
+framework_keylayouts := $(wildcard $(LOCAL_PATH)/*.kl)
-keycharmaps := $(notdir $(wildcard $(LOCAL_PATH)/*.kcm))
+framework_keycharmaps := $(wildcard $(LOCAL_PATH)/*.kcm)
-keyconfigs := $(notdir $(wildcard $(LOCAL_PATH)/*.idc))
+framework_keyconfigs := $(wildcard $(LOCAL_PATH)/*.idc)
diff --git a/data/keyboards/keyboards.mk b/data/keyboards/keyboards.mk
index c964961..d545241 100644
--- a/data/keyboards/keyboards.mk
+++ b/data/keyboards/keyboards.mk
@@ -16,11 +16,11 @@
include $(LOCAL_PATH)/common.mk
-PRODUCT_COPY_FILES := $(foreach file,$(keylayouts),\
- frameworks/base/data/keyboards/$(file):system/usr/keylayout/$(file))
+PRODUCT_COPY_FILES := $(foreach file,$(framework_keylayouts),\
+ $(file):system/usr/keylayout/$(file))
-PRODUCT_COPY_FILES += $(foreach file,$(keycharmaps),\
- frameworks/base/data/keyboards/$(file):system/usr/keychars/$(file))
+PRODUCT_COPY_FILES += $(foreach file,$(framework_keycharmaps),\
+ $(file):system/usr/keychars/$(file))
-PRODUCT_COPY_FILES += $(foreach file,$(keyconfigs),\
- frameworks/base/data/keyboards/$(file):system/usr/idc/$(file))
+PRODUCT_COPY_FILES += $(foreach file,$(framework_keyconfigs),\
+ $(file):system/usr/idc/$(file))
diff --git a/docs/html/about/dashboards/index.jd b/docs/html/about/dashboards/index.jd
index 6e4a03c..ff19476 100644
--- a/docs/html/about/dashboards/index.jd
+++ b/docs/html/about/dashboards/index.jd
@@ -30,16 +30,20 @@ you optimize your app.</p>
<p>This page provides information about the relative number of devices that share a certain
characteristic, such as Android version or screen size. This information may
help you prioritize efforts for <a
-href="{@docRoot}training/basics/supporting-devices/index.html">supporting different devices</a>.</p>
+href="{@docRoot}training/basics/supporting-devices/index.html">supporting different devices</a>
+by revealing which devices are active in the Android and Google Play ecosystem.</p>
-<p>Each snapshot of data represents all the devices that visited the Google Play Store in the
-prior 14 days.</p>
+<p>This data reflects devices running the latest Google Play Store app, which is compatible
+with Android 2.2 and higher. Each snapshot of data represents all the devices that visited the
+Google Play Store in the prior 7 days.</p>
-<p class="note"><strong>Note:</strong> Beginning in April, 2013, these charts are now built
-using data collected from each device when the user visits the Google Play Store. Previously, the
-data was collected when the device simply checked-in to Google servers. We believe the new
-data more accurately reflects those users who are most engaged in the Android and Google Play
-ecosystem.</p>
+
+<div class="note">
+<p><strong>Note:</strong> Beginning in September, 2013, devices running versions older than Android
+2.2 do not appear in this data because those devices do not support the new Google Play Store
+app. Only the new app is able to measure the number of devices that actively visit Google Play Store
+and we believe this measurement best reflects your potential user-base.</p>
+</div>
<h2 id="Platform">Platform Versions</h2>
@@ -57,10 +61,15 @@ Platform Versions</a>.</p>
</div>
-<p style="clear:both"><em>Data collected during a 14-day period ending on August 1, 2013.
+<p style="clear:both"><em>Data collected during a 7-day period ending on September 4, 2013.
<br/>Any versions with less than 0.1% distribution are not shown.</em>
</p>
+<p class="note"><strong>Note:</strong> Because this data is gathered from the new Google Play
+Store app, which supports Android 2.2 and above, devices running older versions are not included.
+However, in August, 2013, versions older than Android 2.2 accounted for about 1% of devices that
+<em>checked in</em> to Google servers (not those that actually visited Google Play Store).
+</p>
@@ -83,7 +92,7 @@ Screens</a>.</p>
</div>
-<p style="clear:both"><em>Data collected during a 14-day period ending on August 1, 2013
+<p style="clear:both"><em>Data collected during a 7-day period ending on September 4, 2013
<br/>Any screen configurations with less than 0.1% distribution are not shown.</em></p>
@@ -130,7 +139,7 @@ uses.</p>
-<p style="clear:both"><em>Data collected during a 14-day period ending on August 1, 2013</em></p>
+<p style="clear:both"><em>Data collected during a 7-day period ending on September 4, 2013</em></p>
@@ -148,32 +157,17 @@ uses.</p>
var VERSION_DATA =
[
{
- "chart": "//chart.googleapis.com/chart?cht=p&chs=500x250&chl=Eclair%7CFroyo%7CGingerbread%7CHoneycomb%7CIce%20Cream%20Sandwich%7CJelly%20Bean&chf=bg%2Cs%2C00000000&chd=t%3A1.3%2C2.5%2C33.1%2C0.1%2C22.5%2C40.5&chco=c4df9b%2C6fad0c",
+ "chart": "//chart.googleapis.com/chart?chs=500x250&cht=p&chco=c4df9b%2C6fad0c&chd=t%3A2.4%2C30.7%2C0.1%2C21.7%2C45.1&chf=bg%2Cs%2C00000000&chl=Froyo%7CGingerbread%7CHoneycomb%7CIce%20Cream%20Sandwich%7CJelly%20Bean",
"data": [
{
- "api": 4,
- "name": "Donut",
- "perc": "0.1"
- },
- {
- "api": 7,
- "name": "Eclair",
- "perc": "1.2"
- },
- {
"api": 8,
"name": "Froyo",
- "perc": "2.5"
- },
- {
- "api": 9,
- "name": "Gingerbread",
- "perc": "0.1"
+ "perc": "2.4"
},
{
"api": 10,
"name": "Gingerbread",
- "perc": "33.0"
+ "perc": "30.7"
},
{
"api": 13,
@@ -183,17 +177,17 @@ var VERSION_DATA =
{
"api": 15,
"name": "Ice Cream Sandwich",
- "perc": "22.5"
+ "perc": "21.7"
},
{
"api": 16,
"name": "Jelly Bean",
- "perc": "34.0"
+ "perc": "36.6"
},
{
"api": 17,
"name": "Jelly Bean",
- "perc": "6.5"
+ "perc": "8.5"
}
]
}
@@ -209,30 +203,29 @@ var SCREEN_DATA =
"data": {
"Large": {
"hdpi": "0.4",
- "ldpi": "0.5",
- "mdpi": "3.2",
- "tvdpi": "1.1",
+ "ldpi": "0.6",
+ "mdpi": "3.4",
+ "tvdpi": "1.2",
"xhdpi": "0.5"
},
"Normal": {
- "hdpi": "34.5",
+ "hdpi": "33.6",
"ldpi": "0.1",
- "mdpi": "15.9",
- "xhdpi": "23.9",
- "xxhdpi": "5.7"
+ "mdpi": "15.7",
+ "xhdpi": "23.1",
+ "xxhdpi": "7.1"
},
"Small": {
- "hdpi": "0.1",
- "ldpi": "9.7"
+ "ldpi": "9.5"
},
"Xlarge": {
- "hdpi": "0.2",
- "mdpi": "4.1",
+ "hdpi": "0.3",
+ "mdpi": "4.4",
"xhdpi": "0.1"
}
},
- "densitychart": "//chart.googleapis.com/chart?cht=p&chs=400x250&chl=ldpi%7Cmdpi%7Ctvdpi%7Chdpi%7Cxhdpi%7Cxxhdpi&chf=bg%2Cs%2C00000000&chd=t%3A10.3%2C23.2%2C1.1%2C35.2%2C24.5%2C5.7&chco=c4df9b%2C6fad0c",
- "layoutchart": "//chart.googleapis.com/chart?cht=p&chs=400x250&chl=Xlarge%7CLarge%7CNormal%7CSmall&chf=bg%2Cs%2C00000000&chd=t%3A4.4%2C5.7%2C80.2%2C9.8&chco=c4df9b%2C6fad0c"
+ "densitychart": "//chart.googleapis.com/chart?chs=400x250&cht=p&chco=c4df9b%2C6fad0c&chd=t%3A10.2%2C23.5%2C1.2%2C34.3%2C23.7%2C7.1&chf=bg%2Cs%2C00000000&chl=ldpi%7Cmdpi%7Ctvdpi%7Chdpi%7Cxhdpi%7Cxxhdpi",
+ "layoutchart": "//chart.googleapis.com/chart?chs=400x250&cht=p&chco=c4df9b%2C6fad0c&chd=t%3A4.8%2C6.1%2C79.6%2C9.5&chf=bg%2Cs%2C00000000&chl=Xlarge%7CLarge%7CNormal%7CSmall"
}
];
@@ -294,6 +287,11 @@ var VERSION_NAMES =
"api":17,
"link":"<a href='/about/versions/android-4.2.html'>4.2.x</a>",
"codename":"Jelly Bean"
+ },
+ {
+ "api":18,
+ "link":"<a href='/about/versions/android-4.3.html'>4.3</a>",
+ "codename":"Jelly Bean"
}
];
diff --git a/media/java/android/media/AudioTimestamp.java b/media/java/android/media/AudioTimestamp.java
new file mode 100644
index 0000000..437a0c6
--- /dev/null
+++ b/media/java/android/media/AudioTimestamp.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Structure that groups a position in frame units relative to an assumed audio stream,
+ * together with the estimated time when that frame was presented or is committed to be
+ * presented.
+ * In the case of audio output, "present" means that audio produced on device
+ * is detectable by an external observer off device.
+ * The time is based on the implementation's best effort, using whatever knowledge
+ * is available to the system, but cannot account for any delay unknown to the implementation.
+ *
+ * @see AudioTrack#getTimestamp
+ * @see AudioTrack.TimestampListener
+ *
+ * @hide
+ */
+public final class AudioTimestamp
+{
+ /**
+ * Position in frames relative to start of an assumed audio stream.
+ * The low-order 32 bits of position is in wrapping frame units similar to
+ * {@link AudioTrack#getPlaybackHeadPosition}.
+ */
+ public long framePosition;
+
+ /**
+ * The estimated time when frame was presented or is committed to be presented,
+ * in the same units and timebase as {@link java.lang.System#nanoTime}.
+ */
+ public long nanoTime;
+}
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index d9227bd..88539f28 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -732,6 +732,51 @@ public class AudioTrack
return mSessionId;
}
+ /**
+ * Poll for a timestamp on demand.
+ *
+ * Use if {@link TimestampListener} is not delivered often enough for your needs,
+ * or if you need to get the most recent timestamp outside of the event callback handler.
+ * Calling this method too often may be inefficient;
+ * if you need a high-resolution mapping between frame position and presentation time,
+ * consider implementing that at application level, based on low-resolution timestamps.
+ * The audio data at the returned position may either already have been
+ * presented, or may have not yet been presented but is committed to be presented.
+ * It is not possible to request the time corresponding to a particular position,
+ * or to request the (fractional) position corresponding to a particular time.
+ * If you need such features, consider implementing them at application level.
+ *
+ * @param timestamp a reference to a non-null AudioTimestamp instance allocated
+ * and owned by caller, or null.
+ * @return that same instance if timestamp parameter is non-null and a timestamp is available,
+ * or a reference to a new AudioTimestamp instance which is now owned by caller
+ * if timestamp parameter is null and a timestamp is available,
+ * or null if no timestamp is available. In either successful case,
+ * the AudioTimestamp instance is filled in with a position in frame units, together
+ * with the estimated time when that frame was presented or is committed to
+ * be presented.
+ * In the case that no timestamp is available, any supplied instance is left unaltered.
+ *
+ * @hide
+ */
+ public AudioTimestamp getTimestamp(AudioTimestamp timestamp)
+ {
+ // It's unfortunate, but we have to either create garbage every time or use synchronized
+ long[] longArray = new long[2];
+ int ret = native_get_timestamp(longArray);
+ if (ret == SUCCESS) {
+ if (timestamp == null) {
+ timestamp = new AudioTimestamp();
+ }
+ timestamp.framePosition = longArray[0];
+ timestamp.nanoTime = longArray[1];
+ } else {
+ timestamp = null;
+ }
+ return timestamp;
+ }
+
+
//--------------------------------------------------------------------------
// Initialization / configuration
//--------------------
@@ -1321,6 +1366,11 @@ public class AudioTrack
private native final int native_get_latency();
+ // longArray must be a non-null array of length >= 2
+ // [0] is assigned the frame position
+ // [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
+ private native final int native_get_timestamp(long[] longArray);
+
private native final int native_set_loop(int start, int end, int loopCount);
static private native final int native_get_output_sample_rate(int streamType);
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index bcb1cbd..d286be4 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -26,11 +26,13 @@ import android.net.Proxy;
import android.net.ProxyProperties;
import android.net.Uri;
import android.os.Handler;
+import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.Parcel;
import android.os.Parcelable;
import android.os.ParcelFileDescriptor;
+import android.os.Process;
import android.os.PowerManager;
import android.util.Log;
import android.view.Surface;
@@ -41,14 +43,18 @@ import android.media.AudioManager;
import android.media.MediaFormat;
import android.media.MediaTimeProvider;
import android.media.MediaTimeProvider.OnMediaTimeListener;
+import android.media.SubtitleController;
import android.media.SubtitleData;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
+import java.io.InputStream;
+import java.lang.Runnable;
import java.net.InetSocketAddress;
import java.util.Map;
+import java.util.Scanner;
import java.util.Set;
import java.util.Vector;
import java.lang.ref.WeakReference;
@@ -520,7 +526,7 @@ import java.lang.ref.WeakReference;
* thread by default has a Looper running).
*
*/
-public class MediaPlayer
+public class MediaPlayer implements SubtitleController.Listener
{
/**
Constant to retrieve only the new metadata since the last
@@ -594,6 +600,9 @@ public class MediaPlayer
}
mTimeProvider = new TimeProvider(this);
+ mOutOfBandSubtitleTracks = new Vector<SubtitleTrack>();
+ mOpenSubtitleSources = new Vector<InputStream>();
+ mInbandSubtitleTracks = new SubtitleTrack[0];
/* Native setup requires a weak reference to our object.
* It's easier to create it here than in C++.
@@ -1356,6 +1365,22 @@ public class MediaPlayer
* data source and calling prepare().
*/
public void reset() {
+ mSelectedSubtitleTrackIndex = -1;
+ synchronized(mOpenSubtitleSources) {
+ for (final InputStream is: mOpenSubtitleSources) {
+ try {
+ is.close();
+ } catch (IOException e) {
+ }
+ }
+ mOpenSubtitleSources.clear();
+ }
+ mOutOfBandSubtitleTracks.clear();
+ mInbandSubtitleTracks = new SubtitleTrack[0];
+ if (mSubtitleController != null) {
+ mSubtitleController.reset();
+ }
+
stayAwake(false);
_reset();
// make sure none of the listeners get called anymore
@@ -1575,6 +1600,12 @@ public class MediaPlayer
}
}
+ /** @hide */
+ TrackInfo(int type, MediaFormat format) {
+ mTrackType = type;
+ mFormat = format;
+ }
+
/**
* {@inheritDoc}
*/
@@ -1619,6 +1650,19 @@ public class MediaPlayer
* @throws IllegalStateException if it is called in an invalid state.
*/
public TrackInfo[] getTrackInfo() throws IllegalStateException {
+ TrackInfo trackInfo[] = getInbandTrackInfo();
+ // add out-of-band tracks
+ TrackInfo allTrackInfo[] = new TrackInfo[trackInfo.length + mOutOfBandSubtitleTracks.size()];
+ System.arraycopy(trackInfo, 0, allTrackInfo, 0, trackInfo.length);
+ int i = trackInfo.length;
+ for (SubtitleTrack track: mOutOfBandSubtitleTracks) {
+ allTrackInfo[i] = new TrackInfo(TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE, track.getFormat());
+ ++i;
+ }
+ return allTrackInfo;
+ }
+
+ private TrackInfo[] getInbandTrackInfo() throws IllegalStateException {
Parcel request = Parcel.obtain();
Parcel reply = Parcel.obtain();
try {
@@ -1651,6 +1695,143 @@ public class MediaPlayer
return false;
}
+ private SubtitleController mSubtitleController;
+
+ /** @hide */
+ public void setSubtitleAnchor(
+ SubtitleController controller,
+ SubtitleController.Anchor anchor) {
+ // TODO: create SubtitleController in MediaPlayer
+ mSubtitleController = controller;
+ mSubtitleController.setAnchor(anchor);
+ }
+
+ private SubtitleTrack[] mInbandSubtitleTracks;
+ private int mSelectedSubtitleTrackIndex = -1;
+ private Vector<SubtitleTrack> mOutOfBandSubtitleTracks;
+ private Vector<InputStream> mOpenSubtitleSources;
+
+ private OnSubtitleDataListener mSubtitleDataListener = new OnSubtitleDataListener() {
+ @Override
+ public void onSubtitleData(MediaPlayer mp, SubtitleData data) {
+ int index = data.getTrackIndex();
+ if (index >= mInbandSubtitleTracks.length) {
+ return;
+ }
+ SubtitleTrack track = mInbandSubtitleTracks[index];
+ if (track != null) {
+ try {
+ long runID = data.getStartTimeUs() + 1;
+ // TODO: move conversion into track
+ track.onData(new String(data.getData(), "UTF-8"), true /* eos */, runID);
+ track.setRunDiscardTimeMs(
+ runID,
+ (data.getStartTimeUs() + data.getDurationUs()) / 1000);
+ } catch (java.io.UnsupportedEncodingException e) {
+ Log.w(TAG, "subtitle data for track " + index + " is not UTF-8 encoded: " + e);
+ }
+ }
+ }
+ };
+
+ /** @hide */
+ @Override
+ public void onSubtitleTrackSelected(SubtitleTrack track) {
+ if (mSelectedSubtitleTrackIndex >= 0) {
+ deselectTrack(mSelectedSubtitleTrackIndex);
+ }
+ mSelectedSubtitleTrackIndex = -1;
+ setOnSubtitleDataListener(null);
+ for (int i = 0; i < mInbandSubtitleTracks.length; i++) {
+ if (mInbandSubtitleTracks[i] == track) {
+ Log.v(TAG, "Selecting subtitle track " + i);
+ selectTrack(i);
+ mSelectedSubtitleTrackIndex = i;
+ setOnSubtitleDataListener(mSubtitleDataListener);
+ break;
+ }
+ }
+ // no need to select out-of-band tracks
+ }
+
+ /** @hide */
+ public void addSubtitleSource(InputStream is, MediaFormat format)
+ throws IllegalStateException
+ {
+ final InputStream fIs = is;
+ final MediaFormat fFormat = format;
+
+ // Ensure all input streams are closed. It is also a handy
+ // way to implement timeouts in the future.
+ synchronized(mOpenSubtitleSources) {
+ mOpenSubtitleSources.add(is);
+ }
+
+ // process each subtitle in its own thread
+ final HandlerThread thread = new HandlerThread("SubtitleReadThread",
+ Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
+ thread.start();
+ Handler handler = new Handler(thread.getLooper());
+ handler.post(new Runnable() {
+ private int addTrack() {
+ if (fIs == null || mSubtitleController == null) {
+ return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
+ }
+
+ SubtitleTrack track = mSubtitleController.addTrack(fFormat);
+ if (track == null) {
+ return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
+ }
+
+ // TODO: do the conversion in the subtitle track
+ Scanner scanner = new Scanner(fIs, "UTF-8");
+ String contents = scanner.useDelimiter("\\A").next();
+ synchronized(mOpenSubtitleSources) {
+ mOpenSubtitleSources.remove(fIs);
+ }
+ scanner.close();
+ mOutOfBandSubtitleTracks.add(track);
+ track.onData(contents, true /* eos */, ~0 /* runID: keep forever */);
+ // update default track selection
+ mSubtitleController.selectDefaultTrack();
+ return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
+ }
+
+ public void run() {
+ int res = addTrack();
+ if (mEventHandler != null) {
+ Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
+ mEventHandler.sendMessage(m);
+ }
+ thread.getLooper().quitSafely();
+ }
+ });
+ }
+
+ private void scanInternalSubtitleTracks() {
+ if (mSubtitleController == null) {
+ Log.e(TAG, "Should have subtitle controller already set");
+ return;
+ }
+
+ TrackInfo[] tracks = getInbandTrackInfo();
+ SubtitleTrack[] inbandTracks = new SubtitleTrack[tracks.length];
+ for (int i=0; i < tracks.length; i++) {
+ if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
+ if (i < mInbandSubtitleTracks.length) {
+ inbandTracks[i] = mInbandSubtitleTracks[i];
+ } else {
+ MediaFormat format = MediaFormat.createSubtitleFormat(
+ "text/vtt", tracks[i].getLanguage());
+ SubtitleTrack track = mSubtitleController.addTrack(format);
+ inbandTracks[i] = track;
+ }
+ }
+ }
+ mInbandSubtitleTracks = inbandTracks;
+ mSubtitleController.selectDefaultTrack();
+ }
+
/* TODO: Limit the total number of external timed text source to a reasonable number.
*/
/**
@@ -1841,6 +2022,13 @@ public class MediaPlayer
private void selectOrDeselectTrack(int index, boolean select)
throws IllegalStateException {
+ // ignore out-of-band tracks
+ TrackInfo[] trackInfo = getInbandTrackInfo();
+ if (index >= trackInfo.length &&
+ index < trackInfo.length + mOutOfBandSubtitleTracks.size()) {
+ return;
+ }
+
Parcel request = Parcel.obtain();
Parcel reply = Parcel.obtain();
try {
@@ -1953,6 +2141,7 @@ public class MediaPlayer
}
switch(msg.what) {
case MEDIA_PREPARED:
+ scanInternalSubtitleTracks();
if (mOnPreparedListener != null)
mOnPreparedListener.onPrepared(mMediaPlayer);
return;
@@ -2008,9 +2197,18 @@ public class MediaPlayer
return;
case MEDIA_INFO:
- if (msg.arg1 != MEDIA_INFO_VIDEO_TRACK_LAGGING) {
+ switch (msg.arg1) {
+ case MEDIA_INFO_VIDEO_TRACK_LAGGING:
Log.i(TAG, "Info (" + msg.arg1 + "," + msg.arg2 + ")");
+ break;
+ case MEDIA_INFO_METADATA_UPDATE:
+ scanInternalSubtitleTracks();
+ break;
+ case MEDIA_INFO_EXTERNAL_METADATA_UPDATE:
+ msg.arg1 = MEDIA_INFO_METADATA_UPDATE;
+ break;
}
+
if (mOnInfoListener != null) {
mOnInfoListener.onInfo(mMediaPlayer, msg.arg1, msg.arg2);
}
@@ -2409,6 +2607,12 @@ public class MediaPlayer
*/
public static final int MEDIA_INFO_METADATA_UPDATE = 802;
+ /** A new set of external-only metadata is available. Used by
+ * JAVA framework to avoid triggering track scanning.
+ * @hide
+ */
+ public static final int MEDIA_INFO_EXTERNAL_METADATA_UPDATE = 803;
+
/** Failed to handle timed text track properly.
* @see android.media.MediaPlayer.OnInfoListener
*
diff --git a/packages/InputDevices/Android.mk b/packages/InputDevices/Android.mk
index 095655c..f537022 100644
--- a/packages/InputDevices/Android.mk
+++ b/packages/InputDevices/Android.mk
@@ -30,14 +30,20 @@ include $(BUILD_PACKAGE)
# Validate all key maps.
include $(CLEAR_VARS)
-validatekeymaps := $(HOST_OUT_EXECUTABLES)/validatekeymaps$(HOST_EXECUTABLE_SUFFIX)
-files := frameworks/base/packages/InputDevices/res/raw/*.kcm
-
LOCAL_MODULE := validate_input_devices_keymaps
-LOCAL_MODULE_TAGS := optional
-LOCAL_REQUIRED_MODULES := validatekeymaps
+intermediates := $(call intermediates-dir-for,ETC,$(LOCAL_MODULE),,COMMON)
+LOCAL_BUILT_MODULE := $(intermediates)/stamp
-validate_input_devices_keymaps: $(files)
- $(hide) $(validatekeymaps) $(files)
-
-include $(BUILD_PHONY_PACKAGE)
+validatekeymaps := $(HOST_OUT_EXECUTABLES)/validatekeymaps$(HOST_EXECUTABLE_SUFFIX)
+input_devices_keymaps := $(wildcard $(LOCAL_PATH)/res/raw/*.kcm)
+$(LOCAL_BUILT_MODULE): PRIVATE_VALIDATEKEYMAPS := $(validatekeymaps)
+$(LOCAL_BUILT_MODULE) : $(input_devices_keymaps) | $(validatekeymaps)
+ $(hide) $(PRIVATE_VALIDATEKEYMAPS) $^
+ $(hide) mkdir -p $(dir $@) && touch $@
+
+# Run validatekeymaps unconditionally for platform build.
+droidcore all_modules : $(LOCAL_BUILT_MODULE)
+
+# Reset temp vars.
+validatekeymaps :=
+input_devices_keymaps :=
diff --git a/services/java/com/android/server/display/WifiDisplayController.java b/services/java/com/android/server/display/WifiDisplayController.java
index cd201f5..9a4cfb7 100644
--- a/services/java/com/android/server/display/WifiDisplayController.java
+++ b/services/java/com/android/server/display/WifiDisplayController.java
@@ -150,6 +150,8 @@ final class WifiDisplayController implements DumpUtils.Dump {
// Certification
private boolean mWifiDisplayCertMode;
+ private int mWifiDisplayWpsConfig = WpsInfo.INVALID;
+
private WifiP2pDevice mThisDevice;
public WifiDisplayController(Context context, Handler handler, Listener listener) {
@@ -179,6 +181,8 @@ final class WifiDisplayController implements DumpUtils.Dump {
Settings.Global.WIFI_DISPLAY_ON), false, settingsObserver);
resolver.registerContentObserver(Settings.Global.getUriFor(
Settings.Global.WIFI_DISPLAY_CERTIFICATION_ON), false, settingsObserver);
+ resolver.registerContentObserver(Settings.Global.getUriFor(
+ Settings.Global.WIFI_DISPLAY_WPS_CONFIG), false, settingsObserver);
updateSettings();
}
@@ -189,6 +193,12 @@ final class WifiDisplayController implements DumpUtils.Dump {
mWifiDisplayCertMode = Settings.Global.getInt(resolver,
Settings.Global.WIFI_DISPLAY_CERTIFICATION_ON, 0) != 0;
+ mWifiDisplayWpsConfig = WpsInfo.INVALID;
+ if (mWifiDisplayCertMode) {
+ mWifiDisplayWpsConfig = Settings.Global.getInt(resolver,
+ Settings.Global.WIFI_DISPLAY_WPS_CONFIG, WpsInfo.INVALID);
+ }
+
updateWfdEnableState();
}
@@ -286,6 +296,25 @@ final class WifiDisplayController implements DumpUtils.Dump {
}
} else {
// WFD should be disabled.
+ if (mWfdEnabled || mWfdEnabling) {
+ WifiP2pWfdInfo wfdInfo = new WifiP2pWfdInfo();
+ wfdInfo.setWfdEnabled(false);
+ mWifiP2pManager.setWFDInfo(mWifiP2pChannel, wfdInfo, new ActionListener() {
+ @Override
+ public void onSuccess() {
+ if (DEBUG) {
+ Slog.d(TAG, "Successfully set WFD info.");
+ }
+ }
+
+ @Override
+ public void onFailure(int reason) {
+ if (DEBUG) {
+ Slog.d(TAG, "Failed to set WFD info with reason " + reason + ".");
+ }
+ }
+ });
+ }
mWfdEnabling = false;
mWfdEnabled = false;
reportFeatureState();
@@ -589,7 +618,9 @@ final class WifiDisplayController implements DumpUtils.Dump {
mConnectingDevice = mDesiredDevice;
WifiP2pConfig config = new WifiP2pConfig();
WpsInfo wps = new WpsInfo();
- if (mConnectingDevice.wpsPbcSupported()) {
+ if (mWifiDisplayWpsConfig != WpsInfo.INVALID) {
+ wps.setup = mWifiDisplayWpsConfig;
+ } else if (mConnectingDevice.wpsPbcSupported()) {
wps.setup = WpsInfo.PBC;
} else if (mConnectingDevice.wpsDisplaySupported()) {
// We do keypad if peer does display