summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorRicardo Cerqueira <cyanogenmod@cerqueira.org>2013-02-13 14:24:59 +0000
committerRicardo Cerqueira <cyanogenmod@cerqueira.org>2013-02-15 00:30:09 +0000
commit4815d6e5a658bb1b301f7724c8e8fff6bc764bba (patch)
tree66f6e6484d239545197d73bcca8ad2505246c981 /media
parenteaa7d1835c8691dd656fea99ae3d61b039ebe1b7 (diff)
parent763ef60466ac752a3031719fb86b08486c9946b1 (diff)
downloadframeworks_base-4815d6e5a658bb1b301f7724c8e8fff6bc764bba.zip
frameworks_base-4815d6e5a658bb1b301f7724c8e8fff6bc764bba.tar.gz
frameworks_base-4815d6e5a658bb1b301f7724c8e8fff6bc764bba.tar.bz2
Merge commit 'android-4.2.2_r1' into mr1.1-staging
Conflicts: core/java/android/os/Trace.java core/java/android/widget/Toast.java core/res/res/values-cs/strings.xml core/res/res/values-el/strings.xml core/res/res/values-iw/strings.xml core/res/res/values/config.xml core/res/res/values/symbols.xml media/java/android/media/AudioService.java packages/SystemUI/res/values-sv/strings.xml packages/SystemUI/src/com/android/systemui/statusbar/phone/NotificationPanelView.java packages/SystemUI/src/com/android/systemui/statusbar/policy/BatteryController.java packages/SystemUI/src/com/android/systemui/usb/StorageNotification.java policy/src/com/android/internal/policy/impl/keyguard/KeyguardHostView.java policy/src/com/android/internal/policy/impl/keyguard/KeyguardMessageArea.java policy/src/com/android/internal/policy/impl/keyguard/KeyguardViewManager.java policy/src/com/android/internal/policy/impl/keyguard/KeyguardViewMediator.java services/java/com/android/server/NotificationManagerService.java services/java/com/android/server/power/ElectronBeam.java Change-Id: I60b8ddf20a1d7bcf9dc7b1a4ed841aaa4d953294
Diffstat (limited to 'media')
-rwxr-xr-xmedia/java/android/media/AudioService.java210
-rw-r--r--media/java/android/media/MediaFormat.java8
-rw-r--r--media/java/android/media/MediaRouter.java18
-rwxr-xr-xmedia/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java58
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java189
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java341
6 files changed, 511 insertions, 313 deletions
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 1b0675a..5c4b315 100755
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -163,6 +163,7 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
private static final int MSG_BROADCAST_AUDIO_BECOMING_NOISY = 25;
private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME = 26;
private static final int MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED = 27;
+ private static final int MSG_PERSIST_SAFE_VOLUME_STATE = 28;
// flags for MSG_PERSIST_VOLUME indicating if current and/or last audible volume should be
// persisted
@@ -445,6 +446,8 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
private boolean mDockAudioMediaEnabled = true;
+ private int mDockState = Intent.EXTRA_DOCK_STATE_UNDOCKED;
+
///////////////////////////////////////////////////////////////////////////
// Construction
///////////////////////////////////////////////////////////////////////////
@@ -488,6 +491,14 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
null,
0);
+ mSafeMediaVolumeState = new Integer(Settings.Global.getInt(mContentResolver,
+ Settings.Global.AUDIO_SAFE_VOLUME_STATE,
+ SAFE_MEDIA_VOLUME_NOT_CONFIGURED));
+ // The default safe volume index read here will be replaced by the actual value when
+ // the mcc is read by onConfigureSafeVolume()
+ mSafeMediaVolumeIndex = mContext.getResources().getInteger(
+ com.android.internal.R.integer.config_safe_media_volume_index) * 10;
+
readPersistedSettings();
mSettingsObserver = new SettingsObserver();
updateStreamVolumeAlias(false /*updateVolumes*/);
@@ -495,8 +506,6 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
mMediaServerOk = true;
- mSafeMediaVolumeState = new Integer(SAFE_MEDIA_VOLUME_NOT_CONFIGURED);
-
// Call setRingerModeInt() to apply correct mute
// state on streams affected by ringer mode.
mRingerModeMutedStreams = 0;
@@ -862,70 +871,72 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
// convert one UI step (+/-1) into a number of internal units on the stream alias
int step = rescaleIndex(10, streamType, streamTypeAlias);
- if ((direction == AudioManager.ADJUST_RAISE) &&
- !checkSafeMediaVolume(streamTypeAlias, aliasIndex + step, device)) {
- return;
- }
-
int index;
int oldIndex;
- flags &= ~AudioManager.FLAG_FIXED_VOLUME;
- if ((streamTypeAlias == AudioSystem.STREAM_MUSIC) &&
- ((device & mFixedVolumeDevices) != 0)) {
- flags |= AudioManager.FLAG_FIXED_VOLUME;
- index = mStreamStates[streamType].getMaxIndex();
+ if ((direction == AudioManager.ADJUST_RAISE) &&
+ !checkSafeMediaVolume(streamTypeAlias, aliasIndex + step, device)) {
+ index = mStreamStates[streamType].getIndex(device,
+ (streamState.muteCount() != 0) /* lastAudible */);
oldIndex = index;
} else {
- // If either the client forces allowing ringer modes for this adjustment,
- // or the stream type is one that is affected by ringer modes
- if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
- (streamTypeAlias == getMasterStreamType())) {
- int ringerMode = getRingerMode();
- // do not vibrate if already in vibrate mode
- if (ringerMode == AudioManager.RINGER_MODE_VIBRATE) {
- flags &= ~AudioManager.FLAG_VIBRATE;
- }
- // Check if the ringer mode changes with this volume adjustment. If
- // it does, it will handle adjusting the volume, so we won't below
- adjustVolume = checkForRingerModeChange(aliasIndex, direction, step);
- if ((streamTypeAlias == getMasterStreamType()) &&
- (mRingerMode == AudioManager.RINGER_MODE_SILENT)) {
- streamState.setLastAudibleIndex(0, device);
- }
- }
-
- // If stream is muted, adjust last audible index only
- oldIndex = mStreamStates[streamType].getIndex(device,
- (mStreamStates[streamType].muteCount() != 0) /* lastAudible */);
-
- if (streamState.muteCount() != 0) {
- if (adjustVolume) {
- // Post a persist volume msg
- // no need to persist volume on all streams sharing the same alias
- streamState.adjustLastAudibleIndex(direction * step, device);
- sendMsg(mAudioHandler,
- MSG_PERSIST_VOLUME,
- SENDMSG_QUEUE,
- PERSIST_LAST_AUDIBLE,
- device,
- streamState,
- PERSIST_DELAY);
- }
- index = mStreamStates[streamType].getIndex(device, true /* lastAudible */);
+ flags &= ~AudioManager.FLAG_FIXED_VOLUME;
+ if ((streamTypeAlias == AudioSystem.STREAM_MUSIC) &&
+ ((device & mFixedVolumeDevices) != 0)) {
+ flags |= AudioManager.FLAG_FIXED_VOLUME;
+ index = mStreamStates[streamType].getMaxIndex();
+ oldIndex = index;
} else {
- if (adjustVolume && streamState.adjustIndex(direction * step, device)) {
- // Post message to set system volume (it in turn will post a message
- // to persist). Do not change volume if stream is muted.
- sendMsg(mAudioHandler,
- MSG_SET_DEVICE_VOLUME,
- SENDMSG_QUEUE,
- device,
- 0,
- streamState,
- 0);
+ // If either the client forces allowing ringer modes for this adjustment,
+ // or the stream type is one that is affected by ringer modes
+ if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
+ (streamTypeAlias == getMasterStreamType())) {
+ int ringerMode = getRingerMode();
+ // do not vibrate if already in vibrate mode
+ if (ringerMode == AudioManager.RINGER_MODE_VIBRATE) {
+ flags &= ~AudioManager.FLAG_VIBRATE;
+ }
+ // Check if the ringer mode changes with this volume adjustment. If
+ // it does, it will handle adjusting the volume, so we won't below
+ adjustVolume = checkForRingerModeChange(aliasIndex, direction, step);
+ if ((streamTypeAlias == getMasterStreamType()) &&
+ (mRingerMode == AudioManager.RINGER_MODE_SILENT)) {
+ streamState.setLastAudibleIndex(0, device);
+ }
+ }
+
+ // If stream is muted, adjust last audible index only
+ oldIndex = mStreamStates[streamType].getIndex(device,
+ (mStreamStates[streamType].muteCount() != 0) /* lastAudible */);
+
+ if (streamState.muteCount() != 0) {
+ if (adjustVolume) {
+ // Post a persist volume msg
+ // no need to persist volume on all streams sharing the same alias
+ streamState.adjustLastAudibleIndex(direction * step, device);
+ sendMsg(mAudioHandler,
+ MSG_PERSIST_VOLUME,
+ SENDMSG_QUEUE,
+ PERSIST_LAST_AUDIBLE,
+ device,
+ streamState,
+ PERSIST_DELAY);
+ }
+ index = mStreamStates[streamType].getIndex(device, true /* lastAudible */);
+ } else {
+ if (adjustVolume && streamState.adjustIndex(direction * step, device)) {
+ // Post message to set system volume (it in turn will post a message
+ // to persist). Do not change volume if stream is muted.
+ sendMsg(mAudioHandler,
+ MSG_SET_DEVICE_VOLUME,
+ SENDMSG_QUEUE,
+ device,
+ 0,
+ streamState,
+ 0);
+ }
+ index = mStreamStates[streamType].getIndex(device, false /* lastAudible */);
}
- index = mStreamStates[streamType].getIndex(device, false /* lastAudible */);
}
}
sendVolumeUpdate(streamType, oldIndex, index, flags);
@@ -2353,13 +2364,33 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
if ((mMcc != mcc) || ((mMcc == 0) && force)) {
mSafeMediaVolumeIndex = mContext.getResources().getInteger(
com.android.internal.R.integer.config_safe_media_volume_index) * 10;
- if (mSafeVolumeEnabled) {
- mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_ACTIVE;
- enforceSafeMediaVolume();
+ boolean safeMediaVolumeEnabled = mContext.getResources().getBoolean(
+ com.android.internal.R.bool.config_safe_media_volume_enabled);
+
+ // The persisted state is either "disabled" or "active": this is the state applied
+ // next time we boot and cannot be "inactive"
+ int persistedState;
+ if (safeMediaVolumeEnabled) {
+ persistedState = SAFE_MEDIA_VOLUME_ACTIVE;
+ // The state can already be "inactive" here if the user has forced it before
+ // the 30 seconds timeout for forced configuration. In this case we don't reset
+ // it to "active".
+ if (mSafeMediaVolumeState != SAFE_MEDIA_VOLUME_INACTIVE) {
+ mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_ACTIVE;
+ enforceSafeMediaVolume();
+ }
} else {
+ persistedState = SAFE_MEDIA_VOLUME_DISABLED;
mSafeMediaVolumeState = SAFE_MEDIA_VOLUME_DISABLED;
}
mMcc = mcc;
+ sendMsg(mAudioHandler,
+ MSG_PERSIST_SAFE_VOLUME_STATE,
+ SENDMSG_QUEUE,
+ persistedState,
+ 0,
+ null,
+ 0);
}
}
}
@@ -3278,6 +3309,12 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
AudioSystem.setForceUse(usage, config);
}
+ private void onPersistSafeVolumeState(int state) {
+ Settings.Global.putInt(mContentResolver,
+ Settings.Global.AUDIO_SAFE_VOLUME_STATE,
+ state);
+ }
+
@Override
public void handleMessage(Message msg) {
@@ -3380,6 +3417,13 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
mBluetoothA2dpEnabled ?
AudioSystem.FORCE_NONE : AudioSystem.FORCE_NO_BT_A2DP);
}
+
+ synchronized (mSettingsLock) {
+ AudioSystem.setForceUse(AudioSystem.FOR_DOCK,
+ mDockAudioMediaEnabled ?
+ AudioSystem.FORCE_ANALOG_DOCK : AudioSystem.FORCE_NONE);
+ }
+
// indicate the end of reconfiguration phase to audio HAL
AudioSystem.setParameters("restarting=false");
break;
@@ -3480,6 +3524,9 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
case MSG_CONFIGURE_SAFE_MEDIA_VOLUME:
onConfigureSafeVolume((msg.what == MSG_CONFIGURE_SAFE_MEDIA_VOLUME_FORCED));
break;
+ case MSG_PERSIST_SAFE_VOLUME_STATE:
+ onPersistSafeVolumeState(msg.arg1);
+ break;
}
}
}
@@ -3830,13 +3877,7 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
config = AudioSystem.FORCE_BT_CAR_DOCK;
break;
case Intent.EXTRA_DOCK_STATE_LE_DESK:
- synchronized (mSettingsLock) {
- if (mDockAudioMediaEnabled) {
- config = AudioSystem.FORCE_ANALOG_DOCK;
- } else {
- config = AudioSystem.FORCE_NONE;
- }
- }
+ config = AudioSystem.FORCE_ANALOG_DOCK;
break;
case Intent.EXTRA_DOCK_STATE_HE_DESK:
config = AudioSystem.FORCE_DIGITAL_DOCK;
@@ -3846,7 +3887,14 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
config = AudioSystem.FORCE_NONE;
}
- AudioSystem.setForceUse(AudioSystem.FOR_DOCK, config);
+ // Low end docks have a menu to enable or disable audio
+ // (see mDockAudioMediaEnabled)
+ if (!((dockState == Intent.EXTRA_DOCK_STATE_LE_DESK) ||
+ ((dockState == Intent.EXTRA_DOCK_STATE_UNDOCKED) &&
+ (mDockState == Intent.EXTRA_DOCK_STATE_LE_DESK)))) {
+ AudioSystem.setForceUse(AudioSystem.FOR_DOCK, config);
+ }
+ mDockState = dockState;
} else if (action.equals(BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED) && noDelayInATwoDP) {
state = intent.getIntExtra(BluetoothProfile.EXTRA_STATE,
BluetoothProfile.STATE_DISCONNECTED);
@@ -5317,18 +5365,23 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
// top of the stack for the media button event receivers : simply using the top of the
// stack would make the entry disappear from the RemoteControlDisplay in conditions such as
// notifications playing during music playback.
- // crawl the AudioFocus stack until an entry is found with the following characteristics:
+ // Crawl the AudioFocus stack from the top until an entry is found with the following
+ // characteristics:
// - focus gain on STREAM_MUSIC stream
// - non-transient focus gain on a stream other than music
FocusStackEntry af = null;
- Iterator<FocusStackEntry> stackIterator = mFocusStack.iterator();
- while(stackIterator.hasNext()) {
- FocusStackEntry fse = (FocusStackEntry)stackIterator.next();
- if ((fse.mStreamType == AudioManager.STREAM_MUSIC)
- || (fse.mFocusChangeType == AudioManager.AUDIOFOCUS_GAIN)) {
- af = fse;
- break;
+ try {
+ for (int index = mFocusStack.size()-1; index >= 0; index--) {
+ FocusStackEntry fse = mFocusStack.elementAt(index);
+ if ((fse.mStreamType == AudioManager.STREAM_MUSIC)
+ || (fse.mFocusChangeType == AudioManager.AUDIOFOCUS_GAIN)) {
+ af = fse;
+ break;
+ }
}
+ } catch (ArrayIndexOutOfBoundsException e) {
+ Log.e(TAG, "Wrong index accessing audio focus stack when updating RCD: " + e);
+ af = null;
}
if (af == null) {
clearRemoteControlDisplay_syncAfRcs();
@@ -5349,6 +5402,7 @@ public class AudioService extends IAudioService.Stub implements OnFinished {
clearRemoteControlDisplay_syncAfRcs();
return;
}
+
// refresh conditions were verified: update the remote controls
// ok to call: synchronized mAudioFocusLock then on mRCStack, mRCStack is not empty
updateRemoteControlDisplay_syncAfRcs(infoChangedFlags);
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index 4414191..a2eb8d9 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -26,7 +26,7 @@ import java.util.Map;
*
* The format of the media data is specified as string/value pairs.
*
- * Keys common to all formats:
+ * Keys common to all formats, <b>all keys not marked optional are mandatory</b>:
*
* <table>
* <tr><th>Name</th><th>Value Type</th><th>Description</th></tr>
@@ -50,9 +50,9 @@ import java.util.Map;
* <tr><th>Name</th><th>Value Type</th><th>Description</th></tr>
* <tr><td>{@link #KEY_CHANNEL_COUNT}</td><td>Integer</td><td></td></tr>
* <tr><td>{@link #KEY_SAMPLE_RATE}</td><td>Integer</td><td></td></tr>
- * <tr><td>{@link #KEY_IS_ADTS}</td><td>Integer</td><td>optional, if content is AAC audio, setting this key to 1 indicates that each audio frame is prefixed by the ADTS header.</td></tr>
+ * <tr><td>{@link #KEY_IS_ADTS}</td><td>Integer</td><td>optional, if <em>decoding</em> AAC audio content, setting this key to 1 indicates that each audio frame is prefixed by the ADTS header.</td></tr>
* <tr><td>{@link #KEY_AAC_PROFILE}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is AAC audio, specifies the desired profile.</td></tr>
- * <tr><td>{@link #KEY_CHANNEL_MASK}</td><td>Integer</td><td>A mask of audio channel assignments</td></tr>
+ * <tr><td>{@link #KEY_CHANNEL_MASK}</td><td>Integer</td><td>optional, a mask of audio channel assignments</td></tr>
* <tr><td>{@link #KEY_FLAC_COMPRESSION_LEVEL}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is FLAC audio, specifies the desired compression level.</td></tr>
* </table>
*
@@ -140,6 +140,8 @@ public final class MediaFormat {
* A key mapping to a value of 1 if the content is AAC audio and
* audio frames are prefixed with an ADTS header.
* The associated value is an integer (0 or 1).
+ * This key is only supported when _decoding_ content, it cannot
+ * be used to configure an encoder to emit ADTS output.
*/
public static final String KEY_IS_ADTS = "is-adts";
diff --git a/media/java/android/media/MediaRouter.java b/media/java/android/media/MediaRouter.java
index 2a5a16e..8b489b1 100644
--- a/media/java/android/media/MediaRouter.java
+++ b/media/java/android/media/MediaRouter.java
@@ -313,13 +313,25 @@ public class MediaRouter {
}
/**
- * Return the currently selected route for the given types
+ * Return the currently selected route for any of the given types
*
* @param type route types
* @return the selected route
*/
public RouteInfo getSelectedRoute(int type) {
- return sStatic.mSelectedRoute;
+ if (sStatic.mSelectedRoute != null &&
+ (sStatic.mSelectedRoute.mSupportedTypes & type) != 0) {
+ // If the selected route supports any of the types supplied, it's still considered
+ // 'selected' for that type.
+ return sStatic.mSelectedRoute;
+ } else if (type == ROUTE_TYPE_USER) {
+ // The caller specifically asked for a user route and the currently selected route
+ // doesn't qualify.
+ return null;
+ }
+ // If the above didn't match and we're not specifically asking for a user route,
+ // consider the default selected.
+ return sStatic.mDefaultAudioVideo;
}
/**
@@ -862,7 +874,7 @@ public class MediaRouter {
private static WifiDisplay findMatchingDisplay(WifiDisplay d, WifiDisplay[] displays) {
for (int i = 0; i < displays.length; i++) {
final WifiDisplay other = displays[i];
- if (d.getDeviceAddress().equals(other.getDeviceAddress())) {
+ if (d.hasSameAddress(other)) {
return other;
}
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java
index 95e7b5e..5c74552 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaRecorderStressTestRunner.java
@@ -33,18 +33,16 @@ public class MediaRecorderStressTestRunner extends InstrumentationTestRunner {
// the test must be supported by the corresponding camera.
public static int mCameraId = 0;
public static int mProfileQuality = CamcorderProfile.QUALITY_HIGH;
- public static CamcorderProfile profile =
- CamcorderProfile.get(mCameraId, mProfileQuality);
-
- public static int mIterations = 100;
+ public static CamcorderProfile profile = CamcorderProfile.get(mCameraId, mProfileQuality);
+ public static int mIterations = 15;
public static int mVideoEncoder = profile.videoCodec;
- public static int mAudioEncdoer = profile.audioCodec;
+ public static int mAudioEncoder = profile.audioCodec;
public static int mFrameRate = profile.videoFrameRate;
public static int mVideoWidth = profile.videoFrameWidth;
public static int mVideoHeight = profile.videoFrameHeight;
public static int mBitRate = profile.videoBitRate;
public static boolean mRemoveVideo = true;
- public static int mDuration = 10 * 1000; // 10 seconds
+ public static int mDuration = 60 * 1000; // 60 seconds
public static int mTimeLapseDuration = 180 * 1000; // 3 minutes
public static double mCaptureRate = 0.5; // 2 sec timelapse interval
@@ -64,41 +62,41 @@ public class MediaRecorderStressTestRunner extends InstrumentationTestRunner {
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
String iterations = (String) icicle.get("iterations");
- String video_encoder = (String) icicle.get("video_encoder");
- String audio_encoder = (String) icicle.get("audio_encoder");
- String frame_rate = (String) icicle.get("frame_rate");
- String video_width = (String) icicle.get("video_width");
- String video_height = (String) icicle.get("video_height");
- String bit_rate = (String) icicle.get("bit_rate");
- String record_duration = (String) icicle.get("record_duration");
- String remove_videos = (String) icicle.get("remove_videos");
+ String videoEncoder = (String) icicle.get("video_encoder");
+ String audioEncoder = (String) icicle.get("audio_encoder");
+ String frameRate = (String) icicle.get("frame_rate");
+ String videoWidth = (String) icicle.get("video_width");
+ String videoHeight = (String) icicle.get("video_height");
+ String bitRate = (String) icicle.get("bit_rate");
+ String recordDuration = (String) icicle.get("record_duration");
+ String removeVideos = (String) icicle.get("remove_videos");
if (iterations != null ) {
mIterations = Integer.parseInt(iterations);
}
- if ( video_encoder != null) {
- mVideoEncoder = Integer.parseInt(video_encoder);
+ if (videoEncoder != null) {
+ mVideoEncoder = Integer.parseInt(videoEncoder);
}
- if ( audio_encoder != null) {
- mAudioEncdoer = Integer.parseInt(audio_encoder);
+ if (audioEncoder != null) {
+ mAudioEncoder = Integer.parseInt(audioEncoder);
}
- if (frame_rate != null) {
- mFrameRate = Integer.parseInt(frame_rate);
+ if (frameRate != null) {
+ mFrameRate = Integer.parseInt(frameRate);
}
- if (video_width != null) {
- mVideoWidth = Integer.parseInt(video_width);
+ if (videoWidth != null) {
+ mVideoWidth = Integer.parseInt(videoWidth);
}
- if (video_height != null) {
- mVideoHeight = Integer.parseInt(video_height);
+ if (videoHeight != null) {
+ mVideoHeight = Integer.parseInt(videoHeight);
}
- if (bit_rate != null) {
- mBitRate = Integer.parseInt(bit_rate);
+ if (bitRate != null) {
+ mBitRate = Integer.parseInt(bitRate);
}
- if (record_duration != null) {
- mDuration = Integer.parseInt(record_duration);
+ if (recordDuration != null) {
+ mDuration = Integer.parseInt(recordDuration);
}
- if (remove_videos != null) {
- if (remove_videos.compareTo("true") == 0) {
+ if (removeVideos != null) {
+ if (removeVideos.compareTo("true") == 0) {
mRemoveVideo = true;
} else {
mRemoveVideo = false;
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java
index ab9e36c..ed1d8fc 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/CameraStressTest.java
@@ -28,6 +28,7 @@ import java.io.IOException;
import java.io.Writer;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
+import java.util.List;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
@@ -44,7 +45,7 @@ import com.android.mediaframeworktest.CameraStressTestRunner;
import junit.framework.Assert;
/**
- * Junit / Instrumentation test case for the camera zoom api
+ * Junit / Instrumentation test case for the camera zoom and scene mode APIs
*
* adb shell am instrument
* -e class com.android.mediaframeworktest.stress.CameraStressTest
@@ -54,18 +55,22 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
private String TAG = "CameraStressTest";
private Camera mCamera;
+ private static final int CAMERA_ID = 0;
private static final int NUMBER_OF_ZOOM_LOOPS = 100;
+ private static final int NUMBER_OF_SCENE_MODE_LOOPS = 10;
private static final long WAIT_GENERIC = 3 * 1000; // 3 seconds
private static final long WAIT_TIMEOUT = 10 * 1000; // 10 seconds
private static final long WAIT_ZOOM_ANIMATION = 5 * 1000; // 5 seconds
- private static final String CAMERA_STRESS_OUTPUT =
- "/sdcard/cameraStressOutput.txt";
- private static final int CAMERA_ID = 0;
+ private static final String CAMERA_STRESS_IMAGES_DIRECTORY = "cameraStressImages";
+ private static final String CAMERA_STRESS_IMAGES_PREFIX = "camera-stress-test";
+ private static final String CAMERA_STRESS_OUTPUT = "cameraStressOutput.txt";
private final CameraErrorCallback mCameraErrorCallback = new CameraErrorCallback();
private Thread mLooperThread;
private Handler mHandler;
+ private Writer mOutput;
+
public CameraStressTest() {
super("com.android.mediaframeworktest", MediaFrameworkTest.class);
}
@@ -89,6 +94,20 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
}
getActivity();
super.setUp();
+
+ File sdcard = Environment.getExternalStorageDirectory();
+
+ // Create the test images directory if it doesn't exist
+ File stressImagesDirectory = new File(String.format("%s/%s", sdcard,
+ CAMERA_STRESS_IMAGES_DIRECTORY));
+ if (!stressImagesDirectory.exists()) {
+ stressImagesDirectory.mkdir();
+ }
+
+ // Start writing output file
+ File stressOutFile = new File(String.format("%s/%s",sdcard, CAMERA_STRESS_OUTPUT));
+ mOutput = new BufferedWriter(new FileWriter(stressOutFile, true));
+ mOutput.write(this.getName() + ":\n");
}
@Override
@@ -105,6 +124,9 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
mLooperThread = null;
}
+ mOutput.write("\n\n");
+ mOutput.close();
+
super.tearDown();
}
@@ -127,9 +149,7 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
private final class CameraErrorCallback implements android.hardware.Camera.ErrorCallback {
public void onError(int error, android.hardware.Camera camera) {
- if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
- assertTrue("Camera test mediaserver died", false);
- }
+ fail(String.format("Camera error, code: %d", error));
}
}
@@ -154,49 +174,76 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
try {
Log.v(TAG, "JPEG picture taken");
- fos = new FileOutputStream(String.format("%s/zoom-test-%d.jpg",
- Environment.getExternalStorageDirectory(), System.currentTimeMillis()));
+ fos = new FileOutputStream(String.format("%s/%s/%s-%d.jpg",
+ Environment.getExternalStorageDirectory(), CAMERA_STRESS_IMAGES_DIRECTORY,
+ CAMERA_STRESS_IMAGES_PREFIX, System.currentTimeMillis()));
fos.write(data);
- }
- catch (FileNotFoundException e) {
- Log.v(TAG, "File not found: " + e.toString());
- }
- catch (IOException e) {
- Log.v(TAG, "Error accessing file: " + e.toString());
- }
- finally {
+ } catch (FileNotFoundException e) {
+ Log.e(TAG, "File not found: " + e.toString());
+ } catch (IOException e) {
+ Log.e(TAG, "Error accessing file: " + e.toString());
+ } finally {
try {
if (fos != null) {
fos.close();
}
- }
- catch (IOException e) {
- Log.v(TAG, "Error closing file: " + e.toString());
+ } catch (IOException e) {
+ Log.e(TAG, "Error closing file: " + e.toString());
}
}
}
};
// Helper method for cleaning up pics taken during testStressCameraZoom
- private void cleanupZoomImages() {
+ private void cleanupStressTestImages() {
try {
- File sdcard = Environment.getExternalStorageDirectory();
+ File stressImagesDirectory = new File(String.format("%s/%s",
+ Environment.getExternalStorageDirectory(), CAMERA_STRESS_IMAGES_DIRECTORY));
File[] zoomImages = null;
FilenameFilter filter = new FilenameFilter() {
public boolean accept(File dir, String name) {
- return name.startsWith("zoom-test-");
+ return name.startsWith(CAMERA_STRESS_IMAGES_PREFIX);
}
};
- zoomImages = sdcard.listFiles(filter);
+ zoomImages = stressImagesDirectory.listFiles(filter);
for (File f : zoomImages) {
f.delete();
}
+ } catch (SecurityException e) {
+ Log.e(TAG, "Security manager access violation: " + e.toString());
}
- catch (SecurityException e) {
- Log.v(TAG, "Security manager access violation: " + e.toString());
+ }
+
+ // Helper method for starting up the camera preview
+ private void startCameraPreview(SurfaceHolder surfaceHolder) {
+ try {
+ mCamera.setErrorCallback(mCameraErrorCallback);
+ mCamera.setPreviewDisplay(surfaceHolder);
+ mCamera.startPreview();
+ Thread.sleep(WAIT_GENERIC);
+ } catch (IOException e) {
+ Log.e(TAG, "Error setting preview display: " + e.toString());
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Error waiting for preview to come up: " + e.toString());
+ } catch (Exception e) {
+ Log.e(TAG, "Error starting up camera preview: " + e.toString());
+ }
+ }
+
+ // Helper method for taking a photo
+ private void capturePhoto() {
+ try {
+ mCamera.takePicture(shutterCallback, rawCallback, jpegCallback);
+ Thread.sleep(WAIT_GENERIC);
+ mCamera.stopPreview();
+ mCamera.release();
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Error waiting for photo to be taken: " + e.toString());
+ } catch (Exception e) {
+ Log.e(TAG, "Error capturing photo: " + e.toString());
}
}
@@ -205,14 +252,11 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
public void testStressCameraZoom() throws Exception {
SurfaceHolder mSurfaceHolder;
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
- File stressOutFile = new File(CAMERA_STRESS_OUTPUT);
- Writer output = new BufferedWriter(new FileWriter(stressOutFile, true));
- output.write("Camera zoom stress:\n");
- output.write("Total number of loops: " + NUMBER_OF_ZOOM_LOOPS + "\n");
+ mOutput.write("Total number of loops: " + NUMBER_OF_ZOOM_LOOPS + "\n");
try {
Log.v(TAG, "Start preview");
- output.write("No of loop: ");
+ mOutput.write("No of loop: ");
mCamera = Camera.open(CAMERA_ID);
Camera.Parameters params = mCamera.getParameters();
@@ -220,9 +264,8 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
if (!params.isSmoothZoomSupported() && !params.isZoomSupported()) {
Log.v(TAG, "Device camera does not support zoom");
- assertTrue("Camera zoom stress test", false);
- }
- else {
+ fail("Camera zoom stress test failed");
+ } else {
Log.v(TAG, "Device camera does support zoom");
int nextZoomLevel = 0;
@@ -235,11 +278,7 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
}
});
- mCamera.setErrorCallback(mCameraErrorCallback);
- mCamera.setPreviewDisplay(mSurfaceHolder);
- mCamera.startPreview();
- Thread.sleep(WAIT_GENERIC);
-
+ startCameraPreview(mSurfaceHolder);
params = mCamera.getParameters();
int currentZoomLevel = params.getZoom();
@@ -250,8 +289,7 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
if (params.isSmoothZoomSupported()) {
mCamera.startSmoothZoom(nextZoomLevel);
- }
- else {
+ } else {
params.setZoom(nextZoomLevel);
mCamera.setParameters(params);
}
@@ -259,23 +297,66 @@ public class CameraStressTest extends ActivityInstrumentationTestCase2<MediaFram
// sleep allows for zoom animation to finish
Thread.sleep(WAIT_ZOOM_ANIMATION);
+ capturePhoto();
- // take picture
- mCamera.takePicture(shutterCallback, rawCallback, jpegCallback);
- Thread.sleep(WAIT_GENERIC);
- mCamera.stopPreview();
- mCamera.release();
- output.write(" ," + i);
+ if (i == 0) {
+ mOutput.write(Integer.toString(i));
+ } else {
+ mOutput.write(", " + i);
+ }
}
}
-
- cleanupZoomImages();
+ cleanupStressTestImages();
+ } catch (Exception e) {
+ Log.e(TAG, e.toString());
+ fail("Camera zoom stress test Exception");
}
- catch (Exception e) {
- assertTrue("Camera zoom stress test Exception", false);
- Log.v(TAG, e.toString());
+ }
+
+ // Test case for stressing the camera scene mode feature
+ @LargeTest
+ public void testStressCameraSceneModes() throws Exception {
+ SurfaceHolder mSurfaceHolder;
+ mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
+
+ try {
+ mCamera = Camera.open(CAMERA_ID);
+ Camera.Parameters params = mCamera.getParameters();
+ mCamera.release();
+ List<String> supportedSceneModes = params.getSupportedSceneModes();
+ assertNotNull("No scene modes supported", supportedSceneModes);
+
+ mOutput.write("Total number of loops: " +
+ (NUMBER_OF_SCENE_MODE_LOOPS * supportedSceneModes.size()) + "\n");
+ Log.v(TAG, "Start preview");
+ mOutput.write("No of loop: ");
+
+ for (int i = 0; i < supportedSceneModes.size(); i++) {
+ for (int j = 0; j < NUMBER_OF_SCENE_MODE_LOOPS; j++) {
+ runOnLooper(new Runnable() {
+ @Override
+ public void run() {
+ mCamera = Camera.open(CAMERA_ID);
+ }
+ });
+
+ startCameraPreview(mSurfaceHolder);
+ Log.v(TAG, "Setting mode to " + supportedSceneModes.get(i));
+ params.setSceneMode(supportedSceneModes.get(i));
+ mCamera.setParameters(params);
+ capturePhoto();
+
+ if ((i == 0) && (j == 0)) {
+ mOutput.write(Integer.toString(j + i * NUMBER_OF_SCENE_MODE_LOOPS));
+ } else {
+ mOutput.write(", " + (j + i * NUMBER_OF_SCENE_MODE_LOOPS));
+ }
+ }
+ }
+ cleanupStressTestImages();
+ } catch (Exception e) {
+ Log.e(TAG, e.toString());
+ fail("Camera scene mode test Exception");
}
- output.write("\n\n");
- output.close();
}
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java
index 6995c60..6eb9891 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java
@@ -31,6 +31,7 @@ import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
+import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.test.ActivityInstrumentationTestCase2;
@@ -48,26 +49,26 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
private MediaRecorder mRecorder;
private Camera mCamera;
+ private static final int CAMERA_ID = 0;
private static final int NUMBER_OF_CAMERA_STRESS_LOOPS = 100;
private static final int NUMBER_OF_RECORDER_STRESS_LOOPS = 100;
private static final int NUMBER_OF_RECORDERANDPLAY_STRESS_LOOPS = 50;
private static final int NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER = 200;
private static final int NUMBER_OF_TIME_LAPSE_LOOPS = 25;
private static final int TIME_LAPSE_PLAYBACK_WAIT_TIME = 5* 1000; // 5 seconds
+ private static final int USE_TEST_RUNNER_PROFILE = -1;
+ private static final long WAIT_TIMEOUT = 10 * 1000; // 10 seconds
private static final long WAIT_TIME_CAMERA_TEST = 3 * 1000; // 3 seconds
private static final long WAIT_TIME_RECORDER_TEST = 6 * 1000; // 6 seconds
- private static final String OUTPUT_FILE = "/sdcard/temp";
private static final String OUTPUT_FILE_EXT = ".3gp";
- private static final String MEDIA_STRESS_OUTPUT =
- "/sdcard/mediaStressOutput.txt";
- private static final int CAMERA_ID = 0;
+ private static final String MEDIA_STRESS_OUTPUT = "mediaStressOutput.txt";
private final CameraErrorCallback mCameraErrorCallback = new CameraErrorCallback();
private final RecorderErrorCallback mRecorderErrorCallback = new RecorderErrorCallback();
- private final static int WAIT_TIMEOUT = 10 * 1000; // 10 seconds
- private Thread mLooperThread;
private Handler mHandler;
+ private Thread mLooperThread;
+ private Writer mOutput;
public MediaRecorderStressTest() {
super("com.android.mediaframeworktest", MediaFrameworkTest.class);
@@ -95,6 +96,11 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
Thread.sleep(2000);
getActivity();
super.setUp();
+
+ File stressOutFile = new File(String.format("%s/%s",
+ Environment.getExternalStorageDirectory(), MEDIA_STRESS_OUTPUT));
+ mOutput = new BufferedWriter(new FileWriter(stressOutFile, true));
+ mOutput.write(this.getName() + "\n");
}
@Override
@@ -110,7 +116,8 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
}
mLooperThread = null;
}
-
+ mOutput.write("\n\n");
+ mOutput.close();
super.tearDown();
}
@@ -133,16 +140,13 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
private final class CameraErrorCallback implements android.hardware.Camera.ErrorCallback {
public void onError(int error, android.hardware.Camera camera) {
- if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
- assertTrue("Camera test mediaserver died", false);
- }
+ fail(String.format("Camera error, code: %d", error));
}
}
private final class RecorderErrorCallback implements MediaRecorder.OnErrorListener {
public void onError(MediaRecorder mr, int what, int extra) {
- // fail the test case no matter what error come up
- assertTrue("mediaRecorder error", false);
+ fail(String.format("Media recorder error, code: %d\textra: %d", what, extra));
}
}
@@ -151,14 +155,11 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
public void testStressCamera() throws Exception {
SurfaceHolder mSurfaceHolder;
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
- File stressOutFile = new File(MEDIA_STRESS_OUTPUT);
- Writer output = new BufferedWriter(new FileWriter(stressOutFile, true));
- output.write("Camera start preview stress:\n");
- output.write("Total number of loops:" +
- NUMBER_OF_CAMERA_STRESS_LOOPS + "\n");
+ Log.v(TAG, "Camera start preview stress test");
+ mOutput.write("Total number of loops:" + NUMBER_OF_CAMERA_STRESS_LOOPS + "\n");
try {
Log.v(TAG, "Start preview");
- output.write("No of loop: ");
+ mOutput.write("No of loop: ");
for (int i = 0; i< NUMBER_OF_CAMERA_STRESS_LOOPS; i++) {
runOnLooper(new Runnable() {
@@ -173,29 +174,27 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
Thread.sleep(WAIT_TIME_CAMERA_TEST);
mCamera.stopPreview();
mCamera.release();
- output.write(" ," + i);
+ if (i == 0) {
+ mOutput.write(i + 1);
+ } else {
+ mOutput.write(String.format(", %d", (i + 1)));
+ }
}
} catch (Exception e) {
- assertTrue("CameraStressTest", false);
- Log.v(TAG, e.toString());
+ Log.e(TAG, e.toString());
+ fail("Camera startup preview stress test");
}
- output.write("\n\n");
- output.close();
}
//Test case for stressing the camera preview.
@LargeTest
public void testStressRecorder() throws Exception {
- String filename;
SurfaceHolder mSurfaceHolder;
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
- File stressOutFile = new File(MEDIA_STRESS_OUTPUT);
- Writer output = new BufferedWriter(new FileWriter(stressOutFile, true));
- output.write("H263 video record- reset after prepare Stress test\n");
- output.write("Total number of loops:" +
- NUMBER_OF_RECORDER_STRESS_LOOPS + "\n");
+ Log.v(TAG, "H263 video record: reset after prepare Stress test");
+ mOutput.write("Total number of loops:" + NUMBER_OF_RECORDER_STRESS_LOOPS + "\n");
try {
- output.write("No of loop: ");
+ mOutput.write("No of loop: ");
Log.v(TAG, "Start preview");
for (int i = 0; i < NUMBER_OF_RECORDER_STRESS_LOOPS; i++) {
runOnLooper(new Runnable() {
@@ -205,12 +204,15 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
}
});
Log.v(TAG, "counter = " + i);
- filename = OUTPUT_FILE + i + OUTPUT_FILE_EXT;
- Log.v(TAG, filename);
+ String fileName = String.format("%s/temp%d%s",
+ Environment.getExternalStorageDirectory(),
+ i, OUTPUT_FILE_EXT);
+
+ Log.v(TAG, fileName);
mRecorder.setOnErrorListener(mRecorderErrorCallback);
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
- mRecorder.setOutputFile(filename);
+ mRecorder.setOutputFile(fileName);
mRecorder.setVideoFrameRate(MediaRecorderStressTestRunner.mFrameRate);
mRecorder.setVideoSize(176,144);
Log.v(TAG, "setEncoder");
@@ -224,30 +226,29 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
Thread.sleep(WAIT_TIME_RECORDER_TEST);
mRecorder.reset();
mRecorder.release();
- output.write(", " + i);
+ if (i == 0) {
+ mOutput.write(i + 1);
+ } else {
+ mOutput.write(String.format(", %d", (i + 1)));
+ }
}
} catch (Exception e) {
- assertTrue("Recorder Stress test", false);
- Log.v(TAG, e.toString());
+ Log.e(TAG, e.toString());
+ fail("H263 video recording stress test");
}
- output.write("\n\n");
- output.close();
}
//Stress test case for switching camera and video recorder preview.
@LargeTest
public void testStressCameraSwitchRecorder() throws Exception {
- String filename;
SurfaceHolder mSurfaceHolder;
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
- File stressOutFile = new File(MEDIA_STRESS_OUTPUT);
- Writer output = new BufferedWriter(new FileWriter(stressOutFile, true));
- output.write("Camera and video recorder preview switching\n");
- output.write("Total number of loops:"
- + NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER + "\n");
+ Log.v(TAG, "Camera and video recorder preview switching");
+ mOutput.write("Total number of loops: " +
+ NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER + "\n");
try {
Log.v(TAG, "Start preview");
- output.write("No of loop: ");
+ mOutput.write("No of loop: ");
for (int i = 0; i < NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER; i++) {
runOnLooper(new Runnable() {
@Override
@@ -263,8 +264,10 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
mCamera.release();
mCamera = null;
Log.v(TAG, "release camera");
- filename = OUTPUT_FILE + i + OUTPUT_FILE_EXT;
- Log.v(TAG, filename);
+ String fileName = String.format("%s/temp%d%s",
+ Environment.getExternalStorageDirectory(),
+ i, OUTPUT_FILE_EXT);
+ Log.v(TAG, fileName);
runOnLooper(new Runnable() {
@Override
public void run() {
@@ -274,7 +277,7 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
mRecorder.setOnErrorListener(mRecorderErrorCallback);
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
- mRecorder.setOutputFile(filename);
+ mRecorder.setOutputFile(fileName);
mRecorder.setVideoFrameRate(MediaRecorderStressTestRunner.mFrameRate);
mRecorder.setVideoSize(176,144);
Log.v(TAG, "Media recorder setEncoder");
@@ -287,117 +290,167 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
Thread.sleep(WAIT_TIME_CAMERA_TEST);
mRecorder.release();
Log.v(TAG, "release video recorder");
- output.write(", " + i);
+ if (i == 0) {
+ mOutput.write(i + 1);
+ } else {
+ mOutput.write(String.format(", %d", (i + 1)));
+ }
}
} catch (Exception e) {
- assertTrue("Camer and recorder switch mode", false);
- Log.v(TAG, e.toString());
+ Log.e(TAG, e.toString());
+ fail("Camera and recorder switch mode");
}
- output.write("\n\n");
- output.close();
}
- public void validateRecordedVideo(String recorded_file) {
+ public void validateRecordedVideo(String recordedFile) {
try {
MediaPlayer mp = new MediaPlayer();
- mp.setDataSource(recorded_file);
+ mp.setDataSource(recordedFile);
mp.prepare();
int duration = mp.getDuration();
if (duration <= 0){
- assertTrue("stressRecordAndPlayback", false);
+ fail("stressRecordAndPlayback");
}
mp.release();
} catch (Exception e) {
- assertTrue("stressRecordAndPlayback", false);
+ fail("stressRecordAndPlayback");
}
}
- public void removeRecordedVideo(String filename){
- File video = new File(filename);
- Log.v(TAG, "remove recorded video " + filename);
+ public void removeRecordedVideo(String fileName){
+ File video = new File(fileName);
+ Log.v(TAG, "remove recorded video " + fileName);
video.delete();
}
- //Stress test case for record a video and play right away.
- @LargeTest
- public void testStressRecordVideoAndPlayback() throws Exception {
- int iterations = MediaRecorderStressTestRunner.mIterations;
- int video_encoder = MediaRecorderStressTestRunner.mVideoEncoder;
- int audio_encoder = MediaRecorderStressTestRunner.mAudioEncdoer;
- int frame_rate = MediaRecorderStressTestRunner.mFrameRate;
- int video_width = MediaRecorderStressTestRunner.mVideoWidth;
- int video_height = MediaRecorderStressTestRunner.mVideoHeight;
- int bit_rate = MediaRecorderStressTestRunner.mBitRate;
- boolean remove_video = MediaRecorderStressTestRunner.mRemoveVideo;
- int record_duration = MediaRecorderStressTestRunner.mDuration;
-
- String filename;
- SurfaceHolder mSurfaceHolder;
- mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
- File stressOutFile = new File(MEDIA_STRESS_OUTPUT);
- Writer output = new BufferedWriter(
- new FileWriter(stressOutFile, true));
- output.write("Video record and play back stress test:\n");
- output.write("Total number of loops:"
- + NUMBER_OF_RECORDERANDPLAY_STRESS_LOOPS + "\n");
+ // Helper method for record & playback testing with different camcorder profiles
+ private void recordVideoAndPlayback(int profile) throws Exception {
+ int iterations;
+ int recordDuration;
+ boolean removeVideo;
+
+ int videoEncoder;
+ int audioEncoder;
+ int frameRate;
+ int videoWidth;
+ int videoHeight;
+ int bitRate;
+
+ if (profile != USE_TEST_RUNNER_PROFILE) {
+ assertTrue(String.format("Camera doesn't support profile %d", profile),
+ CamcorderProfile.hasProfile(CAMERA_ID, profile));
+ CamcorderProfile camcorderProfile = CamcorderProfile.get(CAMERA_ID, profile);
+ videoEncoder = camcorderProfile.videoCodec;
+ audioEncoder = camcorderProfile.audioCodec;
+ frameRate = camcorderProfile.videoFrameRate;
+ videoWidth = camcorderProfile.videoFrameWidth;
+ videoHeight = camcorderProfile.videoFrameHeight;
+ bitRate = camcorderProfile.videoBitRate;
+ } else {
+ videoEncoder = MediaRecorderStressTestRunner.mVideoEncoder;
+ audioEncoder = MediaRecorderStressTestRunner.mAudioEncoder;
+ frameRate = MediaRecorderStressTestRunner.mFrameRate;
+ videoWidth = MediaRecorderStressTestRunner.mVideoWidth;
+ videoHeight = MediaRecorderStressTestRunner.mVideoHeight;
+ bitRate = MediaRecorderStressTestRunner.mBitRate;
+ }
+ iterations = MediaRecorderStressTestRunner.mIterations;
+ recordDuration = MediaRecorderStressTestRunner.mDuration;
+ removeVideo = MediaRecorderStressTestRunner.mRemoveVideo;
+
+ SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
+ mOutput.write("Total number of loops: " + iterations + "\n");
+
try {
- output.write("No of loop: ");
- for (int i = 0; i < iterations; i++){
- filename = OUTPUT_FILE + i + OUTPUT_FILE_EXT;
- Log.v(TAG, filename);
+ mOutput.write("No of loop: ");
+ for (int i = 0; i < iterations; i++) {
+ String fileName = String.format("%s/temp%d%s",
+ Environment.getExternalStorageDirectory(), i, OUTPUT_FILE_EXT);
+ Log.v(TAG, fileName);
+
runOnLooper(new Runnable() {
@Override
public void run() {
mRecorder = new MediaRecorder();
}
});
+
Log.v(TAG, "iterations : " + iterations);
- Log.v(TAG, "video_encoder : " + video_encoder);
- Log.v(TAG, "audio_encoder : " + audio_encoder);
- Log.v(TAG, "frame_rate : " + frame_rate);
- Log.v(TAG, "video_width : " + video_width);
- Log.v(TAG, "video_height : " + video_height);
- Log.v(TAG, "bit rate : " + bit_rate);
- Log.v(TAG, "record_duration : " + record_duration);
+ Log.v(TAG, "video encoder : " + videoEncoder);
+ Log.v(TAG, "audio encoder : " + audioEncoder);
+ Log.v(TAG, "frame rate : " + frameRate);
+ Log.v(TAG, "video width : " + videoWidth);
+ Log.v(TAG, "video height : " + videoHeight);
+ Log.v(TAG, "bit rate : " + bitRate);
+ Log.v(TAG, "record duration : " + recordDuration);
mRecorder.setOnErrorListener(mRecorderErrorCallback);
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
- mRecorder.setOutputFile(filename);
- mRecorder.setVideoFrameRate(frame_rate);
- mRecorder.setVideoSize(video_width, video_height);
- mRecorder.setVideoEncoder(video_encoder);
- mRecorder.setAudioEncoder(audio_encoder);
- mRecorder.setVideoEncodingBitRate(bit_rate);
+ mRecorder.setOutputFile(fileName);
+ mRecorder.setVideoFrameRate(frameRate);
+ mRecorder.setVideoSize(videoWidth, videoHeight);
+ mRecorder.setVideoEncoder(videoEncoder);
+ mRecorder.setAudioEncoder(audioEncoder);
+ mRecorder.setVideoEncodingBitRate(bitRate);
+
Log.v(TAG, "mediaRecorder setPreview");
- mRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
+ mRecorder.setPreviewDisplay(surfaceHolder.getSurface());
mRecorder.prepare();
mRecorder.start();
- Thread.sleep(record_duration);
+ Thread.sleep(recordDuration);
Log.v(TAG, "Before stop");
mRecorder.stop();
mRecorder.release();
+
//start the playback
MediaPlayer mp = new MediaPlayer();
- mp.setDataSource(filename);
+ mp.setDataSource(fileName);
mp.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder());
mp.prepare();
mp.start();
- Thread.sleep(record_duration);
+ Thread.sleep(recordDuration);
mp.release();
- validateRecordedVideo(filename);
- if (remove_video) {
- removeRecordedVideo(filename);
+ validateRecordedVideo(fileName);
+ if (removeVideo) {
+ removeRecordedVideo(fileName);
+ }
+ if (i == 0) {
+ mOutput.write(i + 1);
+ } else {
+ mOutput.write(String.format(", %d", (i + 1)));
}
- output.write(", " + i);
}
} catch (Exception e) {
- Log.v(TAG, e.toString());
- assertTrue("record and playback", false);
+ Log.e(TAG, e.toString());
+ fail("Record and playback");
}
- output.write("\n\n");
- output.close();
+ }
+
+ // Record and playback stress test @ 1080P quality
+ @LargeTest
+ public void testStressRecordVideoAndPlayback1080P() throws Exception {
+ recordVideoAndPlayback(CamcorderProfile.QUALITY_1080P);
+ }
+
+ // Record and playback stress test @ 720P quality
+ @LargeTest
+ public void testStressRecordVideoAndPlayback720P() throws Exception {
+ recordVideoAndPlayback(CamcorderProfile.QUALITY_720P);
+ }
+
+ // Record and playback stress test @ 480P quality
+ @LargeTest
+ public void testStressRecordVideoAndPlayback480P() throws Exception {
+ recordVideoAndPlayback(CamcorderProfile.QUALITY_480P);
+ }
+
+ // This test method uses the codec info from the test runner. Use this
+ // for more granular control of video encoding.
+ @LargeTest
+ public void defaultStressRecordVideoAndPlayback() throws Exception {
+ recordVideoAndPlayback(USE_TEST_RUNNER_PROFILE);
}
// Test case for stressing time lapse
@@ -405,21 +458,19 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
public void testStressTimeLapse() throws Exception {
SurfaceHolder mSurfaceHolder;
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
- int record_duration = MediaRecorderStressTestRunner.mTimeLapseDuration;
- boolean remove_video = MediaRecorderStressTestRunner.mRemoveVideo;
+ int recordDuration = MediaRecorderStressTestRunner.mTimeLapseDuration;
+ boolean removeVideo = MediaRecorderStressTestRunner.mRemoveVideo;
double captureRate = MediaRecorderStressTestRunner.mCaptureRate;
- String filename;
- File stressOutFile = new File(MEDIA_STRESS_OUTPUT);
- Writer output = new BufferedWriter(new FileWriter(stressOutFile, true));
- output.write("Start camera time lapse stress:\n");
- output.write("Total number of loops: " + NUMBER_OF_TIME_LAPSE_LOOPS + "\n");
+ Log.v(TAG, "Start camera time lapse stress:");
+ mOutput.write("Total number of loops: " + NUMBER_OF_TIME_LAPSE_LOOPS + "\n");
try {
- for (int j = 0, n = Camera.getNumberOfCameras(); j < n; j++) {
- output.write("No of loop: camera " + j);
- for (int i = 0; i < NUMBER_OF_TIME_LAPSE_LOOPS; i++) {
- filename = OUTPUT_FILE + j + "_" + i + OUTPUT_FILE_EXT;
- Log.v(TAG, filename);
+ for (int i = 0, n = Camera.getNumberOfCameras(); i < n; i++) {
+ mOutput.write("No of loop: camera " + i);
+ for (int j = 0; j < NUMBER_OF_TIME_LAPSE_LOOPS; j++) {
+ String fileName = String.format("%s/temp%d_%d%s",
+ Environment.getExternalStorageDirectory(), i, j, OUTPUT_FILE_EXT);
+ Log.v(TAG, fileName);
runOnLooper(new Runnable() {
@Override
public void run() {
@@ -438,12 +489,12 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
CamcorderProfile.get(j, CamcorderProfile.QUALITY_TIME_LAPSE_HIGH);
mRecorder.setProfile(profile);
- // Set the timelapse setting; 0.1 = 10 sec timelapse, 0.5 = 2 sec timelapse, etc.
+ // Set the timelapse setting; 0.1 = 10 sec timelapse, 0.5 = 2 sec timelapse, etc
// http://developer.android.com/guide/topics/media/camera.html#time-lapse-video
mRecorder.setCaptureRate(captureRate);
// Set output file
- mRecorder.setOutputFile(filename);
+ mRecorder.setOutputFile(fileName);
// Set the preview display
Log.v(TAG, "mediaRecorder setPreviewDisplay");
@@ -451,40 +502,40 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me
mRecorder.prepare();
mRecorder.start();
- Thread.sleep(record_duration);
+ Thread.sleep(recordDuration);
Log.v(TAG, "Before stop");
mRecorder.stop();
mRecorder.release();
// Start the playback
MediaPlayer mp = new MediaPlayer();
- mp.setDataSource(filename);
+ mp.setDataSource(fileName);
mp.setDisplay(mSurfaceHolder);
mp.prepare();
mp.start();
Thread.sleep(TIME_LAPSE_PLAYBACK_WAIT_TIME);
mp.release();
- validateRecordedVideo(filename);
- if (remove_video) {
- removeRecordedVideo(filename);
+ validateRecordedVideo(fileName);
+ if (removeVideo) {
+ removeRecordedVideo(fileName);
+ }
+
+ if (j == 0) {
+ mOutput.write(j + 1);
+ } else {
+ mOutput.write(String.format(", %d", (j + 1)));
}
- output.write(", " + i);
}
}
+ } catch (IllegalStateException e) {
+ Log.e(TAG, e.toString());
+ fail("Camera time lapse stress test IllegalStateException");
+ } catch (IOException e) {
+ Log.e(TAG, e.toString());
+ fail("Camera time lapse stress test IOException");
+ } catch (Exception e) {
+ Log.e(TAG, e.toString());
+ fail("Camera time lapse stress test Exception");
}
- catch (IllegalStateException e) {
- assertTrue("Camera time lapse stress test IllegalStateException", false);
- Log.v(TAG, e.toString());
- }
- catch (IOException e) {
- assertTrue("Camera time lapse stress test IOException", false);
- Log.v(TAG, e.toString());
- }
- catch (Exception e) {
- assertTrue("Camera time lapse stress test Exception", false);
- Log.v(TAG, e.toString());
- }
- output.write("\n\n");
- output.close();
}
}