summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/java/android/media/AudioFormat.java4
-rw-r--r--media/java/android/media/AudioManager.java18
-rw-r--r--media/java/android/media/AudioRecord.java10
-rw-r--r--media/java/android/media/AudioService.java152
-rw-r--r--media/java/android/media/AudioSystem.java4
-rw-r--r--media/java/android/media/CamcorderProfile.java140
-rw-r--r--media/java/android/media/MediaFile.java190
-rw-r--r--media/java/android/media/MediaPlayer.java47
-rw-r--r--media/java/android/media/MediaRecorder.java136
-rw-r--r--media/java/android/media/MediaScanner.java356
-rw-r--r--media/java/android/media/MtpClient.java108
-rw-r--r--media/java/android/media/MtpConstants.java404
-rw-r--r--media/java/android/media/MtpCursor.java218
-rw-r--r--media/java/android/media/MtpDatabase.java916
-rw-r--r--media/java/android/media/MtpServer.java76
-rwxr-xr-xmedia/java/android/media/videoeditor/AudioTrack.java487
-rwxr-xr-xmedia/java/android/media/videoeditor/Effect.java168
-rwxr-xr-xmedia/java/android/media/videoeditor/EffectColor.java119
-rwxr-xr-xmedia/java/android/media/videoeditor/EffectKenBurns.java88
-rw-r--r--media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java37
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaImageItem.java272
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaItem.java568
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaProperties.java257
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaVideoItem.java426
-rwxr-xr-xmedia/java/android/media/videoeditor/Overlay.java187
-rwxr-xr-xmedia/java/android/media/videoeditor/OverlayFrame.java149
-rwxr-xr-xmedia/java/android/media/videoeditor/Transition.java210
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionAlpha.java114
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionCrossfade.java60
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionFadeBlack.java60
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionSliding.java82
-rwxr-xr-xmedia/java/android/media/videoeditor/VideoEditor.java493
-rwxr-xr-xmedia/java/android/media/videoeditor/VideoEditorFactory.java122
-rw-r--r--media/java/android/media/videoeditor/VideoEditorTestImpl.java1207
-rw-r--r--media/java/android/media/videoeditor/WaveformData.java78
-rw-r--r--media/jni/Android.mk14
-rw-r--r--media/jni/android_media_MediaPlayer.cpp38
-rw-r--r--media/jni/android_media_MediaProfiles.cpp20
-rw-r--r--media/jni/android_media_MediaRecorder.cpp15
-rw-r--r--media/jni/android_media_MediaScanner.cpp21
-rw-r--r--media/jni/android_media_MtpClient.cpp296
-rw-r--r--media/jni/android_media_MtpCursor.cpp136
-rw-r--r--media/jni/android_media_MtpDatabase.cpp1021
-rw-r--r--media/jni/android_media_MtpServer.cpp247
-rw-r--r--media/libeffects/visualizer/Android.mk2
-rw-r--r--media/libmedia/AudioSystem.cpp3
-rw-r--r--media/libmedia/IMediaPlayer.cpp53
-rw-r--r--media/libmedia/IMediaRecorder.cpp25
-rw-r--r--media/libmedia/IOMX.cpp101
-rw-r--r--media/libmedia/MediaProfiles.cpp139
-rw-r--r--media/libmedia/MediaScanner.cpp50
-rw-r--r--media/libmedia/mediaplayer.cpp23
-rw-r--r--media/libmedia/mediarecorder.cpp30
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp24
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h5
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp14
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.h4
-rw-r--r--media/libmediaplayerservice/MidiFile.h3
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.cpp21
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.h5
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp471
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h54
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.h5
-rw-r--r--media/libstagefright/Android.mk4
-rw-r--r--media/libstagefright/AwesomePlayer.cpp140
-rw-r--r--media/libstagefright/CameraSource.cpp522
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp518
-rw-r--r--media/libstagefright/FileSource.cpp2
-rw-r--r--media/libstagefright/MPEG4Writer.cpp9
-rw-r--r--media/libstagefright/MediaBuffer.cpp29
-rw-r--r--media/libstagefright/MediaExtractor.cpp2
-rw-r--r--media/libstagefright/MediaSourceSplitter.cpp234
-rw-r--r--media/libstagefright/OMXCodec.cpp386
-rw-r--r--media/libstagefright/SampleTable.cpp2
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp2
-rw-r--r--media/libstagefright/VideoSourceDownSampler.cpp142
-rw-r--r--media/libstagefright/codecs/avc/enc/AVCEncoder.cpp84
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp109
-rw-r--r--media/libstagefright/colorconversion/Android.mk8
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp183
-rw-r--r--media/libstagefright/include/AwesomePlayer.h8
-rw-r--r--media/libstagefright/include/OMX.h10
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h8
-rw-r--r--media/libstagefright/include/SoftwareRenderer.h18
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp4
-rw-r--r--media/libstagefright/matroska/mkvparser.cpp7614
-rw-r--r--media/libstagefright/matroska/mkvparser.hpp982
-rw-r--r--media/libstagefright/omx/OMX.cpp23
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp130
-rw-r--r--media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp384
-rw-r--r--media/libstagefright/rtsp/AMPEG4AudioAssembler.h13
-rw-r--r--media/libstagefright/rtsp/ARTPSource.cpp2
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp263
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.h20
-rw-r--r--media/libstagefright/rtsp/ASessionDescription.cpp19
-rw-r--r--media/libstagefright/rtsp/Android.mk1
-rw-r--r--media/libstagefright/rtsp/MyHandler.h25
-rw-r--r--media/libstagefright/yuv/Android.mk13
-rw-r--r--media/libstagefright/yuv/YUVCanvas.cpp111
-rw-r--r--media/libstagefright/yuv/YUVImage.cpp413
-rw-r--r--media/mtp/Android.mk78
-rw-r--r--media/mtp/MtpClient.cpp262
-rw-r--r--media/mtp/MtpClient.h68
-rw-r--r--media/mtp/MtpCursor.cpp461
-rw-r--r--media/mtp/MtpCursor.h78
-rw-r--r--media/mtp/MtpDataPacket.cpp486
-rw-r--r--media/mtp/MtpDataPacket.h118
-rw-r--r--media/mtp/MtpDatabase.h104
-rw-r--r--media/mtp/MtpDebug.cpp387
-rw-r--r--media/mtp/MtpDebug.h37
-rw-r--r--media/mtp/MtpDevice.cpp496
-rw-r--r--media/mtp/MtpDevice.h105
-rw-r--r--media/mtp/MtpDeviceInfo.cpp97
-rw-r--r--media/mtp/MtpDeviceInfo.h54
-rw-r--r--media/mtp/MtpEventPacket.cpp67
-rw-r--r--media/mtp/MtpEventPacket.h48
-rw-r--r--media/mtp/MtpObjectInfo.cpp108
-rw-r--r--media/mtp/MtpObjectInfo.h60
-rw-r--r--media/mtp/MtpPacket.cpp154
-rw-r--r--media/mtp/MtpPacket.h69
-rw-r--r--media/mtp/MtpProperty.cpp356
-rw-r--r--media/mtp/MtpProperty.h108
-rw-r--r--media/mtp/MtpRequestPacket.cpp56
-rw-r--r--media/mtp/MtpRequestPacket.h48
-rw-r--r--media/mtp/MtpResponsePacket.cpp57
-rw-r--r--media/mtp/MtpResponsePacket.h48
-rw-r--r--media/mtp/MtpServer.cpp797
-rw-r--r--media/mtp/MtpServer.h107
-rw-r--r--media/mtp/MtpStorage.cpp80
-rw-r--r--media/mtp/MtpStorage.h50
-rw-r--r--media/mtp/MtpStorageInfo.cpp72
-rw-r--r--media/mtp/MtpStorageInfo.h49
-rw-r--r--media/mtp/MtpStringBuffer.cpp171
-rw-r--r--media/mtp/MtpStringBuffer.h57
-rw-r--r--media/mtp/MtpTypes.h78
-rw-r--r--media/mtp/MtpUtils.cpp78
-rw-r--r--media/mtp/MtpUtils.h29
-rw-r--r--media/mtp/mtp.h475
-rw-r--r--media/tests/CameraBrowser/Android.mk10
-rw-r--r--media/tests/CameraBrowser/AndroidManifest.xml28
-rw-r--r--media/tests/CameraBrowser/res/layout/object_info.xml157
-rw-r--r--media/tests/CameraBrowser/res/layout/object_list.xml33
-rw-r--r--media/tests/CameraBrowser/res/menu/object_menu.xml23
-rw-r--r--media/tests/CameraBrowser/res/values/strings.xml45
-rw-r--r--media/tests/CameraBrowser/res/values/styles.xml34
-rw-r--r--media/tests/CameraBrowser/src/com/android/camerabrowser/CameraBrowser.java103
-rw-r--r--media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectBrowser.java147
-rw-r--r--media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectViewer.java254
-rw-r--r--media/tests/CameraBrowser/src/com/android/camerabrowser/StorageBrowser.java76
-rw-r--r--media/tests/CameraBrowser/src/com/android/camerabrowser/UsbReceiver.java47
-rw-r--r--media/tests/mtp/Android.mk61
-rw-r--r--media/tests/mtp/MtpFile.cpp187
-rw-r--r--media/tests/mtp/MtpFile.h57
-rw-r--r--media/tests/mtp/mtp.cpp370
-rw-r--r--media/tests/players/invoke_mock_media_player.cpp4
155 files changed, 26490 insertions, 4439 deletions
diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java
index f1fa1e8..31e4631 100644
--- a/media/java/android/media/AudioFormat.java
+++ b/media/java/android/media/AudioFormat.java
@@ -31,9 +31,9 @@ public class AudioFormat {
public static final int ENCODING_INVALID = 0;
/** Default audio data format */
public static final int ENCODING_DEFAULT = 1;
- /** Audio data format: PCM 16 bit per sample */
+ /** Audio data format: PCM 16 bit per sample. Guaranteed to be supported by devices. */
public static final int ENCODING_PCM_16BIT = 2; // accessed by native code
- /** Audio data format: PCM 8 bit per sample */
+ /** Audio data format: PCM 8 bit per sample. Not guaranteed to be supported by devices. */
public static final int ENCODING_PCM_8BIT = 3; // accessed by native code
/** Invalid audio channel configuration */
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index bbbba74..b23dcde 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -1525,4 +1525,22 @@ public class AudioManager {
* {@hide}
*/
private IBinder mICallBack = new Binder();
+
+ /**
+ * Checks whether the phone is in silent mode, with or without vibrate.
+ *
+ * @return true if phone is in silent mode, with or without vibrate.
+ *
+ * @see #getRingerMode()
+ *
+ * @hide pending API Council approval
+ */
+ public boolean isSilentMode() {
+ int ringerMode = getRingerMode();
+ boolean silentMode =
+ (ringerMode == RINGER_MODE_SILENT) ||
+ (ringerMode == RINGER_MODE_VIBRATE);
+ return silentMode;
+ }
+
}
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index c48eaad..c567a6e 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -194,11 +194,13 @@ public class AudioRecord
* Class constructor.
* @param audioSource the recording source. See {@link MediaRecorder.AudioSource} for
* recording source definitions.
- * @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but
- * not limited to) 44100, 22050 and 11025.
+ * @param sampleRateInHz the sample rate expressed in Hertz. 44100Hz is currently the only
+ * rate that is guaranteed to work on all devices, but other rates such as 22050,
+ * 16000, and 11025 may work on some devices.
* @param channelConfig describes the configuration of the audio channels.
* See {@link AudioFormat#CHANNEL_IN_MONO} and
- * {@link AudioFormat#CHANNEL_IN_STEREO}
+ * {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed
+ * to work on all devices.
* @param audioFormat the format in which the audio data is represented.
* See {@link AudioFormat#ENCODING_PCM_16BIT} and
* {@link AudioFormat#ENCODING_PCM_8BIT}
@@ -444,6 +446,8 @@ public class AudioRecord
* or {@link #ERROR} if the implementation was unable to query the hardware for its
* output properties,
* or the minimum buffer size expressed in bytes.
+ * @see #AudioRecord(int, int, int, int, int) for more information on valid
+ * configuration values.
*/
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
int channelCount = 0;
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 41d2cc5..a49bb37 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -16,18 +16,19 @@
package android.media;
-import java.util.NoSuchElementException;
import android.app.ActivityManagerNative;
+import android.bluetooth.BluetoothA2dp;
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothClass;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothHeadset;
+import android.bluetooth.BluetoothProfile;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
-import android.bluetooth.BluetoothA2dp;
-import android.bluetooth.BluetoothClass;
-import android.bluetooth.BluetoothDevice;
-import android.bluetooth.BluetoothHeadset;
import android.content.pm.PackageManager;
import android.database.ContentObserver;
import android.media.MediaPlayer.OnCompletionListener;
@@ -40,6 +41,7 @@ import android.os.Looper;
import android.os.Message;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.os.SystemProperties;
import android.provider.Settings;
import android.provider.Settings.System;
import android.telephony.PhoneStateListener;
@@ -47,7 +49,6 @@ import android.telephony.TelephonyManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.VolumePanel;
-import android.os.SystemProperties;
import com.android.internal.telephony.ITelephony;
@@ -57,7 +58,9 @@ import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
+import java.util.NoSuchElementException;
import java.util.Set;
import java.util.Stack;
@@ -258,8 +261,8 @@ public class AudioService extends IAudioService.Stub {
// BluetoothHeadset API to control SCO connection
private BluetoothHeadset mBluetoothHeadset;
- // Bluetooth headset connection state
- private boolean mBluetoothHeadsetConnected;
+ // Bluetooth headset device
+ private BluetoothDevice mBluetoothHeadsetDevice;
///////////////////////////////////////////////////////////////////////////
// Construction
@@ -294,17 +297,23 @@ public class AudioService extends IAudioService.Stub {
AudioSystem.setErrorCallback(mAudioSystemCallback);
loadSoundEffects();
- mBluetoothHeadsetConnected = false;
- mBluetoothHeadset = new BluetoothHeadset(context,
- mBluetoothHeadsetServiceListener);
+ mBluetoothHeadsetDevice = null;
+ BluetoothAdapter adapter = BluetoothAdapter.getDefaultAdapter();
+ if (adapter != null) {
+ adapter.getProfileProxy(mContext, mBluetoothProfileServiceListener,
+ BluetoothProfile.HEADSET);
+ }
// Register for device connection intent broadcasts.
IntentFilter intentFilter =
new IntentFilter(Intent.ACTION_HEADSET_PLUG);
- intentFilter.addAction(BluetoothA2dp.ACTION_SINK_STATE_CHANGED);
- intentFilter.addAction(BluetoothHeadset.ACTION_STATE_CHANGED);
- intentFilter.addAction(Intent.ACTION_DOCK_EVENT);
+
+ intentFilter.addAction(BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED);
intentFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
+ intentFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
+ intentFilter.addAction(Intent.ACTION_DOCK_EVENT);
+ intentFilter.addAction(Intent.ACTION_USB_ANLG_HEADSET_PLUG);
+ intentFilter.addAction(Intent.ACTION_USB_DGTL_HEADSET_PLUG);
context.registerReceiver(mReceiver, intentFilter);
// Register for media button intent broadcasts.
@@ -1000,7 +1009,7 @@ public class AudioService extends IAudioService.Stub {
public void incCount() {
synchronized(mScoClients) {
- requestScoState(BluetoothHeadset.AUDIO_STATE_CONNECTED);
+ requestScoState(BluetoothHeadset.STATE_AUDIO_CONNECTED);
if (mStartcount == 0) {
try {
mCb.linkToDeath(this, 0);
@@ -1026,7 +1035,7 @@ public class AudioService extends IAudioService.Stub {
Log.w(TAG, "decCount() going to 0 but not registered to binder");
}
}
- requestScoState(BluetoothHeadset.AUDIO_STATE_DISCONNECTED);
+ requestScoState(BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
}
}
}
@@ -1042,7 +1051,7 @@ public class AudioService extends IAudioService.Stub {
}
mStartcount = 0;
if (stopSco) {
- requestScoState(BluetoothHeadset.AUDIO_STATE_DISCONNECTED);
+ requestScoState(BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
}
}
}
@@ -1068,12 +1077,12 @@ public class AudioService extends IAudioService.Stub {
private void requestScoState(int state) {
if (totalCount() == 0 &&
- mBluetoothHeadsetConnected &&
+ mBluetoothHeadsetDevice != null &&
AudioService.this.mMode == AudioSystem.MODE_NORMAL) {
- if (state == BluetoothHeadset.AUDIO_STATE_CONNECTED) {
- mBluetoothHeadset.startVoiceRecognition();
+ if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
+ mBluetoothHeadset.startVoiceRecognition(mBluetoothHeadsetDevice);
} else {
- mBluetoothHeadset.stopVoiceRecognition();
+ mBluetoothHeadset.stopVoiceRecognition(mBluetoothHeadsetDevice);
}
}
}
@@ -1103,23 +1112,25 @@ public class AudioService extends IAudioService.Stub {
}
}
- private BluetoothHeadset.ServiceListener mBluetoothHeadsetServiceListener =
- new BluetoothHeadset.ServiceListener() {
- public void onServiceConnected() {
- if (mBluetoothHeadset != null) {
- BluetoothDevice device = mBluetoothHeadset.getCurrentHeadset();
- if (mBluetoothHeadset.getState(device) == BluetoothHeadset.STATE_CONNECTED) {
- mBluetoothHeadsetConnected = true;
- }
+ private BluetoothProfile.ServiceListener mBluetoothProfileServiceListener =
+ new BluetoothProfile.ServiceListener() {
+ public void onServiceConnected(int profile, BluetoothProfile proxy) {
+ mBluetoothHeadset = (BluetoothHeadset) proxy;
+ List<BluetoothDevice> deviceList = mBluetoothHeadset.getConnectedDevices();
+ if (deviceList.size() > 0) {
+ mBluetoothHeadsetDevice = deviceList.get(0);
+ } else {
+ mBluetoothHeadsetDevice = null;
}
}
- public void onServiceDisconnected() {
+ public void onServiceDisconnected(int profile) {
if (mBluetoothHeadset != null) {
- BluetoothDevice device = mBluetoothHeadset.getCurrentHeadset();
- if (mBluetoothHeadset.getState(device) == BluetoothHeadset.STATE_DISCONNECTED) {
- mBluetoothHeadsetConnected = false;
+ List<BluetoothDevice> devices = mBluetoothHeadset.getConnectedDevices();
+ if (devices.size() == 0) {
+ mBluetoothHeadsetDevice = null;
clearAllScoClients();
}
+ mBluetoothHeadset = null;
}
}
};
@@ -1808,23 +1819,29 @@ public class AudioService extends IAudioService.Stub {
case Intent.EXTRA_DOCK_STATE_CAR:
config = AudioSystem.FORCE_BT_CAR_DOCK;
break;
+ case Intent.EXTRA_DOCK_STATE_LE_DESK:
+ config = AudioSystem.FORCE_ANALOG_DOCK;
+ break;
+ case Intent.EXTRA_DOCK_STATE_HE_DESK:
+ config = AudioSystem.FORCE_DIGITAL_DOCK;
+ break;
case Intent.EXTRA_DOCK_STATE_UNDOCKED:
default:
config = AudioSystem.FORCE_NONE;
}
AudioSystem.setForceUse(AudioSystem.FOR_DOCK, config);
- } else if (action.equals(BluetoothA2dp.ACTION_SINK_STATE_CHANGED)) {
- int state = intent.getIntExtra(BluetoothA2dp.EXTRA_SINK_STATE,
- BluetoothA2dp.STATE_DISCONNECTED);
+ } else if (action.equals(BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED)) {
+ int state = intent.getIntExtra(BluetoothProfile.EXTRA_STATE,
+ BluetoothProfile.STATE_DISCONNECTED);
BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
String address = btDevice.getAddress();
- boolean isConnected = (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) &&
- ((String)mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)).equals(address));
+ boolean isConnected =
+ (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) &&
+ mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP).equals(address));
- if (isConnected &&
- state != BluetoothA2dp.STATE_CONNECTED && state != BluetoothA2dp.STATE_PLAYING) {
+ if (isConnected && state != BluetoothProfile.STATE_CONNECTED) {
if (btDevice.isBluetoothDock()) {
- if (state == BluetoothA2dp.STATE_DISCONNECTED) {
+ if (state == BluetoothProfile.STATE_DISCONNECTED) {
// introduction of a delay for transient disconnections of docks when
// power is rapidly turned off/on, this message will be canceled if
// we reconnect the dock under a preset delay
@@ -1834,9 +1851,7 @@ public class AudioService extends IAudioService.Stub {
} else {
makeA2dpDeviceUnavailableNow(address);
}
- } else if (!isConnected &&
- (state == BluetoothA2dp.STATE_CONNECTED ||
- state == BluetoothA2dp.STATE_PLAYING)) {
+ } else if (!isConnected && state == BluetoothProfile.STATE_CONNECTED) {
if (btDevice.isBluetoothDock()) {
// this could be a reconnection after a transient disconnection
cancelA2dpDeviceTimeout();
@@ -1851,9 +1866,9 @@ public class AudioService extends IAudioService.Stub {
}
makeA2dpDeviceAvailable(address);
}
- } else if (action.equals(BluetoothHeadset.ACTION_STATE_CHANGED)) {
- int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE,
- BluetoothHeadset.STATE_ERROR);
+ } else if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
+ int state = intent.getIntExtra(BluetoothProfile.EXTRA_STATE,
+ BluetoothProfile.STATE_DISCONNECTED);
int device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO;
BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
String address = null;
@@ -1874,21 +1889,21 @@ public class AudioService extends IAudioService.Stub {
}
boolean isConnected = (mConnectedDevices.containsKey(device) &&
- ((String)mConnectedDevices.get(device)).equals(address));
+ mConnectedDevices.get(device).equals(address));
- if (isConnected && state != BluetoothHeadset.STATE_CONNECTED) {
+ if (isConnected && state != BluetoothProfile.STATE_CONNECTED) {
AudioSystem.setDeviceConnectionState(device,
AudioSystem.DEVICE_STATE_UNAVAILABLE,
address);
mConnectedDevices.remove(device);
- mBluetoothHeadsetConnected = false;
+ mBluetoothHeadsetDevice = null;
clearAllScoClients();
- } else if (!isConnected && state == BluetoothHeadset.STATE_CONNECTED) {
+ } else if (!isConnected && state == BluetoothProfile.STATE_CONNECTED) {
AudioSystem.setDeviceConnectionState(device,
AudioSystem.DEVICE_STATE_AVAILABLE,
address);
mConnectedDevices.put(new Integer(device), address);
- mBluetoothHeadsetConnected = true;
+ mBluetoothHeadsetDevice = btDevice;
}
} else if (action.equals(Intent.ACTION_HEADSET_PLUG)) {
int state = intent.getIntExtra("state", 0);
@@ -1921,16 +1936,41 @@ public class AudioService extends IAudioService.Stub {
mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE), "");
}
}
+ } else if (action.equals(Intent.ACTION_USB_ANLG_HEADSET_PLUG)) {
+ int state = intent.getIntExtra("state", 0);
+ Log.v(TAG, "Broadcast Receiver: Got ACTION_USB_ANLG_HEADSET_PLUG, state = "+state);
+ boolean isConnected = mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET);
+ if (state == 0 && isConnected) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE, "");
+ mConnectedDevices.remove(AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET);
+ } else if (state == 1 && !isConnected) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET,
+ AudioSystem.DEVICE_STATE_AVAILABLE, "");
+ mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET), "");
+ }
+ } else if (action.equals(Intent.ACTION_USB_DGTL_HEADSET_PLUG)) {
+ int state = intent.getIntExtra("state", 0);
+ Log.v(TAG, "Broadcast Receiver: Got ACTION_USB_DGTL_HEADSET_PLUG, state = "+state);
+ boolean isConnected = mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET);
+ if (state == 0 && isConnected) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET,
+ AudioSystem.DEVICE_STATE_UNAVAILABLE, "");
+ mConnectedDevices.remove(AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET);
+ } else if (state == 1 && !isConnected) {
+ AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET,
+ AudioSystem.DEVICE_STATE_AVAILABLE, "");
+ mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET), "");
+ }
} else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
- int state = intent.getIntExtra(BluetoothHeadset.EXTRA_AUDIO_STATE,
- BluetoothHeadset.STATE_ERROR);
+ int state = intent.getIntExtra(BluetoothProfile.EXTRA_STATE, -1);
synchronized (mScoClients) {
if (!mScoClients.isEmpty()) {
switch (state) {
- case BluetoothHeadset.AUDIO_STATE_CONNECTED:
+ case BluetoothHeadset.STATE_AUDIO_CONNECTED:
state = AudioManager.SCO_AUDIO_STATE_CONNECTED;
break;
- case BluetoothHeadset.AUDIO_STATE_DISCONNECTED:
+ case BluetoothHeadset.STATE_AUDIO_DISCONNECTED:
state = AudioManager.SCO_AUDIO_STATE_DISCONNECTED;
break;
default:
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index a4818ff..5442791 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -243,6 +243,8 @@ public class AudioSystem
public static final int DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100;
public static final int DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200;
public static final int DEVICE_OUT_AUX_DIGITAL = 0x400;
+ public static final int DEVICE_OUT_ANLG_DOCK_HEADSET = 0x800;
+ public static final int DEVICE_OUT_DGTL_DOCK_HEADSET = 0x1000;
public static final int DEVICE_OUT_DEFAULT = 0x8000;
// input devices
public static final int DEVICE_IN_COMMUNICATION = 0x10000;
@@ -273,6 +275,8 @@ public class AudioSystem
public static final int FORCE_WIRED_ACCESSORY = 5;
public static final int FORCE_BT_CAR_DOCK = 6;
public static final int FORCE_BT_DESK_DOCK = 7;
+ public static final int FORCE_ANALOG_DOCK = 8;
+ public static final int FORCE_DIGITAL_DOCK = 9;
public static final int FORCE_DEFAULT = FORCE_NONE;
// usage for serForceUse
diff --git a/media/java/android/media/CamcorderProfile.java b/media/java/android/media/CamcorderProfile.java
index a27df57..3e9429d 100644
--- a/media/java/android/media/CamcorderProfile.java
+++ b/media/java/android/media/CamcorderProfile.java
@@ -39,22 +39,80 @@ package android.media;
*/
public class CamcorderProfile
{
+ // Do not change these values/ordinals without updating their counterpart
+ // in include/media/MediaProfiles.h!
+
/**
- * The output from camcorder recording sessions can have different quality levels.
- *
- * Currently, we define two quality levels: high quality and low quality.
- * A camcorder recording session with high quality level usually has higher output bit
- * rate, better video and/or audio recording quality, larger video frame
- * resolution and higher audio sampling rate, etc, than those with low quality
- * level.
- *
- * Do not change these values/ordinals without updating their counterpart
- * in include/media/MediaProfiles.h!
+ * Quality level corresponding to the lowest available resolution.
*/
public static final int QUALITY_LOW = 0;
+
+ /**
+ * Quality level corresponding to the highest available resolution.
+ */
public static final int QUALITY_HIGH = 1;
/**
+ * Quality level corresponding to the qcif (176 x 144) resolution.
+ */
+ public static final int QUALITY_QCIF = 2;
+
+ /**
+ * Quality level corresponding to the cif (352 x 288) resolution.
+ */
+ public static final int QUALITY_CIF = 3;
+
+ /**
+ * Quality level corresponding to the 480p (720 x 480) resolution.
+ */
+ public static final int QUALITY_480P = 4;
+
+ /**
+ * Quality level corresponding to the 720p (1280 x 720) resolution.
+ */
+ public static final int QUALITY_720P = 5;
+
+ /**
+ * Quality level corresponding to the 1080p (1920 x 1088) resolution.
+ */
+ public static final int QUALITY_1080P = 6;
+
+ /**
+ * Time lapse quality level corresponding to the lowest available resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_LOW = 1000;
+
+ /**
+ * Time lapse quality level corresponding to the highest available resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_HIGH = 1001;
+
+ /**
+ * Time lapse quality level corresponding to the qcif (176 x 144) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_QCIF = 1002;
+
+ /**
+ * Time lapse quality level corresponding to the cif (352 x 288) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_CIF = 1003;
+
+ /**
+ * Time lapse quality level corresponding to the 480p (720 x 480) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_480P = 1004;
+
+ /**
+ * Time lapse quality level corresponding to the 720p (1280 x 720) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_720P = 1005;
+
+ /**
+ * Time lapse quality level corresponding to the 1080p (1920 x 1088) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_1080P = 1006;
+
+ /**
* Default recording duration in seconds before the session is terminated.
* This is useful for applications like MMS has limited file size requirement.
*/
@@ -122,25 +180,79 @@ public class CamcorderProfile
* Returns the camcorder profile for the default camera at the given
* quality level.
* @param quality the target quality level for the camcorder profile
+ * @see #get(int, int)
*/
public static CamcorderProfile get(int quality) {
- return get(0, quality);
+ return get(android.hardware.Camera.CAMERA_ID_DEFAULT, quality);
}
/**
* Returns the camcorder profile for the given camera at the given
* quality level.
+ *
+ * Quality levels QUALITY_LOW, QUALITY_HIGH are guaranteed to be supported, while
+ * other levels may or may not be supported. The supported levels can be checked using
+ * {@link #hasProfile(int, int)}.
+ * QUALITY_LOW refers to the lowest quality available, while QUALITY_HIGH refers to
+ * the highest quality available.
+ * QUALITY_LOW/QUALITY_HIGH have to match one of qcif, cif, 480p, 720p, or 1080p.
+ * E.g. if the device supports 480p, 720p, and 1080p, then low is 480p and high is
+ * 1080p.
+ *
+ * The same is true for time lapse quality levels, i.e. QUALITY_TIME_LAPSE_LOW,
+ * QUALITY_TIME_LAPSE_HIGH are guaranteed to be supported and have to match one of
+ * qcif, cif, 480p, 720p, or 1080p.
+ *
+ * A camcorder recording session with higher quality level usually has higher output
+ * bit rate, better video and/or audio recording quality, larger video frame
+ * resolution and higher audio sampling rate, etc, than those with lower quality
+ * level.
+ *
* @param cameraId the id for the camera
- * @param quality the target quality level for the camcorder profile
+ * @param quality the target quality level for the camcorder profile.
+ * @see #QUALITY_LOW
+ * @see #QUALITY_HIGH
+ * @see #QUALITY_QCIF
+ * @see #QUALITY_CIF
+ * @see #QUALITY_480P
+ * @see #QUALITY_720P
+ * @see #QUALITY_1080P
+ * @see #QUALITY_TIME_LAPSE_LOW
+ * @see #QUALITY_TIME_LAPSE_HIGH
+ * @see #QUALITY_TIME_LAPSE_QCIF
+ * @see #QUALITY_TIME_LAPSE_CIF
+ * @see #QUALITY_TIME_LAPSE_480P
+ * @see #QUALITY_TIME_LAPSE_720P
+ * @see #QUALITY_TIME_LAPSE_1080P
*/
public static CamcorderProfile get(int cameraId, int quality) {
- if (quality < QUALITY_LOW || quality > QUALITY_HIGH) {
+ if (!((quality >= QUALITY_LOW && quality <= QUALITY_1080P) ||
+ (quality >= QUALITY_TIME_LAPSE_LOW && quality <= QUALITY_TIME_LAPSE_1080P))) {
String errMessage = "Unsupported quality level: " + quality;
throw new IllegalArgumentException(errMessage);
}
return native_get_camcorder_profile(cameraId, quality);
}
+ /**
+ * Returns true if camcorder profile exists for the default camera at
+ * the given quality level.
+ * @param quality the target quality level for the camcorder profile
+ */
+ public static boolean hasProfile(int quality) {
+ return hasProfile(android.hardware.Camera.CAMERA_ID_DEFAULT, quality);
+ }
+
+ /**
+ * Returns true if camcorder profile exists for the given camera at
+ * the given quality level.
+ * @param cameraId the id for the camera
+ * @param quality the target quality level for the camcorder profile
+ */
+ public static boolean hasProfile(int cameraId, int quality) {
+ return native_has_camcorder_profile(cameraId, quality);
+ }
+
static {
System.loadLibrary("media_jni");
native_init();
@@ -178,4 +290,6 @@ public class CamcorderProfile
private static native final void native_init();
private static native final CamcorderProfile native_get_camcorder_profile(
int cameraId, int quality);
+ private static native final boolean native_has_camcorder_profile(
+ int cameraId, int quality);
}
diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java
index 6e527d9..532a2df 100644
--- a/media/java/android/media/MediaFile.java
+++ b/media/java/android/media/MediaFile.java
@@ -34,8 +34,6 @@ import java.util.List;
* {@hide}
*/
public class MediaFile {
- // comma separated list of all file extensions supported by the media scanner
- public final static String sFileExtensions;
// Audio file types
public static final int FILE_TYPE_MP3 = 1;
@@ -84,6 +82,22 @@ public class MediaFile {
public static final int FILE_TYPE_WPL = 43;
private static final int FIRST_PLAYLIST_FILE_TYPE = FILE_TYPE_M3U;
private static final int LAST_PLAYLIST_FILE_TYPE = FILE_TYPE_WPL;
+
+ // Drm file types
+ public static final int FILE_TYPE_FL = 51;
+ private static final int FIRST_DRM_FILE_TYPE = FILE_TYPE_FL;
+ private static final int LAST_DRM_FILE_TYPE = FILE_TYPE_FL;
+
+ // Other popular file types
+ public static final int FILE_TYPE_TEXT = 100;
+ public static final int FILE_TYPE_HTML = 101;
+ public static final int FILE_TYPE_PDF = 102;
+ public static final int FILE_TYPE_XML = 103;
+ public static final int FILE_TYPE_MS_WORD = 104;
+ public static final int FILE_TYPE_MS_EXCEL = 105;
+ public static final int FILE_TYPE_MS_POWERPOINT = 106;
+ public static final int FILE_TYPE_FLAC = 107;
+ public static final int FILE_TYPE_ZIP = 108;
static class MediaFileType {
@@ -96,15 +110,32 @@ public class MediaFile {
}
}
- private static HashMap<String, MediaFileType> sFileTypeMap
+ private static HashMap<String, MediaFileType> sFileTypeMap
= new HashMap<String, MediaFileType>();
- private static HashMap<String, Integer> sMimeTypeMap
- = new HashMap<String, Integer>();
+ private static HashMap<String, Integer> sMimeTypeMap
+ = new HashMap<String, Integer>();
+ // maps file extension to MTP format code
+ private static HashMap<String, Integer> sFileTypeToFormatMap
+ = new HashMap<String, Integer>();
+ // maps mime type to MTP format code
+ private static HashMap<String, Integer> sMimeTypeToFormatMap
+ = new HashMap<String, Integer>();
+ // maps MTP format code to mime type
+ private static HashMap<Integer, String> sFormatToMimeTypeMap
+ = new HashMap<Integer, String>();
+
static void addFileType(String extension, int fileType, String mimeType) {
sFileTypeMap.put(extension, new MediaFileType(fileType, mimeType));
sMimeTypeMap.put(mimeType, Integer.valueOf(fileType));
}
+ static void addFileType(String extension, int fileType, String mimeType, int mtpFormatCode) {
+ addFileType(extension, fileType, mimeType);
+ sFileTypeToFormatMap.put(extension, Integer.valueOf(mtpFormatCode));
+ sMimeTypeToFormatMap.put(mimeType, Integer.valueOf(mtpFormatCode));
+ sFormatToMimeTypeMap.put(mtpFormatCode, mimeType);
+ }
+
private static boolean isWMAEnabled() {
List<AudioDecoder> decoders = DecoderCapabilities.getAudioDecoders();
for (AudioDecoder decoder: decoders) {
@@ -115,28 +146,18 @@ public class MediaFile {
return false;
}
- private static boolean isWMVEnabled() {
- List<VideoDecoder> decoders = DecoderCapabilities.getVideoDecoders();
- for (VideoDecoder decoder: decoders) {
- if (decoder == VideoDecoder.VIDEO_DECODER_WMV) {
- return true;
- }
- }
- return false;
- }
-
static {
- addFileType("MP3", FILE_TYPE_MP3, "audio/mpeg");
- addFileType("M4A", FILE_TYPE_M4A, "audio/mp4");
- addFileType("WAV", FILE_TYPE_WAV, "audio/x-wav");
+ addFileType("MP3", FILE_TYPE_MP3, "audio/mpeg", MtpConstants.FORMAT_MP3);
+ addFileType("M4A", FILE_TYPE_M4A, "audio/mp4", MtpConstants.FORMAT_MPEG);
+ addFileType("WAV", FILE_TYPE_WAV, "audio/x-wav", MtpConstants.FORMAT_WAV);
addFileType("AMR", FILE_TYPE_AMR, "audio/amr");
addFileType("AWB", FILE_TYPE_AWB, "audio/amr-wb");
if (isWMAEnabled()) {
- addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma");
+ addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma", MtpConstants.FORMAT_WMA);
}
- addFileType("OGG", FILE_TYPE_OGG, "application/ogg");
- addFileType("OGA", FILE_TYPE_OGG, "application/ogg");
- addFileType("AAC", FILE_TYPE_AAC, "audio/aac");
+ addFileType("OGG", FILE_TYPE_OGG, "application/ogg", MtpConstants.FORMAT_OGG);
+ addFileType("OGA", FILE_TYPE_OGG, "application/ogg", MtpConstants.FORMAT_OGG);
+ addFileType("AAC", FILE_TYPE_AAC, "audio/aac", MtpConstants.FORMAT_AAC);
addFileType("MKA", FILE_TYPE_MKA, "audio/x-matroska");
addFileType("MID", FILE_TYPE_MID, "audio/midi");
@@ -148,78 +169,131 @@ public class MediaFile {
addFileType("RTX", FILE_TYPE_MID, "audio/midi");
addFileType("OTA", FILE_TYPE_MID, "audio/midi");
- addFileType("MPEG", FILE_TYPE_MP4, "video/mpeg");
- addFileType("MP4", FILE_TYPE_MP4, "video/mp4");
- addFileType("M4V", FILE_TYPE_M4V, "video/mp4");
- addFileType("3GP", FILE_TYPE_3GPP, "video/3gpp");
- addFileType("3GPP", FILE_TYPE_3GPP, "video/3gpp");
- addFileType("3G2", FILE_TYPE_3GPP2, "video/3gpp2");
- addFileType("3GPP2", FILE_TYPE_3GPP2, "video/3gpp2");
+ addFileType("MPEG", FILE_TYPE_MP4, "video/mpeg", MtpConstants.FORMAT_MPEG);
+ addFileType("MP4", FILE_TYPE_MP4, "video/mp4", MtpConstants.FORMAT_MPEG);
+ addFileType("M4V", FILE_TYPE_M4V, "video/mp4", MtpConstants.FORMAT_MPEG);
+ addFileType("3GP", FILE_TYPE_3GPP, "video/3gpp", MtpConstants.FORMAT_3GP_CONTAINER);
+ addFileType("3GPP", FILE_TYPE_3GPP, "video/3gpp", MtpConstants.FORMAT_3GP_CONTAINER);
+ addFileType("3G2", FILE_TYPE_3GPP2, "video/3gpp2", MtpConstants.FORMAT_3GP_CONTAINER);
+ addFileType("3GPP2", FILE_TYPE_3GPP2, "video/3gpp2", MtpConstants.FORMAT_3GP_CONTAINER);
addFileType("MKV", FILE_TYPE_MKV, "video/x-matroska");
addFileType("WEBM", FILE_TYPE_MKV, "video/x-matroska");
addFileType("TS", FILE_TYPE_MP2TS, "video/mp2ts");
- if (isWMVEnabled()) {
- addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv");
- addFileType("ASF", FILE_TYPE_ASF, "video/x-ms-asf");
- }
+ addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv", MtpConstants.FORMAT_WMV);
+ addFileType("ASF", FILE_TYPE_ASF, "video/x-ms-asf");
- addFileType("JPG", FILE_TYPE_JPEG, "image/jpeg");
- addFileType("JPEG", FILE_TYPE_JPEG, "image/jpeg");
- addFileType("GIF", FILE_TYPE_GIF, "image/gif");
- addFileType("PNG", FILE_TYPE_PNG, "image/png");
- addFileType("BMP", FILE_TYPE_BMP, "image/x-ms-bmp");
+ addFileType("JPG", FILE_TYPE_JPEG, "image/jpeg", MtpConstants.FORMAT_EXIF_JPEG);
+ addFileType("JPEG", FILE_TYPE_JPEG, "image/jpeg", MtpConstants.FORMAT_EXIF_JPEG);
+ addFileType("GIF", FILE_TYPE_GIF, "image/gif", MtpConstants.FORMAT_GIF);
+ addFileType("PNG", FILE_TYPE_PNG, "image/png", MtpConstants.FORMAT_PNG);
+ addFileType("BMP", FILE_TYPE_BMP, "image/x-ms-bmp", MtpConstants.FORMAT_BMP);
addFileType("WBMP", FILE_TYPE_WBMP, "image/vnd.wap.wbmp");
- addFileType("M3U", FILE_TYPE_M3U, "audio/x-mpegurl");
- addFileType("PLS", FILE_TYPE_PLS, "audio/x-scpls");
- addFileType("WPL", FILE_TYPE_WPL, "application/vnd.ms-wpl");
+ addFileType("M3U", FILE_TYPE_M3U, "audio/x-mpegurl", MtpConstants.FORMAT_M3U_PLAYLIST);
+ addFileType("PLS", FILE_TYPE_PLS, "audio/x-scpls", MtpConstants.FORMAT_PLS_PLAYLIST);
+ addFileType("WPL", FILE_TYPE_WPL, "application/vnd.ms-wpl", MtpConstants.FORMAT_WPL_PLAYLIST);
- // compute file extensions list for native Media Scanner
- StringBuilder builder = new StringBuilder();
- Iterator<String> iterator = sFileTypeMap.keySet().iterator();
-
- while (iterator.hasNext()) {
- if (builder.length() > 0) {
- builder.append(',');
- }
- builder.append(iterator.next());
- }
- sFileExtensions = builder.toString();
+ addFileType("FL", FILE_TYPE_FL, "application/x-android-drm-fl");
+
+ addFileType("TXT", FILE_TYPE_TEXT, "text/plain", MtpConstants.FORMAT_TEXT);
+ addFileType("HTM", FILE_TYPE_HTML, "text/html", MtpConstants.FORMAT_HTML);
+ addFileType("HTML", FILE_TYPE_HTML, "text/html", MtpConstants.FORMAT_HTML);
+ addFileType("PDF", FILE_TYPE_PDF, "application/pdf");
+ addFileType("DOC", FILE_TYPE_MS_WORD, "application/msword", MtpConstants.FORMAT_MS_WORD_DOCUMENT);
+ addFileType("XLS", FILE_TYPE_MS_EXCEL, "application/vnd.ms-excel", MtpConstants.FORMAT_MS_EXCEL_SPREADSHEET);
+ addFileType("PPT", FILE_TYPE_MS_POWERPOINT, "application/mspowerpoint", MtpConstants.FORMAT_MS_POWERPOINT_PRESENTATION);
+ addFileType("FLAC", FILE_TYPE_FLAC, "audio/flac", MtpConstants.FORMAT_FLAC);
+ addFileType("ZIP", FILE_TYPE_ZIP, "application/zip");
}
-
+
public static boolean isAudioFileType(int fileType) {
return ((fileType >= FIRST_AUDIO_FILE_TYPE &&
fileType <= LAST_AUDIO_FILE_TYPE) ||
(fileType >= FIRST_MIDI_FILE_TYPE &&
fileType <= LAST_MIDI_FILE_TYPE));
}
-
+
public static boolean isVideoFileType(int fileType) {
return (fileType >= FIRST_VIDEO_FILE_TYPE &&
fileType <= LAST_VIDEO_FILE_TYPE);
}
-
+
public static boolean isImageFileType(int fileType) {
return (fileType >= FIRST_IMAGE_FILE_TYPE &&
fileType <= LAST_IMAGE_FILE_TYPE);
}
-
+
public static boolean isPlayListFileType(int fileType) {
return (fileType >= FIRST_PLAYLIST_FILE_TYPE &&
fileType <= LAST_PLAYLIST_FILE_TYPE);
}
-
+
+ public static boolean isDrmFileType(int fileType) {
+ return (fileType >= FIRST_DRM_FILE_TYPE &&
+ fileType <= LAST_DRM_FILE_TYPE);
+ }
+
public static MediaFileType getFileType(String path) {
int lastDot = path.lastIndexOf(".");
if (lastDot < 0)
return null;
return sFileTypeMap.get(path.substring(lastDot + 1).toUpperCase());
}
-
+
+ public static boolean isMimeTypeMedia(String mimeType) {
+ int fileType = getFileTypeForMimeType(mimeType);
+ return isAudioFileType(fileType) || isVideoFileType(fileType)
+ || isImageFileType(fileType) || isPlayListFileType(fileType);
+ }
+
+ // generates a title based on file name
+ public static String getFileTitle(String path) {
+ // extract file name after last slash
+ int lastSlash = path.lastIndexOf('/');
+ if (lastSlash >= 0) {
+ lastSlash++;
+ if (lastSlash < path.length()) {
+ path = path.substring(lastSlash);
+ }
+ }
+ // truncate the file extension (if any)
+ int lastDot = path.lastIndexOf('.');
+ if (lastDot > 0) {
+ path = path.substring(0, lastDot);
+ }
+ return path;
+ }
+
public static int getFileTypeForMimeType(String mimeType) {
Integer value = sMimeTypeMap.get(mimeType);
return (value == null ? 0 : value.intValue());
}
+ public static String getMimeTypeForFile(String path) {
+ MediaFileType mediaFileType = getFileType(path);
+ return (mediaFileType == null ? null : mediaFileType.mimeType);
+ }
+
+ public static int getFormatCode(String fileName, String mimeType) {
+ if (mimeType != null) {
+ Integer value = sMimeTypeToFormatMap.get(mimeType);
+ if (value != null) {
+ return value.intValue();
+ }
+ }
+ int lastDot = fileName.lastIndexOf('.');
+ if (lastDot > 0) {
+ String extension = fileName.substring(lastDot + 1);
+ Integer value = sFileTypeToFormatMap.get(extension);
+ if (value != null) {
+ return value.intValue();
+ }
+ }
+ return MtpConstants.FORMAT_UNDEFINED;
+ }
+
+ public static String getMimeTypeForFormatCode(int formatCode) {
+ return sFormatToMimeTypeMap.get(formatCode);
+ }
}
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index cb46a29..01134f2 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -1087,53 +1087,6 @@ public class MediaPlayer
private native void _reset();
/**
- * Suspends the MediaPlayer. The only methods that may be called while
- * suspended are {@link #reset()}, {@link #release()} and {@link #resume()}.
- * MediaPlayer will release its hardware resources as far as
- * possible and reasonable. A successfully suspended MediaPlayer will
- * cease sending events.
- * If suspension is successful, this method returns true, otherwise
- * false is returned and the player's state is not affected.
- * @hide
- */
- public boolean suspend() {
- if (native_suspend_resume(true) < 0) {
- return false;
- }
-
- stayAwake(false);
-
- // make sure none of the listeners get called anymore
- mEventHandler.removeCallbacksAndMessages(null);
-
- return true;
- }
-
- /**
- * Resumes the MediaPlayer. Only to be called after a previous (successful)
- * call to {@link #suspend()}.
- * MediaPlayer will return to a state close to what it was in before
- * suspension.
- * @hide
- */
- public boolean resume() {
- if (native_suspend_resume(false) < 0) {
- return false;
- }
-
- if (isPlaying()) {
- stayAwake(true);
- }
-
- return true;
- }
-
- /**
- * @hide
- */
- private native int native_suspend_resume(boolean isSuspend);
-
- /**
* Sets the audio stream type for this MediaPlayer. See {@link AudioManager}
* for a list of stream types. Must call this method before prepare() or
* prepareAsync() in order for the target stream type to become effective
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index b38124e..fcf6510 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -72,6 +72,9 @@ public class MediaRecorder
private String mPath;
private FileDescriptor mFd;
+ private boolean mPrepareAuxiliaryFile = false;
+ private String mPathAux;
+ private FileDescriptor mFdAux;
private EventHandler mEventHandler;
private OnErrorListener mOnErrorListener;
private OnInfoListener mOnInfoListener;
@@ -277,11 +280,37 @@ public class MediaRecorder
setVideoFrameRate(profile.videoFrameRate);
setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
setVideoEncodingBitRate(profile.videoBitRate);
- setAudioEncodingBitRate(profile.audioBitRate);
- setAudioChannels(profile.audioChannels);
- setAudioSamplingRate(profile.audioSampleRate);
setVideoEncoder(profile.videoCodec);
- setAudioEncoder(profile.audioCodec);
+ if (profile.quality >= CamcorderProfile.QUALITY_TIME_LAPSE_LOW &&
+ profile.quality <= CamcorderProfile.QUALITY_TIME_LAPSE_1080P) {
+ // Enable time lapse. Also don't set audio for time lapse.
+ setParameter(String.format("time-lapse-enable=1"));
+ } else {
+ setAudioEncodingBitRate(profile.audioBitRate);
+ setAudioChannels(profile.audioChannels);
+ setAudioSamplingRate(profile.audioSampleRate);
+ setAudioEncoder(profile.audioCodec);
+ }
+ }
+
+ /**
+ * Set video frame capture rate. This can be used to set a different video frame capture
+ * rate than the recorded video's playback rate. Currently this works only for time lapse mode.
+ *
+ * @param fps Rate at which frames should be captured in frames per second.
+ * The fps can go as low as desired. However the fastest fps will be limited by the hardware.
+ * For resolutions that can be captured by the video camera, the fastest fps can be computed using
+ * {@link android.hardware.Camera.Parameters#getPreviewFpsRange(int[])}. For higher
+ * resolutions the fastest fps may be more restrictive.
+ * Note that the recorder cannot guarantee that frames will be captured at the
+ * given rate due to camera/encoder limitations. However it tries to be as close as
+ * possible.
+ */
+ public void setCaptureRate(double fps) {
+ double timeBetweenFrameCapture = 1 / fps;
+ int timeBetweenFrameCaptureMs = (int) (1000 * timeBetweenFrameCapture);
+ setParameter(String.format("time-between-time-lapse-frame-capture=%d",
+ timeBetweenFrameCaptureMs));
}
/**
@@ -451,6 +480,87 @@ public class MediaRecorder
}
/**
+ * Sets the auxiliary time lapse video's resolution and bitrate.
+ *
+ * The auxiliary video's resolution and bitrate are determined by the CamcorderProfile
+ * quality level {@link android.media.CamcorderProfile#QUALITY_HIGH}.
+ */
+ private void setAuxVideoParameters() {
+ CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
+ setParameter(String.format("video-aux-param-width=%d", profile.videoFrameWidth));
+ setParameter(String.format("video-aux-param-height=%d", profile.videoFrameHeight));
+ setParameter(String.format("video-aux-param-encoding-bitrate=%d", profile.videoBitRate));
+ }
+
+ /**
+ * Pass in the file descriptor for the auxiliary time lapse video. Call this before
+ * prepare().
+ *
+ * Sets file descriptor and parameters for auxiliary time lapse video. Time lapse mode
+ * can capture video (using the still camera) at resolutions higher than that can be
+ * played back on the device. This function or
+ * {@link #setAuxiliaryOutputFile(String)} enable capture of a smaller video in
+ * parallel with the main time lapse video, which can be used to play back on the
+ * device. The smaller video is created by downsampling the main video. This call is
+ * optional and does not have to be called if parallel capture of a downsampled video
+ * is not desired.
+ *
+ * Note that while the main video resolution and bitrate is determined from the
+ * CamcorderProfile in {@link #setProfile(CamcorderProfile)}, the auxiliary video's
+ * resolution and bitrate are determined by the CamcorderProfile quality level
+ * {@link android.media.CamcorderProfile#QUALITY_HIGH}. All other encoding parameters
+ * remain the same for the main video and the auxiliary video.
+ *
+ * E.g. if the device supports the time lapse profile quality level
+ * {@link android.media.CamcorderProfile#QUALITY_TIME_LAPSE_1080P} but can playback at
+ * most 480p, the application might want to capture an auxiliary video of resolution
+ * 480p using this call.
+ *
+ * @param fd an open file descriptor to be written into.
+ */
+ public void setAuxiliaryOutputFile(FileDescriptor fd)
+ {
+ mPrepareAuxiliaryFile = true;
+ mPathAux = null;
+ mFdAux = fd;
+ setAuxVideoParameters();
+ }
+
+ /**
+ * Pass in the file path for the auxiliary time lapse video. Call this before
+ * prepare().
+ *
+ * Sets file path and parameters for auxiliary time lapse video. Time lapse mode can
+ * capture video (using the still camera) at resolutions higher than that can be
+ * played back on the device. This function or
+ * {@link #setAuxiliaryOutputFile(FileDescriptor)} enable capture of a smaller
+ * video in parallel with the main time lapse video, which can be used to play back on
+ * the device. The smaller video is created by downsampling the main video. This call
+ * is optional and does not have to be called if parallel capture of a downsampled
+ * video is not desired.
+ *
+ * Note that while the main video resolution and bitrate is determined from the
+ * CamcorderProfile in {@link #setProfile(CamcorderProfile)}, the auxiliary video's
+ * resolution and bitrate are determined by the CamcorderProfile quality level
+ * {@link android.media.CamcorderProfile#QUALITY_HIGH}. All other encoding parameters
+ * remain the same for the main video and the auxiliary video.
+ *
+ * E.g. if the device supports the time lapse profile quality level
+ * {@link android.media.CamcorderProfile#QUALITY_TIME_LAPSE_1080P} but can playback at
+ * most 480p, the application might want to capture an auxiliary video of resolution
+ * 480p using this call.
+ *
+ * @param path The pathname to use.
+ */
+ public void setAuxiliaryOutputFile(String path)
+ {
+ mPrepareAuxiliaryFile = true;
+ mFdAux = null;
+ mPathAux = path;
+ setAuxVideoParameters();
+ }
+
+ /**
* Pass in the file descriptor of the file to be written. Call this after
* setOutputFormat() but before prepare().
*
@@ -481,6 +591,8 @@ public class MediaRecorder
// native implementation
private native void _setOutputFile(FileDescriptor fd, long offset, long length)
throws IllegalStateException, IOException;
+ private native void _setOutputFileAux(FileDescriptor fd)
+ throws IllegalStateException, IOException;
private native void _prepare() throws IllegalStateException, IOException;
/**
@@ -506,6 +618,22 @@ public class MediaRecorder
} else {
throw new IOException("No valid output file");
}
+
+ if (mPrepareAuxiliaryFile) {
+ if (mPathAux != null) {
+ FileOutputStream fos = new FileOutputStream(mPathAux);
+ try {
+ _setOutputFileAux(fos.getFD());
+ } finally {
+ fos.close();
+ }
+ } else if (mFdAux != null) {
+ _setOutputFileAux(mFdAux);
+ } else {
+ throw new IOException("No valid output file");
+ }
+ }
+
_prepare();
}
diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java
index 3333268..3f6b7a2 100644
--- a/media/java/android/media/MediaScanner.java
+++ b/media/java/android/media/MediaScanner.java
@@ -26,6 +26,7 @@ import android.content.Context;
import android.content.IContentProvider;
import android.database.Cursor;
import android.database.SQLException;
+import android.drm.DrmManagerClient;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Process;
@@ -34,6 +35,7 @@ import android.os.SystemProperties;
import android.provider.MediaStore;
import android.provider.Settings;
import android.provider.MediaStore.Audio;
+import android.provider.MediaStore.Files;
import android.provider.MediaStore.Images;
import android.provider.MediaStore.Video;
import android.provider.MediaStore.Audio.Genres;
@@ -109,41 +111,32 @@ public class MediaScanner
private final static String TAG = "MediaScanner";
- private static final String[] AUDIO_PROJECTION = new String[] {
- Audio.Media._ID, // 0
- Audio.Media.DATA, // 1
- Audio.Media.DATE_MODIFIED, // 2
+ private static final String[] FILES_PRESCAN_PROJECTION = new String[] {
+ Files.FileColumns._ID, // 0
+ Files.FileColumns.DATA, // 1
+ Files.FileColumns.FORMAT, // 2
+ Files.FileColumns.DATE_MODIFIED, // 3
};
- private static final int ID_AUDIO_COLUMN_INDEX = 0;
- private static final int PATH_AUDIO_COLUMN_INDEX = 1;
- private static final int DATE_MODIFIED_AUDIO_COLUMN_INDEX = 2;
-
- private static final String[] VIDEO_PROJECTION = new String[] {
- Video.Media._ID, // 0
- Video.Media.DATA, // 1
- Video.Media.DATE_MODIFIED, // 2
+ private static final String[] ID_PROJECTION = new String[] {
+ Files.FileColumns._ID,
};
- private static final int ID_VIDEO_COLUMN_INDEX = 0;
- private static final int PATH_VIDEO_COLUMN_INDEX = 1;
- private static final int DATE_MODIFIED_VIDEO_COLUMN_INDEX = 2;
+ private static final int FILES_PRESCAN_ID_COLUMN_INDEX = 0;
+ private static final int FILES_PRESCAN_PATH_COLUMN_INDEX = 1;
+ private static final int FILES_PRESCAN_FORMAT_COLUMN_INDEX = 2;
+ private static final int FILES_PRESCAN_DATE_MODIFIED_COLUMN_INDEX = 3;
- private static final String[] IMAGES_PROJECTION = new String[] {
- Images.Media._ID, // 0
- Images.Media.DATA, // 1
- Images.Media.DATE_MODIFIED, // 2
+ private static final String[] MEDIA_PRESCAN_PROJECTION = new String[] {
+ MediaStore.MediaColumns._ID, // 0
+ MediaStore.MediaColumns.DATA, // 1
+ MediaStore.MediaColumns.DATE_MODIFIED, // 2
};
- private static final int ID_IMAGES_COLUMN_INDEX = 0;
- private static final int PATH_IMAGES_COLUMN_INDEX = 1;
- private static final int DATE_MODIFIED_IMAGES_COLUMN_INDEX = 2;
+ private static final int MEDIA_PRESCAN_ID_COLUMN_INDEX = 0;
+ private static final int MEDIA_PRESCAN_PATH_COLUMN_INDEX = 1;
+ private static final int MEDIA_PRESCAN_DATE_MODIFIED_COLUMN_INDEX = 2;
- private static final String[] PLAYLISTS_PROJECTION = new String[] {
- Audio.Playlists._ID, // 0
- Audio.Playlists.DATA, // 1
- Audio.Playlists.DATE_MODIFIED, // 2
- };
private static final String[] PLAYLIST_MEMBERS_PROJECTION = new String[] {
Audio.Playlists.Members.PLAYLIST_ID, // 0
@@ -304,7 +297,9 @@ public class MediaScanner
private Uri mThumbsUri;
private Uri mGenresUri;
private Uri mPlaylistsUri;
+ private Uri mFilesUri;
private boolean mProcessPlaylists, mProcessGenres;
+ private int mMtpObjectHandle;
// used when scanning the image database so we know whether we have to prune
// old thumbnail files
@@ -339,21 +334,23 @@ public class MediaScanner
long mRowId;
String mPath;
long mLastModified;
+ int mFormat;
boolean mSeenInFileSystem;
boolean mLastModifiedChanged;
- FileCacheEntry(Uri tableUri, long rowId, String path, long lastModified) {
+ FileCacheEntry(Uri tableUri, long rowId, String path, long lastModified, int format) {
mTableUri = tableUri;
mRowId = rowId;
mPath = path;
mLastModified = lastModified;
+ mFormat = format;
mSeenInFileSystem = false;
mLastModifiedChanged = false;
}
@Override
public String toString() {
- return mPath;
+ return mPath + " mTableUri: " + mTableUri + " mRowId: " + mRowId;
}
}
@@ -364,6 +361,7 @@ public class MediaScanner
private ArrayList<FileCacheEntry> mPlayLists;
private HashMap<String, Uri> mGenreCache;
+ private DrmManagerClient mDrmManagerClient = null;
public MediaScanner(Context c) {
native_setup();
@@ -385,6 +383,11 @@ public class MediaScanner
private MyMediaScannerClient mClient = new MyMediaScannerClient();
+ private boolean isDrmEnabled() {
+ String prop = SystemProperties.get("drm.service.enabled");
+ return prop != null && prop.equals("true");
+ }
+
private class MyMediaScannerClient implements MediaScannerClient {
private String mArtist;
@@ -431,6 +434,9 @@ public class MediaScanner
}
mMimeType = null;
+ mFileType = 0;
+ mFileSize = fileSize;
+
// try mimeType first, if it is specified
if (mimeType != null) {
mFileType = MediaFile.getFileTypeForMimeType(mimeType);
@@ -438,7 +444,6 @@ public class MediaScanner
mMimeType = mimeType;
}
}
- mFileSize = fileSize;
// if mimeType was not specified, compute file type based on file extension.
if (mMimeType == null) {
@@ -449,13 +454,27 @@ public class MediaScanner
}
}
+ if (isDrmEnabled() && MediaFile.isDrmFileType(mFileType)) {
+ mFileType = getFileTypeFromDrm(path);
+ }
+
String key = path;
if (mCaseInsensitivePaths) {
key = path.toLowerCase();
}
FileCacheEntry entry = mFileCache.get(key);
if (entry == null) {
- entry = new FileCacheEntry(null, 0, path, 0);
+ Uri tableUri;
+ if (MediaFile.isVideoFileType(mFileType)) {
+ tableUri = mVideoUri;
+ } else if (MediaFile.isImageFileType(mFileType)) {
+ tableUri = mImagesUri;
+ } else if (MediaFile.isAudioFileType(mFileType)) {
+ tableUri = mAudioUri;
+ } else {
+ tableUri = mFilesUri;
+ }
+ entry = new FileCacheEntry(tableUri, 0, path, 0, 0);
mFileCache.put(key, entry);
}
entry.mSeenInFileSystem = true;
@@ -500,7 +519,8 @@ public class MediaScanner
doScanFile(path, mimeType, lastModified, fileSize, false);
}
- public Uri doScanFile(String path, String mimeType, long lastModified, long fileSize, boolean scanAlways) {
+ public Uri doScanFile(String path, String mimeType, long lastModified,
+ long fileSize, boolean scanAlways) {
Uri result = null;
// long t1 = System.currentTimeMillis();
try {
@@ -515,7 +535,9 @@ public class MediaScanner
boolean music = (lowpath.indexOf(MUSIC_DIR) > 0) ||
(!ringtones && !notifications && !alarms && !podcasts);
- if (!MediaFile.isImageFileType(mFileType)) {
+ // we only extract metadata for audio and video files
+ if (MediaFile.isAudioFileType(mFileType)
+ || MediaFile.isVideoFileType(mFileType)) {
processFile(path, mimeType, this);
}
@@ -555,7 +577,8 @@ public class MediaScanner
mTitle = value;
} else if (name.equalsIgnoreCase("artist") || name.startsWith("artist;")) {
mArtist = value.trim();
- } else if (name.equalsIgnoreCase("albumartist") || name.startsWith("albumartist;")) {
+ } else if (name.equalsIgnoreCase("albumartist") || name.startsWith("albumartist;")
+ || name.equalsIgnoreCase("band") || name.startsWith("band;")) {
mAlbumArtist = value.trim();
} else if (name.equalsIgnoreCase("album") || name.startsWith("album;")) {
mAlbum = value.trim();
@@ -654,21 +677,6 @@ public class MediaScanner
boolean alarms, boolean music, boolean podcasts)
throws RemoteException {
// update database
- Uri tableUri;
- boolean isAudio = MediaFile.isAudioFileType(mFileType);
- boolean isVideo = MediaFile.isVideoFileType(mFileType);
- boolean isImage = MediaFile.isImageFileType(mFileType);
- if (isVideo) {
- tableUri = mVideoUri;
- } else if (isImage) {
- tableUri = mImagesUri;
- } else if (isAudio) {
- tableUri = mAudioUri;
- } else {
- // don't add file to database if not audio, video or image
- return null;
- }
- entry.mTableUri = tableUri;
// use album artist if artist is missing
if (mArtist == null || mArtist.length() == 0) {
@@ -678,20 +686,7 @@ public class MediaScanner
ContentValues values = toValues();
String title = values.getAsString(MediaStore.MediaColumns.TITLE);
if (title == null || TextUtils.isEmpty(title.trim())) {
- title = values.getAsString(MediaStore.MediaColumns.DATA);
- // extract file name after last slash
- int lastSlash = title.lastIndexOf('/');
- if (lastSlash >= 0) {
- lastSlash++;
- if (lastSlash < title.length()) {
- title = title.substring(lastSlash);
- }
- }
- // truncate the file extension (if any)
- int lastDot = title.lastIndexOf('.');
- if (lastDot > 0) {
- title = title.substring(0, lastDot);
- }
+ title = MediaFile.getFileTitle(values.getAsString(MediaStore.MediaColumns.DATA));
values.put(MediaStore.MediaColumns.TITLE, title);
}
String album = values.getAsString(Audio.Media.ALBUM);
@@ -715,7 +710,7 @@ public class MediaScanner
}
}
long rowId = entry.mRowId;
- if (isAudio && rowId == 0) {
+ if (MediaFile.isAudioFileType(mFileType) && (rowId == 0 || mMtpObjectHandle != 0)) {
// Only set these for new entries. For existing entries, they
// may have been modified later, and we want to keep the current
// values so that custom ringtones still show up in the ringtone
@@ -768,8 +763,15 @@ public class MediaScanner
}
}
+ Uri tableUri = entry.mTableUri;
Uri result = null;
if (rowId == 0) {
+ if (mMtpObjectHandle != 0) {
+ values.put(MediaStore.MediaColumns.MEDIA_SCANNER_NEW_OBJECT_ID, mMtpObjectHandle);
+ }
+ if (tableUri == mFilesUri) {
+ values.put(Files.FileColumns.FORMAT, MediaFile.getFormatCode(entry.mPath, mMimeType));
+ }
// new file, insert it
result = mMediaProvider.insert(tableUri, values);
if (result != null) {
@@ -883,9 +885,30 @@ public class MediaScanner
}
}
+ private int getFileTypeFromDrm(String path) {
+ if (!isDrmEnabled()) {
+ return 0;
+ }
+
+ int resultFileType = 0;
+
+ if (mDrmManagerClient == null) {
+ mDrmManagerClient = new DrmManagerClient(mContext);
+ }
+
+ if (mDrmManagerClient.canHandle(path, null)) {
+ String drmMimetype = mDrmManagerClient.getOriginalMimeType(path);
+ if (drmMimetype != null) {
+ mMimeType = drmMimetype;
+ resultFileType = MediaFile.getFileTypeForMimeType(drmMimetype);
+ }
+ }
+ return resultFileType;
+ }
+
}; // end of anonymous MediaScannerClient instance
- private void prescan(String filePath) throws RemoteException {
+ private void prescan(String filePath, boolean prescanFiles) throws RemoteException {
Cursor c = null;
String where = null;
String[] selectionArgs = null;
@@ -901,54 +924,26 @@ public class MediaScanner
mPlayLists.clear();
}
+ if (filePath != null) {
+ // query for only one file
+ where = Files.FileColumns.DATA + "=?";
+ selectionArgs = new String[] { filePath };
+ }
+
// Build the list of files from the content provider
try {
- // Read existing files from the audio table
- if (filePath != null) {
- where = MediaStore.Audio.Media.DATA + "=?";
- selectionArgs = new String[] { filePath };
- }
- c = mMediaProvider.query(mAudioUri, AUDIO_PROJECTION, where, selectionArgs, null);
-
- if (c != null) {
- try {
- while (c.moveToNext()) {
- long rowId = c.getLong(ID_AUDIO_COLUMN_INDEX);
- String path = c.getString(PATH_AUDIO_COLUMN_INDEX);
- long lastModified = c.getLong(DATE_MODIFIED_AUDIO_COLUMN_INDEX);
-
- // Only consider entries with absolute path names.
- // This allows storing URIs in the database without the
- // media scanner removing them.
- if (path.startsWith("/")) {
- String key = path;
- if (mCaseInsensitivePaths) {
- key = path.toLowerCase();
- }
- mFileCache.put(key, new FileCacheEntry(mAudioUri, rowId, path,
- lastModified));
- }
- }
- } finally {
- c.close();
- c = null;
- }
- }
+ if (prescanFiles) {
+ // First read existing files from the files table
- // Read existing files from the video table
- if (filePath != null) {
- where = MediaStore.Video.Media.DATA + "=?";
- } else {
- where = null;
- }
- c = mMediaProvider.query(mVideoUri, VIDEO_PROJECTION, where, selectionArgs, null);
+ c = mMediaProvider.query(mFilesUri, FILES_PRESCAN_PROJECTION,
+ where, selectionArgs, null);
- if (c != null) {
- try {
+ if (c != null) {
while (c.moveToNext()) {
- long rowId = c.getLong(ID_VIDEO_COLUMN_INDEX);
- String path = c.getString(PATH_VIDEO_COLUMN_INDEX);
- long lastModified = c.getLong(DATE_MODIFIED_VIDEO_COLUMN_INDEX);
+ long rowId = c.getLong(FILES_PRESCAN_ID_COLUMN_INDEX);
+ String path = c.getString(FILES_PRESCAN_PATH_COLUMN_INDEX);
+ int format = c.getInt(FILES_PRESCAN_FORMAT_COLUMN_INDEX);
+ long lastModified = c.getLong(FILES_PRESCAN_DATE_MODIFIED_COLUMN_INDEX);
// Only consider entries with absolute path names.
// This allows storing URIs in the database without the
@@ -958,89 +953,30 @@ public class MediaScanner
if (mCaseInsensitivePaths) {
key = path.toLowerCase();
}
- mFileCache.put(key, new FileCacheEntry(mVideoUri, rowId, path,
- lastModified));
- }
- }
- } finally {
- c.close();
- c = null;
- }
- }
-
- // Read existing files from the images table
- if (filePath != null) {
- where = MediaStore.Images.Media.DATA + "=?";
- } else {
- where = null;
- }
- mOriginalCount = 0;
- c = mMediaProvider.query(mImagesUri, IMAGES_PROJECTION, where, selectionArgs, null);
- if (c != null) {
- try {
- mOriginalCount = c.getCount();
- while (c.moveToNext()) {
- long rowId = c.getLong(ID_IMAGES_COLUMN_INDEX);
- String path = c.getString(PATH_IMAGES_COLUMN_INDEX);
- long lastModified = c.getLong(DATE_MODIFIED_IMAGES_COLUMN_INDEX);
-
- // Only consider entries with absolute path names.
- // This allows storing URIs in the database without the
- // media scanner removing them.
- if (path.startsWith("/")) {
- String key = path;
- if (mCaseInsensitivePaths) {
- key = path.toLowerCase();
- }
- mFileCache.put(key, new FileCacheEntry(mImagesUri, rowId, path,
- lastModified));
- }
+ FileCacheEntry entry = new FileCacheEntry(mFilesUri, rowId, path,
+ lastModified, format);
+ mFileCache.put(key, entry);
+ }
}
- } finally {
c.close();
c = null;
}
}
-
- if (mProcessPlaylists) {
- // Read existing files from the playlists table
- if (filePath != null) {
- where = MediaStore.Audio.Playlists.DATA + "=?";
- } else {
- where = null;
- }
- c = mMediaProvider.query(mPlaylistsUri, PLAYLISTS_PROJECTION, where, selectionArgs, null);
-
- if (c != null) {
- try {
- while (c.moveToNext()) {
- String path = c.getString(PATH_PLAYLISTS_COLUMN_INDEX);
-
- if (path != null && path.length() > 0) {
- long rowId = c.getLong(ID_PLAYLISTS_COLUMN_INDEX);
- long lastModified = c.getLong(DATE_MODIFIED_PLAYLISTS_COLUMN_INDEX);
-
- String key = path;
- if (mCaseInsensitivePaths) {
- key = path.toLowerCase();
- }
- mFileCache.put(key, new FileCacheEntry(mPlaylistsUri, rowId, path,
- lastModified));
- }
- }
- } finally {
- c.close();
- c = null;
- }
- }
- }
}
finally {
if (c != null) {
c.close();
}
}
+
+ // compute original size of images
+ mOriginalCount = 0;
+ c = mMediaProvider.query(mImagesUri, ID_PROJECTION, null, null, null);
+ if (c != null) {
+ mOriginalCount = c.getCount();
+ c.close();
+ }
}
private boolean inScanDirectory(String path, String[] directories) {
@@ -1107,12 +1043,14 @@ public class MediaScanner
// remove database entries for files that no longer exist.
boolean fileMissing = false;
- if (!entry.mSeenInFileSystem) {
- if (inScanDirectory(path, directories)) {
+ if (!entry.mSeenInFileSystem && !MtpConstants.isAbstractObject(entry.mFormat)) {
+ if (entry.mFormat != MtpConstants.FORMAT_ASSOCIATION &&
+ inScanDirectory(path, directories)) {
// we didn't see this file in the scan directory.
fileMissing = true;
} else {
- // the file is outside of our scan directory,
+ // the file actually a directory or other abstract object
+ // or is outside of our scan directory,
// so we need to check for file existence here.
File testFile = new File(path);
if (!testFile.exists()) {
@@ -1132,9 +1070,11 @@ public class MediaScanner
ContentValues values = new ContentValues();
values.put(MediaStore.Audio.Playlists.DATA, "");
values.put(MediaStore.Audio.Playlists.DATE_MODIFIED, 0);
- mMediaProvider.update(ContentUris.withAppendedId(mPlaylistsUri, entry.mRowId), values, null, null);
+ mMediaProvider.update(ContentUris.withAppendedId(mPlaylistsUri, entry.mRowId),
+ values, null, null);
} else {
- mMediaProvider.delete(ContentUris.withAppendedId(entry.mTableUri, entry.mRowId), null, null);
+ mMediaProvider.delete(ContentUris.withAppendedId(mFilesUri, entry.mRowId),
+ null, null);
iterator.remove();
}
}
@@ -1162,6 +1102,7 @@ public class MediaScanner
mVideoUri = Video.Media.getContentUri(volumeName);
mImagesUri = Images.Media.getContentUri(volumeName);
mThumbsUri = Images.Thumbnails.getContentUri(volumeName);
+ mFilesUri = Files.getContentUri(volumeName);
if (!volumeName.equals("internal")) {
// we only support playlists on external media
@@ -1170,9 +1111,12 @@ public class MediaScanner
mGenreCache = new HashMap<String, Uri>();
mGenresUri = Genres.getContentUri(volumeName);
mPlaylistsUri = Playlists.getContentUri(volumeName);
- // assuming external storage is FAT (case insensitive), except on the simulator.
- if ( Process.supportsProcesses()) {
- mCaseInsensitivePaths = true;
+
+ mCaseInsensitivePaths = !mContext.getResources().getBoolean(
+ com.android.internal.R.bool.config_caseSensitiveExternalStorage);
+ if (!Process.supportsProcesses()) {
+ // Simulator uses host file system, so it should be case sensitive.
+ mCaseInsensitivePaths = false;
}
}
}
@@ -1181,11 +1125,11 @@ public class MediaScanner
try {
long start = System.currentTimeMillis();
initialize(volumeName);
- prescan(null);
+ prescan(null, true);
long prescan = System.currentTimeMillis();
for (int i = 0; i < directories.length; i++) {
- processDirectory(directories[i], MediaFile.sFileExtensions, mClient);
+ processDirectory(directories[i], mClient);
}
long scan = System.currentTimeMillis();
postscan(directories);
@@ -1212,7 +1156,7 @@ public class MediaScanner
public Uri scanSingleFile(String path, String volumeName, String mimeType) {
try {
initialize(volumeName);
- prescan(path);
+ prescan(path, true);
File file = new File(path);
@@ -1227,6 +1171,36 @@ public class MediaScanner
}
}
+ public void scanMtpFile(String path, String volumeName, int objectHandle, int format) {
+ MediaFile.MediaFileType mediaFileType = MediaFile.getFileType(path);
+ int fileType = (mediaFileType == null ? 0 : mediaFileType.fileType);
+
+ if (!MediaFile.isAudioFileType(fileType) && !MediaFile.isVideoFileType(fileType) &&
+ !MediaFile.isImageFileType(fileType) && !MediaFile.isPlayListFileType(fileType)) {
+ // nothing to do
+ return;
+ }
+
+ mMtpObjectHandle = objectHandle;
+ try {
+ initialize(volumeName);
+ // MTP will create a file entry for us so we don't want to do it in prescan
+ prescan(path, false);
+
+ File file = new File(path);
+
+ // lastModified is in milliseconds on Files.
+ long lastModifiedSeconds = file.lastModified() / 1000;
+
+ // always scan the file, so we can return the content://media Uri for existing files
+ mClient.doScanFile(path, mediaFileType.mimeType, lastModifiedSeconds, file.length(), true);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MediaScanner.scanFile()", e);
+ } finally {
+ mMtpObjectHandle = 0;
+ }
+ }
+
// returns the number of matching file/directory names, starting from the right
private int matchPaths(String path1, String path2) {
int result = 0;
@@ -1506,7 +1480,7 @@ public class MediaScanner
}
}
- private native void processDirectory(String path, String extensions, MediaScannerClient client);
+ private native void processDirectory(String path, MediaScannerClient client);
private native void processFile(String path, String mimeType, MediaScannerClient client);
public native void setLocale(String locale);
diff --git a/media/java/android/media/MtpClient.java b/media/java/android/media/MtpClient.java
new file mode 100644
index 0000000..98da1f6
--- /dev/null
+++ b/media/java/android/media/MtpClient.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+
+/**
+ * {@hide}
+ */
+public class MtpClient {
+
+ private static final String TAG = "MtpClient";
+
+ private final Listener mListener;
+
+ static {
+ System.loadLibrary("media_jni");
+ }
+
+ public MtpClient(Listener listener) {
+ native_setup();
+ if (listener == null) {
+ throw new NullPointerException("MtpClient: listener is null");
+ }
+ mListener = listener;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ native_finalize();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ public boolean start() {
+ return native_start();
+ }
+
+ public void stop() {
+ native_stop();
+ }
+
+ public boolean deleteObject(int deviceID, long objectID) {
+ return native_delete_object(deviceID, objectID);
+ }
+
+ public long getParent(int deviceID, long objectID) {
+ return native_get_parent(deviceID, objectID);
+ }
+
+ public long getStorageID(int deviceID, long objectID) {
+ return native_get_storage_id(deviceID, objectID);
+ }
+
+ // create a file descriptor for reading the contents of an object over MTP
+ public ParcelFileDescriptor openFile(int deviceID, long objectID) {
+ return native_open_file(deviceID, objectID);
+ }
+
+ public interface Listener {
+ // called when a new MTP device has been discovered
+ void deviceAdded(int id);
+
+ // called when an MTP device has been removed
+ void deviceRemoved(int id);
+ }
+
+ // called from native code
+ private void deviceAdded(int id) {
+ Log.d(TAG, "deviceAdded " + id);
+ mListener.deviceAdded(id);
+ }
+
+ // called from native code
+ private void deviceRemoved(int id) {
+ Log.d(TAG, "deviceRemoved " + id);
+ mListener.deviceRemoved(id);
+ }
+
+ // used by the JNI code
+ private int mNativeContext;
+
+ private native final void native_setup();
+ private native final void native_finalize();
+ private native boolean native_start();
+ private native void native_stop();
+ private native boolean native_delete_object(int deviceID, long objectID);
+ private native long native_get_parent(int deviceID, long objectID);
+ private native long native_get_storage_id(int deviceID, long objectID);
+ private native ParcelFileDescriptor native_open_file(int deviceID, long objectID);
+}
diff --git a/media/java/android/media/MtpConstants.java b/media/java/android/media/MtpConstants.java
new file mode 100644
index 0000000..a7d33ce
--- /dev/null
+++ b/media/java/android/media/MtpConstants.java
@@ -0,0 +1,404 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+public final class MtpConstants {
+
+// MTP Response Codes
+ public static final int RESPONSE_UNDEFINED = 0x2000;
+ public static final int RESPONSE_OK = 0x2001;
+ public static final int RESPONSE_GENERAL_ERROR = 0x2002;
+ public static final int RESPONSE_SESSION_NOT_OPEN = 0x2003;
+ public static final int RESPONSE_INVALID_TRANSACTION_ID = 0x2004;
+ public static final int RESPONSE_OPERATION_NOT_SUPPORTED = 0x2005;
+ public static final int RESPONSE_PARAMETER_NOT_SUPPORTED = 0x2006;
+ public static final int RESPONSE_INCOMPLETE_TRANSFER = 0x2007;
+ public static final int RESPONSE_INVALID_STORAGE_ID = 0x2008;
+ public static final int RESPONSE_INVALID_OBJECT_HANDLE = 0x2009;
+ public static final int RESPONSE_DEVICE_PROP_NOT_SUPPORTED = 0x200A;
+ public static final int RESPONSE_INVALID_OBJECT_FORMAT_CODE = 0x200B;
+ public static final int RESPONSE_STORAGE_FULL = 0x200C;
+ public static final int RESPONSE_OBJECT_WRITE_PROTECTED = 0x200D;
+ public static final int RESPONSE_STORE_READ_ONLY = 0x200E;
+ public static final int RESPONSE_ACCESS_DENIED = 0x200F;
+ public static final int RESPONSE_NO_THUMBNAIL_PRESENT = 0x2010;
+ public static final int RESPONSE_SELF_TEST_FAILED = 0x2011;
+ public static final int RESPONSE_PARTIAL_DELETION = 0x2012;
+ public static final int RESPONSE_STORE_NOT_AVAILABLE = 0x2013;
+ public static final int RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED = 0x2014;
+ public static final int RESPONSE_NO_VALID_OBJECT_INFO = 0x2015;
+ public static final int RESPONSE_INVALID_CODE_FORMAT = 0x2016;
+ public static final int RESPONSE_UNKNOWN_VENDOR_CODE = 0x2017;
+ public static final int RESPONSE_CAPTURE_ALREADY_TERMINATED = 0x2018;
+ public static final int RESPONSE_DEVICE_BUSY = 0x2019;
+ public static final int RESPONSE_INVALID_PARENT_OBJECT = 0x201A;
+ public static final int RESPONSE_INVALID_DEVICE_PROP_FORMAT = 0x201B;
+ public static final int RESPONSE_INVALID_DEVICE_PROP_VALUE = 0x201C;
+ public static final int RESPONSE_INVALID_PARAMETER = 0x201D;
+ public static final int RESPONSE_SESSION_ALREADY_OPEN = 0x201E;
+ public static final int RESPONSE_TRANSACTION_CANCELLED = 0x201F;
+ public static final int RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED = 0x2020;
+ public static final int RESPONSE_INVALID_OBJECT_PROP_CODE = 0xA801;
+ public static final int RESPONSE_INVALID_OBJECT_PROP_FORMAT = 0xA802;
+ public static final int RESPONSE_INVALID_OBJECT_PROP_VALUE = 0xA803;
+ public static final int RESPONSE_INVALID_OBJECT_REFERENCE = 0xA804;
+ public static final int RESPONSE_GROUP_NOT_SUPPORTED = 0xA805;
+ public static final int RESPONSE_INVALID_DATASET = 0xA806;
+ public static final int RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED = 0xA807;
+ public static final int RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED = 0xA808;
+ public static final int RESPONSE_OBJECT_TOO_LARGE = 0xA809;
+ public static final int RESPONSE_OBJECT_PROP_NOT_SUPPORTED = 0xA80A;
+
+ // MTP format codes
+ public static final int FORMAT_UNDEFINED = 0x3000;
+ public static final int FORMAT_ASSOCIATION = 0x3001;
+ public static final int FORMAT_SCRIPT = 0x3002;
+ public static final int FORMAT_EXECUTABLE = 0x3003;
+ public static final int FORMAT_TEXT = 0x3004;
+ public static final int FORMAT_HTML = 0x3005;
+ public static final int FORMAT_DPOF = 0x3006;
+ public static final int FORMAT_AIFF = 0x3007;
+ public static final int FORMAT_WAV = 0x3008;
+ public static final int FORMAT_MP3 = 0x3009;
+ public static final int FORMAT_AVI = 0x300A;
+ public static final int FORMAT_MPEG = 0x300B;
+ public static final int FORMAT_ASF = 0x300C;
+ public static final int FORMAT_DEFINED = 0x3800;
+ public static final int FORMAT_EXIF_JPEG = 0x3801;
+ public static final int FORMAT_TIFF_EP = 0x3802;
+ public static final int FORMAT_FLASHPIX = 0x3803;
+ public static final int FORMAT_BMP = 0x3804;
+ public static final int FORMAT_CIFF = 0x3805;
+ public static final int FORMAT_GIF = 0x3807;
+ public static final int FORMAT_JFIF = 0x3808;
+ public static final int FORMAT_CD = 0x3809;
+ public static final int FORMAT_PICT = 0x380A;
+ public static final int FORMAT_PNG = 0x380B;
+ public static final int FORMAT_TIFF = 0x380D;
+ public static final int FORMAT_TIFF_IT = 0x380E;
+ public static final int FORMAT_JP2 = 0x380F;
+ public static final int FORMAT_JPX = 0x3810;
+ public static final int FORMAT_UNDEFINED_FIRMWARE = 0xB802;
+ public static final int FORMAT_WINDOWS_IMAGE_FORMAT = 0xB881;
+ public static final int FORMAT_UNDEFINED_AUDIO = 0xB900;
+ public static final int FORMAT_WMA = 0xB901;
+ public static final int FORMAT_OGG = 0xB902;
+ public static final int FORMAT_AAC = 0xB903;
+ public static final int FORMAT_AUDIBLE = 0xB904;
+ public static final int FORMAT_FLAC = 0xB906;
+ public static final int FORMAT_UNDEFINED_VIDEO = 0xB980;
+ public static final int FORMAT_WMV = 0xB981;
+ public static final int FORMAT_MP4_CONTAINER = 0xB982;
+ public static final int FORMAT_MP2 = 0xB983;
+ public static final int FORMAT_3GP_CONTAINER = 0xB984;
+ public static final int FORMAT_UNDEFINED_COLLECTION = 0xBA00;
+ public static final int FORMAT_ABSTRACT_MULTIMEDIA_ALBUM = 0xBA01;
+ public static final int FORMAT_ABSTRACT_IMAGE_ALBUM = 0xBA02;
+ public static final int FORMAT_ABSTRACT_AUDIO_ALBUM = 0xBA03;
+ public static final int FORMAT_ABSTRACT_VIDEO_ALBUM = 0xBA04;
+ public static final int FORMAT_ABSTRACT_AV_PLAYLIST = 0xBA05;
+ public static final int FORMAT_ABSTRACT_CONTACT_GROUP = 0xBA06;
+ public static final int FORMAT_ABSTRACT_MESSAGE_FOLDER = 0xBA07;
+ public static final int FORMAT_ABSTRACT_CHAPTERED_PRODUCTION = 0xBA08;
+ public static final int FORMAT_ABSTRACT_AUDIO_PLAYLIST = 0xBA09;
+ public static final int FORMAT_ABSTRACT_VIDEO_PLAYLIST = 0xBA0A;
+ public static final int FORMAT_ABSTRACT_MEDIACAST = 0xBA0B;
+ public static final int FORMAT_WPL_PLAYLIST = 0xBA10;
+ public static final int FORMAT_M3U_PLAYLIST = 0xBA11;
+ public static final int FORMAT_MPL_PLAYLIST = 0xBA12;
+ public static final int FORMAT_ASX_PLAYLIST = 0xBA13;
+ public static final int FORMAT_PLS_PLAYLIST = 0xBA14;
+ public static final int FORMAT_UNDEFINED_DOCUMENT = 0xBA80;
+ public static final int FORMAT_ABSTRACT_DOCUMENT = 0xBA81;
+ public static final int FORMAT_XML_DOCUMENT = 0xBA82;
+ public static final int FORMAT_MS_WORD_DOCUMENT = 0xBA83;
+ public static final int FORMAT_MHT_COMPILED_HTML_DOCUMENT = 0xBA84;
+ public static final int FORMAT_MS_EXCEL_SPREADSHEET = 0xBA85;
+ public static final int FORMAT_MS_POWERPOINT_PRESENTATION = 0xBA86;
+ public static final int FORMAT_UNDEFINED_MESSAGE = 0xBB00;
+ public static final int FORMAT_ABSTRACT_MESSSAGE = 0xBB01;
+ public static final int FORMAT_UNDEFINED_CONTACT = 0xBB80;
+ public static final int FORMAT_ABSTRACT_CONTACT = 0xBB81;
+ public static final int FORMAT_VCARD_2 = 0xBB82;
+
+ public static boolean isAbstractObject(int format) {
+ switch (format) {
+ case FORMAT_ABSTRACT_MULTIMEDIA_ALBUM:
+ case FORMAT_ABSTRACT_IMAGE_ALBUM:
+ case FORMAT_ABSTRACT_AUDIO_ALBUM:
+ case FORMAT_ABSTRACT_VIDEO_ALBUM:
+ case FORMAT_ABSTRACT_AV_PLAYLIST:
+ case FORMAT_ABSTRACT_CONTACT_GROUP:
+ case FORMAT_ABSTRACT_MESSAGE_FOLDER:
+ case FORMAT_ABSTRACT_CHAPTERED_PRODUCTION:
+ case FORMAT_ABSTRACT_AUDIO_PLAYLIST:
+ case FORMAT_ABSTRACT_VIDEO_PLAYLIST:
+ case FORMAT_ABSTRACT_MEDIACAST:
+ case FORMAT_ABSTRACT_DOCUMENT:
+ case FORMAT_ABSTRACT_MESSSAGE:
+ case FORMAT_ABSTRACT_CONTACT:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ // MTP object properties
+ public static final int PROPERTY_STORAGE_ID = 0xDC01;
+ public static final int PROPERTY_OBJECT_FORMAT = 0xDC02;
+ public static final int PROPERTY_PROTECTION_STATUS = 0xDC03;
+ public static final int PROPERTY_OBJECT_SIZE = 0xDC04;
+ public static final int PROPERTY_ASSOCIATION_TYPE = 0xDC05;
+ public static final int PROPERTY_ASSOCIATION_DESC = 0xDC06;
+ public static final int PROPERTY_OBJECT_FILE_NAME = 0xDC07;
+ public static final int PROPERTY_DATE_CREATED = 0xDC08;
+ public static final int PROPERTY_DATE_MODIFIED = 0xDC09;
+ public static final int PROPERTY_KEYWORDS = 0xDC0A;
+ public static final int PROPERTY_PARENT_OBJECT = 0xDC0B;
+ public static final int PROPERTY_ALLOWED_FOLDER_CONTENTS = 0xDC0C;
+ public static final int PROPERTY_HIDDEN = 0xDC0D;
+ public static final int PROPERTY_SYSTEM_OBJECT = 0xDC0E;
+ public static final int PROPERTY_PERSISTENT_UID = 0xDC41;
+ public static final int PROPERTY_SYNC_ID = 0xDC42;
+ public static final int PROPERTY_PROPERTY_BAG = 0xDC43;
+ public static final int PROPERTY_NAME = 0xDC44;
+ public static final int PROPERTY_CREATED_BY = 0xDC45;
+ public static final int PROPERTY_ARTIST = 0xDC46;
+ public static final int PROPERTY_DATE_AUTHORED = 0xDC47;
+ public static final int PROPERTY_DESCRIPTION = 0xDC48;
+ public static final int PROPERTY_URL_REFERENCE = 0xDC49;
+ public static final int PROPERTY_LANGUAGE_LOCALE = 0xDC4A;
+ public static final int PROPERTY_COPYRIGHT_INFORMATION = 0xDC4B;
+ public static final int PROPERTY_SOURCE = 0xDC4C;
+ public static final int PROPERTY_ORIGIN_LOCATION = 0xDC4D;
+ public static final int PROPERTY_DATE_ADDED = 0xDC4E;
+ public static final int PROPERTY_NON_CONSUMABLE = 0xDC4F;
+ public static final int PROPERTY_CORRUPT_UNPLAYABLE = 0xDC50;
+ public static final int PROPERTY_PRODUCER_SERIAL_NUMBER = 0xDC51;
+ public static final int PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT = 0xDC81;
+ public static final int PROPERTY_REPRESENTATIVE_SAMPLE_SIZE = 0xDC82;
+ public static final int PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT = 0xDC83;
+ public static final int PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH = 0xDC84;
+ public static final int PROPERTY_REPRESENTATIVE_SAMPLE_DURATION = 0xDC85;
+ public static final int PROPERTY_REPRESENTATIVE_SAMPLE_DATA = 0xDC86;
+ public static final int PROPERTY_WIDTH = 0xDC87;
+ public static final int PROPERTY_HEIGHT = 0xDC88;
+ public static final int PROPERTY_DURATION = 0xDC89;
+ public static final int PROPERTY_RATING = 0xDC8A;
+ public static final int PROPERTY_TRACK = 0xDC8B;
+ public static final int PROPERTY_GENRE = 0xDC8C;
+ public static final int PROPERTY_CREDITS = 0xDC8D;
+ public static final int PROPERTY_LYRICS = 0xDC8E;
+ public static final int PROPERTY_SUBSCRIPTION_CONTENT_ID = 0xDC8F;
+ public static final int PROPERTY_PRODUCED_BY = 0xDC90;
+ public static final int PROPERTY_USE_COUNT = 0xDC91;
+ public static final int PROPERTY_SKIP_COUNT = 0xDC92;
+ public static final int PROPERTY_LAST_ACCESSED = 0xDC93;
+ public static final int PROPERTY_PARENTAL_RATING = 0xDC94;
+ public static final int PROPERTY_META_GENRE = 0xDC95;
+ public static final int PROPERTY_COMPOSER = 0xDC96;
+ public static final int PROPERTY_EFFECTIVE_RATING = 0xDC97;
+ public static final int PROPERTY_SUBTITLE = 0xDC98;
+ public static final int PROPERTY_ORIGINAL_RELEASE_DATE = 0xDC99;
+ public static final int PROPERTY_ALBUM_NAME = 0xDC9A;
+ public static final int PROPERTY_ALBUM_ARTIST = 0xDC9B;
+ public static final int PROPERTY_MOOD = 0xDC9C;
+ public static final int PROPERTY_DRM_STATUS = 0xDC9D;
+ public static final int PROPERTY_SUB_DESCRIPTION = 0xDC9E;
+ public static final int PROPERTY_IS_CROPPED = 0xDCD1;
+ public static final int PROPERTY_IS_COLOUR_CORRECTED = 0xDCD2;
+ public static final int PROPERTY_IMAGE_BIT_DEPTH = 0xDCD3;
+ public static final int PROPERTY_F_NUMBER = 0xDCD4;
+ public static final int PROPERTY_EXPOSURE_TIME = 0xDCD5;
+ public static final int PROPERTY_EXPOSURE_INDEX = 0xDCD6;
+ public static final int PROPERTY_TOTAL_BITRATE = 0xDE91;
+ public static final int PROPERTY_BITRATE_TYPE = 0xDE92;
+ public static final int PROPERTY_SAMPLE_RATE = 0xDE93;
+ public static final int PROPERTY_NUMBER_OF_CHANNELS = 0xDE94;
+ public static final int PROPERTY_AUDIO_BIT_DEPTH = 0xDE95;
+ public static final int PROPERTY_SCAN_TYPE = 0xDE97;
+ public static final int PROPERTY_AUDIO_WAVE_CODEC = 0xDE99;
+ public static final int PROPERTY_AUDIO_BITRATE = 0xDE9A;
+ public static final int PROPERTY_VIDEO_FOURCC_CODEC = 0xDE9B;
+ public static final int PROPERTY_VIDEO_BITRATE = 0xDE9C;
+ public static final int PROPERTY_FRAMES_PER_THOUSAND_SECONDS = 0xDE9D;
+ public static final int PROPERTY_KEYFRAME_DISTANCE = 0xDE9E;
+ public static final int PROPERTY_BUFFER_SIZE = 0xDE9F;
+ public static final int PROPERTY_ENCODING_QUALITY = 0xDEA0;
+ public static final int PROPERTY_ENCODING_PROFILE = 0xDEA1;
+ public static final int PROPERTY_DISPLAY_NAME = 0xDCE0;
+ public static final int PROPERTY_BODY_TEXT = 0xDCE1;
+ public static final int PROPERTY_SUBJECT = 0xDCE2;
+ public static final int PROPERTY_PRIORITY = 0xDCE3;
+ public static final int PROPERTY_GIVEN_NAME = 0xDD00;
+ public static final int PROPERTY_MIDDLE_NAMES = 0xDD01;
+ public static final int PROPERTY_FAMILY_NAME = 0xDD02;
+ public static final int PROPERTY_PREFIX = 0xDD03;
+ public static final int PROPERTY_SUFFIX = 0xDD04;
+ public static final int PROPERTY_PHONETIC_GIVEN_NAME = 0xDD05;
+ public static final int PROPERTY_PHONETIC_FAMILY_NAME = 0xDD06;
+ public static final int PROPERTY_EMAIL_PRIMARY = 0xDD07;
+ public static final int PROPERTY_EMAIL_PERSONAL_1 = 0xDD08;
+ public static final int PROPERTY_EMAIL_PERSONAL_2 = 0xDD09;
+ public static final int PROPERTY_EMAIL_BUSINESS_1 = 0xDD0A;
+ public static final int PROPERTY_EMAIL_BUSINESS_2 = 0xDD0B;
+ public static final int PROPERTY_EMAIL_OTHERS = 0xDD0C;
+ public static final int PROPERTY_PHONE_NUMBER_PRIMARY = 0xDD0D;
+ public static final int PROPERTY_PHONE_NUMBER_PERSONAL = 0xDD0E;
+ public static final int PROPERTY_PHONE_NUMBER_PERSONAL_2 = 0xDD0F;
+ public static final int PROPERTY_PHONE_NUMBER_BUSINESS = 0xDD10;
+ public static final int PROPERTY_PHONE_NUMBER_BUSINESS_2 = 0xDD11;
+ public static final int PROPERTY_PHONE_NUMBER_MOBILE= 0xDD12;
+ public static final int PROPERTY_PHONE_NUMBER_MOBILE_2 = 0xDD13;
+ public static final int PROPERTY_FAX_NUMBER_PRIMARY = 0xDD14;
+ public static final int PROPERTY_FAX_NUMBER_PERSONAL= 0xDD15;
+ public static final int PROPERTY_FAX_NUMBER_BUSINESS= 0xDD16;
+ public static final int PROPERTY_PAGER_NUMBER = 0xDD17;
+ public static final int PROPERTY_PHONE_NUMBER_OTHERS= 0xDD18;
+ public static final int PROPERTY_PRIMARY_WEB_ADDRESS= 0xDD19;
+ public static final int PROPERTY_PERSONAL_WEB_ADDRESS = 0xDD1A;
+ public static final int PROPERTY_BUSINESS_WEB_ADDRESS = 0xDD1B;
+ public static final int PROPERTY_INSTANT_MESSANGER_ADDRESS = 0xDD1C;
+ public static final int PROPERTY_INSTANT_MESSANGER_ADDRESS_2 = 0xDD1D;
+ public static final int PROPERTY_INSTANT_MESSANGER_ADDRESS_3 = 0xDD1E;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL = 0xDD1F;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1 = 0xDD20;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2 = 0xDD21;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY = 0xDD22;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION = 0xDD23;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE = 0xDD24;
+ public static final int PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY = 0xDD25;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL = 0xDD26;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1 = 0xDD27;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2 = 0xDD28;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY = 0xDD29;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION = 0xDD2A;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE = 0xDD2B;
+ public static final int PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY = 0xDD2C;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_FULL = 0xDD2D;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1 = 0xDD2E;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2 = 0xDD2F;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_CITY = 0xDD30;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_REGION = 0xDD31;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE = 0xDD32;
+ public static final int PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY = 0xDD33;
+ public static final int PROPERTY_ORGANIZATION_NAME = 0xDD34;
+ public static final int PROPERTY_PHONETIC_ORGANIZATION_NAME = 0xDD35;
+ public static final int PROPERTY_ROLE = 0xDD36;
+ public static final int PROPERTY_BIRTHDATE = 0xDD37;
+ public static final int PROPERTY_MESSAGE_TO = 0xDD40;
+ public static final int PROPERTY_MESSAGE_CC = 0xDD41;
+ public static final int PROPERTY_MESSAGE_BCC = 0xDD42;
+ public static final int PROPERTY_MESSAGE_READ = 0xDD43;
+ public static final int PROPERTY_MESSAGE_RECEIVED_TIME = 0xDD44;
+ public static final int PROPERTY_MESSAGE_SENDER = 0xDD45;
+ public static final int PROPERTY_ACTIVITY_BEGIN_TIME = 0xDD50;
+ public static final int PROPERTY_ACTIVITY_END_TIME = 0xDD51;
+ public static final int PROPERTY_ACTIVITY_LOCATION = 0xDD52;
+ public static final int PROPERTY_ACTIVITY_REQUIRED_ATTENDEES = 0xDD54;
+ public static final int PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES = 0xDD55;
+ public static final int PROPERTY_ACTIVITY_RESOURCES = 0xDD56;
+ public static final int PROPERTY_ACTIVITY_ACCEPTED = 0xDD57;
+ public static final int PROPERTY_ACTIVITY_TENTATIVE = 0xDD58;
+ public static final int PROPERTY_ACTIVITY_DECLINED = 0xDD59;
+ public static final int PROPERTY_ACTIVITY_REMAINDER_TIME = 0xDD5A;
+ public static final int PROPERTY_ACTIVITY_OWNER = 0xDD5B;
+ public static final int PROPERTY_ACTIVITY_STATUS = 0xDD5C;
+ public static final int PROPERTY_OWNER = 0xDD5D;
+ public static final int PROPERTY_EDITOR = 0xDD5E;
+ public static final int PROPERTY_WEBMASTER = 0xDD5F;
+ public static final int PROPERTY_URL_SOURCE = 0xDD60;
+ public static final int PROPERTY_URL_DESTINATION = 0xDD61;
+ public static final int PROPERTY_TIME_BOOKMARK = 0xDD62;
+ public static final int PROPERTY_OBJECT_BOOKMARK = 0xDD63;
+ public static final int PROPERTY_BYTE_BOOKMARK = 0xDD64;
+ public static final int PROPERTY_LAST_BUILD_DATE = 0xDD70;
+ public static final int PROPERTY_TIME_TO_LIVE = 0xDD71;
+ public static final int PROPERTY_MEDIA_GUID = 0xDD72;
+
+ // MTP device properties
+ public static final int DEVICE_PROPERTY_UNDEFINED = 0x5000;
+ public static final int DEVICE_PROPERTY_BATTERY_LEVEL = 0x5001;
+ public static final int DEVICE_PROPERTY_FUNCTIONAL_MODE = 0x5002;
+ public static final int DEVICE_PROPERTY_IMAGE_SIZE = 0x5003;
+ public static final int DEVICE_PROPERTY_COMPRESSION_SETTING = 0x5004;
+ public static final int DEVICE_PROPERTY_WHITE_BALANCE = 0x5005;
+ public static final int DEVICE_PROPERTY_RGB_GAIN = 0x5006;
+ public static final int DEVICE_PROPERTY_F_NUMBER = 0x5007;
+ public static final int DEVICE_PROPERTY_FOCAL_LENGTH = 0x5008;
+ public static final int DEVICE_PROPERTY_FOCUS_DISTANCE = 0x5009;
+ public static final int DEVICE_PROPERTY_FOCUS_MODE = 0x500A;
+ public static final int DEVICE_PROPERTY_EXPOSURE_METERING_MODE = 0x500B;
+ public static final int DEVICE_PROPERTY_FLASH_MODE = 0x500C;
+ public static final int DEVICE_PROPERTY_EXPOSURE_TIME = 0x500D;
+ public static final int DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE = 0x500E;
+ public static final int DEVICE_PROPERTY_EXPOSURE_INDEX = 0x500F;
+ public static final int DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION = 0x5010;
+ public static final int DEVICE_PROPERTY_DATETIME = 0x5011;
+ public static final int DEVICE_PROPERTY_CAPTURE_DELAY = 0x5012;
+ public static final int DEVICE_PROPERTY_STILL_CAPTURE_MODE = 0x5013;
+ public static final int DEVICE_PROPERTY_CONTRAST = 0x5014;
+ public static final int DEVICE_PROPERTY_SHARPNESS = 0x5015;
+ public static final int DEVICE_PROPERTY_DIGITAL_ZOOM = 0x5016;
+ public static final int DEVICE_PROPERTY_EFFECT_MODE = 0x5017;
+ public static final int DEVICE_PROPERTY_BURST_NUMBER= 0x5018;
+ public static final int DEVICE_PROPERTY_BURST_INTERVAL = 0x5019;
+ public static final int DEVICE_PROPERTY_TIMELAPSE_NUMBER = 0x501A;
+ public static final int DEVICE_PROPERTY_TIMELAPSE_INTERVAL = 0x501B;
+ public static final int DEVICE_PROPERTY_FOCUS_METERING_MODE = 0x501C;
+ public static final int DEVICE_PROPERTY_UPLOAD_URL = 0x501D;
+ public static final int DEVICE_PROPERTY_ARTIST = 0x501E;
+ public static final int DEVICE_PROPERTY_COPYRIGHT_INFO = 0x501F;
+ public static final int DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER = 0xD401;
+ public static final int DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME = 0xD402;
+ public static final int DEVICE_PROPERTY_VOLUME = 0xD403;
+ public static final int DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED = 0xD404;
+ public static final int DEVICE_PROPERTY_DEVICE_ICON = 0xD405;
+ public static final int DEVICE_PROPERTY_PLAYBACK_RATE = 0xD410;
+ public static final int DEVICE_PROPERTY_PLAYBACK_OBJECT = 0xD411;
+ public static final int DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX = 0xD412;
+ public static final int DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO = 0xD406;
+ public static final int DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE = 0xD407;
+
+
+ /**
+ * Object is not protected. It may be modified and deleted, and its properties
+ * may be modified.
+ */
+ public static final int PROTECTION_STATUS_NONE = 0;
+
+ /**
+ * Object can not be modified or deleted and its properties can not be modified.
+ */
+ public static final int PROTECTION_STATUS_READ_ONLY = 0x8001;
+
+ /**
+ * Object can not be modified or deleted but its properties are modifiable.
+ */
+ public static final int PROTECTION_STATUS_READ_ONLY_DATA = 0x8002;
+
+ /**
+ * Object's contents can not be transfered from the device, but the object
+ * may be moved or deleted and its properties may be modified.
+ */
+ public static final int PROTECTION_STATUS_NON_TRANSFERABLE_DATA = 0x8003;
+
+ public static final int ASSOCIATION_TYPE_GENERIC_FOLDER = 0x0001;
+}
diff --git a/media/java/android/media/MtpCursor.java b/media/java/android/media/MtpCursor.java
new file mode 100644
index 0000000..9b5ab95
--- /dev/null
+++ b/media/java/android/media/MtpCursor.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.database.AbstractWindowedCursor;
+import android.database.CursorWindow;
+import android.provider.Mtp;
+import android.util.Log;
+
+import java.util.HashMap;
+
+/**
+ * Cursor class for MTP content provider
+ * @hide
+ */
+public final class MtpCursor extends AbstractWindowedCursor {
+ static final String TAG = "MtpCursor";
+ static final int NO_COUNT = -1;
+
+ /* constants for queryType */
+ public static final int DEVICE = 1;
+ public static final int DEVICE_ID = 2;
+ public static final int STORAGE = 3;
+ public static final int STORAGE_ID = 4;
+ public static final int OBJECT = 5;
+ public static final int OBJECT_ID = 6;
+ public static final int STORAGE_CHILDREN = 7;
+ public static final int OBJECT_CHILDREN = 8;
+
+ /** The names of the columns in the projection */
+ private String[] mColumns;
+
+ /** The number of rows in the cursor */
+ private int mCount = NO_COUNT;
+
+
+ public MtpCursor(MtpClient client, int queryType, int deviceID, long storageID, long objectID,
+ String[] projection) {
+ if (client == null) {
+ throw new NullPointerException("client null in MtpCursor constructor");
+ }
+ mColumns = projection;
+
+ HashMap<String, Integer> map;
+ switch (queryType) {
+ case DEVICE:
+ case DEVICE_ID:
+ map = sDeviceProjectionMap;
+ break;
+ case STORAGE:
+ case STORAGE_ID:
+ map = sStorageProjectionMap;
+ break;
+ case OBJECT:
+ case OBJECT_ID:
+ case STORAGE_CHILDREN:
+ case OBJECT_CHILDREN:
+ map = sObjectProjectionMap;
+ break;
+ default:
+ throw new IllegalArgumentException("unknown query type " + queryType);
+ }
+
+ int[] columns = new int[projection.length];
+ for (int i = 0; i < projection.length; i++) {
+ Integer id = map.get(projection[i]);
+ if (id == null) {
+ throw new IllegalArgumentException("unknown column " + projection[i]);
+ }
+ columns[i] = id.intValue();
+ }
+ native_setup(client, queryType, deviceID, storageID, objectID, columns);
+ }
+
+ @Override
+ protected void finalize() {
+ try {
+ native_finalize();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ @Override
+ public int getCount() {
+ if (mCount == NO_COUNT) {
+ fillWindow(0);
+ }
+ return mCount;
+ }
+
+ @Override
+ public boolean requery() {
+ Log.d(TAG, "requery");
+ mCount = NO_COUNT;
+ if (mWindow != null) {
+ mWindow.clear();
+ }
+ return super.requery();
+ }
+
+ private void fillWindow(int startPos) {
+ if (mWindow == null) {
+ // If there isn't a window set already it will only be accessed locally
+ mWindow = new CursorWindow(true /* the window is local only */);
+ } else {
+ mWindow.clear();
+ }
+ mWindow.setStartPosition(startPos);
+ mCount = native_fill_window(mWindow, startPos);
+ }
+
+ @Override
+ public String[] getColumnNames() {
+ Log.d(TAG, "getColumnNames returning " + mColumns);
+ return mColumns;
+ }
+
+ /* Device Column IDs */
+ /* These must match the values in MtpCursor.cpp */
+ private static final int DEVICE_ROW_ID = 1;
+ private static final int DEVICE_MANUFACTURER = 2;
+ private static final int DEVICE_MODEL = 3;
+
+ /* Storage Column IDs */
+ /* These must match the values in MtpCursor.cpp */
+ private static final int STORAGE_ROW_ID = 101;
+ private static final int STORAGE_IDENTIFIER = 102;
+ private static final int STORAGE_DESCRIPTION = 103;
+
+ /* Object Column IDs */
+ /* These must match the values in MtpCursor.cpp */
+ private static final int OBJECT_ROW_ID = 201;
+ private static final int OBJECT_STORAGE_ID = 202;
+ private static final int OBJECT_FORMAT = 203;
+ private static final int OBJECT_PROTECTION_STATUS = 204;
+ private static final int OBJECT_SIZE = 205;
+ private static final int OBJECT_THUMB_FORMAT = 206;
+ private static final int OBJECT_THUMB_SIZE = 207;
+ private static final int OBJECT_THUMB_WIDTH = 208;
+ private static final int OBJECT_THUMB_HEIGHT = 209;
+ private static final int OBJECT_IMAGE_WIDTH = 210;
+ private static final int OBJECT_IMAGE_HEIGHT = 211;
+ private static final int OBJECT_IMAGE_DEPTH = 212;
+ private static final int OBJECT_PARENT = 213;
+ private static final int OBJECT_ASSOCIATION_TYPE = 214;
+ private static final int OBJECT_ASSOCIATION_DESC = 215;
+ private static final int OBJECT_SEQUENCE_NUMBER = 216;
+ private static final int OBJECT_NAME = 217;
+ private static final int OBJECT_DATE_CREATED = 218;
+ private static final int OBJECT_DATE_MODIFIED = 219;
+ private static final int OBJECT_KEYWORDS = 220;
+ private static final int OBJECT_THUMB = 221;
+
+ private static HashMap<String, Integer> sDeviceProjectionMap;
+ private static HashMap<String, Integer> sStorageProjectionMap;
+ private static HashMap<String, Integer> sObjectProjectionMap;
+
+ static {
+ sDeviceProjectionMap = new HashMap<String, Integer>();
+ sDeviceProjectionMap.put(Mtp.Device._ID, new Integer(DEVICE_ROW_ID));
+ sDeviceProjectionMap.put(Mtp.Device.MANUFACTURER, new Integer(DEVICE_MANUFACTURER));
+ sDeviceProjectionMap.put(Mtp.Device.MODEL, new Integer(DEVICE_MODEL));
+
+ sStorageProjectionMap = new HashMap<String, Integer>();
+ sStorageProjectionMap.put(Mtp.Storage._ID, new Integer(STORAGE_ROW_ID));
+ sStorageProjectionMap.put(Mtp.Storage.IDENTIFIER, new Integer(STORAGE_IDENTIFIER));
+ sStorageProjectionMap.put(Mtp.Storage.DESCRIPTION, new Integer(STORAGE_DESCRIPTION));
+
+ sObjectProjectionMap = new HashMap<String, Integer>();
+ sObjectProjectionMap.put(Mtp.Object._ID, new Integer(OBJECT_ROW_ID));
+ sObjectProjectionMap.put(Mtp.Object.STORAGE_ID, new Integer(OBJECT_STORAGE_ID));
+ sObjectProjectionMap.put(Mtp.Object.FORMAT, new Integer(OBJECT_FORMAT));
+ sObjectProjectionMap.put(Mtp.Object.PROTECTION_STATUS, new Integer(OBJECT_PROTECTION_STATUS));
+ sObjectProjectionMap.put(Mtp.Object.SIZE, new Integer(OBJECT_SIZE));
+ sObjectProjectionMap.put(Mtp.Object.THUMB_FORMAT, new Integer(OBJECT_THUMB_FORMAT));
+ sObjectProjectionMap.put(Mtp.Object.THUMB_SIZE, new Integer(OBJECT_THUMB_SIZE));
+ sObjectProjectionMap.put(Mtp.Object.THUMB_WIDTH, new Integer(OBJECT_THUMB_WIDTH));
+ sObjectProjectionMap.put(Mtp.Object.THUMB_HEIGHT, new Integer(OBJECT_THUMB_HEIGHT));
+ sObjectProjectionMap.put(Mtp.Object.IMAGE_WIDTH, new Integer(OBJECT_IMAGE_WIDTH));
+ sObjectProjectionMap.put(Mtp.Object.IMAGE_HEIGHT, new Integer(OBJECT_IMAGE_HEIGHT));
+ sObjectProjectionMap.put(Mtp.Object.IMAGE_DEPTH, new Integer(OBJECT_IMAGE_DEPTH));
+ sObjectProjectionMap.put(Mtp.Object.PARENT, new Integer(OBJECT_PARENT));
+ sObjectProjectionMap.put(Mtp.Object.ASSOCIATION_TYPE, new Integer(OBJECT_ASSOCIATION_TYPE));
+ sObjectProjectionMap.put(Mtp.Object.ASSOCIATION_DESC, new Integer(OBJECT_ASSOCIATION_DESC));
+ sObjectProjectionMap.put(Mtp.Object.SEQUENCE_NUMBER, new Integer(OBJECT_SEQUENCE_NUMBER));
+ sObjectProjectionMap.put(Mtp.Object.NAME, new Integer(OBJECT_NAME));
+ sObjectProjectionMap.put(Mtp.Object.DATE_CREATED, new Integer(OBJECT_DATE_CREATED));
+ sObjectProjectionMap.put(Mtp.Object.DATE_MODIFIED, new Integer(OBJECT_DATE_MODIFIED));
+ sObjectProjectionMap.put(Mtp.Object.KEYWORDS, new Integer(OBJECT_KEYWORDS));
+ sObjectProjectionMap.put(Mtp.Object.THUMB, new Integer(OBJECT_THUMB));
+
+ sObjectProjectionMap.put(Mtp.Object.NAME, new Integer(OBJECT_NAME));
+ }
+
+ // used by the JNI code
+ private int mNativeContext;
+
+ private native final void native_setup(MtpClient client, int queryType,
+ int deviceID, long storageID, long objectID, int[] columns);
+ private native final void native_finalize();
+ private native void native_wait_for_event();
+ private native int native_fill_window(CursorWindow window, int startPos);
+}
diff --git a/media/java/android/media/MtpDatabase.java b/media/java/android/media/MtpDatabase.java
new file mode 100644
index 0000000..57ab3a1
--- /dev/null
+++ b/media/java/android/media/MtpDatabase.java
@@ -0,0 +1,916 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.content.ContentValues;
+import android.content.IContentProvider;
+import android.content.Intent;
+import android.database.Cursor;
+import android.database.sqlite.SQLiteDatabase;
+import android.net.Uri;
+import android.os.Environment;
+import android.os.RemoteException;
+import android.provider.MediaStore.Audio;
+import android.provider.MediaStore.Files;
+import android.provider.MediaStore.Images;
+import android.provider.MediaStore.MediaColumns;
+import android.provider.Mtp;
+import android.util.Log;
+
+import java.io.File;
+
+/**
+ * {@hide}
+ */
+public class MtpDatabase {
+
+ private static final String TAG = "MtpDatabase";
+
+ private final Context mContext;
+ private final IContentProvider mMediaProvider;
+ private final String mVolumeName;
+ private final Uri mObjectsUri;
+ private final String mMediaStoragePath;
+ private final String mExternalStoragePath;
+
+ // true if the database has been modified in the current MTP session
+ private boolean mDatabaseModified;
+
+ // database for writable MTP device properties
+ private SQLiteDatabase mDevicePropDb;
+ private static final int DEVICE_PROPERTIES_DATABASE_VERSION = 1;
+
+ // FIXME - this should be passed in via the constructor
+ private final int mStorageID = 0x00010001;
+
+ private static final String[] ID_PROJECTION = new String[] {
+ Files.FileColumns._ID, // 0
+ };
+ private static final String[] PATH_PROJECTION = new String[] {
+ Files.FileColumns._ID, // 0
+ Files.FileColumns.DATA, // 1
+ };
+ private static final String[] PATH_SIZE_PROJECTION = new String[] {
+ Files.FileColumns._ID, // 0
+ Files.FileColumns.DATA, // 1
+ Files.FileColumns.SIZE, // 2
+ };
+ private static final String[] OBJECT_INFO_PROJECTION = new String[] {
+ Files.FileColumns._ID, // 0
+ Files.FileColumns.DATA, // 1
+ Files.FileColumns.FORMAT, // 2
+ Files.FileColumns.PARENT, // 3
+ Files.FileColumns.SIZE, // 4
+ Files.FileColumns.DATE_MODIFIED, // 5
+ };
+ private static final String ID_WHERE = Files.FileColumns._ID + "=?";
+ private static final String PARENT_WHERE = Files.FileColumns.PARENT + "=?";
+ private static final String PARENT_FORMAT_WHERE = PARENT_WHERE + " AND "
+ + Files.FileColumns.FORMAT + "=?";
+
+ private static final String[] DEVICE_PROPERTY_PROJECTION = new String[] { "_id", "value" };
+ private static final String DEVICE_PROPERTY_WHERE = "code=?";
+
+ private final MediaScanner mMediaScanner;
+
+ static {
+ System.loadLibrary("media_jni");
+ }
+
+ public MtpDatabase(Context context, String volumeName, String storagePath) {
+ native_setup();
+
+ mContext = context;
+ mMediaProvider = context.getContentResolver().acquireProvider("media");
+ mVolumeName = volumeName;
+ mMediaStoragePath = storagePath;
+ mExternalStoragePath = Environment.getExternalStorageDirectory().getAbsolutePath();
+ mObjectsUri = Files.getMtpObjectsUri(volumeName);
+ mMediaScanner = new MediaScanner(context);
+ openDevicePropertiesDatabase(context);
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ native_finalize();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private String externalToMediaPath(String path) {
+ // convert external storage path to media path
+ if (path != null && mMediaStoragePath != null
+ && mExternalStoragePath != null
+ && path.startsWith(mExternalStoragePath)) {
+ path = mMediaStoragePath + path.substring(mExternalStoragePath.length());
+ }
+ return path;
+ }
+
+ private void openDevicePropertiesDatabase(Context context) {
+ mDevicePropDb = context.openOrCreateDatabase("device-properties", Context.MODE_PRIVATE, null);
+ int version = mDevicePropDb.getVersion();
+
+ // initialize if necessary
+ if (version != DEVICE_PROPERTIES_DATABASE_VERSION) {
+ mDevicePropDb.execSQL("CREATE TABLE properties (" +
+ "_id INTEGER PRIMARY KEY AUTOINCREMENT," +
+ "code INTEGER UNIQUE ON CONFLICT REPLACE," +
+ "value TEXT" +
+ ");");
+ mDevicePropDb.execSQL("CREATE INDEX property_index ON properties (code);");
+ mDevicePropDb.setVersion(DEVICE_PROPERTIES_DATABASE_VERSION);
+ }
+ }
+
+ private int beginSendObject(String path, int format, int parent,
+ int storage, long size, long modified) {
+ mDatabaseModified = true;
+ ContentValues values = new ContentValues();
+ values.put(Files.FileColumns.DATA, path);
+ values.put(Files.FileColumns.FORMAT, format);
+ values.put(Files.FileColumns.PARENT, parent);
+ // storage is ignored for now
+ values.put(Files.FileColumns.SIZE, size);
+ values.put(Files.FileColumns.DATE_MODIFIED, modified);
+
+ try {
+ Uri uri = mMediaProvider.insert(mObjectsUri, values);
+ if (uri != null) {
+ return Integer.parseInt(uri.getPathSegments().get(2));
+ } else {
+ return -1;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in beginSendObject", e);
+ return -1;
+ }
+ }
+
+ private void endSendObject(String path, int handle, int format, boolean succeeded) {
+ if (succeeded) {
+ // handle abstract playlists separately
+ // they do not exist in the file system so don't use the media scanner here
+ if (format == MtpConstants.FORMAT_ABSTRACT_AV_PLAYLIST) {
+ // Strip Windows Media Player file extension
+ if (path.endsWith(".pla")) {
+ path = path.substring(0, path.length() - 4);
+ }
+
+ // extract name from path
+ String name = path;
+ int lastSlash = name.lastIndexOf('/');
+ if (lastSlash >= 0) {
+ name = name.substring(lastSlash + 1);
+ }
+
+ ContentValues values = new ContentValues(1);
+ values.put(Audio.Playlists.DATA, path);
+ values.put(Audio.Playlists.NAME, name);
+ values.put(MediaColumns.MEDIA_SCANNER_NEW_OBJECT_ID, handle);
+ try {
+ Uri uri = mMediaProvider.insert(Audio.Playlists.EXTERNAL_CONTENT_URI, values);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in endSendObject", e);
+ }
+ } else {
+ mMediaScanner.scanMtpFile(path, mVolumeName, handle, format);
+ }
+ } else {
+ deleteFile(handle);
+ }
+ }
+
+ private int[] getObjectList(int storageID, int format, int parent) {
+ // we can ignore storageID until we support multiple storages
+ Log.d(TAG, "getObjectList parent: " + parent);
+ Cursor c = null;
+ try {
+ if (format != 0) {
+ c = mMediaProvider.query(mObjectsUri, ID_PROJECTION,
+ PARENT_FORMAT_WHERE,
+ new String[] { Integer.toString(parent), Integer.toString(format) },
+ null);
+ } else {
+ c = mMediaProvider.query(mObjectsUri, ID_PROJECTION,
+ PARENT_WHERE, new String[] { Integer.toString(parent) }, null);
+ }
+ if (c == null) {
+ Log.d(TAG, "null cursor");
+ return null;
+ }
+ int count = c.getCount();
+ if (count > 0) {
+ int[] result = new int[count];
+ for (int i = 0; i < count; i++) {
+ c.moveToNext();
+ result[i] = c.getInt(0);
+ }
+ Log.d(TAG, "returning " + result);
+ return result;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getObjectList", e);
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ return null;
+ }
+
+ private int getNumObjects(int storageID, int format, int parent) {
+ // we can ignore storageID until we support multiple storages
+ Log.d(TAG, "getObjectList parent: " + parent);
+ Cursor c = null;
+ try {
+ if (format != 0) {
+ c = mMediaProvider.query(mObjectsUri, ID_PROJECTION,
+ PARENT_FORMAT_WHERE,
+ new String[] { Integer.toString(parent), Integer.toString(format) },
+ null);
+ } else {
+ c = mMediaProvider.query(mObjectsUri, ID_PROJECTION,
+ PARENT_WHERE, new String[] { Integer.toString(parent) }, null);
+ }
+ if (c != null) {
+ return c.getCount();
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getNumObjects", e);
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ return -1;
+ }
+
+ private int[] getSupportedPlaybackFormats() {
+ return new int[] {
+ // allow transfering arbitrary files
+ MtpConstants.FORMAT_UNDEFINED,
+
+ MtpConstants.FORMAT_ASSOCIATION,
+ MtpConstants.FORMAT_TEXT,
+ MtpConstants.FORMAT_HTML,
+ MtpConstants.FORMAT_WAV,
+ MtpConstants.FORMAT_MP3,
+ MtpConstants.FORMAT_MPEG,
+ MtpConstants.FORMAT_EXIF_JPEG,
+ MtpConstants.FORMAT_TIFF_EP,
+ MtpConstants.FORMAT_GIF,
+ MtpConstants.FORMAT_JFIF,
+ MtpConstants.FORMAT_PNG,
+ MtpConstants.FORMAT_TIFF,
+ MtpConstants.FORMAT_WMA,
+ MtpConstants.FORMAT_OGG,
+ MtpConstants.FORMAT_AAC,
+ MtpConstants.FORMAT_MP4_CONTAINER,
+ MtpConstants.FORMAT_MP2,
+ MtpConstants.FORMAT_3GP_CONTAINER,
+ MtpConstants.FORMAT_ABSTRACT_AV_PLAYLIST,
+ MtpConstants.FORMAT_WPL_PLAYLIST,
+ MtpConstants.FORMAT_M3U_PLAYLIST,
+ MtpConstants.FORMAT_PLS_PLAYLIST,
+ MtpConstants.FORMAT_XML_DOCUMENT,
+ };
+ }
+
+ private int[] getSupportedCaptureFormats() {
+ // no capture formats yet
+ return null;
+ }
+
+ static final int[] FILE_PROPERTIES = {
+ // NOTE must match beginning of AUDIO_PROPERTIES, VIDEO_PROPERTIES
+ // and IMAGE_PROPERTIES below
+ MtpConstants.PROPERTY_STORAGE_ID,
+ MtpConstants.PROPERTY_OBJECT_FORMAT,
+ MtpConstants.PROPERTY_PROTECTION_STATUS,
+ MtpConstants.PROPERTY_OBJECT_SIZE,
+ MtpConstants.PROPERTY_OBJECT_FILE_NAME,
+ MtpConstants.PROPERTY_DATE_MODIFIED,
+ MtpConstants.PROPERTY_PARENT_OBJECT,
+ MtpConstants.PROPERTY_PERSISTENT_UID,
+ MtpConstants.PROPERTY_NAME,
+ MtpConstants.PROPERTY_DATE_ADDED,
+ };
+
+ static final int[] AUDIO_PROPERTIES = {
+ // NOTE must match FILE_PROPERTIES above
+ MtpConstants.PROPERTY_STORAGE_ID,
+ MtpConstants.PROPERTY_OBJECT_FORMAT,
+ MtpConstants.PROPERTY_PROTECTION_STATUS,
+ MtpConstants.PROPERTY_OBJECT_SIZE,
+ MtpConstants.PROPERTY_OBJECT_FILE_NAME,
+ MtpConstants.PROPERTY_DATE_MODIFIED,
+ MtpConstants.PROPERTY_PARENT_OBJECT,
+ MtpConstants.PROPERTY_PERSISTENT_UID,
+ MtpConstants.PROPERTY_NAME,
+ MtpConstants.PROPERTY_DISPLAY_NAME,
+ MtpConstants.PROPERTY_DATE_ADDED,
+
+ // audio specific properties
+ MtpConstants.PROPERTY_ARTIST,
+ MtpConstants.PROPERTY_ALBUM_NAME,
+ MtpConstants.PROPERTY_ALBUM_ARTIST,
+ MtpConstants.PROPERTY_TRACK,
+ MtpConstants.PROPERTY_ORIGINAL_RELEASE_DATE,
+ MtpConstants.PROPERTY_DURATION,
+ MtpConstants.PROPERTY_GENRE,
+ MtpConstants.PROPERTY_COMPOSER,
+ };
+
+ static final int[] VIDEO_PROPERTIES = {
+ // NOTE must match FILE_PROPERTIES above
+ MtpConstants.PROPERTY_STORAGE_ID,
+ MtpConstants.PROPERTY_OBJECT_FORMAT,
+ MtpConstants.PROPERTY_PROTECTION_STATUS,
+ MtpConstants.PROPERTY_OBJECT_SIZE,
+ MtpConstants.PROPERTY_OBJECT_FILE_NAME,
+ MtpConstants.PROPERTY_DATE_MODIFIED,
+ MtpConstants.PROPERTY_PARENT_OBJECT,
+ MtpConstants.PROPERTY_PERSISTENT_UID,
+ MtpConstants.PROPERTY_NAME,
+ MtpConstants.PROPERTY_DISPLAY_NAME,
+ MtpConstants.PROPERTY_DATE_ADDED,
+
+ // video specific properties
+ MtpConstants.PROPERTY_ARTIST,
+ MtpConstants.PROPERTY_ALBUM_NAME,
+ MtpConstants.PROPERTY_DURATION,
+ MtpConstants.PROPERTY_DESCRIPTION,
+ };
+
+ static final int[] IMAGE_PROPERTIES = {
+ // NOTE must match FILE_PROPERTIES above
+ MtpConstants.PROPERTY_STORAGE_ID,
+ MtpConstants.PROPERTY_OBJECT_FORMAT,
+ MtpConstants.PROPERTY_PROTECTION_STATUS,
+ MtpConstants.PROPERTY_OBJECT_SIZE,
+ MtpConstants.PROPERTY_OBJECT_FILE_NAME,
+ MtpConstants.PROPERTY_DATE_MODIFIED,
+ MtpConstants.PROPERTY_PARENT_OBJECT,
+ MtpConstants.PROPERTY_PERSISTENT_UID,
+ MtpConstants.PROPERTY_NAME,
+ MtpConstants.PROPERTY_DISPLAY_NAME,
+ MtpConstants.PROPERTY_DATE_ADDED,
+
+ // image specific properties
+ MtpConstants.PROPERTY_DESCRIPTION,
+ };
+
+ private int[] getSupportedObjectProperties(int format) {
+ switch (format) {
+ case MtpConstants.FORMAT_MP3:
+ case MtpConstants.FORMAT_WAV:
+ case MtpConstants.FORMAT_WMA:
+ case MtpConstants.FORMAT_OGG:
+ case MtpConstants.FORMAT_AAC:
+ return AUDIO_PROPERTIES;
+ case MtpConstants.FORMAT_MPEG:
+ case MtpConstants.FORMAT_3GP_CONTAINER:
+ case MtpConstants.FORMAT_WMV:
+ return VIDEO_PROPERTIES;
+ case MtpConstants.FORMAT_EXIF_JPEG:
+ case MtpConstants.FORMAT_GIF:
+ case MtpConstants.FORMAT_PNG:
+ case MtpConstants.FORMAT_BMP:
+ return IMAGE_PROPERTIES;
+ default:
+ return FILE_PROPERTIES;
+ }
+ }
+
+ private int[] getSupportedDeviceProperties() {
+ return new int[] {
+ MtpConstants.DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER,
+ MtpConstants.DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME,
+ };
+ }
+
+ private String queryString(int id, String column) {
+ Cursor c = null;
+ try {
+ // for now we are only reading properties from the "objects" table
+ c = mMediaProvider.query(mObjectsUri,
+ new String [] { Files.FileColumns._ID, column },
+ ID_WHERE, new String[] { Integer.toString(id) }, null);
+ if (c != null && c.moveToNext()) {
+ return c.getString(1);
+ } else {
+ return "";
+ }
+ } catch (Exception e) {
+ return null;
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ }
+
+ private String queryGenre(int id) {
+ Cursor c = null;
+ try {
+ Uri uri = Audio.Genres.getContentUriForAudioId(mVolumeName, id);
+ c = mMediaProvider.query(uri,
+ new String [] { Files.FileColumns._ID, Audio.GenresColumns.NAME },
+ null, null, null);
+ if (c != null && c.moveToNext()) {
+ return c.getString(1);
+ } else {
+ return "";
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "queryGenre exception", e);
+ return null;
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ }
+
+ private boolean queryInt(int id, String column, long[] outValue) {
+ Cursor c = null;
+ try {
+ // for now we are only reading properties from the "objects" table
+ c = mMediaProvider.query(mObjectsUri,
+ new String [] { Files.FileColumns._ID, column },
+ ID_WHERE, new String[] { Integer.toString(id) }, null);
+ if (c != null && c.moveToNext()) {
+ outValue[0] = c.getLong(1);
+ return true;
+ }
+ return false;
+ } catch (Exception e) {
+ return false;
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ }
+
+ private String nameFromPath(String path) {
+ // extract name from full path
+ int start = 0;
+ int lastSlash = path.lastIndexOf('/');
+ if (lastSlash >= 0) {
+ start = lastSlash + 1;
+ }
+ int end = path.length();
+ if (end - start > 255) {
+ end = start + 255;
+ }
+ return path.substring(start, end);
+ }
+
+ private int renameFile(int handle, String newName) {
+ Cursor c = null;
+
+ // first compute current path
+ String path = null;
+ String[] whereArgs = new String[] { Integer.toString(handle) };
+ try {
+ c = mMediaProvider.query(mObjectsUri, PATH_PROJECTION, ID_WHERE, whereArgs, null);
+ if (c != null && c.moveToNext()) {
+ path = externalToMediaPath(c.getString(1));
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getObjectFilePath", e);
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ if (path == null) {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+
+ // now rename the file. make sure this succeeds before updating database
+ File oldFile = new File(path);
+ int lastSlash = path.lastIndexOf('/');
+ if (lastSlash <= 1) {
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ }
+ String newPath = path.substring(0, lastSlash + 1) + newName;
+ File newFile = new File(newPath);
+ boolean success = oldFile.renameTo(newFile);
+ Log.d(TAG, "renaming "+ path + " to " + newPath + (success ? " succeeded" : " failed"));
+ if (!success) {
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ }
+
+ // finally update database
+ ContentValues values = new ContentValues();
+ values.put(Files.FileColumns.DATA, newPath);
+ int updated = 0;
+ try {
+ // note - we are relying on a special case in MediaProvider.update() to update
+ // the paths for all children in the case where this is a directory.
+ updated = mMediaProvider.update(mObjectsUri, values, ID_WHERE, whereArgs);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in mMediaProvider.update", e);
+ }
+ if (updated == 0) {
+ Log.e(TAG, "Unable to update path for " + path + " to " + newPath);
+ // this shouldn't happen, but if it does we need to rename the file to its original name
+ newFile.renameTo(oldFile);
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ }
+
+ return MtpConstants.RESPONSE_OK;
+ }
+
+ private int getObjectProperty(int handle, int property,
+ long[] outIntValue, char[] outStringValue) {
+ Log.d(TAG, "getObjectProperty: " + property);
+ String column = null;
+ boolean isString = false;
+
+ switch (property) {
+ case MtpConstants.PROPERTY_STORAGE_ID:
+ outIntValue[0] = mStorageID;
+ return MtpConstants.RESPONSE_OK;
+ case MtpConstants.PROPERTY_OBJECT_FORMAT:
+ column = Files.FileColumns.FORMAT;
+ break;
+ case MtpConstants.PROPERTY_PROTECTION_STATUS:
+ // protection status is always 0
+ outIntValue[0] = 0;
+ return MtpConstants.RESPONSE_OK;
+ case MtpConstants.PROPERTY_OBJECT_SIZE:
+ column = Files.FileColumns.SIZE;
+ break;
+ case MtpConstants.PROPERTY_OBJECT_FILE_NAME:
+ // special case - need to extract file name from full path
+ String value = queryString(handle, Files.FileColumns.DATA);
+ if (value != null) {
+ value = nameFromPath(value);
+ value.getChars(0, value.length(), outStringValue, 0);
+ outStringValue[value.length()] = 0;
+ return MtpConstants.RESPONSE_OK;
+ } else {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+ case MtpConstants.PROPERTY_NAME:
+ // first try title
+ String name = queryString(handle, MediaColumns.TITLE);
+ // then try name
+ if (name == null) {
+ name = queryString(handle, Audio.PlaylistsColumns.NAME);
+ }
+ // if title and name fail, extract name from full path
+ if (name == null) {
+ name = queryString(handle, Files.FileColumns.DATA);
+ if (name != null) {
+ name = nameFromPath(name);
+ }
+ }
+ if (name != null) {
+ name.getChars(0, name.length(), outStringValue, 0);
+ outStringValue[name.length()] = 0;
+ return MtpConstants.RESPONSE_OK;
+ } else {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+ case MtpConstants.PROPERTY_DATE_MODIFIED:
+ column = Files.FileColumns.DATE_MODIFIED;
+ break;
+ case MtpConstants.PROPERTY_DATE_ADDED:
+ column = Files.FileColumns.DATE_ADDED;
+ break;
+ case MtpConstants.PROPERTY_ORIGINAL_RELEASE_DATE:
+ column = Audio.AudioColumns.YEAR;
+ break;
+ case MtpConstants.PROPERTY_PARENT_OBJECT:
+ column = Files.FileColumns.PARENT;
+ break;
+ case MtpConstants.PROPERTY_PERSISTENT_UID:
+ // PUID is concatenation of storageID and object handle
+ long puid = mStorageID;
+ puid <<= 32;
+ puid += handle;
+ outIntValue[0] = puid;
+ return MtpConstants.RESPONSE_OK;
+ case MtpConstants.PROPERTY_DURATION:
+ column = Audio.AudioColumns.DURATION;
+ break;
+ case MtpConstants.PROPERTY_TRACK:
+ if (queryInt(handle, Audio.AudioColumns.TRACK, outIntValue)) {
+ // track is stored in lower 3 decimal digits
+ outIntValue[0] %= 1000;
+ return MtpConstants.RESPONSE_OK;
+ } else {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+ case MtpConstants.PROPERTY_DISPLAY_NAME:
+ column = MediaColumns.DISPLAY_NAME;
+ isString = true;
+ break;
+ case MtpConstants.PROPERTY_ARTIST:
+ column = Audio.AudioColumns.ARTIST;
+ isString = true;
+ break;
+ case MtpConstants.PROPERTY_ALBUM_NAME:
+ column = Audio.AudioColumns.ALBUM;
+ isString = true;
+ break;
+ case MtpConstants.PROPERTY_ALBUM_ARTIST:
+ column = Audio.AudioColumns.ALBUM_ARTIST;
+ isString = true;
+ break;
+ case MtpConstants.PROPERTY_GENRE:
+ String genre = queryGenre(handle);
+ if (genre != null) {
+ genre.getChars(0, genre.length(), outStringValue, 0);
+ outStringValue[genre.length()] = 0;
+ return MtpConstants.RESPONSE_OK;
+ } else {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+ case MtpConstants.PROPERTY_COMPOSER:
+ column = Audio.AudioColumns.COMPOSER;
+ isString = true;
+ break;
+ case MtpConstants.PROPERTY_DESCRIPTION:
+ column = Images.ImageColumns.DESCRIPTION;
+ isString = true;
+ break;
+ default:
+ return MtpConstants.RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+ }
+
+ if (isString) {
+ String value = queryString(handle, column);
+ if (value != null) {
+ value.getChars(0, value.length(), outStringValue, 0);
+ outStringValue[value.length()] = 0;
+ return MtpConstants.RESPONSE_OK;
+ }
+ } else {
+ if (queryInt(handle, column, outIntValue)) {
+ return MtpConstants.RESPONSE_OK;
+ }
+ }
+ // query failed if we get here
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+
+ private int setObjectProperty(int handle, int property,
+ long intValue, String stringValue) {
+ Log.d(TAG, "setObjectProperty: " + property);
+
+ switch (property) {
+ case MtpConstants.PROPERTY_OBJECT_FILE_NAME:
+ return renameFile(handle, stringValue);
+
+ default:
+ return MtpConstants.RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+ }
+ }
+
+ private int getDeviceProperty(int property, long[] outIntValue, char[] outStringValue) {
+ Log.d(TAG, "getDeviceProperty: " + property);
+
+ switch (property) {
+ case MtpConstants.DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER:
+ case MtpConstants.DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME:
+ // writable string properties kept in our device property database
+ Cursor c = null;
+ try {
+ c = mDevicePropDb.query("properties", DEVICE_PROPERTY_PROJECTION,
+ DEVICE_PROPERTY_WHERE, new String[] { Integer.toString(property) },
+ null, null, null);
+
+ if (c != null && c.moveToNext()) {
+ String value = c.getString(1);
+ int length = value.length();
+ if (length > 255) {
+ length = 255;
+ }
+ value.getChars(0, length, outStringValue, 0);
+ outStringValue[length] = 0;
+ } else {
+ outStringValue[0] = 0;
+ }
+ return MtpConstants.RESPONSE_OK;
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ }
+
+ return MtpConstants.RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+ }
+
+ private int setDeviceProperty(int property, long intValue, String stringValue) {
+ Log.d(TAG, "setDeviceProperty: " + property + " : " + stringValue);
+
+ switch (property) {
+ case MtpConstants.DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER:
+ case MtpConstants.DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME:
+ // writable string properties kept in our device property database
+ try {
+ ContentValues values = new ContentValues();
+ values.put("code", property);
+ values.put("value", stringValue);
+ mDevicePropDb.insert("properties", "code", values);
+ return MtpConstants.RESPONSE_OK;
+ } catch (Exception e) {
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ }
+ }
+
+ return MtpConstants.RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+ }
+
+ private boolean getObjectInfo(int handle, int[] outStorageFormatParent,
+ char[] outName, long[] outSizeModified) {
+ Log.d(TAG, "getObjectInfo: " + handle);
+ Cursor c = null;
+ try {
+ c = mMediaProvider.query(mObjectsUri, OBJECT_INFO_PROJECTION,
+ ID_WHERE, new String[] { Integer.toString(handle) }, null);
+ if (c != null && c.moveToNext()) {
+ outStorageFormatParent[0] = mStorageID;
+ outStorageFormatParent[1] = c.getInt(2);
+ outStorageFormatParent[2] = c.getInt(3);
+
+ // extract name from path
+ String path = c.getString(1);
+ int lastSlash = path.lastIndexOf('/');
+ int start = (lastSlash >= 0 ? lastSlash + 1 : 0);
+ int end = path.length();
+ if (end - start > 255) {
+ end = start + 255;
+ }
+ path.getChars(start, end, outName, 0);
+ outName[end - start] = 0;
+
+ outSizeModified[0] = c.getLong(4);
+ outSizeModified[1] = c.getLong(5);
+ return true;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getObjectInfo", e);
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ return false;
+ }
+
+ private int getObjectFilePath(int handle, char[] outFilePath, long[] outFileLength) {
+ Log.d(TAG, "getObjectFilePath: " + handle);
+ if (handle == 0) {
+ // special case root directory
+ mMediaStoragePath.getChars(0, mMediaStoragePath.length(), outFilePath, 0);
+ outFilePath[mMediaStoragePath.length()] = 0;
+ outFileLength[0] = 0;
+ return MtpConstants.RESPONSE_OK;
+ }
+ Cursor c = null;
+ try {
+ c = mMediaProvider.query(mObjectsUri, PATH_SIZE_PROJECTION,
+ ID_WHERE, new String[] { Integer.toString(handle) }, null);
+ if (c != null && c.moveToNext()) {
+ String path = externalToMediaPath(c.getString(1));
+ path.getChars(0, path.length(), outFilePath, 0);
+ outFilePath[path.length()] = 0;
+ outFileLength[0] = c.getLong(2);
+ return MtpConstants.RESPONSE_OK;
+ } else {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getObjectFilePath", e);
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ }
+
+ private int deleteRecursive(int handle) throws RemoteException {
+ int[] children = getObjectList(0 /* storageID */, 0 /* format */, handle);
+ Uri uri = Files.getMtpObjectsUri(mVolumeName, handle);
+ // delete parent first, to avoid potential infinite recursion
+ int count = mMediaProvider.delete(uri, null, null);
+ if (count == 1) {
+ if (children != null) {
+ for (int i = 0; i < children.length; i++) {
+ count += deleteRecursive(children[i]);
+ }
+ }
+ }
+ return count;
+ }
+
+ private int deleteFile(int handle) {
+ Log.d(TAG, "deleteFile: " + handle);
+ mDatabaseModified = true;
+ try {
+ if (deleteRecursive(handle) > 0) {
+ return MtpConstants.RESPONSE_OK;
+ } else {
+ return MtpConstants.RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in deleteFile", e);
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ }
+ }
+
+ private int[] getObjectReferences(int handle) {
+ Log.d(TAG, "getObjectReferences for: " + handle);
+ Uri uri = Files.getMtpReferencesUri(mVolumeName, handle);
+ Cursor c = null;
+ try {
+ c = mMediaProvider.query(uri, ID_PROJECTION, null, null, null);
+ if (c == null) {
+ return null;
+ }
+ int count = c.getCount();
+ if (count > 0) {
+ int[] result = new int[count];
+ for (int i = 0; i < count; i++) {
+ c.moveToNext();
+ result[i] = c.getInt(0);
+ }
+ return result;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in getObjectList", e);
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ return null;
+ }
+
+ private int setObjectReferences(int handle, int[] references) {
+ mDatabaseModified = true;
+ Uri uri = Files.getMtpReferencesUri(mVolumeName, handle);
+ int count = references.length;
+ ContentValues[] valuesList = new ContentValues[count];
+ for (int i = 0; i < count; i++) {
+ ContentValues values = new ContentValues();
+ values.put(Files.FileColumns._ID, references[i]);
+ valuesList[i] = values;
+ }
+ try {
+ if (count == mMediaProvider.bulkInsert(uri, valuesList)) {
+ return MtpConstants.RESPONSE_OK;
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in setObjectReferences", e);
+ }
+ return MtpConstants.RESPONSE_GENERAL_ERROR;
+ }
+
+ private void sessionStarted() {
+ Log.d(TAG, "sessionStarted");
+ mDatabaseModified = false;
+ }
+
+ private void sessionEnded() {
+ Log.d(TAG, "sessionEnded");
+ if (mDatabaseModified) {
+ Log.d(TAG, "sending ACTION_MTP_SESSION_END");
+ mContext.sendBroadcast(new Intent(Mtp.ACTION_MTP_SESSION_END));
+ mDatabaseModified = false;
+ }
+ }
+
+ // used by the JNI code
+ private int mNativeContext;
+
+ private native final void native_setup();
+ private native final void native_finalize();
+}
diff --git a/media/java/android/media/MtpServer.java b/media/java/android/media/MtpServer.java
new file mode 100644
index 0000000..7f15276
--- /dev/null
+++ b/media/java/android/media/MtpServer.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.util.Log;
+
+/**
+ * Java wrapper for MTP/PTP support as USB responder.
+ * {@hide}
+ */
+public class MtpServer {
+
+ private static final String TAG = "MtpServer";
+
+ static {
+ System.loadLibrary("media_jni");
+ }
+
+ public MtpServer(MtpDatabase database, String storagePath) {
+ native_setup(database, storagePath);
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ native_finalize();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ public void start() {
+ native_start();
+ }
+
+ public void stop() {
+ native_stop();
+ }
+
+ public void sendObjectAdded(int handle) {
+ native_send_object_added(handle);
+ }
+
+ public void sendObjectRemoved(int handle) {
+ native_send_object_removed(handle);
+ }
+
+ public void setPtpMode(boolean usePtp) {
+ native_set_ptp_mode(usePtp);
+ }
+
+ // used by the JNI code
+ private int mNativeContext;
+
+ private native final void native_setup(MtpDatabase database, String storagePath);
+ private native final void native_finalize();
+ private native final void native_start();
+ private native final void native_stop();
+ private native final void native_send_object_added(int handle);
+ private native final void native_send_object_removed(int handle);
+ private native final void native_set_ptp_mode(boolean usePtp);
+}
diff --git a/media/java/android/media/videoeditor/AudioTrack.java b/media/java/android/media/videoeditor/AudioTrack.java
new file mode 100755
index 0000000..d02709e
--- /dev/null
+++ b/media/java/android/media/videoeditor/AudioTrack.java
@@ -0,0 +1,487 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.IOException;
+import java.lang.ref.SoftReference;
+
+/**
+ * This class allows to handle an audio track. This audio file is mixed with the
+ * audio samples of the media items.
+ * {@hide}
+ */
+public class AudioTrack {
+ // Instance variables
+ private final String mUniqueId;
+ private final String mFilename;
+ private long mStartTimeMs;
+ private long mTimelineDurationMs;
+ private int mVolumePercent;
+ private long mBeginBoundaryTimeMs;
+ private long mEndBoundaryTimeMs;
+ private boolean mLoop;
+ private boolean mMuted;
+
+ private final long mDurationMs;
+ private final int mAudioChannels;
+ private final int mAudioType;
+ private final int mAudioBitrate;
+ private final int mAudioSamplingFrequency;
+
+ // Ducking variables
+ private int mDuckingThreshold;
+ private int mDuckedTrackVolume;
+ private boolean mIsDuckingEnabled;
+
+ // The audio waveform filename
+ private String mAudioWaveformFilename;
+ // The audio waveform data
+ private SoftReference<WaveformData> mWaveformData;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private AudioTrack() throws IOException {
+ this(null, null, null);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param audioTrackId The audio track id
+ * @param filename The absolute file name
+ *
+ * @throws IOException if file is not found
+ * @throws IllegalArgumentException if file format is not supported or if
+ * the codec is not supported
+ */
+ public AudioTrack(VideoEditor editor, String audioTrackId, String filename)
+ throws IOException {
+ mUniqueId = audioTrackId;
+ mFilename = filename;
+ mStartTimeMs = 0;
+ // TODO: This value represents to the duration of the audio file
+ mDurationMs = 300000;
+ // TODO: This value needs to be read from the audio track of the source
+ // file
+ mAudioChannels = 2;
+ mAudioType = MediaProperties.ACODEC_AAC_LC;
+ mAudioBitrate = 128000;
+ mAudioSamplingFrequency = 44100;
+
+ mTimelineDurationMs = mDurationMs;
+ mVolumePercent = 100;
+
+ // Play the entire audio track
+ mBeginBoundaryTimeMs = 0;
+ mEndBoundaryTimeMs = mDurationMs;
+
+ // By default loop is disabled
+ mLoop = false;
+
+ // By default the audio track is not muted
+ mMuted = false;
+
+ // Ducking is enabled by default
+ mDuckingThreshold = 0;
+ mDuckedTrackVolume = 0;
+ mIsDuckingEnabled = true;
+
+ // The audio waveform file is generated later
+ mAudioWaveformFilename = null;
+ mWaveformData = null;
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param audioTrackId The audio track id
+ * @param filename The audio filename
+ * @param startTimeMs the start time in milliseconds (relative to the
+ * timeline)
+ * @param beginMs start time in the audio track in milliseconds (relative to
+ * the beginning of the audio track)
+ * @param endMs end time in the audio track in milliseconds (relative to the
+ * beginning of the audio track)
+ * @param loop true to loop the audio track
+ * @param volume The volume in percentage
+ * @param muted true if the audio track is muted
+ * @param threshold Ducking will be activated when the relative energy in
+ * the media items audio signal goes above this value. The valid
+ * range of values is 0 to 100.
+ * @param duckedTrackVolume The relative volume of the audio track when ducking
+ * is active. The valid range of values is 0 to 100.
+ * @param audioWaveformFilename The name of the waveform file
+ *
+ * @throws IOException if file is not found
+ */
+ AudioTrack(VideoEditor editor, String audioTrackId, String filename, long startTimeMs,
+ long beginMs, long endMs, boolean loop, int volume, boolean muted,
+ boolean duckingEnabled, int duckThreshold, int duckedTrackVolume,
+ String audioWaveformFilename) throws IOException {
+ mUniqueId = audioTrackId;
+ mFilename = filename;
+ mStartTimeMs = startTimeMs;
+
+ // TODO: This value represents to the duration of the audio file
+ mDurationMs = 300000;
+
+ // TODO: This value needs to be read from the audio track of the source
+ // file
+ mAudioChannels = 2;
+ mAudioType = MediaProperties.ACODEC_AAC_LC;
+ mAudioBitrate = 128000;
+ mAudioSamplingFrequency = 44100;
+
+ mTimelineDurationMs = endMs - beginMs;
+ mVolumePercent = volume;
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs;
+
+ mLoop = loop;
+ mMuted = muted;
+
+ mIsDuckingEnabled = duckingEnabled;
+ mDuckingThreshold = duckThreshold;
+ mDuckedTrackVolume = duckedTrackVolume;
+
+ mAudioWaveformFilename = audioWaveformFilename;
+ if (audioWaveformFilename != null) {
+ mWaveformData =
+ new SoftReference<WaveformData>(new WaveformData(audioWaveformFilename));
+ } else {
+ mWaveformData = null;
+ }
+ }
+
+ /**
+ * @return The id of the audio track
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * Get the filename source for this audio track.
+ *
+ * @return The filename as an absolute file name
+ */
+ public String getFilename() {
+ return mFilename;
+ }
+
+ /**
+ * @return The number of audio channels in the source of this audio track
+ */
+ public int getAudioChannels() {
+ return mAudioChannels;
+ }
+
+ /**
+ * @return The audio codec of the source of this audio track
+ */
+ public int getAudioType() {
+ return mAudioType;
+ }
+
+ /**
+ * @return The audio sample frequency of the audio track
+ */
+ public int getAudioSamplingFrequency() {
+ return mAudioSamplingFrequency;
+ }
+
+ /**
+ * @return The audio bitrate of the audio track
+ */
+ public int getAudioBitrate() {
+ return mAudioBitrate;
+ }
+
+ /**
+ * Set the volume of this audio track as percentage of the volume in the
+ * original audio source file.
+ *
+ * @param volumePercent Percentage of the volume to apply. If it is set to
+ * 0, then volume becomes mute. It it is set to 100, then volume
+ * is same as original volume. It it is set to 200, then volume
+ * is doubled (provided that volume amplification is supported)
+ *
+ * @throws UnsupportedOperationException if volume amplification is
+ * requested and is not supported.
+ */
+ public void setVolume(int volumePercent) {
+ mVolumePercent = volumePercent;
+ }
+
+ /**
+ * Get the volume of the audio track as percentage of the volume in the
+ * original audio source file.
+ *
+ * @return The volume in percentage
+ */
+ public int getVolume() {
+ return mVolumePercent;
+ }
+
+ /**
+ * @param muted true to mute the audio track
+ */
+ public void setMute(boolean muted) {
+ mMuted = muted;
+ }
+
+ /**
+ * @return true if the audio track is muted
+ */
+ public boolean isMuted() {
+ return mMuted;
+ }
+
+ /**
+ * Set the start time of this audio track relative to the storyboard
+ * timeline. Default value is 0.
+ *
+ * @param startTimeMs the start time in milliseconds
+ */
+ public void setStartTime(long startTimeMs) {
+ mStartTimeMs = startTimeMs;
+ }
+
+ /**
+ * Get the start time of this audio track relative to the storyboard
+ * timeline.
+ *
+ * @return The start time in milliseconds
+ */
+ public long getStartTime() {
+ return mStartTimeMs;
+ }
+
+ /**
+ * @return The duration in milliseconds. This value represents the audio
+ * track duration (not looped)
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * @return The timeline duration. If looping is enabled this value
+ * represents the duration of the looped audio track, otherwise it
+ * is the duration of the audio track (mDurationMs).
+ */
+ public long getTimelineDuration() {
+ return mTimelineDurationMs;
+ }
+
+ /**
+ * Sets the start and end marks for trimming an audio track
+ *
+ * @param beginMs start time in the audio track in milliseconds (relative to
+ * the beginning of the audio track)
+ * @param endMs end time in the audio track in milliseconds (relative to the
+ * beginning of the audio track)
+ */
+ public void setExtractBoundaries(long beginMs, long endMs) {
+ if (beginMs > mDurationMs) {
+ throw new IllegalArgumentException("Invalid start time");
+ }
+ if (endMs > mDurationMs) {
+ throw new IllegalArgumentException("Invalid end time");
+ }
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs;
+ if (mLoop) {
+ // TODO: Compute mDurationMs (from the beginning of the loop until
+ // the end of all the loops.
+ mTimelineDurationMs = mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
+ } else {
+ mTimelineDurationMs = mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
+ }
+ }
+
+ /**
+ * @return The boundary begin time
+ */
+ public long getBoundaryBeginTime() {
+ return mBeginBoundaryTimeMs;
+ }
+
+ /**
+ * @return The boundary end time
+ */
+ public long getBoundaryEndTime() {
+ return mEndBoundaryTimeMs;
+ }
+
+ /**
+ * Enable the loop mode for this audio track. Note that only one of the
+ * audio tracks in the timeline can have the loop mode enabled. When looping
+ * is enabled the samples between mBeginBoundaryTimeMs and
+ * mEndBoundaryTimeMs are looped.
+ */
+ public void enableLoop() {
+ mLoop = true;
+ }
+
+ /**
+ * Disable the loop mode
+ */
+ public void disableLoop() {
+ mLoop = false;
+ }
+
+ /**
+ * @return true if looping is enabled
+ */
+ public boolean isLooping() {
+ return mLoop;
+ }
+
+ /**
+ * Disable the audio duck effect
+ */
+ public void disableDucking() {
+ mIsDuckingEnabled = false;
+ }
+
+ /**
+ * Enable ducking by specifying the required parameters
+ *
+ * @param threshold Ducking will be activated when the relative energy in
+ * the media items audio signal goes above this value. The valid
+ * range of values is 0 to 100.
+ * @param duckedTrackVolume The relative volume of the audio track when ducking
+ * is active. The valid range of values is 0 to 100.
+ */
+ public void enableDucking(int threshold, int duckedTrackVolume) {
+ if (threshold < 0 || threshold > 100) {
+ throw new IllegalArgumentException("Invalid threshold value: " + threshold);
+ }
+
+ if (duckedTrackVolume < 0 || duckedTrackVolume > 100) {
+ throw new IllegalArgumentException("Invalid duckedTrackVolume value: "
+ + duckedTrackVolume);
+ }
+
+ mDuckingThreshold = threshold;
+ mDuckedTrackVolume = duckedTrackVolume;
+ mIsDuckingEnabled = true;
+ }
+
+ /**
+ * @return true if ducking is enabled
+ */
+ public boolean isDuckingEnabled() {
+ return mIsDuckingEnabled;
+ }
+
+ /**
+ * @return The ducking threshold
+ */
+ public int getDuckingThreshhold() {
+ return mDuckingThreshold;
+ }
+
+ /**
+ * @return The ducked track volume
+ */
+ public int getDuckedTrackVolume() {
+ return mDuckedTrackVolume;
+ }
+
+ /**
+ * This API allows to generate a file containing the sample volume levels of
+ * this audio track object. This function may take significant time and is
+ * blocking. The filename can be retrieved using getAudioWaveformFilename().
+ *
+ * @param listener The progress listener
+ *
+ * @throws IOException if the output file cannot be created
+ * @throws IllegalArgumentException if the audio file does not have a valid
+ * audio track
+ */
+ public void extractAudioWaveform(ExtractAudioWaveformProgressListener listener)
+ throws IOException {
+ // TODO: Set mAudioWaveformFilename at the end once the extract is
+ // complete
+ mWaveformData = new SoftReference<WaveformData>(new WaveformData(mAudioWaveformFilename));
+ }
+
+ /**
+ * Get the audio waveform file name if extractAudioWaveform was successful.
+ * The file format is as following:
+ * <ul>
+ * <li>first 4 bytes provide the number of samples for each value, as
+ * big-endian signed</li>
+ * <li>4 following bytes is the total number of values in the file, as
+ * big-endian signed</li>
+ * <li>then, all values follow as bytes</li>
+ * </ul>
+ *
+ * @return the name of the file, null if the file does not exist
+ */
+ String getAudioWaveformFilename() {
+ return mAudioWaveformFilename;
+ }
+
+ /**
+ * @return The waveform data
+ */
+ public WaveformData getWaveformData() {
+ if (mWaveformData == null) {
+ return null;
+ }
+
+ WaveformData waveformData = mWaveformData.get();
+ if (waveformData != null) {
+ return waveformData;
+ } else if (mAudioWaveformFilename != null) {
+ waveformData = new WaveformData(mAudioWaveformFilename);
+ mWaveformData = new SoftReference<WaveformData>(waveformData);
+ return waveformData;
+ } else {
+ return null;
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof AudioTrack)) {
+ return false;
+ }
+ return mUniqueId.equals(((AudioTrack)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/Effect.java b/media/java/android/media/videoeditor/Effect.java
new file mode 100755
index 0000000..8547e13
--- /dev/null
+++ b/media/java/android/media/videoeditor/Effect.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+/**
+ * This is the super class for all effects. An effect can only be applied to a
+ * single media item. If one wants to apply the same effect to multiple media
+ * items, multiple @{MediaItem.addEffect(Effect)} call must be invoked on each
+ * of the MediaItem objects.
+ * {@hide}
+ */
+public abstract class Effect {
+ // Instance variables
+ private final String mUniqueId;
+ // The effect owner
+ private final MediaItem mMediaItem;
+ protected long mDurationMs;
+ // The start time of the effect relative to the media item timeline
+ protected long mStartTimeMs;
+
+ /**
+ * Default constructor
+ */
+ @SuppressWarnings("unused")
+ private Effect() {
+ mMediaItem = null;
+ mUniqueId = null;
+ mStartTimeMs = 0;
+ mDurationMs = 0;
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param effectId The effect id
+ * @param startTimeMs The start time relative to the media item to which it
+ * is applied
+ * @param durationMs The effect duration in milliseconds
+ */
+ public Effect(MediaItem mediaItem, String effectId, long startTimeMs, long durationMs) {
+ if (mediaItem == null) {
+ throw new IllegalArgumentException("Media item cannot be null");
+ }
+
+ if (startTimeMs + durationMs > mediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time and duration");
+ }
+
+ mMediaItem = mediaItem;
+ mUniqueId = effectId;
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+ }
+
+ /**
+ * @return The id of the effect
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * Set the duration of the effect. If a preview or export is in progress,
+ * then this change is effective for next preview or export session. s
+ *
+ * @param durationMs of the effect in milliseconds
+ */
+ public void setDuration(long durationMs) {
+ if (mStartTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Duration is too large");
+ }
+
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(this);
+ }
+
+ /**
+ * Get the duration of the effect
+ *
+ * @return The duration of the effect in milliseconds
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * Set start time of the effect. If a preview or export is in progress, then
+ * this change is effective for next preview or export session.
+ *
+ * @param startTimeMs The start time of the effect relative to the beginning
+ * of the media item in milliseconds
+ */
+ public void setStartTime(long startTimeMs) {
+ if (startTimeMs + mDurationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Start time is too large");
+ }
+
+ mStartTimeMs = startTimeMs;
+
+ mMediaItem.invalidateTransitions(this);
+ }
+
+ /**
+ * @return The start time in milliseconds
+ */
+ public long getStartTime() {
+ return mStartTimeMs;
+ }
+
+ /**
+ * Set the start time and duration
+ *
+ * @param startTimeMs start time in milliseconds
+ * @param durationMs The duration in milliseconds
+ */
+ public void setStartTimeAndDuration(long startTimeMs, long durationMs) {
+ if (startTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time or duration");
+ }
+
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(this);
+ }
+
+ /**
+ * @return The media item owner
+ */
+ public MediaItem getMediaItem() {
+ return mMediaItem;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Effect)) {
+ return false;
+ }
+ return mUniqueId.equals(((Effect)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/EffectColor.java b/media/java/android/media/videoeditor/EffectColor.java
new file mode 100755
index 0000000..ac48e37
--- /dev/null
+++ b/media/java/android/media/videoeditor/EffectColor.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+/**
+ * This class allows to apply color on a media item.
+ * {@hide}
+ */
+public class EffectColor extends Effect {
+
+ /**
+ * Change the video frame color to the RGB color value provided
+ */
+ public static final int TYPE_COLOR = 1;
+ /**
+ * Change the video frame color to a gradation from RGB color (at the top of
+ * the frame) to black (at the bottom of the frame).
+ */
+ public static final int TYPE_GRADIENT = 2;
+ /**
+ * Change the video frame color to sepia
+ */
+ public static final int TYPE_SEPIA = 3;
+ /**
+ * Invert the video frame color
+ */
+ public static final int TYPE_NEGATIVE = 4;
+ /**
+ * Make the video look like as if it was recorded in 50's
+ */
+ public static final int TYPE_FIFTIES = 5;
+
+ // Predefined colors
+ public static final int GREEN = 0x0000ff00;
+ public static final int PINK = 0x00ff66cc;
+ public static final int GRAY = 0x007f7f7f;
+
+ // The effect type
+ private final int mType;
+
+ // The effect color
+ private final int mColor;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private EffectColor() {
+ this(null, null, 0, 0, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param effectId The effect id
+ * @param startTimeMs The start time relative to the media item to which it
+ * is applied
+ * @param durationMs The duration of this effect in milliseconds
+ * @param type type of the effect. type is one of: TYPE_COLOR,
+ * TYPE_GRADIENT, TYPE_SEPIA, TYPE_NEGATIVE, TYPE_FIFTIES.
+ * @param color If type is TYPE_COLOR, color is the RGB color as 888.
+ * If type is TYPE_GRADIENT, color is the RGB color at the
+ * top of the frame. Otherwise, color is ignored
+ */
+ public EffectColor(MediaItem mediaItem, String effectId, long startTimeMs, long durationMs,
+ int type, int color) {
+ super(mediaItem, effectId, startTimeMs, durationMs);
+ switch (type) {
+ case TYPE_COLOR:
+ case TYPE_GRADIENT: {
+ mColor = color;
+ break;
+ }
+
+ case TYPE_SEPIA:
+ case TYPE_NEGATIVE:
+ case TYPE_FIFTIES: {
+ mColor = -1;
+ break;
+ }
+
+ default: {
+ throw new IllegalArgumentException("Invalid type: " + type);
+ }
+ }
+
+ mType = type;
+ }
+
+ /**
+ * @return The effect type
+ */
+ public int getType() {
+ return mType;
+ }
+
+ /**
+ * @return the color as RGB 888 if type is TYPE_COLOR or TYPE_GRADIENT.
+ */
+ public int getColor() {
+ return mColor;
+ }
+}
diff --git a/media/java/android/media/videoeditor/EffectKenBurns.java b/media/java/android/media/videoeditor/EffectKenBurns.java
new file mode 100755
index 0000000..ae2e70d
--- /dev/null
+++ b/media/java/android/media/videoeditor/EffectKenBurns.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import android.graphics.Rect;
+
+/**
+ * This class represents a Ken Burns effect.
+ * {@hide}
+ */
+public class EffectKenBurns extends Effect {
+ // Instance variables
+ private Rect mStartRect;
+ private Rect mEndRect;
+
+ /**
+ * Objects of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private EffectKenBurns() {
+ this(null, null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param effectId The effect id
+ * @param startRect The start rectangle
+ * @param endRect The end rectangle
+ * @param startTimeMs The start time
+ * @param durationMs The duration of the Ken Burns effect in milliseconds
+ */
+ public EffectKenBurns(MediaItem mediaItem, String effectId, Rect startRect, Rect endRect,
+ long startTimeMs, long durationMs) {
+ super(mediaItem, effectId, startTimeMs, durationMs);
+
+ mStartRect = startRect;
+ mEndRect = endRect;
+ }
+
+ /**
+ * @param startRect The start rectangle
+ *
+ * @throws IllegalArgumentException if start rectangle is incorrectly set.
+ */
+ public void setStartRect(Rect startRect) {
+ mStartRect = startRect;
+ }
+
+ /**
+ * @return The start rectangle
+ */
+ public Rect getStartRect() {
+ return mStartRect;
+ }
+
+ /**
+ * @param endRect The end rectangle
+ *
+ * @throws IllegalArgumentException if end rectangle is incorrectly set.
+ */
+ public void setEndRect(Rect endRect) {
+ mEndRect = endRect;
+ }
+
+ /**
+ * @return The end rectangle
+ */
+ public Rect getEndRect() {
+ return mEndRect;
+ }
+}
diff --git a/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java b/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java
new file mode 100644
index 0000000..1cce148
--- /dev/null
+++ b/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+/**
+ * This listener interface is used by
+ * {@link MediaVideoItem#extractAudioWaveform(ExtractAudioWaveformProgressListener listener)}
+ * or
+ * {@link AudioTrack#extractAudioWaveform(ExtractAudioWaveformProgressListener listener)}
+ * {@hide}
+ */
+public interface ExtractAudioWaveformProgressListener {
+ /**
+ * This method notifies the listener of the progress status of
+ * an extractAudioWaveform operation.
+ * This method may be called maximum 100 times for one operation.
+ *
+ * @param progress The progress in %. At the beginning of the operation,
+ * this value is set to 0; at the end, the value is set to 100.
+ */
+ public void onProgress(int progress);
+}
+
diff --git a/media/java/android/media/videoeditor/MediaImageItem.java b/media/java/android/media/videoeditor/MediaImageItem.java
new file mode 100755
index 0000000..6dc36c2
--- /dev/null
+++ b/media/java/android/media/videoeditor/MediaImageItem.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.IOException;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.util.Log;
+import android.util.Pair;
+
+/**
+ * This class represents an image item on the storyboard. Note that images are
+ * scaled down to the maximum supported resolution by preserving the native
+ * aspect ratio. To learn the scaled image dimensions use
+ * {@link #getScaledWidth()} and {@link #getScaledHeight()} respectively.
+ *
+ * {@hide}
+ */
+public class MediaImageItem extends MediaItem {
+ // Logging
+ private static final String TAG = "MediaImageItem";
+
+ // The resize paint
+ private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
+
+ // Instance variables
+ private final int mWidth;
+ private final int mHeight;
+ private final int mAspectRatio;
+ private long mDurationMs;
+ private int mScaledWidth, mScaledHeight;
+
+ /**
+ * This class cannot be instantiated by using the default constructor
+ */
+ @SuppressWarnings("unused")
+ private MediaImageItem() throws IOException {
+ this(null, null, null, 0, RENDERING_MODE_BLACK_BORDER);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The media item id
+ * @param filename The image file name
+ * @param durationMs The duration of the image on the storyboard
+ * @param renderingMode The rendering mode
+ *
+ * @throws IOException
+ */
+ public MediaImageItem(VideoEditor editor, String mediaItemId, String filename, long durationMs,
+ int renderingMode)
+ throws IOException {
+ super(editor, mediaItemId, filename, renderingMode);
+
+ // Determine the dimensions of the image
+ final BitmapFactory.Options dbo = new BitmapFactory.Options();
+ dbo.inJustDecodeBounds = true;
+ BitmapFactory.decodeFile(filename, dbo);
+
+ mWidth = dbo.outWidth;
+ mHeight = dbo.outHeight;
+ mDurationMs = durationMs;
+
+ // TODO: Determine the aspect ratio from the width and height
+ mAspectRatio = MediaProperties.ASPECT_RATIO_4_3;
+
+ // Images are stored in memory scaled to the maximum resolution to
+ // save memory.
+ final Pair<Integer, Integer>[] resolutions =
+ MediaProperties.getSupportedResolutions(mAspectRatio);
+ // Get the highest resolution
+ final Pair<Integer, Integer> maxResolution = resolutions[resolutions.length - 1];
+ if (mHeight > maxResolution.second) {
+ // We need to scale the image
+ scaleImage(filename, maxResolution.first, maxResolution.second);
+ mScaledWidth = maxResolution.first;
+ mScaledHeight = maxResolution.second;
+ } else {
+ mScaledWidth = mWidth;
+ mScaledHeight = mHeight;
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getFileType() {
+ if (mFilename.endsWith(".jpg") || mFilename.endsWith(".jpeg")) {
+ return MediaProperties.FILE_JPEG;
+ } else if (mFilename.endsWith(".png")) {
+ return MediaProperties.FILE_PNG;
+ } else {
+ return MediaProperties.FILE_UNSUPPORTED;
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * @return The scaled width of the image.
+ */
+ public int getScaledWidth() {
+ return mScaledWidth;
+ }
+
+ /**
+ * @return The scaled height of the image.
+ */
+ public int getScaledHeight() {
+ return mScaledHeight;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getAspectRatio() {
+ return mAspectRatio;
+ }
+
+ /**
+ * This method will adjust the duration of bounding transitions, effects
+ * and overlays if the current duration of the transactions become greater
+ * than the maximum allowable duration.
+ *
+ * @param durationMs The duration of the image in the storyboard timeline
+ */
+ public void setDuration(long durationMs) {
+ mDurationMs = durationMs;
+
+ adjustTransitions();
+ adjustOverlays();
+ adjustEffects();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getTimelineDuration() {
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap getThumbnail(int width, int height, long timeMs) throws IOException {
+ return scaleImage(mFilename, width, height);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
+ int thumbnailCount) throws IOException {
+ final Bitmap thumbnail = scaleImage(mFilename, width, height);
+ final Bitmap[] thumbnailArray = new Bitmap[thumbnailCount];
+ for (int i = 0; i < thumbnailCount; i++) {
+ thumbnailArray[i] = thumbnail;
+ }
+ return thumbnailArray;
+ }
+
+ /**
+ * Resize a bitmap to the specified width and height
+ *
+ * @param filename The filename
+ * @param width The thumbnail width
+ * @param height The thumbnail height
+ *
+ * @return The resized bitmap
+ */
+ private Bitmap scaleImage(String filename, int width, int height)
+ throws IOException {
+ final BitmapFactory.Options dbo = new BitmapFactory.Options();
+ dbo.inJustDecodeBounds = true;
+ BitmapFactory.decodeFile(filename, dbo);
+
+ final int nativeWidth = dbo.outWidth;
+ final int nativeHeight = dbo.outHeight;
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "generateThumbnail: Input: " + nativeWidth + "x" + nativeHeight
+ + ", resize to: " + width + "x" + height);
+ }
+
+ final Bitmap srcBitmap;
+ float bitmapWidth, bitmapHeight;
+ if (nativeWidth > width || nativeHeight > height) {
+ float dx = ((float)nativeWidth) / ((float)width);
+ float dy = ((float)nativeHeight) / ((float)height);
+ if (dx > dy) {
+ bitmapWidth = width;
+ bitmapHeight = nativeHeight / dx;
+ } else {
+ bitmapWidth = nativeWidth / dy;
+ bitmapHeight = height;
+ }
+ // Create the bitmap from file
+ if (nativeWidth / bitmapWidth > 1) {
+ final BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inSampleSize = nativeWidth / (int)bitmapWidth;
+ srcBitmap = BitmapFactory.decodeFile(filename, options);
+ } else {
+ srcBitmap = BitmapFactory.decodeFile(filename);
+ }
+ } else {
+ bitmapWidth = width;
+ bitmapHeight = height;
+ srcBitmap = BitmapFactory.decodeFile(filename);
+ }
+
+ if (srcBitmap == null) {
+ Log.e(TAG, "generateThumbnail: Cannot decode image bytes");
+ throw new IOException("Cannot decode file: " + mFilename);
+ }
+
+ // Create the canvas bitmap
+ final Bitmap bitmap = Bitmap.createBitmap((int)bitmapWidth, (int)bitmapHeight,
+ Bitmap.Config.ARGB_8888);
+ final Canvas canvas = new Canvas(bitmap);
+ canvas.drawBitmap(srcBitmap, new Rect(0, 0, srcBitmap.getWidth(), srcBitmap.getHeight()),
+ new Rect(0, 0, (int)bitmapWidth, (int)bitmapHeight), sResizePaint);
+ // Release the source bitmap
+ srcBitmap.recycle();
+ return bitmap;
+ }
+}
diff --git a/media/java/android/media/videoeditor/MediaItem.java b/media/java/android/media/videoeditor/MediaItem.java
new file mode 100755
index 0000000..04878f7
--- /dev/null
+++ b/media/java/android/media/videoeditor/MediaItem.java
@@ -0,0 +1,568 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import android.graphics.Bitmap;
+
+/**
+ * This abstract class describes the base class for any MediaItem. Objects are
+ * defined with a file path as a source data.
+ * {@hide}
+s */
+public abstract class MediaItem {
+ // A constant which can be used to specify the end of the file (instead of
+ // providing the actual duration of the media item).
+ public final static int END_OF_FILE = -1;
+
+ // Rendering modes
+ /**
+ * When using the RENDERING_MODE_BLACK_BORDER rendering mode video frames
+ * are resized by preserving the aspect ratio until the movie matches one of
+ * the dimensions of the output movie. The areas outside the resized video
+ * clip are rendered black.
+ */
+ public static final int RENDERING_MODE_BLACK_BORDER = 0;
+
+ /**
+ * When using the RENDERING_MODE_STRETCH rendering mode video frames are
+ * stretched horizontally or vertically to match the current aspect ratio of
+ * the video editor.
+ */
+ public static final int RENDERING_MODE_STRETCH = 1;
+
+ /**
+ * When using the RENDERING_MODE_CROPPING rendering mode video frames are
+ * scaled horizontally or vertically by preserving the original aspect
+ * ratio of the media item.
+ */
+ public static final int RENDERING_MODE_CROPPING = 2;
+
+
+ // The unique id of the MediaItem
+ private final String mUniqueId;
+
+ // The name of the file associated with the MediaItem
+ protected final String mFilename;
+
+ // List of effects
+ private final List<Effect> mEffects;
+
+ // List of overlays
+ private final List<Overlay> mOverlays;
+
+ // The rendering mode
+ private int mRenderingMode;
+
+ // Beginning and end transitions
+ protected Transition mBeginTransition;
+ protected Transition mEndTransition;
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The MediaItem id
+ * @param filename name of the media file.
+ * @param renderingMode The rendering mode
+ *
+ * @throws IOException if file is not found
+ * @throws IllegalArgumentException if a capability such as file format is not
+ * supported the exception object contains the unsupported
+ * capability
+ */
+ protected MediaItem(VideoEditor editor, String mediaItemId, String filename,
+ int renderingMode) throws IOException {
+ mUniqueId = mediaItemId;
+ mFilename = filename;
+ mRenderingMode = renderingMode;
+ mEffects = new ArrayList<Effect>();
+ mOverlays = new ArrayList<Overlay>();
+ mBeginTransition = null;
+ mEndTransition = null;
+ }
+
+ /**
+ * @return The id of the media item
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * @return The media source file name
+ */
+ public String getFilename() {
+ return mFilename;
+ }
+
+ /**
+ * If aspect ratio of the MediaItem is different from the aspect ratio of
+ * the editor then this API controls the rendering mode.
+ *
+ * @param renderingMode rendering mode. It is one of:
+ * {@link #RENDERING_MODE_BLACK_BORDER},
+ * {@link #RENDERING_MODE_STRETCH}
+ */
+ public void setRenderingMode(int renderingMode) {
+ mRenderingMode = renderingMode;
+ if (mBeginTransition != null) {
+ mBeginTransition.invalidate();
+ }
+
+ if (mEndTransition != null) {
+ mEndTransition.invalidate();
+ }
+ }
+
+ /**
+ * @return The rendering mode
+ */
+ public int getRenderingMode() {
+ return mRenderingMode;
+ }
+
+ /**
+ * @param transition The beginning transition
+ */
+ void setBeginTransition(Transition transition) {
+ mBeginTransition = transition;
+ }
+
+ /**
+ * @return The begin transition
+ */
+ public Transition getBeginTransition() {
+ return mBeginTransition;
+ }
+
+ /**
+ * @param transition The end transition
+ */
+ void setEndTransition(Transition transition) {
+ mEndTransition = transition;
+ }
+
+ /**
+ * @return The end transition
+ */
+ public Transition getEndTransition() {
+ return mEndTransition;
+ }
+
+ /**
+ * @return The timeline duration. This is the actual duration in the
+ * timeline (trimmed duration)
+ */
+ public abstract long getTimelineDuration();
+
+ /**
+ * @return The is the full duration of the media item (not trimmed)
+ */
+ public abstract long getDuration();
+
+ /**
+ * @return The source file type
+ */
+ public abstract int getFileType();
+
+ /**
+ * @return Get the native width of the media item
+ */
+ public abstract int getWidth();
+
+ /**
+ * @return Get the native height of the media item
+ */
+ public abstract int getHeight();
+
+ /**
+ * Get aspect ratio of the source media item.
+ *
+ * @return the aspect ratio as described in MediaProperties.
+ * MediaProperties.ASPECT_RATIO_UNDEFINED if aspect ratio is not
+ * supported as in MediaProperties
+ */
+ public abstract int getAspectRatio();
+
+ /**
+ * Add the specified effect to this media item.
+ *
+ * Note that certain types of effects cannot be applied to video and to
+ * image media items. For example in certain implementation a Ken Burns
+ * implementation cannot be applied to video media item.
+ *
+ * This method invalidates transition video clips if the
+ * effect overlaps with the beginning and/or the end transition.
+ *
+ * @param effect The effect to apply
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if the effect start and/or duration are
+ * invalid or if the effect cannot be applied to this type of media
+ * item or if the effect id is not unique across all the Effects
+ * added.
+ */
+ public void addEffect(Effect effect) {
+ if (effect.getMediaItem() != this) {
+ throw new IllegalArgumentException("Media item mismatch");
+ }
+
+ if (mEffects.contains(effect)) {
+ throw new IllegalArgumentException("Effect already exists: " + effect.getId());
+ }
+
+ if (effect.getStartTime() + effect.getDuration() > getDuration()) {
+ throw new IllegalArgumentException(
+ "Effect start time + effect duration > media clip duration");
+ }
+
+ mEffects.add(effect);
+ invalidateTransitions(effect);
+ }
+
+ /**
+ * Remove the effect with the specified id.
+ *
+ * This method invalidates a transition video clip if the effect overlaps
+ * with a transition.
+ *
+ * @param effectId The id of the effect to be removed
+ *
+ * @return The effect that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public Effect removeEffect(String effectId) {
+ for (Effect effect : mEffects) {
+ if (effect.getId().equals(effectId)) {
+ mEffects.remove(effect);
+ invalidateTransitions(effect);
+ return effect;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Find the effect with the specified id
+ *
+ * @param effectId The effect id
+ *
+ * @return The effect with the specified id (null if it does not exist)
+ */
+ public Effect getEffect(String effectId) {
+ for (Effect effect : mEffects) {
+ if (effect.getId().equals(effectId)) {
+ return effect;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Get the list of effects.
+ *
+ * @return the effects list. If no effects exist an empty list will be returned.
+ */
+ public List<Effect> getAllEffects() {
+ return mEffects;
+ }
+
+ /**
+ * Add an overlay to the storyboard. This method invalidates a transition
+ * video clip if the overlay overlaps with a transition.
+ *
+ * @param overlay The overlay to add
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if the overlay id is not unique across all the overlays
+ * added or if the bitmap is not specified or if the dimensions of
+ * the bitmap do not match the dimensions of the media item
+ */
+ public void addOverlay(Overlay overlay) {
+ if (overlay.getMediaItem() != this) {
+ throw new IllegalArgumentException("Media item mismatch");
+ }
+
+ if (mOverlays.contains(overlay)) {
+ throw new IllegalArgumentException("Overlay already exists: " + overlay.getId());
+ }
+
+ if (overlay.getStartTime() + overlay.getDuration() > getDuration()) {
+ throw new IllegalArgumentException(
+ "Overlay start time + overlay duration > media clip duration");
+ }
+
+ if (overlay instanceof OverlayFrame) {
+ final OverlayFrame frame = (OverlayFrame)overlay;
+ final Bitmap bitmap = frame.getBitmap();
+ if (bitmap == null) {
+ throw new IllegalArgumentException("Overlay bitmap not specified");
+ }
+
+ final int scaledWidth, scaledHeight;
+ if (this instanceof MediaVideoItem) {
+ scaledWidth = getWidth();
+ scaledHeight = getHeight();
+ } else {
+ scaledWidth = ((MediaImageItem)this).getScaledWidth();
+ scaledHeight = ((MediaImageItem)this).getScaledHeight();
+ }
+
+ // The dimensions of the overlay bitmap must be the same as the
+ // media item dimensions
+ if (bitmap.getWidth() != scaledWidth || bitmap.getHeight() != scaledHeight) {
+ throw new IllegalArgumentException(
+ "Bitmap dimensions must match media item dimensions");
+ }
+ } else {
+ throw new IllegalArgumentException("Overlay not supported");
+ }
+
+ mOverlays.add(overlay);
+ invalidateTransitions(overlay);
+ }
+
+ /**
+ * Remove the overlay with the specified id.
+ *
+ * This method invalidates a transition video clip if the overlay overlaps
+ * with a transition.
+ *
+ * @param overlayId The id of the overlay to be removed
+ *
+ * @return The overlay that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public Overlay removeOverlay(String overlayId) {
+ for (Overlay overlay : mOverlays) {
+ if (overlay.getId().equals(overlayId)) {
+ mOverlays.remove(overlay);
+ if (overlay instanceof OverlayFrame) {
+ ((OverlayFrame)overlay).invalidate();
+ }
+ invalidateTransitions(overlay);
+ return overlay;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Find the overlay with the specified id
+ *
+ * @param overlayId The overlay id
+ *
+ * @return The overlay with the specified id (null if it does not exist)
+ */
+ public Overlay getOverlay(String overlayId) {
+ for (Overlay overlay : mOverlays) {
+ if (overlay.getId().equals(overlayId)) {
+ return overlay;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Get the list of overlays associated with this media item
+ *
+ * Note that if any overlay source files are not accessible anymore,
+ * this method will still provide the full list of overlays.
+ *
+ * @return The list of overlays. If no overlays exist an empty list will
+ * be returned.
+ */
+ public List<Overlay> getAllOverlays() {
+ return mOverlays;
+ }
+
+ /**
+ * Create a thumbnail at specified time in a video stream in Bitmap format
+ *
+ * @param width width of the thumbnail in pixels
+ * @param height height of the thumbnail in pixels
+ * @param timeMs The time in the source video file at which the thumbnail is
+ * requested (even if trimmed).
+ *
+ * @return The thumbnail as a Bitmap.
+ *
+ * @throws IOException if a file error occurs
+ * @throws IllegalArgumentException if time is out of video duration
+ */
+ public abstract Bitmap getThumbnail(int width, int height, long timeMs) throws IOException;
+
+ /**
+ * Get the array of Bitmap thumbnails between start and end.
+ *
+ * @param width width of the thumbnail in pixels
+ * @param height height of the thumbnail in pixels
+ * @param startMs The start of time range in milliseconds
+ * @param endMs The end of the time range in milliseconds
+ * @param thumbnailCount The thumbnail count
+ *
+ * @return The array of Bitmaps
+ *
+ * @throws IOException if a file error occurs
+ */
+ public abstract Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
+ int thumbnailCount) throws IOException;
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof MediaItem)) {
+ return false;
+ }
+ return mUniqueId.equals(((MediaItem)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+
+ /**
+ * Invalidate the start and end transitions if necessary
+ *
+ * @param effect The effect that was added or removed
+ */
+ void invalidateTransitions(Effect effect) {
+ // Check if the effect overlaps with the beginning and end transitions
+ if (mBeginTransition != null) {
+ if (effect.getStartTime() < mBeginTransition.getDuration()) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (mEndTransition != null) {
+ if (effect.getStartTime() + effect.getDuration() > getDuration()
+ - mEndTransition.getDuration()) {
+ mEndTransition.invalidate();
+ }
+ }
+ }
+
+ /**
+ * Invalidate the start and end transitions if necessary
+ *
+ * @param overlay The effect that was added or removed
+ */
+ void invalidateTransitions(Overlay overlay) {
+ // Check if the overlay overlaps with the beginning and end transitions
+ if (mBeginTransition != null) {
+ if (overlay.getStartTime() < mBeginTransition.getDuration()) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (mEndTransition != null) {
+ if (overlay.getStartTime() + overlay.getDuration() > getDuration()
+ - mEndTransition.getDuration()) {
+ mEndTransition.invalidate();
+ }
+ }
+ }
+
+ /**
+ * Adjust the duration transitions.
+ */
+ protected void adjustTransitions() {
+ // Check if the duration of transitions need to be adjusted
+ if (mBeginTransition != null) {
+ final long maxDurationMs = mBeginTransition.getMaximumDuration();
+ if (mBeginTransition.getDuration() > maxDurationMs) {
+ mBeginTransition.setDuration(maxDurationMs);
+ }
+ }
+
+ if (mEndTransition != null) {
+ final long maxDurationMs = mEndTransition.getMaximumDuration();
+ if (mEndTransition.getDuration() > maxDurationMs) {
+ mEndTransition.setDuration(maxDurationMs);
+ }
+ }
+ }
+
+ /**
+ * Adjust the start time and/or duration of effects.
+ */
+ protected void adjustEffects() {
+ final List<Effect> effects = getAllEffects();
+ for (Effect effect : effects) {
+ // Adjust the start time if necessary
+ final long effectStartTimeMs;
+ if (effect.getStartTime() > getDuration()) {
+ effectStartTimeMs = 0;
+ } else {
+ effectStartTimeMs = effect.getStartTime();
+ }
+
+ // Adjust the duration if necessary
+ final long effectDurationMs;
+ if (effectStartTimeMs + effect.getDuration() > getDuration()) {
+ effectDurationMs = getDuration() - effectStartTimeMs;
+ } else {
+ effectDurationMs = effect.getDuration();
+ }
+
+ if (effectStartTimeMs != effect.getStartTime() ||
+ effectDurationMs != effect.getDuration()) {
+ effect.setStartTimeAndDuration(effectStartTimeMs, effectDurationMs);
+ }
+ }
+ }
+
+ /**
+ * Adjust the start time and/or duration of overlays.
+ */
+ protected void adjustOverlays() {
+ final List<Overlay> overlays = getAllOverlays();
+ for (Overlay overlay : overlays) {
+ // Adjust the start time if necessary
+ final long overlayStartTimeMs;
+ if (overlay.getStartTime() > getDuration()) {
+ overlayStartTimeMs = 0;
+ } else {
+ overlayStartTimeMs = overlay.getStartTime();
+ }
+
+ // Adjust the duration if necessary
+ final long overlayDurationMs;
+ if (overlayStartTimeMs + overlay.getDuration() > getDuration()) {
+ overlayDurationMs = getDuration() - overlayStartTimeMs;
+ } else {
+ overlayDurationMs = overlay.getDuration();
+ }
+
+ if (overlayStartTimeMs != overlay.getStartTime() ||
+ overlayDurationMs != overlay.getDuration()) {
+ overlay.setStartTimeAndDuration(overlayStartTimeMs, overlayDurationMs);
+ }
+ }
+ }
+}
diff --git a/media/java/android/media/videoeditor/MediaProperties.java b/media/java/android/media/videoeditor/MediaProperties.java
new file mode 100755
index 0000000..34088fc
--- /dev/null
+++ b/media/java/android/media/videoeditor/MediaProperties.java
@@ -0,0 +1,257 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import android.util.Pair;
+
+/**
+ * This class defines all properties of a media file such as supported height, aspect ratio,
+ * bitrate for export function.
+ * {@hide}
+ */
+public class MediaProperties {
+ // Supported heights
+ public static final int HEIGHT_144 = 144;
+ public static final int HEIGHT_360 = 360;
+ public static final int HEIGHT_480 = 480;
+ public static final int HEIGHT_720 = 720;
+ public static final int HEIGHT_1080 = 1080;
+
+ // Supported aspect ratios
+ public static final int ASPECT_RATIO_UNDEFINED = 0;
+ public static final int ASPECT_RATIO_3_2 = 1;
+ public static final int ASPECT_RATIO_16_9 = 2;
+ public static final int ASPECT_RATIO_4_3 = 3;
+ public static final int ASPECT_RATIO_5_3 = 4;
+ public static final int ASPECT_RATIO_11_9 = 5;
+
+ // The array of supported aspect ratios
+ private static final int[] ASPECT_RATIOS = new int[] {
+ ASPECT_RATIO_3_2,
+ ASPECT_RATIO_16_9,
+ ASPECT_RATIO_4_3,
+ ASPECT_RATIO_5_3,
+ ASPECT_RATIO_11_9
+ };
+
+ // Supported resolutions for specific aspect ratios
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_3_2_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(720, HEIGHT_480),
+ new Pair<Integer, Integer>(1080, HEIGHT_720)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_4_3_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(640, HEIGHT_480),
+ new Pair<Integer, Integer>(960, HEIGHT_720)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_5_3_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(800, HEIGHT_480)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_11_9_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(176, HEIGHT_144)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_16_9_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(640, HEIGHT_360),
+ new Pair<Integer, Integer>(854, HEIGHT_480),
+ new Pair<Integer, Integer>(1280, HEIGHT_720),
+ };
+
+
+ // Bitrate values (in bits per second)
+ public static final int BITRATE_28K = 28000;
+ public static final int BITRATE_40K = 40000;
+ public static final int BITRATE_64K = 64000;
+ public static final int BITRATE_96K = 96000;
+ public static final int BITRATE_128K = 128000;
+ public static final int BITRATE_192K = 192000;
+ public static final int BITRATE_256K = 256000;
+ public static final int BITRATE_384K = 384000;
+ public static final int BITRATE_512K = 512000;
+ public static final int BITRATE_800K = 800000;
+
+ // The array of supported bitrates
+ private static final int[] SUPPORTED_BITRATES = new int[] {
+ BITRATE_28K,
+ BITRATE_40K,
+ BITRATE_64K,
+ BITRATE_96K,
+ BITRATE_128K,
+ BITRATE_192K,
+ BITRATE_256K,
+ BITRATE_384K,
+ BITRATE_512K,
+ BITRATE_800K
+ };
+
+ // Video codec types
+ public static final int VCODEC_H264BP = 1;
+ public static final int VCODEC_H264MP = 2;
+ public static final int VCODEC_H263 = 3;
+ public static final int VCODEC_MPEG4 = 4;
+
+ // The array of supported video codecs
+ private static final int[] SUPPORTED_VCODECS = new int[] {
+ VCODEC_H264BP,
+ VCODEC_H263,
+ VCODEC_MPEG4,
+ };
+
+ // Audio codec types
+ public static final int ACODEC_AAC_LC = 1;
+ public static final int ACODEC_AMRNB = 2;
+ public static final int ACODEC_AMRWB = 3;
+ public static final int ACODEC_MP3 = 4;
+ public static final int ACODEC_OGG = 5;
+
+ // The array of supported video codecs
+ private static final int[] SUPPORTED_ACODECS = new int[] {
+ ACODEC_AAC_LC,
+ ACODEC_AMRNB,
+ ACODEC_AMRWB
+ };
+
+ // File format types
+ public static final int FILE_UNSUPPORTED = 0;
+ public static final int FILE_3GP = 1;
+ public static final int FILE_MP4 = 2;
+ public static final int FILE_JPEG = 3;
+ public static final int FILE_PNG = 4;
+
+ // The array of the supported file formats
+ private static final int[] SUPPORTED_VIDEO_FILE_FORMATS = new int[] {
+ FILE_3GP,
+ FILE_MP4
+ };
+
+ // The maximum count of audio tracks supported
+ public static final int AUDIO_MAX_TRACK_COUNT = 1;
+
+ // The maximum volume supported (100 means that no amplification is
+ // supported, i.e. attenuation only)
+ public static final int AUDIO_MAX_VOLUME_PERCENT = 100;
+
+ /**
+ * This class cannot be instantiated
+ */
+ private MediaProperties() {
+ }
+
+ /**
+ * @return The array of supported aspect ratios
+ */
+ public static int[] getAllSupportedAspectRatios() {
+ return ASPECT_RATIOS;
+ }
+
+ /**
+ * Get the supported resolutions for the specified aspect ratio.
+ *
+ * @param aspectRatio The aspect ratio for which the resolutions are requested
+ *
+ * @return The array of width and height pairs
+ */
+ public static Pair<Integer, Integer>[] getSupportedResolutions(int aspectRatio) {
+ final Pair<Integer, Integer>[] resolutions;
+ switch(aspectRatio) {
+ case ASPECT_RATIO_3_2: {
+ resolutions = ASPECT_RATIO_3_2_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_4_3: {
+ resolutions = ASPECT_RATIO_4_3_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_5_3: {
+ resolutions = ASPECT_RATIO_5_3_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_11_9: {
+ resolutions = ASPECT_RATIO_11_9_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_16_9: {
+ resolutions = ASPECT_RATIO_16_9_RESOLUTIONS;
+ break;
+ }
+
+ default: {
+ throw new IllegalArgumentException("Unknown aspect ratio: " + aspectRatio);
+ }
+ }
+
+ return resolutions;
+ }
+
+ /**
+ * @return The array of supported video codecs
+ */
+ public static int[] getSupportedVideoCodecs() {
+ return SUPPORTED_VCODECS;
+ }
+
+ /**
+ * @return The array of supported audio codecs
+ */
+ public static int[] getSupportedAudioCodecs() {
+ return SUPPORTED_ACODECS;
+ }
+
+ /**
+ * @return The array of supported file formats
+ */
+ public static int[] getSupportedVideoFileFormat() {
+ return SUPPORTED_VIDEO_FILE_FORMATS;
+ }
+
+ /**
+ * @return The array of supported video bitrates
+ */
+ public static int[] getSupportedVideoBitrates() {
+ return SUPPORTED_BITRATES;
+ }
+
+ /**
+ * @return The maximum value for the audio volume
+ */
+ public static int getSupportedMaxVolume() {
+ return MediaProperties.AUDIO_MAX_VOLUME_PERCENT;
+ }
+
+ /**
+ * @return The maximum number of audio tracks supported
+ */
+ public static int getSupportedAudioTrackCount() {
+ return MediaProperties.AUDIO_MAX_TRACK_COUNT;
+ }
+}
diff --git a/media/java/android/media/videoeditor/MediaVideoItem.java b/media/java/android/media/videoeditor/MediaVideoItem.java
new file mode 100755
index 0000000..c1abf78
--- /dev/null
+++ b/media/java/android/media/videoeditor/MediaVideoItem.java
@@ -0,0 +1,426 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.IOException;
+import java.lang.ref.SoftReference;
+
+import android.graphics.Bitmap;
+import android.view.SurfaceHolder;
+
+/**
+ * This class represents a video clip item on the storyboard
+ * {@hide}
+ */
+public class MediaVideoItem extends MediaItem {
+ // Instance variables
+ private final int mWidth;
+ private final int mHeight;
+ private final int mAspectRatio;
+ private final int mFileType;
+ private final int mVideoType;
+ private final int mVideoProfile;
+ private final int mVideoBitrate;
+ private final long mDurationMs;
+ private final int mAudioBitrate;
+ private final int mFps;
+ private final int mAudioType;
+ private final int mAudioChannels;
+ private final int mAudioSamplingFrequency;
+
+ private long mBeginBoundaryTimeMs;
+ private long mEndBoundaryTimeMs;
+ private int mVolumePercentage;
+ private boolean mMuted;
+ private String mAudioWaveformFilename;
+ // The audio waveform data
+ private SoftReference<WaveformData> mWaveformData;
+
+ /**
+ * An object of this type cannot be instantiated with a default constructor
+ */
+ @SuppressWarnings("unused")
+ private MediaVideoItem() throws IOException {
+ this(null, null, null, RENDERING_MODE_BLACK_BORDER);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The MediaItem id
+ * @param filename The image file name
+ * @param renderingMode The rendering mode
+ *
+ * @throws IOException if the file cannot be opened for reading
+ */
+ public MediaVideoItem(VideoEditor editor, String mediaItemId, String filename,
+ int renderingMode)
+ throws IOException {
+ this(editor, mediaItemId, filename, renderingMode, 0, END_OF_FILE, 100, false, null);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The MediaItem id
+ * @param filename The image file name
+ * @param renderingMode The rendering mode
+ * @param beginMs Start time in milliseconds. Set to 0 to extract from the
+ * beginning
+ * @param endMs End time in milliseconds. Set to {@link #END_OF_FILE} to
+ * extract until the end
+ * @param volumePercent in %/. 100% means no change; 50% means half value, 200%
+ * means double, 0% means silent.
+ * @param muted true if the audio is muted
+ * @param audioWaveformFilename The name of the audio waveform file
+ *
+ * @throws IOException if the file cannot be opened for reading
+ */
+ MediaVideoItem(VideoEditor editor, String mediaItemId, String filename, int renderingMode,
+ long beginMs, long endMs, int volumePercent, boolean muted,
+ String audioWaveformFilename) throws IOException {
+ super(editor, mediaItemId, filename, renderingMode);
+ // TODO: Set these variables correctly
+ mWidth = 1080;
+ mHeight = 720;
+ mAspectRatio = MediaProperties.ASPECT_RATIO_3_2;
+ mFileType = MediaProperties.FILE_MP4;
+ mVideoType = MediaProperties.VCODEC_H264BP;
+ // Do we have predefined values for this variable?
+ mVideoProfile = 0;
+ // Can video and audio duration be different?
+ mDurationMs = 10000;
+ mVideoBitrate = 800000;
+ mAudioBitrate = 30000;
+ mFps = 30;
+ mAudioType = MediaProperties.ACODEC_AAC_LC;
+ mAudioChannels = 2;
+ mAudioSamplingFrequency = 16000;
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs == END_OF_FILE ? mDurationMs : endMs;
+ mVolumePercentage = volumePercent;
+ mMuted = muted;
+ mAudioWaveformFilename = audioWaveformFilename;
+ if (audioWaveformFilename != null) {
+ mWaveformData =
+ new SoftReference<WaveformData>(new WaveformData(audioWaveformFilename));
+ } else {
+ mWaveformData = null;
+ }
+ }
+
+ /**
+ * Sets the start and end marks for trimming a video media item.
+ * This method will adjust the duration of bounding transitions, effects
+ * and overlays if the current duration of the transactions become greater
+ * than the maximum allowable duration.
+ *
+ * @param beginMs Start time in milliseconds. Set to 0 to extract from the
+ * beginning
+ * @param endMs End time in milliseconds. Set to {@link #END_OF_FILE} to
+ * extract until the end
+ *
+ * @throws IllegalArgumentException if the start time is greater or equal than
+ * end time, the end time is beyond the file duration, the start time
+ * is negative
+ */
+ public void setExtractBoundaries(long beginMs, long endMs) {
+ if (beginMs > mDurationMs) {
+ throw new IllegalArgumentException("Invalid start time");
+ }
+ if (endMs > mDurationMs) {
+ throw new IllegalArgumentException("Invalid end time");
+ }
+
+ if (beginMs != mBeginBoundaryTimeMs) {
+ if (mBeginTransition != null) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (endMs != mEndBoundaryTimeMs) {
+ if (mEndTransition != null) {
+ mEndTransition.invalidate();
+ }
+ }
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs;
+
+ adjustTransitions();
+
+ // Note that the start and duration of any effects and overlays are
+ // not adjusted nor are they automatically removed if they fall
+ // outside the new boundaries.
+ }
+
+ /**
+ * @return The boundary begin time
+ */
+ public long getBoundaryBeginTime() {
+ return mBeginBoundaryTimeMs;
+ }
+
+ /**
+ * @return The boundary end time
+ */
+ public long getBoundaryEndTime() {
+ return mEndBoundaryTimeMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public void addEffect(Effect effect) {
+ if (effect instanceof EffectKenBurns) {
+ throw new IllegalArgumentException("Ken Burns effects cannot be applied to MediaVideoItem");
+ }
+ super.addEffect(effect);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap getThumbnail(int width, int height, long timeMs) {
+ return null;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
+ int thumbnailCount) throws IOException {
+ return null;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getAspectRatio() {
+ return mAspectRatio;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getFileType() {
+ return mFileType;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getTimelineDuration() {
+ return mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
+ }
+
+ /**
+ * Render a frame according to the playback (in the native aspect ratio) for
+ * the specified media item. All effects and overlays applied to the media
+ * item are ignored. The extract boundaries are also ignored. This method
+ * can be used to playback frames when implementing trimming functionality.
+ *
+ * @param surfaceHolder SurfaceHolder used by the application
+ * @param timeMs time corresponding to the frame to display (relative to the
+ * the beginning of the media item).
+ * @return The accurate time stamp of the frame that is rendered .
+ * @throws IllegalStateException if a playback, preview or an export is
+ * already in progress
+ * @throws IllegalArgumentException if time is negative or greater than the
+ * media item duration
+ */
+ public long renderFrame(SurfaceHolder surfaceHolder, long timeMs) {
+ return timeMs;
+ }
+
+ /**
+ * This API allows to generate a file containing the sample volume levels of
+ * the Audio track of this media item. This function may take significant
+ * time and is blocking. The file can be retrieved using
+ * getAudioWaveformFilename().
+ *
+ * @param listener The progress listener
+ *
+ * @throws IOException if the output file cannot be created
+ * @throws IllegalArgumentException if the mediaItem does not have a valid
+ * Audio track
+ */
+ public void extractAudioWaveform(ExtractAudioWaveformProgressListener listener)
+ throws IOException {
+ // TODO: Set mAudioWaveformFilename at the end once the export is complete
+ mWaveformData = new SoftReference<WaveformData>(new WaveformData(mAudioWaveformFilename));
+ }
+
+ /**
+ * Get the audio waveform file name if {@link #extractAudioWaveform()} was
+ * successful. The file format is as following:
+ * <ul>
+ * <li>first 4 bytes provide the number of samples for each value, as big-endian signed</li>
+ * <li>4 following bytes is the total number of values in the file, as big-endian signed</li>
+ * <li>all values follow as bytes Name is unique.</li>
+ *</ul>
+ * @return the name of the file, null if the file has not been computed or
+ * if there is no Audio track in the mediaItem
+ */
+ String getAudioWaveformFilename() {
+ return mAudioWaveformFilename;
+ }
+
+ /**
+ * @return The waveform data
+ */
+ public WaveformData getWaveformData() {
+ if (mWaveformData == null) {
+ return null;
+ }
+
+ WaveformData waveformData = mWaveformData.get();
+ if (waveformData != null) {
+ return waveformData;
+ } else if (mAudioWaveformFilename != null) {
+ waveformData = new WaveformData(mAudioWaveformFilename);
+ mWaveformData = new SoftReference<WaveformData>(waveformData);
+ return waveformData;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Set volume of the Audio track of this mediaItem
+ *
+ * @param volumePercent in %/. 100% means no change; 50% means half value, 200%
+ * means double, 0% means silent.
+ * @throws UsupportedOperationException if volume value is not supported
+ */
+ public void setVolume(int volumePercent) {
+ mVolumePercentage = volumePercent;
+ }
+
+ /**
+ * Get the volume value of the audio track as percentage. Call of this
+ * method before calling setVolume will always return 100%
+ *
+ * @return the volume in percentage
+ */
+ public int getVolume() {
+ return mVolumePercentage;
+ }
+
+ /**
+ * @param muted true to mute the media item
+ */
+ public void setMute(boolean muted) {
+ mMuted = muted;
+ }
+
+ /**
+ * @return true if the media item is muted
+ */
+ public boolean isMuted() {
+ return mMuted;
+ }
+
+ /**
+ * @return The video type
+ */
+ public int getVideoType() {
+ return mVideoType;
+ }
+
+ /**
+ * @return The video profile
+ */
+ public int getVideoProfile() {
+ return mVideoProfile;
+ }
+
+ /**
+ * @return The video bitrate
+ */
+ public int getVideoBitrate() {
+ return mVideoBitrate;
+ }
+
+ /**
+ * @return The audio bitrate
+ */
+ public int getAudioBitrate() {
+ return mAudioBitrate;
+ }
+
+ /**
+ * @return The number of frames per second
+ */
+ public int getFps() {
+ return mFps;
+ }
+
+ /**
+ * @return The audio codec
+ */
+ public int getAudioType() {
+ return mAudioType;
+ }
+
+ /**
+ * @return The number of audio channels
+ */
+ public int getAudioChannels() {
+ return mAudioChannels;
+ }
+
+ /**
+ * @return The audio sample frequency
+ */
+ public int getAudioSamplingFrequency() {
+ return mAudioSamplingFrequency;
+ }
+}
diff --git a/media/java/android/media/videoeditor/Overlay.java b/media/java/android/media/videoeditor/Overlay.java
new file mode 100755
index 0000000..fff9ca2
--- /dev/null
+++ b/media/java/android/media/videoeditor/Overlay.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.util.HashMap;
+import java.util.Map;
+
+
+/**
+ * This is the super class for all Overlay classes.
+ * {@hide}
+ */
+public abstract class Overlay {
+ // Instance variables
+ private final String mUniqueId;
+ // The overlay owner
+ private final MediaItem mMediaItem;
+ // user attributes
+ private final Map<String, String> mUserAttributes;
+
+ protected long mStartTimeMs;
+ protected long mDurationMs;
+
+
+ /**
+ * Default constructor
+ */
+ @SuppressWarnings("unused")
+ private Overlay() {
+ this(null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param overlayId The overlay id
+ * @param startTimeMs The start time relative to the media item start time
+ * @param durationMs The duration
+ *
+ * @throws IllegalArgumentException if the file type is not PNG or the
+ * startTimeMs and durationMs are incorrect.
+ */
+ public Overlay(MediaItem mediaItem, String overlayId, long startTimeMs, long durationMs) {
+ if (mediaItem == null) {
+ throw new IllegalArgumentException("Media item cannot be null");
+ }
+
+ if (startTimeMs + durationMs > mediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time and duration");
+ }
+
+ mMediaItem = mediaItem;
+ mUniqueId = overlayId;
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+ mUserAttributes = new HashMap<String, String>();
+ }
+
+ /**
+ * @return The of the overlay
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * @return The duration of the overlay effect
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * If a preview or export is in progress, then this change is effective for
+ * next preview or export session.
+ *
+ * @param durationMs The duration in milliseconds
+ */
+ public void setDuration(long durationMs) {
+ if (mStartTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Duration is too large");
+ }
+
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(this);
+ }
+
+ /**
+ * @return the start time of the overlay
+ */
+ public long getStartTime() {
+ return mStartTimeMs;
+ }
+
+ /**
+ * Set the start time for the overlay. If a preview or export is in
+ * progress, then this change is effective for next preview or export
+ * session.
+ *
+ * @param startTimeMs start time in milliseconds
+ */
+ public void setStartTime(long startTimeMs) {
+ if (startTimeMs + mDurationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Start time is too large");
+ }
+
+ mStartTimeMs = startTimeMs;
+
+ mMediaItem.invalidateTransitions(this);
+ }
+
+ /**
+ * Set the start time and duration
+ *
+ * @param startTimeMs start time in milliseconds
+ * @param durationMs The duration in milliseconds
+ */
+ public void setStartTimeAndDuration(long startTimeMs, long durationMs) {
+ if (startTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time or duration");
+ }
+
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(this);
+ }
+
+ /**
+ * @return The media item owner
+ */
+ public MediaItem getMediaItem() {
+ return mMediaItem;
+ }
+
+ /**
+ * Set a user attribute
+ *
+ * @param name The attribute name
+ * @param value The attribute value
+ */
+ public void setUserAttribute(String name, String value) {
+ mUserAttributes.put(name, value);
+ }
+
+ /**
+ * @return The user attributes
+ */
+ public Map<String, String> getUserAttributes() {
+ return mUserAttributes;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Overlay)) {
+ return false;
+ }
+ return mUniqueId.equals(((Overlay)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/OverlayFrame.java b/media/java/android/media/videoeditor/OverlayFrame.java
new file mode 100755
index 0000000..dcac4ba
--- /dev/null
+++ b/media/java/android/media/videoeditor/OverlayFrame.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Bitmap.CompressFormat;
+
+
+/**
+ * This class is used to overlay an image on top of a media item.
+ * {@hide}
+ */
+public class OverlayFrame extends Overlay {
+ // Instance variables
+ private Bitmap mBitmap;
+ private String mFilename;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private OverlayFrame() {
+ this(null, null, (String)null, 0, 0);
+ }
+
+ /**
+ * Constructor for an OverlayFrame
+ *
+ * @param mediaItem The media item owner
+ * @param overlayId The overlay id
+ * @param bitmap The bitmap to be used as an overlay. The size of the
+ * bitmap must equal to the size of the media item to which it is
+ * added. The bitmap is typically a decoded PNG file.
+ * @param startTimeMs The overlay start time in milliseconds
+ * @param durationMs The overlay duration in milliseconds
+ *
+ * @throws IllegalArgumentException if the file type is not PNG or the
+ * startTimeMs and durationMs are incorrect.
+ */
+ public OverlayFrame(MediaItem mediaItem, String overlayId, Bitmap bitmap, long startTimeMs,
+ long durationMs) {
+ super(mediaItem, overlayId, startTimeMs, durationMs);
+ mBitmap = bitmap;
+ mFilename = null;
+ }
+
+ /**
+ * Constructor for an OverlayFrame. This constructor can be used to
+ * restore the overlay after it was saved internally by the video editor.
+ *
+ * @param mediaItem The media item owner
+ * @param overlayId The overlay id
+ * @param filename The file name that contains the overlay.
+ * @param startTimeMs The overlay start time in milliseconds
+ * @param durationMs The overlay duration in milliseconds
+ *
+ * @throws IllegalArgumentException if the file type is not PNG or the
+ * startTimeMs and durationMs are incorrect.
+ */
+ OverlayFrame(MediaItem mediaItem, String overlayId, String filename, long startTimeMs,
+ long durationMs) {
+ super(mediaItem, overlayId, startTimeMs, durationMs);
+ mFilename = filename;
+ mBitmap = BitmapFactory.decodeFile(mFilename);
+ }
+
+ /**
+ * @return Get the overlay bitmap
+ */
+ public Bitmap getBitmap() {
+ return mBitmap;
+ }
+
+ /**
+ * @param bitmap The overlay bitmap
+ */
+ public void setBitmap(Bitmap bitmap) {
+ mBitmap = bitmap;
+ if (mFilename != null) {
+ // Delete the file
+ new File(mFilename).delete();
+ // Invalidate the filename
+ mFilename = null;
+ }
+
+ // Invalidate the transitions if necessary
+ getMediaItem().invalidateTransitions(this);
+ }
+
+ /**
+ * Get the file name of this overlay
+ */
+ String getFilename() {
+ return mFilename;
+ }
+
+ /**
+ * Save the overlay to the project folder
+ *
+ * @param path The path where the overlay will be saved
+ *
+ * @return The filename
+ * @throws FileNotFoundException if the bitmap cannot be saved
+ * @throws IOException if the bitmap file cannot be saved
+ */
+ String save(String path) throws FileNotFoundException, IOException {
+ if (mFilename != null) {
+ return mFilename;
+ }
+
+ mFilename = path + "/" + getId() + ".png";
+ // Save the image to a local file
+ final FileOutputStream out = new FileOutputStream(mFilename);
+ mBitmap.compress(CompressFormat.PNG, 100, out);
+ out.flush();
+ out.close();
+ return mFilename;
+ }
+
+ /**
+ * Delete the overlay file
+ */
+ void invalidate() {
+ if (mFilename != null) {
+ new File(mFilename).delete();
+ }
+ }
+}
diff --git a/media/java/android/media/videoeditor/Transition.java b/media/java/android/media/videoeditor/Transition.java
new file mode 100755
index 0000000..1c82742
--- /dev/null
+++ b/media/java/android/media/videoeditor/Transition.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.File;
+
+/**
+ * This class is super class for all transitions. Transitions (with the
+ * exception of TransitionAtStart and TransitioAtEnd) can only be inserted
+ * between media items.
+ *
+ * Adding a transition between MediaItems makes the
+ * duration of the storyboard shorter by the duration of the Transition itself.
+ * As a result, if the duration of the transition is larger than the smaller
+ * duration of the two MediaItems associated with the Transition, an exception
+ * will be thrown.
+ *
+ * During a transition, the audio track are cross-fading
+ * automatically. {@hide}
+ */
+public abstract class Transition {
+ // The transition behavior
+ private static final int BEHAVIOR_MIN_VALUE = 0;
+ /** The transition starts slowly and speed up */
+ public static final int BEHAVIOR_SPEED_UP = 0;
+ /** The transition start fast and speed down */
+ public static final int BEHAVIOR_SPEED_DOWN = 1;
+ /** The transition speed is constant */
+ public static final int BEHAVIOR_LINEAR = 2;
+ /** The transition starts fast and ends fast with a slow middle */
+ public static final int BEHAVIOR_MIDDLE_SLOW = 3;
+ /** The transition starts slowly and ends slowly with a fast middle */
+ public static final int BEHAVIOR_MIDDLE_FAST = 4;
+
+ private static final int BEHAVIOR_MAX_VALUE = 4;
+
+ // The unique id of the transition
+ private final String mUniqueId;
+
+ // The transition is applied at the end of this media item
+ private final MediaItem mAfterMediaItem;
+ // The transition is applied at the beginning of this media item
+ private final MediaItem mBeforeMediaItem;
+
+ // The transition behavior
+ protected final int mBehavior;
+
+ // The transition duration
+ protected long mDurationMs;
+
+ // The transition filename
+ protected String mFilename;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private Transition() {
+ this(null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs The duration of the transition in milliseconds
+ * @param behavior The transition behavior
+ */
+ protected Transition(String transitionId, MediaItem afterMediaItem, MediaItem beforeMediaItem,
+ long durationMs, int behavior) {
+ if (behavior < BEHAVIOR_MIN_VALUE || behavior > BEHAVIOR_MAX_VALUE) {
+ throw new IllegalArgumentException("Invalid behavior: " + behavior);
+ }
+ mUniqueId = transitionId;
+ mAfterMediaItem = afterMediaItem;
+ mBeforeMediaItem = beforeMediaItem;
+ mDurationMs = durationMs;
+ mBehavior = behavior;
+ }
+
+ /**
+ * @return The of the transition
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * @return The media item at the end of which the transition is applied
+ */
+ public MediaItem getAfterMediaItem() {
+ return mAfterMediaItem;
+ }
+
+ /**
+ * @return The media item at the beginning of which the transition is applied
+ */
+ public MediaItem getBeforeMediaItem() {
+ return mBeforeMediaItem;
+ }
+
+ /**
+ * Set the duration of the transition.
+ *
+ * @param durationMs the duration of the transition in milliseconds
+ */
+ public void setDuration(long durationMs) {
+ if (durationMs > getMaximumDuration()) {
+ throw new IllegalArgumentException("The duration is too large");
+ }
+
+ mDurationMs = durationMs;
+ invalidate();
+ }
+
+ /**
+ * @return the duration of the transition in milliseconds
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * The duration of a transition cannot be greater than half of the minimum
+ * duration of the bounding media items.
+ *
+ * @return The maximum duration of this transition
+ */
+ public long getMaximumDuration() {
+ if (mAfterMediaItem == null) {
+ return mBeforeMediaItem.getTimelineDuration() / 2;
+ } else if (mBeforeMediaItem == null) {
+ return mAfterMediaItem.getTimelineDuration() / 2;
+ } else {
+ return (Math.min(mAfterMediaItem.getTimelineDuration(),
+ mBeforeMediaItem.getTimelineDuration()) / 2);
+ }
+ }
+
+ /**
+ * @return The behavior
+ */
+ public int getBehavior() {
+ return mBehavior;
+ }
+
+ /**
+ * Generate the video clip for the specified transition.
+ * This method may block for a significant amount of time.
+ *
+ * Before the method completes execution it sets the mFilename to
+ * the name of the newly generated transition video clip file.
+ */
+ abstract void generate();
+
+ /**
+ * Remove any resources associated with this transition
+ */
+ void invalidate() {
+ if (mFilename != null) {
+ new File(mFilename).delete();
+ mFilename = null;
+ }
+ }
+
+ /**
+ * @return true if the transition is generated
+ */
+ boolean isGenerated() {
+ return (mFilename != null);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Transition)) {
+ return false;
+ }
+ return mUniqueId.equals(((Transition)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionAlpha.java b/media/java/android/media/videoeditor/TransitionAlpha.java
new file mode 100755
index 0000000..30e66fc
--- /dev/null
+++ b/media/java/android/media/videoeditor/TransitionAlpha.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+
+/**
+ * This class allows to render an "alpha blending" transition according to a
+ * bitmap mask. The mask shows the shape of the transition all along the
+ * duration of the transition: just before the transition, video 1 is fully
+ * displayed. When the transition starts, as the time goes on, pixels of video 2
+ * replace pixels of video 1 according to the gray scale pixel value of the
+ * mask.
+ * {@hide}
+ */
+public class TransitionAlpha extends Transition {
+ /** This is the input JPEG file for the mask */
+ private final String mMaskFilename;
+
+ /**
+ * This is percentage (between 0 and 100) of blending between video 1 and
+ * video 2 if this value equals 0, then the mask is strictly applied if this
+ * value equals 100, then the mask is not at all applied (no transition
+ * effect)
+ */
+ private final int mBlendingPercent;
+
+ /**
+ * If true, this value inverts the direction of the mask: white pixels of
+ * the mask show video 2 pixels first black pixels of the mask show video 2
+ * pixels last.
+ */
+ private final boolean mIsInvert;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionAlpha() {
+ this(null, null, null, 0, 0, null, 0, false);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this media
+ * item
+ * @param beforeMediaItem The transition is applied to the beginning of this
+ * media item
+ * @param durationMs duration of the transition in milliseconds
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ * @param maskFilename JPEG file name. The dimension of the image
+ * corresponds to 720p (16:9 aspect ratio). Mask files are
+ * shared between video editors and can be created in the
+ * projects folder (the parent folder for all projects).
+ * @param blendingPercent The blending percent applied
+ * @param invert true to invert the direction of the alpha blending
+ * @throws IllegalArgumentException if behavior is not supported, or if
+ * direction are not supported.
+ */
+ public TransitionAlpha(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior, String maskFilename,
+ int blendingPercent, boolean invert) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+
+ mMaskFilename = maskFilename;
+ mBlendingPercent = blendingPercent;
+ mIsInvert = invert;
+ }
+
+ /**
+ * @return The blending percentage
+ */
+ public int getBlendingPercent() {
+ return mBlendingPercent;
+ }
+
+ /**
+ * @return The mask filename
+ */
+ public String getMaskFilename() {
+ return mMaskFilename;
+ }
+
+ /**
+ * @return true if the direction of the alpha blending is inverted
+ */
+ public boolean isInvert() {
+ return mIsInvert;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public void generate() {
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionCrossfade.java b/media/java/android/media/videoeditor/TransitionCrossfade.java
new file mode 100755
index 0000000..f8223e8
--- /dev/null
+++ b/media/java/android/media/videoeditor/TransitionCrossfade.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+
+/**
+ * This class allows to render a crossfade (dissolve) effect transition between
+ * two videos
+ * {@hide}
+ */
+public class TransitionCrossfade extends Transition {
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionCrossfade() {
+ this(null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs duration of the transition in milliseconds
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ *
+ * @throws IllegalArgumentException if behavior is not supported.
+ */
+ public TransitionCrossfade(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void generate() {
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionFadeBlack.java b/media/java/android/media/videoeditor/TransitionFadeBlack.java
new file mode 100755
index 0000000..a9bf4ce
--- /dev/null
+++ b/media/java/android/media/videoeditor/TransitionFadeBlack.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+
+/**
+ * This class is used to render a fade to black and fade from black transition
+ * between two media items.
+ * {@hide}
+ */
+public class TransitionFadeBlack extends Transition {
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionFadeBlack() {
+ this(null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs duration of the transition
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ *
+ * @throws IllegalArgumentException if behavior is not supported.
+ */
+ public TransitionFadeBlack(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void generate() {
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionSliding.java b/media/java/android/media/videoeditor/TransitionSliding.java
new file mode 100755
index 0000000..cc9f4b2
--- /dev/null
+++ b/media/java/android/media/videoeditor/TransitionSliding.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+/**
+ * This class allows to create sliding transitions
+ * {@hide}
+ */
+public class TransitionSliding extends Transition {
+
+ /** Video 1 is pushed to the right while video 2 is coming from left */
+ public final static int DIRECTION_RIGHT_OUT_LEFT_IN = 0;
+ /** Video 1 is pushed to the left while video 2 is coming from right */
+ public static final int DIRECTION_LEFT_OUT_RIGHT_IN = 1;
+ /** Video 1 is pushed to the top while video 2 is coming from bottom */
+ public static final int DIRECTION_TOP_OUT_BOTTOM_IN = 2;
+ /** Video 1 is pushed to the bottom while video 2 is coming from top */
+ public static final int DIRECTION_BOTTOM_OUT_TOP_IN = 3;
+
+ // The sliding transitions
+ private final int mSlidingDirection;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionSliding() {
+ this(null, null, null, 0, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs duration of the transition in milliseconds
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ * @param direction direction shall be one of the supported directions like
+ * RIGHT_OUT_LEFT_IN
+ *
+ * @throws IllegalArgumentException if behavior is not supported.
+ */
+ public TransitionSliding(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior, int direction) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+ mSlidingDirection = direction;
+ }
+
+ /**
+ * @return The sliding direction
+ */
+ public int getDirection() {
+ return mSlidingDirection;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void generate() {
+ }
+}
diff --git a/media/java/android/media/videoeditor/VideoEditor.java b/media/java/android/media/videoeditor/VideoEditor.java
new file mode 100755
index 0000000..aa8f2cb
--- /dev/null
+++ b/media/java/android/media/videoeditor/VideoEditor.java
@@ -0,0 +1,493 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.CancellationException;
+
+import android.view.SurfaceHolder;
+
+/**
+ * This is the interface implemented by classes which provide video editing
+ * functionality. The VideoEditor implementation class manages all input and
+ * output files. Unless specifically mentioned, methods are blocking. A
+ * typical editing session may consist of the following sequence of operations:
+ *
+ * <ul>
+ * <li>Add a set of MediaItems</li>
+ * <li>Apply a set of Transitions between MediaItems</li>
+ * <li>Add Effects and Overlays to media items</li>
+ * <li>Preview the movie at any time</li>
+ * <li>Save the VideoEditor implementation class internal state</li>
+ * <li>Release the VideoEditor implementation class instance by invoking
+ * {@link #release()}
+ * </ul>
+ * The internal VideoEditor state consists of the following elements:
+ * <ul>
+ * <li>Ordered & trimmed MediaItems</li>
+ * <li>Transition video clips</li>
+ * <li>Overlays</li>
+ * <li>Effects</li>
+ * <li>Audio waveform for the background audio and MediaItems</li>
+ * <li>Project thumbnail</li>
+ * <li>Last exported movie.</li>
+ * <li>Other project specific data such as the current aspect ratio.</li>
+ * </ul>
+ * {@hide}
+ */
+public interface VideoEditor {
+ // The file name of the project thumbnail
+ public static final String THUMBNAIL_FILENAME = "thumbnail.jpg";
+
+ // Use this value instead of the specific end of the storyboard timeline
+ // value.
+ public final static int DURATION_OF_STORYBOARD = -1;
+
+ /**
+ * This listener interface is used by the VideoEditor to emit preview
+ * progress notifications. This callback should be invoked after the
+ * number of frames specified by
+ * {@link #startPreview(SurfaceHolder surfaceHolder, long fromMs,
+ * int callbackAfterFrameCount, PreviewProgressListener listener)}
+ */
+ public interface PreviewProgressListener {
+ /**
+ * This method notifies the listener of the current time position while
+ * previewing a project.
+ *
+ * @param videoEditor The VideoEditor instance
+ * @param timeMs The current preview position (expressed in milliseconds
+ * since the beginning of the storyboard timeline).
+ * @param end true if the end of the timeline was reached
+ */
+ public void onProgress(VideoEditor videoEditor, long timeMs, boolean end);
+ }
+
+ /**
+ * This listener interface is used by the VideoEditor to emit export status
+ * notifications.
+ * {@link #export(String filename, ExportProgressListener listener, int height, int bitrate)}
+ */
+ public interface ExportProgressListener {
+ /**
+ * This method notifies the listener of the progress status of a export
+ * operation.
+ *
+ * @param videoEditor The VideoEditor instance
+ * @param filename The name of the file which is in the process of being
+ * exported.
+ * @param progress The progress in %. At the beginning of the export, this
+ * value is set to 0; at the end, the value is set to 100.
+ */
+ public void onProgress(VideoEditor videoEditor, String filename, int progress);
+ }
+
+ /**
+ * @return The path where the VideoEditor stores all files related to the
+ * project
+ */
+ public String getPath();
+
+ /**
+ * This method releases all in-memory resources used by the VideoEditor
+ * instance. All pending operations such as preview, export and extract
+ * audio waveform must be canceled.
+ */
+ public void release();
+
+ /**
+ * Persist the current internal state of VideoEditor to the project path.
+ * The VideoEditor state may be restored by invoking the
+ * {@link VideoEditorFactory#load(String)} method. This method does not
+ * release the internal in-memory state of the VideoEditor. To release
+ * the in-memory state of the VideoEditor the {@link #release()} method
+ * must be invoked.
+ *
+ * Pending transition generations must be allowed to complete before the
+ * state is saved.
+ * Pending audio waveform generations must be allowed to complete.
+ * Pending export operations must be allowed to continue.
+ */
+ public void save() throws IOException;
+
+ /**
+ * Create the output movie based on all media items added and the applied
+ * storyboard items. This method can take a long time to execute and is
+ * blocking. The application will receive progress notifications via the
+ * ExportProgressListener. Specific implementations may not support multiple
+ * simultaneous export operations.
+ *
+ * Note that invoking methods which would change the contents of the output
+ * movie throw an IllegalStateException while an export operation is
+ * pending.
+ *
+ * @param filename The output file name (including the full path)
+ * @param height The height of the output video file. The supported values
+ * for height are described in the MediaProperties class, for
+ * example: HEIGHT_480. The width will be automatically
+ * computed according to the aspect ratio provided by
+ * {@link #setAspectRatio(int)}
+ * @param bitrate The bitrate of the output video file. This is approximate
+ * value for the output movie. Supported bitrate values are
+ * described in the MediaProperties class for example:
+ * BITRATE_384K
+ * @param listener The listener for progress notifications. Use null if
+ * export progress notifications are not needed.
+ *
+ * @throws IllegalArgumentException if height or bitrate are not supported.
+ * @throws IOException if output file cannot be created
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if no MediaItem has been added
+ * @throws CancellationException if export is canceled by calling
+ * {@link #cancelExport()}
+ * @throws UnsupportOperationException if multiple simultaneous export()
+ * are not allowed
+ */
+ public void export(String filename, int height, int bitrate, ExportProgressListener listener)
+ throws IOException;
+
+ /**
+ * Cancel the running export operation. This method blocks until the
+ * export is canceled and the exported file (if any) is deleted. If the
+ * export completed by the time this method is invoked, the export file
+ * will be deleted.
+ *
+ * @param filename The filename which identifies the export operation to be
+ * canceled.
+ **/
+ public void cancelExport(String filename);
+
+ /**
+ * Add a media item at the end of the storyboard.
+ *
+ * @param mediaItem The media item object to add
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if the media item id is not unique across all the media items
+ * added.
+ */
+ public void addMediaItem(MediaItem mediaItem);
+
+ /**
+ * Insert a media item after the media item with the specified id.
+ *
+ * @param mediaItem The media item object to insert
+ * @param afterMediaItemId Insert the mediaItem after the media item
+ * identified by this id. If this parameter is null, the media
+ * item is inserted at the beginning of the timeline.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if media item with the specified id does
+ * not exist (null is a valid value) or if the media item id is
+ * not unique across all the media items added.
+ */
+ public void insertMediaItem(MediaItem mediaItem, String afterMediaItemId);
+
+ /**
+ * Move a media item after the media item with the specified id.
+ *
+ * Note: The project thumbnail is regenerated if the media item is or
+ * becomes the first media item in the storyboard timeline.
+ *
+ * @param mediaItemId The id of the media item to move
+ * @param afterMediaItemId Move the media item identified by mediaItemId after
+ * the media item identified by this parameter. If this parameter
+ * is null, the media item is moved at the beginning of the
+ * timeline.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if one of media item ids is invalid
+ * (null is a valid value)
+ */
+ public void moveMediaItem(String mediaItemId, String afterMediaItemId);
+
+ /**
+ * Remove the media item with the specified id. If there are transitions
+ * before or after this media item, then this/these transition(s) are
+ * removed from the storyboard. If the extraction of the audio waveform is
+ * in progress, the extraction is canceled and the file is deleted.
+ *
+ * Effects and overlays associated with the media item will also be
+ * removed.
+ *
+ * Note: The project thumbnail is regenerated if the media item which
+ * is removed is the first media item in the storyboard or if the
+ * media item is the only one in the storyboard. If the
+ * media item is the only one in the storyboard, the project thumbnail
+ * will be set to a black frame and the aspect ratio will revert to the
+ * default aspect ratio, and this method is equivalent to
+ * removeAllMediaItems() in this case.
+ *
+ * @param mediaItemId The unique id of the media item to be removed
+ *
+ * @return The media item that was removed
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if media item with the specified id
+ * does not exist
+ */
+ public MediaItem removeMediaItem(String mediaItemId);
+
+ /**
+ * Remove all media items in the storyboard. All effects, overlays and all
+ * transitions are also removed.
+ *
+ * Note: The project thumbnail will be set to a black frame and the aspect
+ * ratio will revert to the default aspect ratio.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public void removeAllMediaItems();
+
+ /**
+ * Get the list of media items in the order in which it they appear in the
+ * storyboard timeline.
+ *
+ * Note that if any media item source files are no longer
+ * accessible, this method will still provide the full list of media items.
+ *
+ * @return The list of media items. If no media item exist an empty list
+ * will be returned.
+ */
+ public List<MediaItem> getAllMediaItems();
+
+ /**
+ * Find the media item with the specified id
+ *
+ * @param mediaItemId The media item id
+ *
+ * @return The media item with the specified id (null if it does not exist)
+ */
+ public MediaItem getMediaItem(String mediaItemId);
+
+ /**
+ * Add a transition between the media items specified by the transition.
+ * If a transition existed at the same position it is invalidated and then
+ * the transition is replaced. Note that the new transition video clip is
+ * not automatically generated by this method. The
+ * {@link Transition#generate()} method must be invoked to generate
+ * the transition video clip.
+ *
+ * Note that the TransitionAtEnd and TransitionAtStart are special kinds
+ * that can not be applied between two media items.
+ *
+ * A crossfade audio transition will be automatically applied regardless of
+ * the video transition.
+ *
+ * @param transition The transition to apply
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if the transition duration is larger
+ * than the smallest duration of the two media item files or
+ * if the two media items specified in the transition are not
+ * adjacent
+ */
+ public void addTransition(Transition transition);
+
+ /**
+ * Remove the transition with the specified id.
+ *
+ * @param transitionId The id of the transition to be removed
+ *
+ * @return The transition that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if transition with the specified id does
+ * not exist
+ */
+ public Transition removeTransition(String transitionId);
+
+ /**
+ * Get the list of transitions
+ *
+ * @return The list of transitions. If no transitions exist an empty list
+ * will be returned.
+ */
+ public List<Transition> getAllTransitions();
+
+ /**
+ * Find the transition with the specified transition id.
+ *
+ * @param transitionId The transition id
+ *
+ * @return The transition
+ */
+ public Transition getTransition(String transitionId);
+
+ /**
+ * Add the specified AudioTrack to the storyboard. Note: Specific
+ * implementations may support a limited number of audio tracks (e.g. only
+ * one audio track)
+ *
+ * @param audioTrack The AudioTrack to add
+ * @throws UnsupportedOperationException if the implementation supports a
+ * limited number of audio tracks.
+ * @throws IllegalArgumentException if media item is not unique across all
+ * the audio tracks already added.
+ */
+ public void addAudioTrack(AudioTrack audioTrack);
+
+ /**
+ * Insert an audio track after the audio track with the specified id. Use
+ * addAudioTrack to add an audio track at the end of the storyboard
+ * timeline.
+ *
+ * @param audioTrack The audio track object to insert
+ * @param afterAudioTrackId Insert the audio track after the audio track
+ * identified by this parameter. If this parameter is null the
+ * audio track is added at the beginning of the timeline.
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if media item with the specified id does
+ * not exist (null is a valid value). if media item is not
+ * unique across all the audio tracks already added.
+ * @throws UnsupportedOperationException if the implementation supports a
+ * limited number of audio tracks
+ */
+ public void insertAudioTrack(AudioTrack audioTrack, String afterAudioTrackId);
+
+ /**
+ * Move an AudioTrack after the AudioTrack with the specified id.
+ *
+ * @param audioTrackId The id of the AudioTrack to move
+ * @param afterAudioTrackId Move the AudioTrack identified by audioTrackId
+ * after the AudioTrack identified by this parameter. If this
+ * parameter is null the audio track is added at the beginning of
+ * the timeline.
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if one of media item ids is invalid
+ * (null is a valid value)
+ */
+ public void moveAudioTrack(String audioTrackId, String afterAudioTrackId);
+
+ /**
+ * Remove the audio track with the specified id. If the extraction of the
+ * audio waveform is in progress, the extraction is canceled and the file is
+ * deleted.
+ *
+ * @param audioTrackId The id of the audio track to be removed
+ *
+ * @return The audio track that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public AudioTrack removeAudioTrack(String audioTrackId);
+
+ /**
+ * Get the list of AudioTracks in order in which they appear in the storyboard.
+ *
+ * Note that if any AudioTrack source files are not accessible anymore,
+ * this method will still provide the full list of audio tracks.
+ *
+ * @return The list of AudioTracks. If no audio tracks exist an empty list
+ * will be returned.
+ */
+ public List<AudioTrack> getAllAudioTracks();
+
+ /**
+ * Find the AudioTrack with the specified id
+ *
+ * @param audioTrackId The AudioTrack id
+ *
+ * @return The AudioTrack with the specified id (null if it does not exist)
+ */
+ public AudioTrack getAudioTrack(String audioTrackId);
+
+ /**
+ * Set the aspect ratio used in the preview and the export movie.
+ *
+ * The default aspect ratio is ASPECTRATIO_16_9 (16:9).
+ *
+ * @param aspectRatio to apply. If aspectRatio is the same as the current
+ * aspect ratio, then this function just returns. The supported
+ * aspect ratio are defined in the MediaProperties class for
+ * example: ASPECTRATIO_16_9
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if aspect ratio is not supported
+ */
+ public void setAspectRatio(int aspectRatio);
+
+ /**
+ * Get current aspect ratio.
+ *
+ * @return The aspect ratio as described in MediaProperties
+ */
+ public int getAspectRatio();
+
+ /**
+ * Get the preview (and output movie) duration.
+ *
+ * @return The duration of the preview (and output movie)
+ */
+ public long getDuration();
+
+ /**
+ * Render a frame according to the preview aspect ratio and activating all
+ * storyboard items relative to the specified time.
+ *
+ * @param surfaceHolder SurfaceHolder used by the application
+ * @param timeMs time corresponding to the frame to display
+ *
+ * @return The accurate time stamp of the frame that is rendered
+ * .
+ * @throws IllegalStateException if a preview or an export is already
+ * in progress
+ * @throws IllegalArgumentException if time is negative or beyond the
+ * preview duration
+ */
+ public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs);
+
+ /**
+ * This method must be called after the aspect ratio of the project changes
+ * and before startPreview is called. Note that this method may block for
+ * an extensive period of time.
+ */
+ public void generatePreview();
+
+ /**
+ * Start the preview of all the storyboard items applied on all MediaItems
+ * This method does not block (does not wait for the preview to complete).
+ * The PreviewProgressListener allows to track the progress at the time
+ * interval determined by the callbackAfterFrameCount parameter. The
+ * SurfaceHolder has to be created and ready for use before calling this
+ * method. The method is a no-op if there are no MediaItems in the
+ * storyboard.
+ *
+ * @param surfaceHolder SurfaceHolder where the preview is rendered.
+ * @param fromMs The time (relative to the timeline) at which the preview
+ * will start
+ * @param toMs The time (relative to the timeline) at which the preview will
+ * stop. Use -1 to play to the end of the timeline
+ * @param loop true if the preview should be looped once it reaches the end
+ * @param callbackAfterFrameCount The listener interface should be invoked
+ * after the number of frames specified by this parameter.
+ * @param listener The listener which will be notified of the preview
+ * progress
+ * @throws IllegalArgumentException if fromMs is beyond the preview duration
+ * @throws IllegalStateException if a preview or an export is already in
+ * progress
+ */
+ public void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs, boolean loop,
+ int callbackAfterFrameCount, PreviewProgressListener listener);
+
+ /**
+ * Stop the current preview. This method blocks until ongoing preview is
+ * stopped. Ignored if there is no preview running.
+ *
+ * @return The accurate current time when stop is effective expressed in
+ * milliseconds
+ */
+ public long stopPreview();
+}
diff --git a/media/java/android/media/videoeditor/VideoEditorFactory.java b/media/java/android/media/videoeditor/VideoEditorFactory.java
new file mode 100755
index 0000000..41eed16
--- /dev/null
+++ b/media/java/android/media/videoeditor/VideoEditorFactory.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+
+
+/**
+ * The VideoEditorFactory class must be used to instantiate VideoEditor objects
+ * by creating a new project {@link #create(String)} or by loading an
+ * existing project {@link #load(String)}.
+ * {@hide}
+ */
+public class VideoEditorFactory {
+ // VideoEditor implementation classes
+ public static final String TEST_CLASS_IMPLEMENTATION
+ = "android.media.videoeditor.VideoEditorTestImpl";
+ public static final String DEFAULT_CLASS_IMPLEMENTATION
+ = "android.media.videoeditor.VideoEditorImpl";
+
+ /**
+ * This is the factory method for creating a new VideoEditor instance.
+ *
+ * @param projectPath The path where all VideoEditor internal
+ * files are stored. When a project is deleted the application is
+ * responsible for deleting the path and its contents.
+ * @param className The implementation class name
+ *
+ * @return The VideoEditor instance
+ *
+ * @throws IOException if path does not exist or if the path can
+ * not be accessed in read/write mode
+ * @throws IllegalStateException if a previous VideoEditor instance has not
+ * been released
+ * @throws ClassNotFoundException, NoSuchMethodException,
+ * InvocationTargetException, IllegalAccessException,
+ * InstantiationException if the implementation class cannot
+ * be instantiated.
+ */
+ public static VideoEditor create(String projectPath, String className) throws IOException,
+ ClassNotFoundException, NoSuchMethodException, InvocationTargetException,
+ IllegalAccessException, InstantiationException {
+ // If the project path does not exist create it
+ final File dir = new File(projectPath);
+ if (!dir.exists()) {
+ if (!dir.mkdirs()) {
+ throw new FileNotFoundException("Cannot create project path: " + projectPath);
+ } else {
+ // Create the file which hides the media files
+ // from the media scanner
+ if (!new File(dir, ".nomedia").createNewFile()) {
+ throw new FileNotFoundException("Cannot create file .nomedia");
+ }
+ }
+ }
+
+ final Class<?> cls = Class.forName(className);
+ final Class<?> partypes[] = new Class[1];
+ partypes[0] = String.class;
+ final Constructor<?> ct = cls.getConstructor(partypes);
+ final Object arglist[] = new Object[1];
+ arglist[0] = projectPath;
+
+ return (VideoEditor)ct.newInstance(arglist);
+ }
+
+ /**
+ * This is the factory method for instantiating a VideoEditor from the
+ * internal state previously saved with the
+ * {@link VideoEditor#save(String)} method.
+ *
+ * @param projectPath The path where all VideoEditor internal files
+ * are stored. When a project is deleted the application is
+ * responsible for deleting the path and its contents.
+ * @param className The implementation class name
+ * @param generatePreview if set to true the
+ * {@link MediaEditor#generatePreview()} will be called internally to
+ * generate any needed transitions.
+ *
+ * @return The VideoEditor instance
+ *
+ * @throws IOException if path does not exist or if the path can
+ * not be accessed in read/write mode or if one of the resource
+ * media files cannot be retrieved
+ * @throws IllegalStateException if a previous VideoEditor instance has not
+ * been released
+ */
+ public static VideoEditor load(String projectPath, String className, boolean generatePreview)
+ throws IOException, ClassNotFoundException, NoSuchMethodException,
+ InvocationTargetException, IllegalAccessException, InstantiationException {
+ final Class<?> cls = Class.forName(className);
+ final Class<?> partypes[] = new Class[1];
+ partypes[0] = String.class;
+ final Constructor<?> ct = cls.getConstructor(partypes);
+ final Object arglist[] = new Object[1];
+ arglist[0] = projectPath;
+
+ final VideoEditor videoEditor = (VideoEditor)ct.newInstance(arglist);
+ if (generatePreview) {
+ videoEditor.generatePreview();
+ }
+ return videoEditor;
+ }
+}
diff --git a/media/java/android/media/videoeditor/VideoEditorTestImpl.java b/media/java/android/media/videoeditor/VideoEditorTestImpl.java
new file mode 100644
index 0000000..ba84f49
--- /dev/null
+++ b/media/java/android/media/videoeditor/VideoEditorTestImpl.java
@@ -0,0 +1,1207 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.xmlpull.v1.XmlPullParser;
+import org.xmlpull.v1.XmlPullParserException;
+import org.xmlpull.v1.XmlSerializer;
+
+import android.graphics.Rect;
+import android.util.Log;
+import android.util.Xml;
+import android.view.SurfaceHolder;
+
+/**
+ * The VideoEditor implementation {@hide}
+ */
+public class VideoEditorTestImpl implements VideoEditor {
+ // Logging
+ private static final String TAG = "VideoEditorImpl";
+
+ // The project filename
+ private static final String PROJECT_FILENAME = "videoeditor.xml";
+
+ // XML tags
+ private static final String TAG_PROJECT = "project";
+ private static final String TAG_MEDIA_ITEMS = "media_items";
+ private static final String TAG_MEDIA_ITEM = "media_item";
+ private static final String TAG_TRANSITIONS = "transitions";
+ private static final String TAG_TRANSITION = "transition";
+ private static final String TAG_OVERLAYS = "overlays";
+ private static final String TAG_OVERLAY = "overlay";
+ private static final String TAG_OVERLAY_USER_ATTRIBUTES = "overlay_user_attributes";
+ private static final String TAG_EFFECTS = "effects";
+ private static final String TAG_EFFECT = "effect";
+ private static final String TAG_AUDIO_TRACKS = "audio_tracks";
+ private static final String TAG_AUDIO_TRACK = "audio_track";
+
+ private static final String ATTR_ID = "id";
+ private static final String ATTR_FILENAME = "filename";
+ private static final String ATTR_AUDIO_WAVEFORM_FILENAME = "wavefoem";
+ private static final String ATTR_RENDERING_MODE = "rendering_mode";
+ private static final String ATTR_ASPECT_RATIO = "aspect_ratio";
+ private static final String ATTR_TYPE = "type";
+ private static final String ATTR_DURATION = "duration";
+ private static final String ATTR_START_TIME = "start_time";
+ private static final String ATTR_BEGIN_TIME = "begin_time";
+ private static final String ATTR_END_TIME = "end_time";
+ private static final String ATTR_VOLUME = "volume";
+ private static final String ATTR_BEHAVIOR = "behavior";
+ private static final String ATTR_DIRECTION = "direction";
+ private static final String ATTR_BLENDING = "blending";
+ private static final String ATTR_INVERT = "invert";
+ private static final String ATTR_MASK = "mask";
+ private static final String ATTR_BEFORE_MEDIA_ITEM_ID = "before_media_item";
+ private static final String ATTR_AFTER_MEDIA_ITEM_ID = "after_media_item";
+ private static final String ATTR_COLOR_EFFECT_TYPE = "color_type";
+ private static final String ATTR_COLOR_EFFECT_VALUE = "color_value";
+ private static final String ATTR_START_RECT_L = "start_l";
+ private static final String ATTR_START_RECT_T = "start_t";
+ private static final String ATTR_START_RECT_R = "start_r";
+ private static final String ATTR_START_RECT_B = "start_b";
+ private static final String ATTR_END_RECT_L = "end_l";
+ private static final String ATTR_END_RECT_T = "end_t";
+ private static final String ATTR_END_RECT_R = "end_r";
+ private static final String ATTR_END_RECT_B = "end_b";
+ private static final String ATTR_LOOP = "loop";
+ private static final String ATTR_MUTED = "muted";
+ private static final String ATTR_DUCK_ENABLED = "ducking_enabled";
+ private static final String ATTR_DUCK_THRESHOLD = "ducking_threshold";
+ private static final String ATTR_DUCKED_TRACK_VOLUME = "ducking_volume";
+
+ // Instance variables
+ private long mDurationMs;
+ private final String mProjectPath;
+ private final List<MediaItem> mMediaItems = new ArrayList<MediaItem>();
+ private final List<AudioTrack> mAudioTracks = new ArrayList<AudioTrack>();
+ private final List<Transition> mTransitions = new ArrayList<Transition>();
+ private PreviewThread mPreviewThread;
+ private int mAspectRatio;
+
+ /**
+ * The preview thread
+ */
+ private class PreviewThread extends Thread {
+ // Instance variables
+ private final static long FRAME_DURATION = 33;
+
+ // Instance variables
+ private final PreviewProgressListener mListener;
+ private final int mCallbackAfterFrameCount;
+ private final long mFromMs, mToMs;
+ private boolean mRun, mLoop;
+ private long mPositionMs;
+
+ /**
+ * Constructor
+ *
+ * @param fromMs Start preview at this position
+ * @param toMs The time (relative to the timeline) at which the preview
+ * will stop. Use -1 to play to the end of the timeline
+ * @param callbackAfterFrameCount The listener interface should be
+ * invoked after the number of frames specified by this
+ * parameter.
+ * @param loop true if the preview should be looped once it reaches the
+ * end
+ * @param listener The listener
+ */
+ public PreviewThread(long fromMs, long toMs, boolean loop, int callbackAfterFrameCount,
+ PreviewProgressListener listener) {
+ mPositionMs = mFromMs = fromMs;
+ if (toMs < 0) {
+ mToMs = mDurationMs;
+ } else {
+ mToMs = toMs;
+ }
+ mLoop = loop;
+ mCallbackAfterFrameCount = callbackAfterFrameCount;
+ mListener = listener;
+ mRun = true;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public void run() {
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "===> PreviewThread.run enter");
+ }
+ int frameCount = 0;
+ while (mRun) {
+ try {
+ sleep(FRAME_DURATION);
+ } catch (InterruptedException ex) {
+ break;
+ }
+ frameCount++;
+ mPositionMs += FRAME_DURATION;
+
+ if (mPositionMs >= mToMs) {
+ if (!mLoop) {
+ if (mListener != null) {
+ mListener.onProgress(VideoEditorTestImpl.this, mPositionMs, true);
+ }
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "PreviewThread.run playback complete");
+ }
+ break;
+ } else {
+ // Fire a notification for the end of the clip
+ if (mListener != null) {
+ mListener.onProgress(VideoEditorTestImpl.this, mToMs, false);
+ }
+
+ // Rewind
+ mPositionMs = mFromMs;
+ if (mListener != null) {
+ mListener.onProgress(VideoEditorTestImpl.this, mPositionMs, false);
+ }
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "PreviewThread.run playback complete");
+ }
+ frameCount = 0;
+ }
+ } else {
+ if (frameCount == mCallbackAfterFrameCount) {
+ if (mListener != null) {
+ mListener.onProgress(VideoEditorTestImpl.this, mPositionMs, false);
+ }
+ frameCount = 0;
+ }
+ }
+ }
+
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "===> PreviewThread.run exit");
+ }
+ }
+
+ /**
+ * Stop the preview
+ *
+ * @return The stop position
+ */
+ public long stopPreview() {
+ mRun = false;
+ try {
+ join();
+ } catch (InterruptedException ex) {
+ }
+ return mPositionMs;
+ }
+ };
+
+ /**
+ * Constructor
+ *
+ * @param projectPath
+ */
+ public VideoEditorTestImpl(String projectPath) throws IOException {
+ mProjectPath = projectPath;
+ final File projectXml = new File(projectPath, PROJECT_FILENAME);
+ if (projectXml.exists()) {
+ try {
+ load();
+ } catch (Exception ex) {
+ throw new IOException(ex);
+ }
+ } else {
+ mAspectRatio = MediaProperties.ASPECT_RATIO_16_9;
+ mDurationMs = 0;
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public String getPath() {
+ return mProjectPath;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void addMediaItem(MediaItem mediaItem) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ if (mMediaItems.contains(mediaItem)) {
+ throw new IllegalArgumentException("Media item already exists: " + mediaItem.getId());
+ }
+
+ // Invalidate the end transition if necessary
+ final int mediaItemsCount = mMediaItems.size();
+ if ( mediaItemsCount > 0) {
+ removeTransitionAfter(mediaItemsCount - 1);
+ }
+
+ // Add the new media item
+ mMediaItems.add(mediaItem);
+
+ computeTimelineDuration();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void insertMediaItem(MediaItem mediaItem, String afterMediaItemId) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ if (mMediaItems.contains(mediaItem)) {
+ throw new IllegalArgumentException("Media item already exists: " + mediaItem.getId());
+ }
+
+ if (afterMediaItemId == null) {
+ if (mMediaItems.size() > 0) {
+ // Invalidate the transition at the beginning of the timeline
+ removeTransitionBefore(0);
+ }
+ mMediaItems.add(0, mediaItem);
+ computeTimelineDuration();
+ } else {
+ final int mediaItemCount = mMediaItems.size();
+ for (int i = 0; i < mediaItemCount; i++) {
+ final MediaItem mi = mMediaItems.get(i);
+ if (mi.getId().equals(afterMediaItemId)) {
+ // Invalidate the transition at this position
+ removeTransitionAfter(i);
+ // Insert the new media item
+ mMediaItems.add(i + 1, mediaItem);
+ computeTimelineDuration();
+ return;
+ }
+ }
+ throw new IllegalArgumentException("MediaItem not found: " + afterMediaItemId);
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void moveMediaItem(String mediaItemId, String afterMediaItemId) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ final MediaItem moveMediaItem = removeMediaItem(mediaItemId);
+ if (moveMediaItem == null) {
+ throw new IllegalArgumentException("Target MediaItem not found: " + mediaItemId);
+ }
+
+ if (afterMediaItemId == null) {
+ if (mMediaItems.size() > 0) {
+ // Invalidate adjacent transitions at the insertion point
+ removeTransitionBefore(0);
+
+ // Insert the media item at the new position
+ mMediaItems.add(0, moveMediaItem);
+ computeTimelineDuration();
+ } else {
+ throw new IllegalStateException("Cannot move media item (it is the only item)");
+ }
+ } else {
+ final int mediaItemCount = mMediaItems.size();
+ for (int i = 0; i < mediaItemCount; i++) {
+ final MediaItem mi = mMediaItems.get(i);
+ if (mi.getId().equals(afterMediaItemId)) {
+ // Invalidate adjacent transitions at the insertion point
+ removeTransitionAfter(i);
+ // Insert the media item at the new position
+ mMediaItems.add(i + 1, moveMediaItem);
+ computeTimelineDuration();
+ return;
+ }
+ }
+
+ throw new IllegalArgumentException("MediaItem not found: " + afterMediaItemId);
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized MediaItem removeMediaItem(String mediaItemId) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ final MediaItem mediaItem = getMediaItem(mediaItemId);
+ if (mediaItem != null) {
+ // Remove the media item
+ mMediaItems.remove(mediaItem);
+ // Remove the adjacent transitions
+ removeAdjacentTransitions(mediaItem);
+ computeTimelineDuration();
+ }
+
+ return mediaItem;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized MediaItem getMediaItem(String mediaItemId) {
+ for (MediaItem mediaItem : mMediaItems) {
+ if (mediaItem.getId().equals(mediaItemId)) {
+ return mediaItem;
+ }
+ }
+
+ return null;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized List<MediaItem> getAllMediaItems() {
+ return mMediaItems;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void removeAllMediaItems() {
+ mMediaItems.clear();
+
+ // Invalidate all transitions
+ for (Transition transition : mTransitions) {
+ transition.invalidate();
+ }
+ mTransitions.clear();
+
+ mDurationMs = 0;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void addTransition(Transition transition) {
+ mTransitions.add(transition);
+
+ final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
+ final MediaItem afterMediaItem = transition.getAfterMediaItem();
+
+ // Cross reference the transitions
+ if (afterMediaItem != null) {
+ // If a transition already exists at the specified position then
+ // invalidate it.
+ if (afterMediaItem.getEndTransition() != null) {
+ afterMediaItem.getEndTransition().invalidate();
+ }
+ afterMediaItem.setEndTransition(transition);
+ }
+
+ if (beforeMediaItem != null) {
+ // If a transition already exists at the specified position then
+ // invalidate it.
+ if (beforeMediaItem.getBeginTransition() != null) {
+ beforeMediaItem.getBeginTransition().invalidate();
+ }
+ beforeMediaItem.setBeginTransition(transition);
+ }
+
+ computeTimelineDuration();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized Transition removeTransition(String transitionId) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ final Transition transition = getTransition(transitionId);
+ if (transition == null) {
+ throw new IllegalStateException("Transition not found: " + transitionId);
+ }
+
+ // Remove the transition references
+ final MediaItem afterMediaItem = transition.getAfterMediaItem();
+ if (afterMediaItem != null) {
+ afterMediaItem.setEndTransition(null);
+ }
+
+ final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
+ if (beforeMediaItem != null) {
+ beforeMediaItem.setBeginTransition(null);
+ }
+
+ mTransitions.remove(transition);
+ transition.invalidate();
+ computeTimelineDuration();
+
+ return transition;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public List<Transition> getAllTransitions() {
+ return mTransitions;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public Transition getTransition(String transitionId) {
+ for (Transition transition : mTransitions) {
+ if (transition.getId().equals(transitionId)) {
+ return transition;
+ }
+ }
+
+ return null;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void addAudioTrack(AudioTrack audioTrack) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ mAudioTracks.add(audioTrack);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void insertAudioTrack(AudioTrack audioTrack, String afterAudioTrackId) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ if (afterAudioTrackId == null) {
+ mAudioTracks.add(0, audioTrack);
+ } else {
+ final int audioTrackCount = mAudioTracks.size();
+ for (int i = 0; i < audioTrackCount; i++) {
+ AudioTrack at = mAudioTracks.get(i);
+ if (at.getId().equals(afterAudioTrackId)) {
+ mAudioTracks.add(i + 1, audioTrack);
+ return;
+ }
+ }
+
+ throw new IllegalArgumentException("AudioTrack not found: " + afterAudioTrackId);
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void moveAudioTrack(String audioTrackId, String afterAudioTrackId) {
+ throw new IllegalStateException("Not supported");
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized AudioTrack removeAudioTrack(String audioTrackId) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+
+ final AudioTrack audioTrack = getAudioTrack(audioTrackId);
+ if (audioTrack != null) {
+ mAudioTracks.remove(audioTrack);
+ }
+
+ return audioTrack;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public AudioTrack getAudioTrack(String audioTrackId) {
+ for (AudioTrack at : mAudioTracks) {
+ if (at.getId().equals(audioTrackId)) {
+ return at;
+ }
+ }
+
+ return null;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public List<AudioTrack> getAllAudioTracks() {
+ return mAudioTracks;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public void save() throws IOException {
+ final XmlSerializer serializer = Xml.newSerializer();
+ final StringWriter writer = new StringWriter();
+ serializer.setOutput(writer);
+ serializer.startDocument("UTF-8", true);
+ serializer.startTag("", TAG_PROJECT);
+ serializer.attribute("", ATTR_ASPECT_RATIO, Integer.toString(mAspectRatio));
+
+ serializer.startTag("", TAG_MEDIA_ITEMS);
+ for (MediaItem mediaItem : mMediaItems) {
+ serializer.startTag("", TAG_MEDIA_ITEM);
+ serializer.attribute("", ATTR_ID, mediaItem.getId());
+ serializer.attribute("", ATTR_TYPE, mediaItem.getClass().getSimpleName());
+ serializer.attribute("", ATTR_FILENAME, mediaItem.getFilename());
+ serializer.attribute("", ATTR_RENDERING_MODE, Integer.toString(
+ mediaItem.getRenderingMode()));
+ if (mediaItem instanceof MediaVideoItem) {
+ final MediaVideoItem mvi = (MediaVideoItem)mediaItem;
+ serializer
+ .attribute("", ATTR_BEGIN_TIME, Long.toString(mvi.getBoundaryBeginTime()));
+ serializer.attribute("", ATTR_END_TIME, Long.toString(mvi.getBoundaryEndTime()));
+ serializer.attribute("", ATTR_VOLUME, Integer.toString(mvi.getVolume()));
+ serializer.attribute("", ATTR_MUTED, Boolean.toString(mvi.isMuted()));
+ if (mvi.getAudioWaveformFilename() != null) {
+ serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
+ mvi.getAudioWaveformFilename());
+ }
+ } else if (mediaItem instanceof MediaImageItem) {
+ serializer.attribute("", ATTR_DURATION,
+ Long.toString(mediaItem.getTimelineDuration()));
+ }
+
+ final List<Overlay> overlays = mediaItem.getAllOverlays();
+ if (overlays.size() > 0) {
+ serializer.startTag("", TAG_OVERLAYS);
+ for (Overlay overlay : overlays) {
+ serializer.startTag("", TAG_OVERLAY);
+ serializer.attribute("", ATTR_ID, overlay.getId());
+ serializer.attribute("", ATTR_TYPE, overlay.getClass().getSimpleName());
+ serializer.attribute("", ATTR_BEGIN_TIME,
+ Long.toString(overlay.getStartTime()));
+ serializer.attribute("", ATTR_DURATION, Long.toString(overlay.getDuration()));
+ if (overlay instanceof OverlayFrame) {
+ final OverlayFrame overlayFrame = (OverlayFrame)overlay;
+ overlayFrame.save(getPath());
+ if (overlayFrame.getFilename() != null) {
+ serializer.attribute("", ATTR_FILENAME, overlayFrame.getFilename());
+ }
+ }
+
+ // Save the user attributes
+ serializer.startTag("", TAG_OVERLAY_USER_ATTRIBUTES);
+ final Map<String, String> userAttributes = overlay.getUserAttributes();
+ for (String name : userAttributes.keySet()) {
+ final String value = userAttributes.get(name);
+ if (value != null) {
+ serializer.attribute("", name, value);
+ }
+ }
+ serializer.endTag("", TAG_OVERLAY_USER_ATTRIBUTES);
+
+ serializer.endTag("", TAG_OVERLAY);
+ }
+ serializer.endTag("", TAG_OVERLAYS);
+ }
+
+ final List<Effect> effects = mediaItem.getAllEffects();
+ if (effects.size() > 0) {
+ serializer.startTag("", TAG_EFFECTS);
+ for (Effect effect : effects) {
+ serializer.startTag("", TAG_EFFECT);
+ serializer.attribute("", ATTR_ID, effect.getId());
+ serializer.attribute("", ATTR_TYPE, effect.getClass().getSimpleName());
+ serializer.attribute("", ATTR_BEGIN_TIME,
+ Long.toString(effect.getStartTime()));
+ serializer.attribute("", ATTR_DURATION, Long.toString(effect.getDuration()));
+ if (effect instanceof EffectColor) {
+ final EffectColor colorEffect = (EffectColor)effect;
+ serializer.attribute("", ATTR_COLOR_EFFECT_TYPE,
+ Integer.toString(colorEffect.getType()));
+ if (colorEffect.getType() == EffectColor.TYPE_COLOR ||
+ colorEffect.getType() == EffectColor.TYPE_GRADIENT) {
+ serializer.attribute("", ATTR_COLOR_EFFECT_VALUE,
+ Integer.toString(colorEffect.getColor()));
+ }
+ } else if (effect instanceof EffectKenBurns) {
+ final Rect startRect = ((EffectKenBurns)effect).getStartRect();
+ serializer.attribute("", ATTR_START_RECT_L,
+ Integer.toString(startRect.left));
+ serializer.attribute("", ATTR_START_RECT_T,
+ Integer.toString(startRect.top));
+ serializer.attribute("", ATTR_START_RECT_R,
+ Integer.toString(startRect.right));
+ serializer.attribute("", ATTR_START_RECT_B,
+ Integer.toString(startRect.bottom));
+
+ final Rect endRect = ((EffectKenBurns)effect).getEndRect();
+ serializer.attribute("", ATTR_END_RECT_L, Integer.toString(endRect.left));
+ serializer.attribute("", ATTR_END_RECT_T, Integer.toString(endRect.top));
+ serializer.attribute("", ATTR_END_RECT_R, Integer.toString(endRect.right));
+ serializer.attribute("", ATTR_END_RECT_B,
+ Integer.toString(endRect.bottom));
+ }
+
+ serializer.endTag("", TAG_EFFECT);
+ }
+ serializer.endTag("", TAG_EFFECTS);
+ }
+
+ serializer.endTag("", TAG_MEDIA_ITEM);
+ }
+ serializer.endTag("", TAG_MEDIA_ITEMS);
+
+ serializer.startTag("", TAG_TRANSITIONS);
+
+ for (Transition transition : mTransitions) {
+ serializer.startTag("", TAG_TRANSITION);
+ serializer.attribute("", ATTR_ID, transition.getId());
+ serializer.attribute("", ATTR_TYPE, transition.getClass().getSimpleName());
+ serializer.attribute("", ATTR_DURATION, Long.toString(transition.getDuration()));
+ serializer.attribute("", ATTR_BEHAVIOR, Integer.toString(transition.getBehavior()));
+ final MediaItem afterMediaItem = transition.getAfterMediaItem();
+ if (afterMediaItem != null) {
+ serializer.attribute("", ATTR_AFTER_MEDIA_ITEM_ID, afterMediaItem.getId());
+ }
+
+ final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
+ if (beforeMediaItem != null) {
+ serializer.attribute("", ATTR_BEFORE_MEDIA_ITEM_ID, beforeMediaItem.getId());
+ }
+
+ if (transition instanceof TransitionSliding) {
+ serializer.attribute("", ATTR_DIRECTION,
+ Integer.toString(((TransitionSliding)transition).getDirection()));
+ } else if (transition instanceof TransitionAlpha) {
+ TransitionAlpha ta = (TransitionAlpha)transition;
+ serializer.attribute("", ATTR_BLENDING, Integer.toString(ta.getBlendingPercent()));
+ serializer.attribute("", ATTR_INVERT, Boolean.toString(ta.isInvert()));
+ if (ta.getMaskFilename() != null) {
+ serializer.attribute("", ATTR_MASK, ta.getMaskFilename());
+ }
+ }
+ serializer.endTag("", TAG_TRANSITION);
+ }
+ serializer.endTag("", TAG_TRANSITIONS);
+
+ serializer.startTag("", TAG_AUDIO_TRACKS);
+ for (AudioTrack at : mAudioTracks) {
+ serializer.startTag("", TAG_AUDIO_TRACK);
+ serializer.attribute("", ATTR_ID, at.getId());
+ serializer.attribute("", ATTR_FILENAME, at.getFilename());
+ serializer.attribute("", ATTR_START_TIME, Long.toString(at.getStartTime()));
+ serializer.attribute("", ATTR_BEGIN_TIME, Long.toString(at.getBoundaryBeginTime()));
+ serializer.attribute("", ATTR_END_TIME, Long.toString(at.getBoundaryEndTime()));
+ serializer.attribute("", ATTR_VOLUME, Integer.toString(at.getVolume()));
+ serializer.attribute("", ATTR_DUCK_ENABLED, Boolean.toString(at.isDuckingEnabled()));
+ serializer.attribute("", ATTR_DUCKED_TRACK_VOLUME, Integer.toString(at.getDuckedTrackVolume()));
+ serializer.attribute("", ATTR_DUCK_THRESHOLD, Integer.toString(at.getDuckingThreshhold()));
+ serializer.attribute("", ATTR_MUTED, Boolean.toString(at.isMuted()));
+ serializer.attribute("", ATTR_LOOP, Boolean.toString(at.isLooping()));
+ if (at.getAudioWaveformFilename() != null) {
+ serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
+ at.getAudioWaveformFilename());
+ }
+
+ serializer.endTag("", TAG_AUDIO_TRACK);
+ }
+ serializer.endTag("", TAG_AUDIO_TRACKS);
+
+ serializer.endTag("", TAG_PROJECT);
+ serializer.endDocument();
+
+ // Save the metadata XML file
+ final FileOutputStream out = new FileOutputStream(new File(getPath(), PROJECT_FILENAME));
+ out.write(writer.toString().getBytes());
+ out.flush();
+ out.close();
+ }
+
+ /**
+ * Load the project form XML
+ */
+ private void load() throws FileNotFoundException, XmlPullParserException, IOException {
+ final File file = new File(mProjectPath, PROJECT_FILENAME);
+ // Load the metadata
+ final XmlPullParser parser = Xml.newPullParser();
+ parser.setInput(new FileInputStream(file), "UTF-8");
+ int eventType = parser.getEventType();
+ String name;
+ MediaItem currentMediaItem = null;
+ Overlay currentOverlay = null;
+ while (eventType != XmlPullParser.END_DOCUMENT) {
+ switch (eventType) {
+ case XmlPullParser.START_TAG: {
+ name = parser.getName();
+ if (TAG_PROJECT.equals(name)) {
+ mAspectRatio = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_ASPECT_RATIO));
+ } else if (TAG_MEDIA_ITEM.equals(name)) {
+ final String mediaItemId = parser.getAttributeValue("", ATTR_ID);
+ final String type = parser.getAttributeValue("", ATTR_TYPE);
+ final String filename = parser.getAttributeValue("", ATTR_FILENAME);
+ final int renderingMode = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_RENDERING_MODE));
+
+ if (MediaImageItem.class.getSimpleName().equals(type)) {
+ final long durationMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_DURATION));
+ currentMediaItem = new MediaImageItem(this, mediaItemId, filename,
+ durationMs, renderingMode);
+ } else if (MediaVideoItem.class.getSimpleName().equals(type)) {
+ final long beginMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
+ final long endMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_END_TIME));
+ final int volume = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_VOLUME));
+ final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_MUTED));
+ final String audioWaveformFilename = parser.getAttributeValue("",
+ ATTR_AUDIO_WAVEFORM_FILENAME);
+ currentMediaItem = new MediaVideoItem(this, mediaItemId, filename,
+ renderingMode, beginMs, endMs, volume, muted,
+ audioWaveformFilename);
+
+ final long beginTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
+ final long endTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_END_TIME));
+ ((MediaVideoItem)currentMediaItem).setExtractBoundaries(beginTimeMs,
+ endTimeMs);
+
+ final int volumePercent = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_VOLUME));
+ ((MediaVideoItem)currentMediaItem).setVolume(volumePercent);
+ } else {
+ Log.e(TAG, "Unknown media item type: " + type);
+ currentMediaItem = null;
+ }
+
+ if (currentMediaItem != null) {
+ mMediaItems.add(currentMediaItem);
+ }
+ } else if (TAG_TRANSITION.equals(name)) {
+ final Transition transition = parseTransition(parser);
+ if (transition != null) {
+ mTransitions.add(transition);
+ }
+ } else if (TAG_OVERLAY.equals(name)) {
+ if (currentMediaItem != null) {
+ currentOverlay = parseOverlay(parser, currentMediaItem);
+ if (currentOverlay != null) {
+ currentMediaItem.addOverlay(currentOverlay);
+ }
+ }
+ } else if (TAG_OVERLAY_USER_ATTRIBUTES.equals(name)) {
+ if (currentOverlay != null) {
+ final int attributesCount = parser.getAttributeCount();
+ for (int i = 0; i < attributesCount; i++) {
+ currentOverlay.setUserAttribute(parser.getAttributeName(i),
+ parser.getAttributeValue(i));
+ }
+ }
+ } else if (TAG_EFFECT.equals(name)) {
+ if (currentMediaItem != null) {
+ final Effect effect = parseEffect(parser, currentMediaItem);
+ if (effect != null) {
+ currentMediaItem.addEffect(effect);
+ }
+ }
+ } else if (TAG_AUDIO_TRACK.equals(name)) {
+ final AudioTrack audioTrack = parseAudioTrack(parser);
+ if (audioTrack != null) {
+ addAudioTrack(audioTrack);
+ }
+ }
+ break;
+ }
+
+ case XmlPullParser.END_TAG: {
+ name = parser.getName();
+ if (TAG_MEDIA_ITEM.equals(name)) {
+ currentMediaItem = null;
+ } else if (TAG_OVERLAY.equals(name)) {
+ currentOverlay = null;
+ }
+ break;
+ }
+
+ default: {
+ break;
+ }
+ }
+ eventType = parser.next();
+ }
+
+ computeTimelineDuration();
+ }
+
+ /**
+ * Parse the transition
+ *
+ * @param parser The parser
+ * @return The transition
+ */
+ private Transition parseTransition(XmlPullParser parser) {
+ final String transitionId = parser.getAttributeValue("", ATTR_ID);
+ final String type = parser.getAttributeValue("", ATTR_TYPE);
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ final int behavior = Integer.parseInt(parser.getAttributeValue("", ATTR_BEHAVIOR));
+
+ final String beforeMediaItemId = parser.getAttributeValue("", ATTR_BEFORE_MEDIA_ITEM_ID);
+ final MediaItem beforeMediaItem;
+ if (beforeMediaItemId != null) {
+ beforeMediaItem = getMediaItem(beforeMediaItemId);
+ } else {
+ beforeMediaItem = null;
+ }
+
+ final String afterMediaItemId = parser.getAttributeValue("", ATTR_AFTER_MEDIA_ITEM_ID);
+ final MediaItem afterMediaItem;
+ if (afterMediaItemId != null) {
+ afterMediaItem = getMediaItem(afterMediaItemId);
+ } else {
+ afterMediaItem = null;
+ }
+
+ final Transition transition;
+ if (TransitionAlpha.class.getSimpleName().equals(type)) {
+ final int blending = Integer.parseInt(parser.getAttributeValue("", ATTR_BLENDING));
+ final String maskFilename = parser.getAttributeValue("", ATTR_MASK);
+ final boolean invert = Boolean.getBoolean(parser.getAttributeValue("", ATTR_INVERT));
+ transition = new TransitionAlpha(transitionId, afterMediaItem, beforeMediaItem,
+ durationMs, behavior, maskFilename, blending, invert);
+ } else if (TransitionCrossfade.class.getSimpleName().equals(type)) {
+ transition = new TransitionCrossfade(transitionId, afterMediaItem, beforeMediaItem,
+ durationMs, behavior);
+ } else if (TransitionSliding.class.getSimpleName().equals(type)) {
+ final int direction = Integer.parseInt(parser.getAttributeValue("", ATTR_DIRECTION));
+ transition = new TransitionSliding(transitionId, afterMediaItem, beforeMediaItem,
+ durationMs, behavior, direction);
+ } else if (TransitionFadeBlack.class.getSimpleName().equals(type)) {
+ transition = new TransitionFadeBlack(transitionId, afterMediaItem, beforeMediaItem,
+ durationMs, behavior);
+ } else {
+ transition = null;
+ }
+
+ if (beforeMediaItem != null) {
+ beforeMediaItem.setBeginTransition(transition);
+ }
+
+ if (afterMediaItem != null) {
+ afterMediaItem.setEndTransition(transition);
+ }
+
+ return transition;
+ }
+
+ /**
+ * Parse the overlay
+ *
+ * @param parser The parser
+ * @param mediaItem The media item owner
+ *
+ * @return The overlay
+ */
+ private Overlay parseOverlay(XmlPullParser parser, MediaItem mediaItem) {
+ final String overlayId = parser.getAttributeValue("", ATTR_ID);
+ final String type = parser.getAttributeValue("", ATTR_TYPE);
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+
+ final Overlay overlay;
+ if (OverlayFrame.class.getSimpleName().equals(type)) {
+ final String filename = parser.getAttributeValue("", ATTR_FILENAME);
+ overlay = new OverlayFrame(mediaItem, overlayId, filename, startTimeMs, durationMs);
+ } else {
+ overlay = null;
+ }
+
+ return overlay;
+ }
+
+ /**
+ * Parse the effect
+ *
+ * @param parser The parser
+ * @param mediaItem The media item owner
+ *
+ * @return The effect
+ */
+ private Effect parseEffect(XmlPullParser parser, MediaItem mediaItem) {
+ final String effectId = parser.getAttributeValue("", ATTR_ID);
+ final String type = parser.getAttributeValue("", ATTR_TYPE);
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+
+ final Effect effect;
+ if (EffectColor.class.getSimpleName().equals(type)) {
+ final int colorEffectType =
+ Integer.parseInt(parser.getAttributeValue("", ATTR_COLOR_EFFECT_TYPE));
+ final int color;
+ if (colorEffectType == EffectColor.TYPE_COLOR
+ || colorEffectType == EffectColor.TYPE_GRADIENT) {
+ color = Integer.parseInt(parser.getAttributeValue("", ATTR_COLOR_EFFECT_VALUE));
+ } else {
+ color = 0;
+ }
+ effect = new EffectColor(mediaItem, effectId, startTimeMs, durationMs,
+ colorEffectType, color);
+ } else if (EffectKenBurns.class.getSimpleName().equals(type)) {
+ final Rect startRect = new Rect(
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_L)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_T)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_R)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_B)));
+ final Rect endRect = new Rect(
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_L)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_T)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_R)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_B)));
+ effect = new EffectKenBurns(mediaItem, effectId, startRect, endRect, startTimeMs,
+ durationMs);
+ } else {
+ effect = null;
+ }
+
+ return effect;
+ }
+
+ /**
+ * Parse the audio track
+ *
+ * @param parser The parser
+ *
+ * @return The audio track
+ */
+ private AudioTrack parseAudioTrack(XmlPullParser parser) {
+ final String audioTrackId = parser.getAttributeValue("", ATTR_ID);
+ final String filename = parser.getAttributeValue("", ATTR_FILENAME);
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_START_TIME));
+ final long beginMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+ final long endMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
+ final int volume = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
+ final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_MUTED));
+ final boolean loop = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_LOOP));
+ final boolean duckingEnabled = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_DUCK_ENABLED));
+ final int duckThreshold = Integer.parseInt(parser.getAttributeValue("", ATTR_DUCK_THRESHOLD));
+ final int duckedTrackVolume = Integer.parseInt(parser.getAttributeValue("", ATTR_DUCKED_TRACK_VOLUME));
+ final String waveformFilename = parser.getAttributeValue("", ATTR_AUDIO_WAVEFORM_FILENAME);
+ try {
+ final AudioTrack audioTrack = new AudioTrack(this, audioTrackId, filename, startTimeMs,
+ beginMs, endMs, loop, volume, muted, duckingEnabled, duckThreshold, duckedTrackVolume, waveformFilename);
+
+ return audioTrack;
+ } catch (IOException ex) {
+ return null;
+ }
+ }
+
+ public void cancelExport(String filename) {
+ }
+
+ public void export(String filename, int height, int bitrate, ExportProgressListener listener)
+ throws IOException {
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public void generatePreview() {
+ // Generate all the needed transitions
+ for (Transition transition : mTransitions) {
+ if (!transition.isGenerated()) {
+ transition.generate();
+ }
+ }
+
+ // This is necessary because the user may had called setDuration on
+ // MediaImageItems
+ computeTimelineDuration();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public void release() {
+ stopPreview();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public long getDuration() {
+ // Since MediaImageItem can change duration we need to compute the
+ // duration here
+ computeTimelineDuration();
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public int getAspectRatio() {
+ return mAspectRatio;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public void setAspectRatio(int aspectRatio) {
+ mAspectRatio = aspectRatio;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs) {
+ if (mPreviewThread != null) {
+ throw new IllegalStateException("Previewing is in progress");
+ }
+ return timeMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs,
+ boolean loop, int callbackAfterFrameCount, PreviewProgressListener listener) {
+ if (fromMs >= mDurationMs) {
+ return;
+ }
+ mPreviewThread = new PreviewThread(fromMs, toMs, loop, callbackAfterFrameCount, listener);
+ mPreviewThread.start();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized long stopPreview() {
+ final long stopTimeMs;
+ if (mPreviewThread != null) {
+ stopTimeMs = mPreviewThread.stopPreview();
+ mPreviewThread = null;
+ } else {
+ stopTimeMs = 0;
+ }
+ return stopTimeMs;
+ }
+
+ /**
+ * Compute the duration
+ */
+ private void computeTimelineDuration() {
+ mDurationMs = 0;
+ final int mediaItemsCount = mMediaItems.size();
+ for (int i = 0; i < mediaItemsCount; i++) {
+ final MediaItem mediaItem = mMediaItems.get(i);
+ mDurationMs += mediaItem.getTimelineDuration();
+ if (mediaItem.getEndTransition() != null) {
+ if (i < mediaItemsCount - 1) {
+ mDurationMs -= mediaItem.getEndTransition().getDuration();
+ }
+ }
+ }
+ }
+
+ /**
+ * Remove transitions associated with the specified media item
+ *
+ * @param mediaItem The media item
+ */
+ private void removeAdjacentTransitions(MediaItem mediaItem) {
+ final Transition beginTransition = mediaItem.getBeginTransition();
+ if (beginTransition != null) {
+ if (beginTransition.getAfterMediaItem() != null) {
+ beginTransition.getAfterMediaItem().setEndTransition(null);
+ }
+ beginTransition.invalidate();
+ mTransitions.remove(beginTransition);
+ }
+
+ final Transition endTransition = mediaItem.getEndTransition();
+ if (endTransition != null) {
+ if (endTransition.getBeforeMediaItem() != null) {
+ endTransition.getBeforeMediaItem().setBeginTransition(null);
+ }
+ endTransition.invalidate();
+ mTransitions.remove(endTransition);
+ }
+
+ mediaItem.setBeginTransition(null);
+ mediaItem.setEndTransition(null);
+ }
+
+ /**
+ * Remove the transition before this media item
+ *
+ * @param index The media item index
+ */
+ private void removeTransitionBefore(int index) {
+ final MediaItem mediaItem = mMediaItems.get(index);
+ final Iterator<Transition> it = mTransitions.iterator();
+ while (it.hasNext()) {
+ Transition t = it.next();
+ if (t.getBeforeMediaItem() == mediaItem) {
+ it.remove();
+ t.invalidate();
+ mediaItem.setBeginTransition(null);
+ if (index > 0) {
+ mMediaItems.get(index - 1).setEndTransition(null);
+ }
+ break;
+ }
+ }
+ }
+
+ /**
+ * Remove the transition after this media item
+ *
+ * @param index The media item index
+ */
+ private void removeTransitionAfter(int index) {
+ final MediaItem mediaItem = mMediaItems.get(index);
+ final Iterator<Transition> it = mTransitions.iterator();
+ while (it.hasNext()) {
+ Transition t = it.next();
+ if (t.getAfterMediaItem() == mediaItem) {
+ it.remove();
+ t.invalidate();
+ mediaItem.setEndTransition(null);
+ // Invalidate the reference in the next media item
+ if (index < mMediaItems.size() - 1) {
+ mMediaItems.get(index + 1).setBeginTransition(null);
+ }
+ break;
+ }
+ }
+ }
+}
diff --git a/media/java/android/media/videoeditor/WaveformData.java b/media/java/android/media/videoeditor/WaveformData.java
new file mode 100644
index 0000000..b53bd7d
--- /dev/null
+++ b/media/java/android/media/videoeditor/WaveformData.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+/**
+ * Class which describes the waveform data of an audio track. The gain values
+ * represent the average gain for an audio frame. For audio codecs which do
+ * not operate on a per frame bases (eg. ALAW, ULAW) a reasonable audio frame
+ * duration will be assumed (eg. 50ms).
+ * {@hide}
+ */
+public class WaveformData {
+ // Instance variables
+ private final int mFrameDurationMs;
+ private final int mFramesCount;
+ private final short[] mGains;
+
+ /**
+ * This constructor shall not be used
+ */
+ @SuppressWarnings("unused")
+ private WaveformData() {
+ mFrameDurationMs = 0;
+ mFramesCount = 0;
+ mGains = null;
+ }
+
+ /**
+ * Constructor
+ *
+ * @param audioWaveformFilename The name of the audio waveform file
+ */
+ WaveformData(String audioWaveformFilename) {
+ // TODO: Read these values from the file
+ mFrameDurationMs = 20;
+ mFramesCount = 300000 / mFrameDurationMs;
+ mGains = new short[mFramesCount];
+ for (int i = 0; i < mFramesCount; i++) {
+ mGains[i] = (short)((i * 5) % 256);
+ }
+ }
+
+ /**
+ * @return The duration of a frame in milliseconds
+ */
+ public int getFrameDuration() {
+ return mFrameDurationMs;
+ }
+
+ /**
+ * @return The number of frames within the waveform data
+ */
+ public int getFramesCount() {
+ return mFramesCount;
+ }
+
+ /**
+ * @return The array of frame gains. The size of the array is the frames
+ * count. The values of the frame gains range from 0 to 256.
+ */
+ public short[] getFrameGains() {
+ return mGains;
+ }
+}
diff --git a/media/jni/Android.mk b/media/jni/Android.mk
index 70ed608..25d243b 100644
--- a/media/jni/Android.mk
+++ b/media/jni/Android.mk
@@ -8,7 +8,11 @@ LOCAL_SRC_FILES:= \
android_media_MediaMetadataRetriever.cpp \
android_media_ResampleInputStream.cpp \
android_media_MediaProfiles.cpp \
- android_media_AmrInputStream.cpp
+ android_media_AmrInputStream.cpp \
+ android_media_MtpClient.cpp \
+ android_media_MtpCursor.cpp \
+ android_media_MtpDatabase.cpp \
+ android_media_MtpServer.cpp \
LOCAL_SHARED_LIBRARIES := \
libandroid_runtime \
@@ -21,9 +25,12 @@ LOCAL_SHARED_LIBRARIES := \
libcutils \
libsurfaceflinger_client \
libstagefright \
- libcamera_client
+ libcamera_client \
+ libsqlite
-LOCAL_STATIC_LIBRARIES :=
+ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_STATIC_LIBRARIES := libmtp libusbhost
+endif
LOCAL_C_INCLUDES += \
external/tremor/Tremor \
@@ -32,6 +39,7 @@ LOCAL_C_INCLUDES += \
frameworks/base/media/libstagefright/codecs/amrnb/enc/src \
frameworks/base/media/libstagefright/codecs/amrnb/common \
frameworks/base/media/libstagefright/codecs/amrnb/common/include \
+ frameworks/base/media/mtp \
$(PV_INCLUDES) \
$(JNI_H_INCLUDE) \
$(call include-path-for, corecg graphics)
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 49e5e89..997d017 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -680,19 +680,6 @@ android_media_MediaPlayer_native_finalize(JNIEnv *env, jobject thiz)
android_media_MediaPlayer_release(env, thiz);
}
-static jint
-android_media_MediaPlayer_native_suspend_resume(
- JNIEnv *env, jobject thiz, jboolean isSuspend) {
- LOGV("suspend_resume(%d)", isSuspend);
- sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
- if (mp == NULL ) {
- jniThrowException(env, "java/lang/IllegalStateException", NULL);
- return UNKNOWN_ERROR;
- }
-
- return isSuspend ? mp->suspend() : mp->resume();
-}
-
static void android_media_MediaPlayer_set_audio_session_id(JNIEnv *env, jobject thiz, jint sessionId) {
LOGV("set_session_id(): %d", sessionId);
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
@@ -767,7 +754,6 @@ static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_MediaPlayer_native_init},
{"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaPlayer_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaPlayer_native_finalize},
- {"native_suspend_resume", "(Z)I", (void *)android_media_MediaPlayer_native_suspend_resume},
{"getAudioSessionId", "()I", (void *)android_media_MediaPlayer_get_audio_session_id},
{"setAudioSessionId", "(I)V", (void *)android_media_MediaPlayer_set_audio_session_id},
{"setAuxEffectSendLevel", "(F)V", (void *)android_media_MediaPlayer_setAuxEffectSendLevel},
@@ -788,6 +774,10 @@ extern int register_android_media_MediaRecorder(JNIEnv *env);
extern int register_android_media_MediaScanner(JNIEnv *env);
extern int register_android_media_ResampleInputStream(JNIEnv *env);
extern int register_android_media_MediaProfiles(JNIEnv *env);
+extern int register_android_media_MtpClient(JNIEnv *env);
+extern int register_android_media_MtpCursor(JNIEnv *env);
+extern int register_android_media_MtpDatabase(JNIEnv *env);
+extern int register_android_media_MtpServer(JNIEnv *env);
extern int register_android_media_AmrInputStream(JNIEnv *env);
jint JNI_OnLoad(JavaVM* vm, void* reserved)
@@ -836,6 +826,26 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
goto bail;
}
+ if (register_android_media_MtpClient(env) < 0) {
+ LOGE("ERROR: MtpClient native registration failed");
+ goto bail;
+ }
+
+ if (register_android_media_MtpCursor(env) < 0) {
+ LOGE("ERROR: MtpCursor native registration failed");
+ goto bail;
+ }
+
+ if (register_android_media_MtpDatabase(env) < 0) {
+ LOGE("ERROR: MtpDatabase native registration failed");
+ goto bail;
+ }
+
+ if (register_android_media_MtpServer(env) < 0) {
+ LOGE("ERROR: MtpServer native registration failed");
+ goto bail;
+ }
+
/* success -- return valid version number */
result = JNI_VERSION_1_4;
diff --git a/media/jni/android_media_MediaProfiles.cpp b/media/jni/android_media_MediaProfiles.cpp
index cce9fd0..08a6de1 100644
--- a/media/jni/android_media_MediaProfiles.cpp
+++ b/media/jni/android_media_MediaProfiles.cpp
@@ -165,7 +165,9 @@ static jobject
android_media_MediaProfiles_native_get_camcorder_profile(JNIEnv *env, jobject thiz, jint id, jint quality)
{
LOGV("native_get_camcorder_profile: %d %d", id, quality);
- if (quality != CAMCORDER_QUALITY_HIGH && quality != CAMCORDER_QUALITY_LOW) {
+ if (!((quality >= CAMCORDER_QUALITY_LOW && quality <= CAMCORDER_QUALITY_1080P) ||
+ (quality >= CAMCORDER_QUALITY_TIME_LAPSE_LOW &&
+ quality <= CAMCORDER_QUALITY_TIME_LAPSE_1080P))) {
jniThrowException(env, "java/lang/RuntimeException", "Unknown camcorder profile quality");
return NULL;
}
@@ -210,6 +212,20 @@ android_media_MediaProfiles_native_get_camcorder_profile(JNIEnv *env, jobject th
audioChannels);
}
+static jboolean
+android_media_MediaProfiles_native_has_camcorder_profile(JNIEnv *env, jobject thiz, jint id, jint quality)
+{
+ LOGV("native_has_camcorder_profile: %d %d", id, quality);
+ if (!((quality >= CAMCORDER_QUALITY_LOW && quality <= CAMCORDER_QUALITY_1080P) ||
+ (quality >= CAMCORDER_QUALITY_TIME_LAPSE_LOW &&
+ quality <= CAMCORDER_QUALITY_TIME_LAPSE_1080P))) {
+ return false;
+ }
+
+ camcorder_quality q = static_cast<camcorder_quality>(quality);
+ return sProfiles->hasCamcorderProfile(id, q);
+}
+
static jint
android_media_MediaProfiles_native_get_num_video_decoders(JNIEnv *env, jobject thiz)
{
@@ -289,6 +305,8 @@ static JNINativeMethod gMethodsForCamcorderProfileClass[] = {
{"native_init", "()V", (void *)android_media_MediaProfiles_native_init},
{"native_get_camcorder_profile", "(II)Landroid/media/CamcorderProfile;",
(void *)android_media_MediaProfiles_native_get_camcorder_profile},
+ {"native_has_camcorder_profile", "(II)Z",
+ (void *)android_media_MediaProfiles_native_has_camcorder_profile},
};
static JNINativeMethod gMethodsForDecoderCapabilitiesClass[] = {
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index efa0813..82b4ac1 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -260,6 +260,20 @@ android_media_MediaRecorder_setOutputFileFD(JNIEnv *env, jobject thiz, jobject f
}
static void
+android_media_MediaRecorder_setOutputFileAuxFD(JNIEnv *env, jobject thiz, jobject fileDescriptor)
+{
+ LOGV("setOutputFile");
+ if (fileDescriptor == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+ int fd = getParcelFileDescriptorFD(env, fileDescriptor);
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
+ status_t opStatus = mr->setOutputFileAuxiliary(fd);
+ process_media_recorder_call(env, opStatus, "java/io/IOException", "setOutputFile failed.");
+}
+
+static void
android_media_MediaRecorder_setVideoSize(JNIEnv *env, jobject thiz, jint width, jint height)
{
LOGV("setVideoSize(%d, %d)", width, height);
@@ -466,6 +480,7 @@ static JNINativeMethod gMethods[] = {
{"setAudioEncoder", "(I)V", (void *)android_media_MediaRecorder_setAudioEncoder},
{"setParameter", "(Ljava/lang/String;)V", (void *)android_media_MediaRecorder_setParameter},
{"_setOutputFile", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaRecorder_setOutputFileFD},
+ {"_setOutputFileAux", "(Ljava/io/FileDescriptor;)V", (void *)android_media_MediaRecorder_setOutputFileAuxFD},
{"setVideoSize", "(II)V", (void *)android_media_MediaRecorder_setVideoSize},
{"setVideoFrameRate", "(I)V", (void *)android_media_MediaRecorder_setVideoFrameRate},
{"setMaxDuration", "(I)V", (void *)android_media_MediaRecorder_setMaxDuration},
diff --git a/media/jni/android_media_MediaScanner.cpp b/media/jni/android_media_MediaScanner.cpp
index 273f1af..fd0b233 100644
--- a/media/jni/android_media_MediaScanner.cpp
+++ b/media/jni/android_media_MediaScanner.cpp
@@ -146,7 +146,7 @@ static bool ExceptionCheck(void* env)
}
static void
-android_media_MediaScanner_processDirectory(JNIEnv *env, jobject thiz, jstring path, jstring extensions, jobject client)
+android_media_MediaScanner_processDirectory(JNIEnv *env, jobject thiz, jstring path, jobject client)
{
MediaScanner *mp = (MediaScanner *)env->GetIntField(thiz, fields.context);
@@ -154,27 +154,16 @@ android_media_MediaScanner_processDirectory(JNIEnv *env, jobject thiz, jstring p
jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
return;
}
- if (extensions == NULL) {
- jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
- return;
- }
-
+
const char *pathStr = env->GetStringUTFChars(path, NULL);
if (pathStr == NULL) { // Out of memory
jniThrowException(env, "java/lang/RuntimeException", "Out of memory");
return;
}
- const char *extensionsStr = env->GetStringUTFChars(extensions, NULL);
- if (extensionsStr == NULL) { // Out of memory
- env->ReleaseStringUTFChars(path, pathStr);
- jniThrowException(env, "java/lang/RuntimeException", "Out of memory");
- return;
- }
MyMediaScannerClient myClient(env, client);
- mp->processDirectory(pathStr, extensionsStr, myClient, ExceptionCheck, env);
+ mp->processDirectory(pathStr, myClient, ExceptionCheck, env);
env->ReleaseStringUTFChars(path, pathStr);
- env->ReleaseStringUTFChars(extensions, extensionsStr);
}
static void
@@ -309,9 +298,9 @@ android_media_MediaScanner_native_finalize(JNIEnv *env, jobject thiz)
// ----------------------------------------------------------------------------
static JNINativeMethod gMethods[] = {
- {"processDirectory", "(Ljava/lang/String;Ljava/lang/String;Landroid/media/MediaScannerClient;)V",
+ {"processDirectory", "(Ljava/lang/String;Landroid/media/MediaScannerClient;)V",
(void *)android_media_MediaScanner_processDirectory},
- {"processFile", "(Ljava/lang/String;Ljava/lang/String;Landroid/media/MediaScannerClient;)V",
+ {"processFile", "(Ljava/lang/String;Ljava/lang/String;Landroid/media/MediaScannerClient;)V",
(void *)android_media_MediaScanner_processFile},
{"setLocale", "(Ljava/lang/String;)V", (void *)android_media_MediaScanner_setLocale},
{"extractAlbumArt", "(Ljava/io/FileDescriptor;)[B", (void *)android_media_MediaScanner_extractAlbumArt},
diff --git a/media/jni/android_media_MtpClient.cpp b/media/jni/android_media_MtpClient.cpp
new file mode 100644
index 0000000..d23185b
--- /dev/null
+++ b/media/jni/android_media_MtpClient.cpp
@@ -0,0 +1,296 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpClientJNI"
+#include "utils/Log.h"
+
+#include <stdio.h>
+#include <assert.h>
+#include <limits.h>
+#include <unistd.h>
+#include <fcntl.h>
+
+#include "jni.h"
+#include "JNIHelp.h"
+#include "android_runtime/AndroidRuntime.h"
+
+#include "MtpClient.h"
+#include "MtpDevice.h"
+#include "MtpObjectInfo.h"
+
+using namespace android;
+
+// ----------------------------------------------------------------------------
+
+static jmethodID method_deviceAdded;
+static jmethodID method_deviceRemoved;
+static jfieldID field_context;
+
+static struct file_descriptor_offsets_t
+{
+ jclass mClass;
+ jmethodID mConstructor;
+ jfieldID mDescriptor;
+} gFileDescriptorOffsets;
+
+static struct parcel_file_descriptor_offsets_t
+{
+ jclass mClass;
+ jmethodID mConstructor;
+} gParcelFileDescriptorOffsets;
+
+#ifdef HAVE_ANDROID_OS
+
+static void checkAndClearExceptionFromCallback(JNIEnv* env, const char* methodName) {
+ if (env->ExceptionCheck()) {
+ LOGE("An exception was thrown by callback '%s'.", methodName);
+ LOGE_EX(env);
+ env->ExceptionClear();
+ }
+}
+
+class MyClient : public MtpClient {
+private:
+ virtual void deviceAdded(MtpDevice *device);
+ virtual void deviceRemoved(MtpDevice *device);
+
+ jobject mClient;
+ MtpDevice* mEventDevice;
+
+public:
+ MyClient(JNIEnv *env, jobject client);
+ void cleanup(JNIEnv *env);
+};
+
+MtpClient* get_client_from_object(JNIEnv* env, jobject javaClient)
+{
+ return (MtpClient*)env->GetIntField(javaClient, field_context);
+}
+
+
+MyClient::MyClient(JNIEnv *env, jobject client)
+ : mClient(env->NewGlobalRef(client))
+{
+}
+
+void MyClient::cleanup(JNIEnv *env) {
+ env->DeleteGlobalRef(mClient);
+}
+
+void MyClient::deviceAdded(MtpDevice *device) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ const char* name = device->getDeviceName();
+ LOGD("MyClient::deviceAdded %s\n", name);
+
+ env->CallVoidMethod(mClient, method_deviceAdded, device->getID());
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+}
+
+void MyClient::deviceRemoved(MtpDevice *device) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ const char* name = device->getDeviceName();
+ LOGD("MyClient::deviceRemoved %s\n", name);
+
+ env->CallVoidMethod(mClient, method_deviceRemoved, device->getID());
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+}
+
+#endif // HAVE_ANDROID_OS
+
+// ----------------------------------------------------------------------------
+
+static void
+android_media_MtpClient_setup(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("setup\n");
+ MyClient* client = new MyClient(env, thiz);
+ client->start();
+ env->SetIntField(thiz, field_context, (int)client);
+#endif
+}
+
+static void
+android_media_MtpClient_finalize(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("finalize\n");
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ client->cleanup(env);
+ delete client;
+ env->SetIntField(thiz, field_context, 0);
+#endif
+}
+
+static jboolean
+android_media_MtpClient_start(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("start\n");
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ return client->start();
+#else
+ return false;
+#endif
+}
+
+static void
+android_media_MtpClient_stop(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("stop\n");
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ client->stop();
+#endif
+}
+
+static jboolean
+android_media_MtpClient_delete_object(JNIEnv *env, jobject thiz,
+ jint device_id, jlong object_id)
+{
+#ifdef HAVE_ANDROID_OS
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ MtpDevice* device = client->getDevice(device_id);
+ if (device)
+ return device->deleteObject(object_id);
+ else
+ #endif
+ return NULL;
+}
+
+static jlong
+android_media_MtpClient_get_parent(JNIEnv *env, jobject thiz,
+ jint device_id, jlong object_id)
+{
+#ifdef HAVE_ANDROID_OS
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ MtpDevice* device = client->getDevice(device_id);
+ if (device)
+ return device->getParent(object_id);
+ else
+#endif
+ return -1;
+}
+
+static jlong
+android_media_MtpClient_get_storage_id(JNIEnv *env, jobject thiz,
+ jint device_id, jlong object_id)
+{
+ #ifdef HAVE_ANDROID_OS
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ MtpDevice* device = client->getDevice(device_id);
+ if (device)
+ return device->getStorageID(object_id);
+ else
+#endif
+ return -1;
+}
+
+static jobject
+android_media_MtpClient_open_file(JNIEnv *env, jobject thiz,
+ jint device_id, jlong object_id)
+{
+#ifdef HAVE_ANDROID_OS
+ MyClient *client = (MyClient *)env->GetIntField(thiz, field_context);
+ MtpDevice* device = client->getDevice(device_id);
+ if (!device)
+ return NULL;
+
+ MtpObjectInfo* info = device->getObjectInfo(object_id);
+ if (!info)
+ return NULL;
+ int object_size = info->mCompressedSize;
+ delete info;
+ int fd = device->readObject(object_id, object_size);
+ if (fd < 0)
+ return NULL;
+
+ jobject fileDescriptor = env->NewObject(gFileDescriptorOffsets.mClass,
+ gFileDescriptorOffsets.mConstructor);
+ if (fileDescriptor != NULL) {
+ env->SetIntField(fileDescriptor, gFileDescriptorOffsets.mDescriptor, fd);
+ } else {
+ return NULL;
+ }
+ return env->NewObject(gParcelFileDescriptorOffsets.mClass,
+ gParcelFileDescriptorOffsets.mConstructor, fileDescriptor);
+#endif
+ return NULL;
+}
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gMethods[] = {
+ {"native_setup", "()V", (void *)android_media_MtpClient_setup},
+ {"native_finalize", "()V", (void *)android_media_MtpClient_finalize},
+ {"native_start", "()Z", (void *)android_media_MtpClient_start},
+ {"native_stop", "()V", (void *)android_media_MtpClient_stop},
+ {"native_delete_object", "(IJ)Z", (void *)android_media_MtpClient_delete_object},
+ {"native_get_parent", "(IJ)J", (void *)android_media_MtpClient_get_parent},
+ {"native_get_storage_id", "(IJ)J", (void *)android_media_MtpClient_get_storage_id},
+ {"native_open_file", "(IJ)Landroid/os/ParcelFileDescriptor;",
+ (void *)android_media_MtpClient_open_file},
+};
+
+static const char* const kClassPathName = "android/media/MtpClient";
+
+int register_android_media_MtpClient(JNIEnv *env)
+{
+ jclass clazz;
+
+ LOGD("register_android_media_MtpClient\n");
+
+ clazz = env->FindClass("android/media/MtpClient");
+ if (clazz == NULL) {
+ LOGE("Can't find android/media/MtpClient");
+ return -1;
+ }
+ method_deviceAdded = env->GetMethodID(clazz, "deviceAdded", "(I)V");
+ if (method_deviceAdded == NULL) {
+ LOGE("Can't find deviceAdded");
+ return -1;
+ }
+ method_deviceRemoved = env->GetMethodID(clazz, "deviceRemoved", "(I)V");
+ if (method_deviceRemoved == NULL) {
+ LOGE("Can't find deviceRemoved");
+ return -1;
+ }
+ field_context = env->GetFieldID(clazz, "mNativeContext", "I");
+ if (field_context == NULL) {
+ LOGE("Can't find MtpClient.mNativeContext");
+ return -1;
+ }
+
+ clazz = env->FindClass("java/io/FileDescriptor");
+ LOG_FATAL_IF(clazz == NULL, "Unable to find class java.io.FileDescriptor");
+ gFileDescriptorOffsets.mClass = (jclass) env->NewGlobalRef(clazz);
+ gFileDescriptorOffsets.mConstructor = env->GetMethodID(clazz, "<init>", "()V");
+ gFileDescriptorOffsets.mDescriptor = env->GetFieldID(clazz, "descriptor", "I");
+ LOG_FATAL_IF(gFileDescriptorOffsets.mDescriptor == NULL,
+ "Unable to find descriptor field in java.io.FileDescriptor");
+
+ clazz = env->FindClass("android/os/ParcelFileDescriptor");
+ LOG_FATAL_IF(clazz == NULL, "Unable to find class android.os.ParcelFileDescriptor");
+ gParcelFileDescriptorOffsets.mClass = (jclass) env->NewGlobalRef(clazz);
+ gParcelFileDescriptorOffsets.mConstructor = env->GetMethodID(clazz, "<init>", "(Ljava/io/FileDescriptor;)V");
+ LOG_FATAL_IF(gParcelFileDescriptorOffsets.mConstructor == NULL,
+ "Unable to find constructor for android.os.ParcelFileDescriptor");
+
+ return AndroidRuntime::registerNativeMethods(env,
+ "android/media/MtpClient", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MtpCursor.cpp b/media/jni/android_media_MtpCursor.cpp
new file mode 100644
index 0000000..7a0ae8a
--- /dev/null
+++ b/media/jni/android_media_MtpCursor.cpp
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpCursorJNI"
+#include "utils/Log.h"
+
+#include <stdio.h>
+#include <assert.h>
+#include <limits.h>
+#include <unistd.h>
+#include <fcntl.h>
+
+#include "jni.h"
+#include "JNIHelp.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "binder/CursorWindow.h"
+
+#include "MtpClient.h"
+#include "MtpCursor.h"
+
+using namespace android;
+
+// ----------------------------------------------------------------------------
+
+static jfieldID field_context;
+
+// From android_media_MtpClient.cpp
+MtpClient * get_client_from_object(JNIEnv * env, jobject javaClient);
+
+// ----------------------------------------------------------------------------
+
+static bool ExceptionCheck(void* env)
+{
+ return ((JNIEnv *)env)->ExceptionCheck();
+}
+
+static void
+android_media_MtpCursor_setup(JNIEnv *env, jobject thiz, jobject javaClient,
+ jint queryType, jint deviceID, jlong storageID, jlong objectID, jintArray javaColumns)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("android_media_MtpCursor_setup queryType: %d deviceID: %d storageID: %lld objectID: %lld\n",
+ queryType, deviceID, storageID, objectID);
+
+ int* columns = NULL;
+ int columnCount = 0;
+ if (javaColumns) {
+ columns = env->GetIntArrayElements(javaColumns, 0);
+ columnCount = env->GetArrayLength(javaColumns);
+ }
+
+ MtpClient* client = get_client_from_object(env, javaClient);
+ MtpCursor* cursor = new MtpCursor(client, queryType,
+ deviceID, storageID, objectID, columnCount, columns);
+
+ if (columns)
+ env->ReleaseIntArrayElements(javaColumns, columns, 0);
+ env->SetIntField(thiz, field_context, (int)cursor);
+#endif
+}
+
+static void
+android_media_MtpCursor_finalize(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("finalize\n");
+ MtpCursor *cursor = (MtpCursor *)env->GetIntField(thiz, field_context);
+ delete cursor;
+#endif
+}
+
+static jint
+android_media_MtpCursor_fill_window(JNIEnv *env, jobject thiz, jobject javaWindow, jint startPos)
+{
+#ifdef HAVE_ANDROID_OS
+ CursorWindow* window = get_window_from_object(env, javaWindow);
+ if (!window) {
+ LOGE("Invalid CursorWindow");
+ jniThrowException(env, "java/lang/IllegalArgumentException",
+ "Bad CursorWindow");
+ return 0;
+ }
+ MtpCursor *cursor = (MtpCursor *)env->GetIntField(thiz, field_context);
+
+ return cursor->fillWindow(window, startPos);
+#else
+ return 0;
+#endif
+}
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gMethods[] = {
+ {"native_setup", "(Landroid/media/MtpClient;IIJJ[I)V",
+ (void *)android_media_MtpCursor_setup},
+ {"native_finalize", "()V", (void *)android_media_MtpCursor_finalize},
+ {"native_fill_window", "(Landroid/database/CursorWindow;I)I",
+ (void *)android_media_MtpCursor_fill_window},
+
+};
+
+static const char* const kClassPathName = "android/media/MtpCursor";
+
+int register_android_media_MtpCursor(JNIEnv *env)
+{
+ jclass clazz;
+
+ LOGD("register_android_media_MtpCursor\n");
+
+ clazz = env->FindClass("android/media/MtpCursor");
+ if (clazz == NULL) {
+ LOGE("Can't find android/media/MtpCursor");
+ return -1;
+ }
+ field_context = env->GetFieldID(clazz, "mNativeContext", "I");
+ if (field_context == NULL) {
+ LOGE("Can't find MtpCursor.mNativeContext");
+ return -1;
+ }
+
+ return AndroidRuntime::registerNativeMethods(env,
+ "android/media/MtpCursor", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MtpDatabase.cpp b/media/jni/android_media_MtpDatabase.cpp
new file mode 100644
index 0000000..87cb82e
--- /dev/null
+++ b/media/jni/android_media_MtpDatabase.cpp
@@ -0,0 +1,1021 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDatabaseJNI"
+#include "utils/Log.h"
+
+#include <stdio.h>
+#include <assert.h>
+#include <limits.h>
+#include <unistd.h>
+#include <fcntl.h>
+
+#include "jni.h"
+#include "JNIHelp.h"
+#include "android_runtime/AndroidRuntime.h"
+
+#include "MtpDatabase.h"
+#include "MtpDataPacket.h"
+#include "MtpProperty.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+#include "mtp.h"
+
+using namespace android;
+
+// ----------------------------------------------------------------------------
+
+static jmethodID method_beginSendObject;
+static jmethodID method_endSendObject;
+static jmethodID method_getObjectList;
+static jmethodID method_getNumObjects;
+static jmethodID method_getSupportedPlaybackFormats;
+static jmethodID method_getSupportedCaptureFormats;
+static jmethodID method_getSupportedObjectProperties;
+static jmethodID method_getSupportedDeviceProperties;
+static jmethodID method_getObjectProperty;
+static jmethodID method_setObjectProperty;
+static jmethodID method_getDeviceProperty;
+static jmethodID method_setDeviceProperty;
+static jmethodID method_getObjectInfo;
+static jmethodID method_getObjectFilePath;
+static jmethodID method_deleteFile;
+static jmethodID method_getObjectReferences;
+static jmethodID method_setObjectReferences;
+static jmethodID method_sessionStarted;
+static jmethodID method_sessionEnded;
+
+static jfieldID field_context;
+
+MtpDatabase* getMtpDatabase(JNIEnv *env, jobject database) {
+ return (MtpDatabase *)env->GetIntField(database, field_context);
+}
+
+#ifdef HAVE_ANDROID_OS
+// ----------------------------------------------------------------------------
+
+class MyMtpDatabase : public MtpDatabase {
+private:
+ jobject mDatabase;
+ jintArray mIntBuffer;
+ jlongArray mLongBuffer;
+ jcharArray mStringBuffer;
+
+public:
+ MyMtpDatabase(JNIEnv *env, jobject client);
+ virtual ~MyMtpDatabase();
+ void cleanup(JNIEnv *env);
+
+ virtual MtpObjectHandle beginSendObject(const char* path,
+ MtpObjectFormat format,
+ MtpObjectHandle parent,
+ MtpStorageID storage,
+ uint64_t size,
+ time_t modified);
+
+ virtual void endSendObject(const char* path,
+ MtpObjectHandle handle,
+ MtpObjectFormat format,
+ bool succeeded);
+
+ virtual MtpObjectHandleList* getObjectList(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent);
+
+ virtual int getNumObjects(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent);
+
+ // callee should delete[] the results from these
+ // results can be NULL
+ virtual MtpObjectFormatList* getSupportedPlaybackFormats();
+ virtual MtpObjectFormatList* getSupportedCaptureFormats();
+ virtual MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format);
+ virtual MtpDevicePropertyList* getSupportedDeviceProperties();
+
+ virtual MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet);
+
+ virtual MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet);
+
+ virtual MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet);
+
+ virtual MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet);
+
+ virtual MtpResponseCode resetDeviceProperty(MtpDeviceProperty property);
+
+ virtual MtpResponseCode getObjectInfo(MtpObjectHandle handle,
+ MtpDataPacket& packet);
+
+ virtual MtpResponseCode getObjectFilePath(MtpObjectHandle handle,
+ MtpString& filePath,
+ int64_t& fileLength);
+ virtual MtpResponseCode deleteFile(MtpObjectHandle handle);
+
+ bool getObjectPropertyInfo(MtpObjectProperty property, int& type);
+ bool getDevicePropertyInfo(MtpDeviceProperty property, int& type);
+
+ virtual MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle);
+
+ virtual MtpResponseCode setObjectReferences(MtpObjectHandle handle,
+ MtpObjectHandleList* references);
+
+ virtual MtpProperty* getObjectPropertyDesc(MtpObjectProperty property,
+ MtpObjectFormat format);
+
+ virtual MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property);
+
+ virtual void sessionStarted();
+
+ virtual void sessionEnded();
+};
+
+// ----------------------------------------------------------------------------
+
+static void checkAndClearExceptionFromCallback(JNIEnv* env, const char* methodName) {
+ if (env->ExceptionCheck()) {
+ LOGE("An exception was thrown by callback '%s'.", methodName);
+ LOGE_EX(env);
+ env->ExceptionClear();
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+MyMtpDatabase::MyMtpDatabase(JNIEnv *env, jobject client)
+ : mDatabase(env->NewGlobalRef(client)),
+ mIntBuffer(NULL),
+ mLongBuffer(NULL),
+ mStringBuffer(NULL)
+{
+ jintArray intArray;
+ jlongArray longArray;
+ jcharArray charArray;
+
+ // create buffers for out arguments
+ // we don't need to be thread-safe so this is OK
+ intArray = env->NewIntArray(3);
+ if (!intArray)
+ goto out_of_memory;
+ mIntBuffer = (jintArray)env->NewGlobalRef(intArray);
+ longArray = env->NewLongArray(2);
+ if (!longArray)
+ goto out_of_memory;
+ mLongBuffer = (jlongArray)env->NewGlobalRef(longArray);
+ charArray = env->NewCharArray(256);
+ if (!charArray)
+ goto out_of_memory;
+ mStringBuffer = (jcharArray)env->NewGlobalRef(charArray);
+ return;
+
+out_of_memory:
+ env->ThrowNew(env->FindClass("java/lang/OutOfMemoryError"), NULL);
+}
+
+void MyMtpDatabase::cleanup(JNIEnv *env) {
+ env->DeleteGlobalRef(mDatabase);
+ env->DeleteGlobalRef(mIntBuffer);
+ env->DeleteGlobalRef(mLongBuffer);
+ env->DeleteGlobalRef(mStringBuffer);
+}
+
+MyMtpDatabase::~MyMtpDatabase() {
+}
+
+MtpObjectHandle MyMtpDatabase::beginSendObject(const char* path,
+ MtpObjectFormat format,
+ MtpObjectHandle parent,
+ MtpStorageID storage,
+ uint64_t size,
+ time_t modified) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jstring pathStr = env->NewStringUTF(path);
+ MtpObjectHandle result = env->CallIntMethod(mDatabase, method_beginSendObject,
+ pathStr, (jint)format, (jint)parent, (jint)storage,
+ (jlong)size, (jlong)modified);
+
+ if (pathStr)
+ env->DeleteLocalRef(pathStr);
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+void MyMtpDatabase::endSendObject(const char* path, MtpObjectHandle handle,
+ MtpObjectFormat format, bool succeeded) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jstring pathStr = env->NewStringUTF(path);
+ env->CallVoidMethod(mDatabase, method_endSendObject, pathStr,
+ (jint)handle, (jint)format, (jboolean)succeeded);
+
+ if (pathStr)
+ env->DeleteLocalRef(pathStr);
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+}
+
+MtpObjectHandleList* MyMtpDatabase::getObjectList(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jintArray array = (jintArray)env->CallObjectMethod(mDatabase, method_getObjectList,
+ (jint)storageID, (jint)format, (jint)parent);
+ if (!array)
+ return NULL;
+ MtpObjectHandleList* list = new MtpObjectHandleList();
+ jint* handles = env->GetIntArrayElements(array, 0);
+ jsize length = env->GetArrayLength(array);
+ for (int i = 0; i < length; i++)
+ list->push(handles[i]);
+ env->ReleaseIntArrayElements(array, handles, 0);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return list;
+}
+
+int MyMtpDatabase::getNumObjects(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ int result = env->CallIntMethod(mDatabase, method_getNumObjects,
+ (jint)storageID, (jint)format, (jint)parent);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+MtpObjectFormatList* MyMtpDatabase::getSupportedPlaybackFormats() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jintArray array = (jintArray)env->CallObjectMethod(mDatabase,
+ method_getSupportedPlaybackFormats);
+ if (!array)
+ return NULL;
+ MtpObjectFormatList* list = new MtpObjectFormatList();
+ jint* formats = env->GetIntArrayElements(array, 0);
+ jsize length = env->GetArrayLength(array);
+ for (int i = 0; i < length; i++)
+ list->push(formats[i]);
+ env->ReleaseIntArrayElements(array, formats, 0);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return list;
+}
+
+MtpObjectFormatList* MyMtpDatabase::getSupportedCaptureFormats() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jintArray array = (jintArray)env->CallObjectMethod(mDatabase,
+ method_getSupportedCaptureFormats);
+ if (!array)
+ return NULL;
+ MtpObjectFormatList* list = new MtpObjectFormatList();
+ jint* formats = env->GetIntArrayElements(array, 0);
+ jsize length = env->GetArrayLength(array);
+ for (int i = 0; i < length; i++)
+ list->push(formats[i]);
+ env->ReleaseIntArrayElements(array, formats, 0);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return list;
+}
+
+MtpObjectPropertyList* MyMtpDatabase::getSupportedObjectProperties(MtpObjectFormat format) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jintArray array = (jintArray)env->CallObjectMethod(mDatabase,
+ method_getSupportedObjectProperties, (jint)format);
+ if (!array)
+ return NULL;
+ MtpObjectPropertyList* list = new MtpObjectPropertyList();
+ jint* properties = env->GetIntArrayElements(array, 0);
+ jsize length = env->GetArrayLength(array);
+ for (int i = 0; i < length; i++)
+ list->push(properties[i]);
+ env->ReleaseIntArrayElements(array, properties, 0);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return list;
+}
+
+MtpDevicePropertyList* MyMtpDatabase::getSupportedDeviceProperties() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jintArray array = (jintArray)env->CallObjectMethod(mDatabase,
+ method_getSupportedDeviceProperties);
+ if (!array)
+ return NULL;
+ MtpDevicePropertyList* list = new MtpDevicePropertyList();
+ jint* properties = env->GetIntArrayElements(array, 0);
+ jsize length = env->GetArrayLength(array);
+ for (int i = 0; i < length; i++)
+ list->push(properties[i]);
+ env->ReleaseIntArrayElements(array, properties, 0);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return list;
+}
+
+MtpResponseCode MyMtpDatabase::getObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) {
+ int type;
+
+ if (!getObjectPropertyInfo(property, type))
+ return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint result = env->CallIntMethod(mDatabase, method_getObjectProperty,
+ (jint)handle, (jint)property, mLongBuffer, mStringBuffer);
+ if (result != MTP_RESPONSE_OK) {
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+ }
+
+ jlong* longValues = env->GetLongArrayElements(mLongBuffer, 0);
+ jlong longValue = longValues[0];
+ env->ReleaseLongArrayElements(mLongBuffer, longValues, 0);
+
+ // special case date properties, which are strings to MTP
+ // but stored internally as a uint64
+ if (property == MTP_PROPERTY_DATE_MODIFIED || property == MTP_PROPERTY_DATE_ADDED) {
+ char date[20];
+ formatDateTime(longValue, date, sizeof(date));
+ packet.putString(date);
+ return MTP_RESPONSE_OK;
+ }
+ // release date is stored internally as just the year
+ if (property == MTP_PROPERTY_ORIGINAL_RELEASE_DATE) {
+ char date[20];
+ snprintf(date, sizeof(date), "%04lld0101T000000", longValue);
+ packet.putString(date);
+ return MTP_RESPONSE_OK;
+ }
+
+ switch (type) {
+ case MTP_TYPE_INT8:
+ packet.putInt8(longValue);
+ break;
+ case MTP_TYPE_UINT8:
+ packet.putUInt8(longValue);
+ break;
+ case MTP_TYPE_INT16:
+ packet.putInt16(longValue);
+ break;
+ case MTP_TYPE_UINT16:
+ packet.putUInt16(longValue);
+ break;
+ case MTP_TYPE_INT32:
+ packet.putInt32(longValue);
+ break;
+ case MTP_TYPE_UINT32:
+ packet.putUInt32(longValue);
+ break;
+ case MTP_TYPE_INT64:
+ packet.putInt64(longValue);
+ break;
+ case MTP_TYPE_UINT64:
+ packet.putUInt64(longValue);
+ break;
+ case MTP_TYPE_INT128:
+ packet.putInt128(longValue);
+ break;
+ case MTP_TYPE_UINT128:
+ packet.putInt128(longValue);
+ break;
+ case MTP_TYPE_STR:
+ {
+ jchar* str = env->GetCharArrayElements(mStringBuffer, 0);
+ packet.putString(str);
+ env->ReleaseCharArrayElements(mStringBuffer, str, 0);
+ break;
+ }
+ default:
+ LOGE("unsupported type in getObjectPropertyValue\n");
+ return MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT;
+ }
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MyMtpDatabase::setObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) {
+ int type;
+
+ if (!getObjectPropertyInfo(property, type))
+ return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jlong longValue = 0;
+ jstring stringValue = NULL;
+
+ switch (type) {
+ case MTP_TYPE_INT8:
+ longValue = packet.getInt8();
+ break;
+ case MTP_TYPE_UINT8:
+ longValue = packet.getUInt8();
+ break;
+ case MTP_TYPE_INT16:
+ longValue = packet.getInt16();
+ break;
+ case MTP_TYPE_UINT16:
+ longValue = packet.getUInt16();
+ break;
+ case MTP_TYPE_INT32:
+ longValue = packet.getInt32();
+ break;
+ case MTP_TYPE_UINT32:
+ longValue = packet.getUInt32();
+ break;
+ case MTP_TYPE_INT64:
+ longValue = packet.getInt64();
+ break;
+ case MTP_TYPE_UINT64:
+ longValue = packet.getUInt64();
+ break;
+ case MTP_TYPE_STR:
+ {
+ MtpStringBuffer buffer;
+ packet.getString(buffer);
+ stringValue = env->NewStringUTF((const char *)buffer);
+ break;
+ }
+ default:
+ LOGE("unsupported type in getObjectPropertyValue\n");
+ return MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT;
+ }
+
+ jint result = env->CallIntMethod(mDatabase, method_setObjectProperty,
+ (jint)handle, (jint)property, longValue, stringValue);
+ if (stringValue)
+ env->DeleteLocalRef(stringValue);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+MtpResponseCode MyMtpDatabase::getDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) {
+ int type;
+
+ if (!getDevicePropertyInfo(property, type))
+ return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint result = env->CallIntMethod(mDatabase, method_getDeviceProperty,
+ (jint)property, mLongBuffer, mStringBuffer);
+ if (result != MTP_RESPONSE_OK) {
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+ }
+
+ jlong* longValues = env->GetLongArrayElements(mLongBuffer, 0);
+ jlong longValue = longValues[0];
+ env->ReleaseLongArrayElements(mLongBuffer, longValues, 0);
+
+ switch (type) {
+ case MTP_TYPE_INT8:
+ packet.putInt8(longValue);
+ break;
+ case MTP_TYPE_UINT8:
+ packet.putUInt8(longValue);
+ break;
+ case MTP_TYPE_INT16:
+ packet.putInt16(longValue);
+ break;
+ case MTP_TYPE_UINT16:
+ packet.putUInt16(longValue);
+ break;
+ case MTP_TYPE_INT32:
+ packet.putInt32(longValue);
+ break;
+ case MTP_TYPE_UINT32:
+ packet.putUInt32(longValue);
+ break;
+ case MTP_TYPE_INT64:
+ packet.putInt64(longValue);
+ break;
+ case MTP_TYPE_UINT64:
+ packet.putUInt64(longValue);
+ break;
+ case MTP_TYPE_INT128:
+ packet.putInt128(longValue);
+ break;
+ case MTP_TYPE_UINT128:
+ packet.putInt128(longValue);
+ break;
+ case MTP_TYPE_STR:
+ {
+ jchar* str = env->GetCharArrayElements(mStringBuffer, 0);
+ packet.putString(str);
+ env->ReleaseCharArrayElements(mStringBuffer, str, 0);
+ break;
+ }
+ default:
+ LOGE("unsupported type in getDevicePropertyValue\n");
+ return MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT;
+ }
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MyMtpDatabase::setDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) {
+ int type;
+
+ if (!getDevicePropertyInfo(property, type))
+ return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jlong longValue = 0;
+ jstring stringValue = NULL;
+
+ switch (type) {
+ case MTP_TYPE_INT8:
+ longValue = packet.getInt8();
+ break;
+ case MTP_TYPE_UINT8:
+ longValue = packet.getUInt8();
+ break;
+ case MTP_TYPE_INT16:
+ longValue = packet.getInt16();
+ break;
+ case MTP_TYPE_UINT16:
+ longValue = packet.getUInt16();
+ break;
+ case MTP_TYPE_INT32:
+ longValue = packet.getInt32();
+ break;
+ case MTP_TYPE_UINT32:
+ longValue = packet.getUInt32();
+ break;
+ case MTP_TYPE_INT64:
+ longValue = packet.getInt64();
+ break;
+ case MTP_TYPE_UINT64:
+ longValue = packet.getUInt64();
+ break;
+ case MTP_TYPE_STR:
+ {
+ MtpStringBuffer buffer;
+ packet.getString(buffer);
+ stringValue = env->NewStringUTF((const char *)buffer);
+ break;
+ }
+ default:
+ LOGE("unsupported type in setDevicePropertyValue\n");
+ return MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT;
+ }
+
+ jint result = env->CallIntMethod(mDatabase, method_setDeviceProperty,
+ (jint)property, longValue, stringValue);
+ if (stringValue)
+ env->DeleteLocalRef(stringValue);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+MtpResponseCode MyMtpDatabase::resetDeviceProperty(MtpDeviceProperty property) {
+ return -1;
+}
+
+MtpResponseCode MyMtpDatabase::getObjectInfo(MtpObjectHandle handle,
+ MtpDataPacket& packet) {
+ char date[20];
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jboolean result = env->CallBooleanMethod(mDatabase, method_getObjectInfo,
+ (jint)handle, mIntBuffer, mStringBuffer, mLongBuffer);
+ if (!result)
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+
+ jint* intValues = env->GetIntArrayElements(mIntBuffer, 0);
+ MtpStorageID storageID = intValues[0];
+ MtpObjectFormat format = intValues[1];
+ MtpObjectHandle parent = intValues[2];
+ env->ReleaseIntArrayElements(mIntBuffer, intValues, 0);
+
+ jlong* longValues = env->GetLongArrayElements(mLongBuffer, 0);
+ uint64_t size = longValues[0];
+ uint64_t modified = longValues[1];
+ env->ReleaseLongArrayElements(mLongBuffer, longValues, 0);
+
+// int associationType = (format == MTP_FORMAT_ASSOCIATION ?
+// MTP_ASSOCIATION_TYPE_GENERIC_FOLDER :
+// MTP_ASSOCIATION_TYPE_UNDEFINED);
+ int associationType = MTP_ASSOCIATION_TYPE_UNDEFINED;
+
+ packet.putUInt32(storageID);
+ packet.putUInt16(format);
+ packet.putUInt16(0); // protection status
+ packet.putUInt32((size > 0xFFFFFFFFLL ? 0xFFFFFFFF : size));
+ packet.putUInt16(0); // thumb format
+ packet.putUInt32(0); // thumb compressed size
+ packet.putUInt32(0); // thumb pix width
+ packet.putUInt32(0); // thumb pix height
+ packet.putUInt32(0); // image pix width
+ packet.putUInt32(0); // image pix height
+ packet.putUInt32(0); // image bit depth
+ packet.putUInt32(parent);
+ packet.putUInt16(associationType);
+ packet.putUInt32(0); // association desc
+ packet.putUInt32(0); // sequence number
+
+ jchar* str = env->GetCharArrayElements(mStringBuffer, 0);
+ packet.putString(str); // file name
+ env->ReleaseCharArrayElements(mStringBuffer, str, 0);
+
+ packet.putEmptyString();
+ formatDateTime(modified, date, sizeof(date));
+ packet.putString(date); // date modified
+ packet.putEmptyString(); // keywords
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MyMtpDatabase::getObjectFilePath(MtpObjectHandle handle,
+ MtpString& filePath,
+ int64_t& fileLength) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint result = env->CallIntMethod(mDatabase, method_getObjectFilePath,
+ (jint)handle, mStringBuffer, mLongBuffer);
+ if (result != MTP_RESPONSE_OK) {
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+ }
+
+ jchar* str = env->GetCharArrayElements(mStringBuffer, 0);
+ filePath.setTo(str, strlen16(str));
+ env->ReleaseCharArrayElements(mStringBuffer, str, 0);
+
+ jlong* longValues = env->GetLongArrayElements(mLongBuffer, 0);
+ fileLength = longValues[0];
+ env->ReleaseLongArrayElements(mLongBuffer, longValues, 0);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+MtpResponseCode MyMtpDatabase::deleteFile(MtpObjectHandle handle) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ MtpResponseCode result = env->CallIntMethod(mDatabase, method_deleteFile, (jint)handle);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+struct PropertyTableEntry {
+ MtpObjectProperty property;
+ int type;
+};
+
+static const PropertyTableEntry kObjectPropertyTable[] = {
+ { MTP_PROPERTY_STORAGE_ID, MTP_TYPE_UINT32 },
+ { MTP_PROPERTY_OBJECT_FORMAT, MTP_TYPE_UINT16 },
+ { MTP_PROPERTY_PROTECTION_STATUS, MTP_TYPE_UINT16 },
+ { MTP_PROPERTY_OBJECT_SIZE, MTP_TYPE_UINT64 },
+ { MTP_PROPERTY_OBJECT_FILE_NAME, MTP_TYPE_STR },
+ { MTP_PROPERTY_DATE_MODIFIED, MTP_TYPE_STR },
+ { MTP_PROPERTY_PARENT_OBJECT, MTP_TYPE_UINT32 },
+ { MTP_PROPERTY_PERSISTENT_UID, MTP_TYPE_UINT128 },
+ { MTP_PROPERTY_NAME, MTP_TYPE_STR },
+ { MTP_PROPERTY_DISPLAY_NAME, MTP_TYPE_STR },
+ { MTP_PROPERTY_DATE_ADDED, MTP_TYPE_STR },
+ { MTP_PROPERTY_ARTIST, MTP_TYPE_STR },
+ { MTP_PROPERTY_ALBUM_NAME, MTP_TYPE_STR },
+ { MTP_PROPERTY_ALBUM_ARTIST, MTP_TYPE_STR },
+ { MTP_PROPERTY_TRACK, MTP_TYPE_UINT16 },
+ { MTP_PROPERTY_ORIGINAL_RELEASE_DATE, MTP_TYPE_STR },
+ { MTP_PROPERTY_GENRE, MTP_TYPE_STR },
+ { MTP_PROPERTY_COMPOSER, MTP_TYPE_STR },
+ { MTP_PROPERTY_DURATION, MTP_TYPE_UINT32 },
+ { MTP_PROPERTY_DESCRIPTION, MTP_TYPE_STR },
+};
+
+static const PropertyTableEntry kDevicePropertyTable[] = {
+ { MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER, MTP_TYPE_STR },
+ { MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME, MTP_TYPE_STR },
+};
+
+bool MyMtpDatabase::getObjectPropertyInfo(MtpObjectProperty property, int& type) {
+ int count = sizeof(kObjectPropertyTable) / sizeof(kObjectPropertyTable[0]);
+ const PropertyTableEntry* entry = kObjectPropertyTable;
+ for (int i = 0; i < count; i++, entry++) {
+ if (entry->property == property) {
+ type = entry->type;
+ return true;
+ }
+ }
+ return false;
+}
+
+bool MyMtpDatabase::getDevicePropertyInfo(MtpDeviceProperty property, int& type) {
+ int count = sizeof(kDevicePropertyTable) / sizeof(kDevicePropertyTable[0]);
+ const PropertyTableEntry* entry = kDevicePropertyTable;
+ for (int i = 0; i < count; i++, entry++) {
+ if (entry->property == property) {
+ type = entry->type;
+ return true;
+ }
+ }
+ return false;
+}
+
+MtpObjectHandleList* MyMtpDatabase::getObjectReferences(MtpObjectHandle handle) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jintArray array = (jintArray)env->CallObjectMethod(mDatabase, method_getObjectReferences,
+ (jint)handle);
+ if (!array)
+ return NULL;
+ MtpObjectHandleList* list = new MtpObjectHandleList();
+ jint* handles = env->GetIntArrayElements(array, 0);
+ jsize length = env->GetArrayLength(array);
+ for (int i = 0; i < length; i++)
+ list->push(handles[i]);
+ env->ReleaseIntArrayElements(array, handles, 0);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return list;
+}
+
+MtpResponseCode MyMtpDatabase::setObjectReferences(MtpObjectHandle handle,
+ MtpObjectHandleList* references) {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ int count = references->size();
+ jintArray array = env->NewIntArray(count);
+ if (!array) {
+ LOGE("out of memory in setObjectReferences");
+ return false;
+ }
+ jint* handles = env->GetIntArrayElements(array, 0);
+ for (int i = 0; i < count; i++)
+ handles[i] = (*references)[i];
+ env->ReleaseIntArrayElements(array, handles, 0);
+ MtpResponseCode result = env->CallIntMethod(mDatabase, method_setObjectReferences,
+ (jint)handle, array);
+ env->DeleteLocalRef(array);
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ return result;
+}
+
+MtpProperty* MyMtpDatabase::getObjectPropertyDesc(MtpObjectProperty property,
+ MtpObjectFormat format) {
+ MtpProperty* result = NULL;
+ switch (property) {
+ case MTP_PROPERTY_OBJECT_FORMAT:
+ case MTP_PROPERTY_PROTECTION_STATUS:
+ case MTP_PROPERTY_TRACK:
+ result = new MtpProperty(property, MTP_TYPE_UINT16);
+ break;
+ case MTP_PROPERTY_STORAGE_ID:
+ case MTP_PROPERTY_PARENT_OBJECT:
+ case MTP_PROPERTY_DURATION:
+ result = new MtpProperty(property, MTP_TYPE_UINT32);
+ break;
+ case MTP_PROPERTY_OBJECT_SIZE:
+ result = new MtpProperty(property, MTP_TYPE_UINT64);
+ break;
+ case MTP_PROPERTY_PERSISTENT_UID:
+ result = new MtpProperty(property, MTP_TYPE_UINT128);
+ break;
+ case MTP_PROPERTY_NAME:
+ case MTP_PROPERTY_DATE_MODIFIED:
+ case MTP_PROPERTY_DISPLAY_NAME:
+ case MTP_PROPERTY_DATE_ADDED:
+ case MTP_PROPERTY_ARTIST:
+ case MTP_PROPERTY_ALBUM_NAME:
+ case MTP_PROPERTY_ALBUM_ARTIST:
+ case MTP_PROPERTY_ORIGINAL_RELEASE_DATE:
+ case MTP_PROPERTY_GENRE:
+ case MTP_PROPERTY_COMPOSER:
+ case MTP_PROPERTY_DESCRIPTION:
+ result = new MtpProperty(property, MTP_TYPE_STR);
+ break;
+ case MTP_PROPERTY_OBJECT_FILE_NAME:
+ // We allow renaming files and folders
+ result = new MtpProperty(property, MTP_TYPE_STR, true);
+ break;
+ }
+
+ return result;
+}
+
+MtpProperty* MyMtpDatabase::getDevicePropertyDesc(MtpDeviceProperty property) {
+ MtpProperty* result = NULL;
+ switch (property) {
+ case MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER:
+ case MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME:
+ {
+ // writeable string properties
+ result = new MtpProperty(property, MTP_TYPE_STR, true);
+
+ // set current value
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint ret = env->CallIntMethod(mDatabase, method_getDeviceProperty,
+ (jint)property, mLongBuffer, mStringBuffer);
+ if (ret == MTP_RESPONSE_OK) {
+ jchar* str = env->GetCharArrayElements(mStringBuffer, 0);
+ result->setCurrentValue(str);
+ env->ReleaseCharArrayElements(mStringBuffer, str, 0);
+ } else {
+ LOGE("unable to read device property, response: %04X", ret);
+ }
+
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+ break;
+ }
+ }
+
+ return result;
+}
+
+void MyMtpDatabase::sessionStarted() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(mDatabase, method_sessionStarted);
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+}
+
+void MyMtpDatabase::sessionEnded() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(mDatabase, method_sessionEnded);
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+}
+
+#endif // HAVE_ANDROID_OS
+
+// ----------------------------------------------------------------------------
+
+static void
+android_media_MtpDatabase_setup(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("setup\n");
+ MyMtpDatabase* database = new MyMtpDatabase(env, thiz);
+ env->SetIntField(thiz, field_context, (int)database);
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+#endif
+}
+
+static void
+android_media_MtpDatabase_finalize(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("finalize\n");
+ MyMtpDatabase* database = (MyMtpDatabase *)env->GetIntField(thiz, field_context);
+ database->cleanup(env);
+ delete database;
+ env->SetIntField(thiz, field_context, 0);
+ checkAndClearExceptionFromCallback(env, __FUNCTION__);
+#endif
+}
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gMethods[] = {
+ {"native_setup", "()V", (void *)android_media_MtpDatabase_setup},
+ {"native_finalize", "()V", (void *)android_media_MtpDatabase_finalize},
+};
+
+static const char* const kClassPathName = "android/media/MtpDatabase";
+
+int register_android_media_MtpDatabase(JNIEnv *env)
+{
+ jclass clazz;
+
+ LOGD("register_android_media_MtpDatabase\n");
+
+ clazz = env->FindClass("android/media/MtpDatabase");
+ if (clazz == NULL) {
+ LOGE("Can't find android/media/MtpDatabase");
+ return -1;
+ }
+ method_beginSendObject = env->GetMethodID(clazz, "beginSendObject", "(Ljava/lang/String;IIIJJ)I");
+ if (method_beginSendObject == NULL) {
+ LOGE("Can't find beginSendObject");
+ return -1;
+ }
+ method_endSendObject = env->GetMethodID(clazz, "endSendObject", "(Ljava/lang/String;IIZ)V");
+ if (method_endSendObject == NULL) {
+ LOGE("Can't find endSendObject");
+ return -1;
+ }
+ method_getObjectList = env->GetMethodID(clazz, "getObjectList", "(III)[I");
+ if (method_getObjectList == NULL) {
+ LOGE("Can't find getObjectList");
+ return -1;
+ }
+ method_getNumObjects = env->GetMethodID(clazz, "getNumObjects", "(III)I");
+ if (method_getNumObjects == NULL) {
+ LOGE("Can't find getNumObjects");
+ return -1;
+ }
+ method_getSupportedPlaybackFormats = env->GetMethodID(clazz, "getSupportedPlaybackFormats", "()[I");
+ if (method_getSupportedPlaybackFormats == NULL) {
+ LOGE("Can't find getSupportedPlaybackFormats");
+ return -1;
+ }
+ method_getSupportedCaptureFormats = env->GetMethodID(clazz, "getSupportedCaptureFormats", "()[I");
+ if (method_getSupportedCaptureFormats == NULL) {
+ LOGE("Can't find getSupportedCaptureFormats");
+ return -1;
+ }
+ method_getSupportedObjectProperties = env->GetMethodID(clazz, "getSupportedObjectProperties", "(I)[I");
+ if (method_getSupportedObjectProperties == NULL) {
+ LOGE("Can't find getSupportedObjectProperties");
+ return -1;
+ }
+ method_getSupportedDeviceProperties = env->GetMethodID(clazz, "getSupportedDeviceProperties", "()[I");
+ if (method_getSupportedDeviceProperties == NULL) {
+ LOGE("Can't find getSupportedDeviceProperties");
+ return -1;
+ }
+ method_getObjectProperty = env->GetMethodID(clazz, "getObjectProperty", "(II[J[C)I");
+ if (method_getObjectProperty == NULL) {
+ LOGE("Can't find getObjectProperty");
+ return -1;
+ }
+ method_setObjectProperty = env->GetMethodID(clazz, "setObjectProperty", "(IIJLjava/lang/String;)I");
+ if (method_setObjectProperty == NULL) {
+ LOGE("Can't find setObjectProperty");
+ return -1;
+ }
+ method_getDeviceProperty = env->GetMethodID(clazz, "getDeviceProperty", "(I[J[C)I");
+ if (method_getDeviceProperty == NULL) {
+ LOGE("Can't find getDeviceProperty");
+ return -1;
+ }
+ method_setDeviceProperty = env->GetMethodID(clazz, "setDeviceProperty", "(IJLjava/lang/String;)I");
+ if (method_setDeviceProperty == NULL) {
+ LOGE("Can't find setDeviceProperty");
+ return -1;
+ }
+ method_getObjectInfo = env->GetMethodID(clazz, "getObjectInfo", "(I[I[C[J)Z");
+ if (method_getObjectInfo == NULL) {
+ LOGE("Can't find getObjectInfo");
+ return -1;
+ }
+ method_getObjectFilePath = env->GetMethodID(clazz, "getObjectFilePath", "(I[C[J)I");
+ if (method_getObjectFilePath == NULL) {
+ LOGE("Can't find getObjectFilePath");
+ return -1;
+ }
+ method_deleteFile = env->GetMethodID(clazz, "deleteFile", "(I)I");
+ if (method_deleteFile == NULL) {
+ LOGE("Can't find deleteFile");
+ return -1;
+ }
+ method_getObjectReferences = env->GetMethodID(clazz, "getObjectReferences", "(I)[I");
+ if (method_getObjectReferences == NULL) {
+ LOGE("Can't find getObjectReferences");
+ return -1;
+ }
+ method_setObjectReferences = env->GetMethodID(clazz, "setObjectReferences", "(I[I)I");
+ if (method_setObjectReferences == NULL) {
+ LOGE("Can't find setObjectReferences");
+ return -1;
+ }
+ method_sessionStarted = env->GetMethodID(clazz, "sessionStarted", "()V");
+ if (method_sessionStarted == NULL) {
+ LOGE("Can't find sessionStarted");
+ return -1;
+ }
+ method_sessionEnded = env->GetMethodID(clazz, "sessionEnded", "()V");
+ if (method_sessionEnded == NULL) {
+ LOGE("Can't find sessionEnded");
+ return -1;
+ }
+
+ field_context = env->GetFieldID(clazz, "mNativeContext", "I");
+ if (field_context == NULL) {
+ LOGE("Can't find MtpDatabase.mNativeContext");
+ return -1;
+ }
+
+ return AndroidRuntime::registerNativeMethods(env,
+ "android/media/MtpDatabase", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MtpServer.cpp b/media/jni/android_media_MtpServer.cpp
new file mode 100644
index 0000000..f16cdd9
--- /dev/null
+++ b/media/jni/android_media_MtpServer.cpp
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpServerJNI"
+#include "utils/Log.h"
+
+#include <stdio.h>
+#include <assert.h>
+#include <limits.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <utils/threads.h>
+
+#ifdef HAVE_ANDROID_OS
+#include <linux/usb/f_mtp.h>
+#endif
+
+#include "jni.h"
+#include "JNIHelp.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "private/android_filesystem_config.h"
+
+#include "MtpServer.h"
+
+using namespace android;
+
+// ----------------------------------------------------------------------------
+
+static jfieldID field_context;
+static Mutex sMutex;
+
+// in android_media_MtpDatabase.cpp
+extern MtpDatabase* getMtpDatabase(JNIEnv *env, jobject database);
+
+// ----------------------------------------------------------------------------
+
+#ifdef HAVE_ANDROID_OS
+
+static bool ExceptionCheck(void* env)
+{
+ return ((JNIEnv *)env)->ExceptionCheck();
+}
+
+class MtpThread : public Thread {
+private:
+ MtpDatabase* mDatabase;
+ MtpServer* mServer;
+ String8 mStoragePath;
+ jobject mJavaServer;
+ int mFd;
+
+public:
+ MtpThread(MtpDatabase* database, const char* storagePath, jobject javaServer)
+ : mDatabase(database),
+ mServer(NULL),
+ mStoragePath(storagePath),
+ mJavaServer(javaServer),
+ mFd(-1)
+ {
+ }
+
+ void setPtpMode(bool usePtp) {
+ sMutex.lock();
+ if (mFd >= 0) {
+ ioctl(mFd, MTP_SET_INTERFACE_MODE,
+ (usePtp ? MTP_INTERFACE_MODE_PTP : MTP_INTERFACE_MODE_MTP));
+ } else {
+ int fd = open("/dev/mtp_usb", O_RDWR);
+ if (fd >= 0) {
+ ioctl(fd, MTP_SET_INTERFACE_MODE,
+ (usePtp ? MTP_INTERFACE_MODE_PTP : MTP_INTERFACE_MODE_MTP));
+ close(fd);
+ }
+ }
+ sMutex.unlock();
+ }
+
+ virtual bool threadLoop() {
+ sMutex.lock();
+ mFd = open("/dev/mtp_usb", O_RDWR);
+ printf("open returned %d\n", mFd);
+ if (mFd < 0) {
+ LOGE("could not open MTP driver\n");
+ sMutex.unlock();
+ return false;
+ }
+
+ mServer = new MtpServer(mFd, mDatabase, AID_SDCARD_RW, 0664, 0775);
+ mServer->addStorage(mStoragePath);
+ sMutex.unlock();
+
+ LOGD("MtpThread mServer->run");
+ mServer->run();
+
+ sMutex.lock();
+ close(mFd);
+ mFd = -1;
+ delete mServer;
+ mServer = NULL;
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->SetIntField(mJavaServer, field_context, 0);
+ env->DeleteGlobalRef(mJavaServer);
+ sMutex.unlock();
+
+ LOGD("threadLoop returning");
+ return false;
+ }
+
+ void sendObjectAdded(MtpObjectHandle handle) {
+ sMutex.lock();
+ if (mServer)
+ mServer->sendObjectAdded(handle);
+ sMutex.unlock();
+ }
+
+ void sendObjectRemoved(MtpObjectHandle handle) {
+ sMutex.lock();
+ if (mServer)
+ mServer->sendObjectRemoved(handle);
+ sMutex.unlock();
+ }
+};
+
+#endif // HAVE_ANDROID_OS
+
+static void
+android_media_MtpServer_setup(JNIEnv *env, jobject thiz, jobject javaDatabase, jstring storagePath)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("setup\n");
+
+ MtpDatabase* database = getMtpDatabase(env, javaDatabase);
+ const char *storagePathStr = env->GetStringUTFChars(storagePath, NULL);
+
+ MtpThread* thread = new MtpThread(database, storagePathStr, env->NewGlobalRef(thiz));
+ env->SetIntField(thiz, field_context, (int)thread);
+
+ env->ReleaseStringUTFChars(storagePath, storagePathStr);
+#endif
+}
+
+static void
+android_media_MtpServer_finalize(JNIEnv *env, jobject thiz)
+{
+ LOGD("finalize\n");
+}
+
+
+static void
+android_media_MtpServer_start(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("start\n");
+ MtpThread *thread = (MtpThread *)env->GetIntField(thiz, field_context);
+ thread->run("MtpThread");
+#endif // HAVE_ANDROID_OS
+}
+
+static void
+android_media_MtpServer_stop(JNIEnv *env, jobject thiz)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("stop\n");
+#endif
+}
+
+static void
+android_media_MtpServer_send_object_added(JNIEnv *env, jobject thiz, jint handle)
+{
+#ifdef HAVE_ANDROID_OS
+ MtpThread *thread = (MtpThread *)env->GetIntField(thiz, field_context);
+ if (thread)
+ thread->sendObjectAdded(handle);
+#endif
+}
+
+static void
+android_media_MtpServer_send_object_removed(JNIEnv *env, jobject thiz, jint handle)
+{
+#ifdef HAVE_ANDROID_OS
+ MtpThread *thread = (MtpThread *)env->GetIntField(thiz, field_context);
+ if (thread)
+ thread->sendObjectRemoved(handle);
+#endif
+}
+
+static void
+android_media_MtpServer_set_ptp_mode(JNIEnv *env, jobject thiz, jboolean usePtp)
+{
+#ifdef HAVE_ANDROID_OS
+ LOGD("set_ptp_mode\n");
+ MtpThread *thread = (MtpThread *)env->GetIntField(thiz, field_context);
+ if (thread)
+ thread->setPtpMode(usePtp);
+ #endif
+}
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gMethods[] = {
+ {"native_setup", "(Landroid/media/MtpDatabase;Ljava/lang/String;)V",
+ (void *)android_media_MtpServer_setup},
+ {"native_finalize", "()V", (void *)android_media_MtpServer_finalize},
+ {"native_start", "()V", (void *)android_media_MtpServer_start},
+ {"native_stop", "()V", (void *)android_media_MtpServer_stop},
+ {"native_send_object_added", "(I)V", (void *)android_media_MtpServer_send_object_added},
+ {"native_send_object_removed", "(I)V", (void *)android_media_MtpServer_send_object_removed},
+ {"native_set_ptp_mode", "(Z)V", (void *)android_media_MtpServer_set_ptp_mode},
+};
+
+static const char* const kClassPathName = "android/media/MtpServer";
+
+int register_android_media_MtpServer(JNIEnv *env)
+{
+ jclass clazz;
+
+ LOGD("register_android_media_MtpServer\n");
+
+ clazz = env->FindClass("android/media/MtpServer");
+ if (clazz == NULL) {
+ LOGE("Can't find android/media/MtpServer");
+ return -1;
+ }
+ field_context = env->GetFieldID(clazz, "mNativeContext", "I");
+ if (field_context == NULL) {
+ LOGE("Can't find MtpServer.mNativeContext");
+ return -1;
+ }
+
+ return AndroidRuntime::registerNativeMethods(env,
+ "android/media/MtpServer", gMethods, NELEM(gMethods));
+}
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index 48b45ff..e6ff654 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -27,4 +27,4 @@ LOCAL_C_INCLUDES := \
LOCAL_PRELINK_MODULE := false
-include $(BUILD_SHARED_LIBRARY) \ No newline at end of file
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 7e3b743..9c2a8ba 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -763,7 +763,8 @@ bool AudioSystem::isBluetoothScoDevice(audio_devices device)
if ((popCount(device) == 1 ) &&
(device & (AudioSystem::DEVICE_OUT_BLUETOOTH_SCO |
AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
- AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT))) {
+ AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT |
+ AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET))) {
return true;
} else {
return false;
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 0f55b19d..1a46715 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -22,12 +22,14 @@
#include <media/IMediaPlayer.h>
#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
namespace android {
enum {
DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
SET_VIDEO_SURFACE,
+ SET_VIDEO_ISURFACE,
PREPARE_ASYNC,
START,
STOP,
@@ -43,8 +45,6 @@ enum {
INVOKE,
SET_METADATA_FILTER,
GET_METADATA,
- SUSPEND,
- RESUME,
SET_AUX_EFFECT_SEND_LEVEL,
ATTACH_AUX_EFFECT
};
@@ -65,11 +65,20 @@ public:
remote()->transact(DISCONNECT, data, &reply);
}
- status_t setVideoSurface(const sp<ISurface>& surface)
+ status_t setVideoISurface(const sp<ISurface>& surface)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
data.writeStrongBinder(surface->asBinder());
+ remote()->transact(SET_VIDEO_ISURFACE, data, &reply);
+ return reply.readInt32();
+ }
+
+ status_t setVideoSurface(const sp<Surface>& surface)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_VIDEO_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -204,26 +213,6 @@ public:
return reply->readInt32();
}
- status_t suspend() {
- Parcel request;
- request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
- Parcel reply;
- remote()->transact(SUSPEND, request, &reply);
-
- return reply.readInt32();
- }
-
- status_t resume() {
- Parcel request;
- request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
- Parcel reply;
- remote()->transact(RESUME, request, &reply);
-
- return reply.readInt32();
- }
-
status_t setAuxEffectSendLevel(float level)
{
Parcel data, reply;
@@ -256,9 +245,15 @@ status_t BnMediaPlayer::onTransact(
disconnect();
return NO_ERROR;
} break;
- case SET_VIDEO_SURFACE: {
+ case SET_VIDEO_ISURFACE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ reply->writeInt32(setVideoISurface(surface));
+ return NO_ERROR;
+ } break;
+ case SET_VIDEO_SURFACE: {
+ CHECK_INTERFACE(IMediaPlayer, data, reply);
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setVideoSurface(surface));
return NO_ERROR;
} break;
@@ -341,16 +336,6 @@ status_t BnMediaPlayer::onTransact(
reply->writeInt32(setMetadataFilter(data));
return NO_ERROR;
} break;
- case SUSPEND: {
- CHECK_INTERFACE(IMediaPlayer, data, reply);
- reply->writeInt32(suspend());
- return NO_ERROR;
- } break;
- case RESUME: {
- CHECK_INTERFACE(IMediaPlayer, data, reply);
- reply->writeInt32(resume());
- return NO_ERROR;
- } break;
case GET_METADATA: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply);
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 947ff34..59cd1b7 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -19,7 +19,7 @@
#define LOG_TAG "IMediaRecorder"
#include <utils/Log.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <camera/ICamera.h>
#include <media/IMediaRecorderClient.h>
#include <media/IMediaRecorder.h>
@@ -43,6 +43,7 @@ enum {
SET_AUDIO_ENCODER,
SET_OUTPUT_FILE_PATH,
SET_OUTPUT_FILE_FD,
+ SET_OUTPUT_FILE_AUXILIARY_FD,
SET_VIDEO_SIZE,
SET_VIDEO_FRAMERATE,
SET_PARAMETERS,
@@ -69,12 +70,12 @@ public:
return reply.readInt32();
}
- status_t setPreviewSurface(const sp<ISurface>& surface)
+ status_t setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface(%p)", surface.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_PREVIEW_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -159,6 +160,15 @@ public:
return reply.readInt32();
}
+ status_t setOutputFileAuxiliary(int fd) {
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ data.writeFileDescriptor(fd);
+ remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply);
+ return reply.readInt32();
+ }
+
status_t setVideoSize(int width, int height)
{
LOGV("setVideoSize(%dx%d)", width, height);
@@ -377,6 +387,13 @@ status_t BnMediaRecorder::onTransact(
::close(fd);
return NO_ERROR;
} break;
+ case SET_OUTPUT_FILE_AUXILIARY_FD: {
+ LOGV("SET_OUTPUT_FILE_AUXILIARY_FD");
+ CHECK_INTERFACE(IMediaRecorder, data, reply);
+ int fd = dup(data.readFileDescriptor());
+ reply->writeInt32(setOutputFileAuxiliary(fd));
+ return NO_ERROR;
+ } break;
case SET_VIDEO_SIZE: {
LOGV("SET_VIDEO_SIZE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
@@ -409,7 +426,7 @@ status_t BnMediaRecorder::onTransact(
case SET_PREVIEW_SURFACE: {
LOGV("SET_PREVIEW_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
- sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setPreviewSurface(surface));
return NO_ERROR;
} break;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index f3804b8..f975217 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -21,7 +21,10 @@ enum {
SET_PARAMETER,
GET_CONFIG,
SET_CONFIG,
+ ENABLE_GRAPHIC_BUFFERS,
USE_BUFFER,
+ USE_GRAPHIC_BUFFER,
+ STORE_META_DATA_IN_BUFFERS,
ALLOC_BUFFER,
ALLOC_BUFFER_WITH_BACKUP,
FREE_BUFFER,
@@ -216,6 +219,19 @@ public:
return reply.readInt32();
}
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32((uint32_t)enable);
+ remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -238,6 +254,42 @@ public:
return err;
}
+
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.write(*graphicBuffer);
+ remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ *buffer = 0;
+
+ return err;
+ }
+
+ *buffer = (void*)reply.readIntPtr();
+
+ return err;
+ }
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32((uint32_t)enable);
+ remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -541,6 +593,20 @@ status_t BnOMX::onTransact(
return NO_ERROR;
}
+ case ENABLE_GRAPHIC_BUFFERS:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+ status_t err = enableGraphicBuffers(node, port_index, enable);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case USE_BUFFER:
{
CHECK_INTERFACE(IOMX, data, reply);
@@ -561,6 +627,41 @@ status_t BnOMX::onTransact(
return NO_ERROR;
}
+ case USE_GRAPHIC_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+ data.read(*graphicBuffer);
+
+ buffer_id buffer;
+ status_t err = useGraphicBuffer(
+ node, port_index, graphicBuffer, &buffer);
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ reply->writeIntPtr((intptr_t)buffer);
+ }
+
+ return NO_ERROR;
+ }
+
+ case STORE_META_DATA_IN_BUFFERS:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+ status_t err = storeMetaDataInBuffers(node, port_index, enable);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case ALLOC_BUFFER:
{
CHECK_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 3869389..9ad63f0 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -59,8 +59,21 @@ const MediaProfiles::NameToTagMap MediaProfiles::sAudioDecoderNameMap[] = {
};
const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
+ {"low", CAMCORDER_QUALITY_LOW},
{"high", CAMCORDER_QUALITY_HIGH},
- {"low", CAMCORDER_QUALITY_LOW}
+ {"qcif", CAMCORDER_QUALITY_QCIF},
+ {"cif", CAMCORDER_QUALITY_CIF},
+ {"480p", CAMCORDER_QUALITY_480P},
+ {"720p", CAMCORDER_QUALITY_720P},
+ {"1080p", CAMCORDER_QUALITY_1080P},
+
+ {"timelapselow", CAMCORDER_QUALITY_TIME_LAPSE_LOW},
+ {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH},
+ {"timelapseqcif", CAMCORDER_QUALITY_TIME_LAPSE_QCIF},
+ {"timelapsecif", CAMCORDER_QUALITY_TIME_LAPSE_CIF},
+ {"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
+ {"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
+ {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P}
};
/*static*/ void
@@ -411,24 +424,57 @@ MediaProfiles::createDefaultVideoEncoders(MediaProfiles *profiles)
}
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderHighProfile()
+MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
{
MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
+
+ AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
+ profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+ profile->mQuality = quality;
+ profile->mDuration = 60;
+ profile->mVideoCodec = videoCodec;
+ profile->mAudioCodec = audioCodec;
+ return profile;
+}
+
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
+{
+ MediaProfiles::VideoCodec *videoCodec =
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
- profile->mQuality = CAMCORDER_QUALITY_HIGH;
+ profile->mQuality = quality;
profile->mDuration = 60;
profile->mVideoCodec = videoCodec;
profile->mAudioCodec = audioCodec;
return profile;
}
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(
+ MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) {
+ *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW);
+ *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(
+ MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) {
+ *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
+ *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P);
+}
+
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderLowProfile()
+MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
{
MediaProfiles::VideoCodec *videoCodec =
new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
@@ -439,18 +485,72 @@ MediaProfiles::createDefaultCamcorderLowProfile()
MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
- profile->mQuality = CAMCORDER_QUALITY_LOW;
+ profile->mQuality = quality;
profile->mDuration = 30;
profile->mVideoCodec = videoCodec;
profile->mAudioCodec = audioCodec;
return profile;
}
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
+{
+ MediaProfiles::VideoCodec *videoCodec =
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+
+ AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
+ profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+ profile->mQuality = quality;
+ profile->mDuration = 60;
+ profile->mVideoCodec = videoCodec;
+ profile->mAudioCodec = audioCodec;
+ return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderLowProfiles(
+ MediaProfiles::CamcorderProfile **lowProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificProfile) {
+ *lowProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_LOW);
+ *lowSpecificProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderHighProfiles(
+ MediaProfiles::CamcorderProfile **highProfile,
+ MediaProfiles::CamcorderProfile **highSpecificProfile) {
+ *highProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_HIGH);
+ *highSpecificProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_CIF);
+}
+
/*static*/ void
MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)
{
- profiles->mCamcorderProfiles.add(createDefaultCamcorderHighProfile());
- profiles->mCamcorderProfiles.add(createDefaultCamcorderLowProfile());
+ // low camcorder profiles.
+ MediaProfiles::CamcorderProfile *lowProfile, *lowSpecificProfile;
+ createDefaultCamcorderLowProfiles(&lowProfile, &lowSpecificProfile);
+ profiles->mCamcorderProfiles.add(lowProfile);
+ profiles->mCamcorderProfiles.add(lowSpecificProfile);
+
+ // high camcorder profiles.
+ MediaProfiles::CamcorderProfile* highProfile, *highSpecificProfile;
+ createDefaultCamcorderHighProfiles(&highProfile, &highSpecificProfile);
+ profiles->mCamcorderProfiles.add(highProfile);
+ profiles->mCamcorderProfiles.add(highSpecificProfile);
+
+ // low camcorder time lapse profiles.
+ MediaProfiles::CamcorderProfile *lowTimeLapseProfile, *lowSpecificTimeLapseProfile;
+ createDefaultCamcorderTimeLapseLowProfiles(&lowTimeLapseProfile, &lowSpecificTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(lowTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(lowSpecificTimeLapseProfile);
+
+ // high camcorder time lapse profiles.
+ MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile;
+ createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(highTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
}
/*static*/ void
@@ -668,13 +768,8 @@ Vector<audio_decoder> MediaProfiles::getAudioDecoders() const
return decoders; // copy out
}
-int MediaProfiles::getCamcorderProfileParamByName(const char *name,
- int cameraId,
- camcorder_quality quality) const
+int MediaProfiles::getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const
{
- LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
- name, cameraId, quality);
-
int index = -1;
for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
if (mCamcorderProfiles[i]->mCameraId == cameraId &&
@@ -683,6 +778,17 @@ int MediaProfiles::getCamcorderProfileParamByName(const char *name,
break;
}
}
+ return index;
+}
+
+int MediaProfiles::getCamcorderProfileParamByName(const char *name,
+ int cameraId,
+ camcorder_quality quality) const
+{
+ LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
+ name, cameraId, quality);
+
+ int index = getCamcorderProfileIndex(cameraId, quality);
if (index == -1) {
LOGE("The given camcorder profile camera %d quality %d is not found",
cameraId, quality);
@@ -705,6 +811,11 @@ int MediaProfiles::getCamcorderProfileParamByName(const char *name,
return -1;
}
+bool MediaProfiles::hasCamcorderProfile(int cameraId, camcorder_quality quality) const
+{
+ return (getCamcorderProfileIndex(cameraId, quality) != -1);
+}
+
Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const
{
Vector<int> result;
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index c5112a5..c31b622 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -48,8 +48,7 @@ const char *MediaScanner::locale() const {
}
status_t MediaScanner::processDirectory(
- const char *path, const char *extensions,
- MediaScannerClient &client,
+ const char *path, MediaScannerClient &client,
ExceptionCheck exceptionCheck, void *exceptionEnv) {
int pathLength = strlen(path);
if (pathLength >= PATH_MAX) {
@@ -72,35 +71,16 @@ status_t MediaScanner::processDirectory(
status_t result =
doProcessDirectory(
- pathBuffer, pathRemaining, extensions, client,
- exceptionCheck, exceptionEnv);
+ pathBuffer, pathRemaining, client, exceptionCheck, exceptionEnv);
free(pathBuffer);
return result;
}
-static bool fileMatchesExtension(const char* path, const char* extensions) {
- const char* extension = strrchr(path, '.');
- if (!extension) return false;
- ++extension; // skip the dot
- if (extension[0] == 0) return false;
-
- while (extensions[0]) {
- const char* comma = strchr(extensions, ',');
- size_t length = (comma ? comma - extensions : strlen(extensions));
- if (length == strlen(extension) && strncasecmp(extension, extensions, length) == 0) return true;
- extensions += length;
- if (extensions[0] == ',') ++extensions;
- }
-
- return false;
-}
-
status_t MediaScanner::doProcessDirectory(
- char *path, int pathRemaining, const char *extensions,
- MediaScannerClient &client, ExceptionCheck exceptionCheck,
- void *exceptionEnv) {
+ char *path, int pathRemaining, MediaScannerClient &client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv) {
// place to copy file or directory name
char* fileSpot = path + strlen(path);
struct dirent* entry;
@@ -133,6 +113,13 @@ status_t MediaScanner::doProcessDirectory(
continue;
}
+ int nameLength = strlen(name);
+ if (nameLength + 1 > pathRemaining) {
+ // path too long!
+ continue;
+ }
+ strcpy(fileSpot, name);
+
int type = entry->d_type;
if (type == DT_UNKNOWN) {
// If the type is unknown, stat() the file instead.
@@ -150,29 +137,20 @@ status_t MediaScanner::doProcessDirectory(
}
}
if (type == DT_REG || type == DT_DIR) {
- int nameLength = strlen(name);
- bool isDirectory = (type == DT_DIR);
-
- if (nameLength > pathRemaining || (isDirectory && nameLength + 1 > pathRemaining)) {
- // path too long!
- continue;
- }
-
- strcpy(fileSpot, name);
- if (isDirectory) {
+ if (type == DT_DIR) {
// ignore directories with a name that starts with '.'
// for example, the Mac ".Trashes" directory
if (name[0] == '.') continue;
strcat(fileSpot, "/");
- int err = doProcessDirectory(path, pathRemaining - nameLength - 1, extensions, client, exceptionCheck, exceptionEnv);
+ int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client, exceptionCheck, exceptionEnv);
if (err) {
// pass exceptions up - ignore other errors
if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
LOGE("Error processing '%s' - skipping\n", path);
continue;
}
- } else if (fileMatchesExtension(path, extensions)) {
+ } else {
struct stat statbuf;
stat(path, &statbuf);
if (statbuf.st_size > 0) {
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index cc41e66..34e41a1 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -172,16 +172,6 @@ status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
return INVALID_OPERATION;
}
-status_t MediaPlayer::suspend() {
- Mutex::Autolock _l(mLock);
- return mPlayer->suspend();
-}
-
-status_t MediaPlayer::resume() {
- Mutex::Autolock _l(mLock);
- return mPlayer->resume();
-}
-
status_t MediaPlayer::setMetadataFilter(const Parcel& filter)
{
LOGD("setMetadataFilter");
@@ -207,10 +197,15 @@ status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
LOGV("setVideoSurface");
Mutex::Autolock _l(mLock);
if (mPlayer == 0) return NO_INIT;
- if (surface != NULL)
- return mPlayer->setVideoSurface(surface->getISurface());
- else
- return mPlayer->setVideoSurface(NULL);
+
+ status_t err = mPlayer->setVideoISurface(
+ surface == NULL ? NULL : surface->getISurface());
+
+ if (err != OK) {
+ return err;
+ }
+
+ return mPlayer->setVideoSurface(surface);
}
// must call with lock held
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index e20e3ba..fd575fe 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -65,7 +65,7 @@ status_t MediaRecorder::setPreviewSurface(const sp<Surface>& surface)
return INVALID_OPERATION;
}
- status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface());
+ status_t ret = mMediaRecorder->setPreviewSurface(surface);
if (OK != ret) {
LOGV("setPreviewSurface failed: %d", ret);
mCurrentState = MEDIA_RECORDER_ERROR;
@@ -308,6 +308,32 @@ status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
return ret;
}
+status_t MediaRecorder::setOutputFileAuxiliary(int fd)
+{
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ if(mMediaRecorder == NULL) {
+ LOGE("media recorder is not initialized yet");
+ return INVALID_OPERATION;
+ }
+ if (mIsAuxiliaryOutputFileSet) {
+ LOGE("output file has already been set");
+ return INVALID_OPERATION;
+ }
+ if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) {
+ LOGE("setOutputFile called in an invalid state(%d)", mCurrentState);
+ return INVALID_OPERATION;
+ }
+
+ status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd);
+ if (OK != ret) {
+ LOGV("setOutputFileAuxiliary failed: %d", ret);
+ mCurrentState = MEDIA_RECORDER_ERROR;
+ return ret;
+ }
+ mIsAuxiliaryOutputFileSet = true;
+ return ret;
+}
+
status_t MediaRecorder::setVideoSize(int width, int height)
{
LOGV("setVideoSize(%d, %d)", width, height);
@@ -571,6 +597,7 @@ void MediaRecorder::doCleanUp()
mIsAudioEncoderSet = false;
mIsVideoEncoderSet = false;
mIsOutputFileSet = false;
+ mIsAuxiliaryOutputFileSet = false;
}
// Release should be OK in any state
@@ -643,4 +670,3 @@ void MediaRecorder::died()
}
}; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c43e9bb..bb86e05 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -891,7 +891,15 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64
return mStatus;
}
-status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface)
+status_t MediaPlayerService::Client::setVideoISurface(const sp<ISurface>& surface)
+{
+ LOGV("[%d] setVideoISurface(%p)", mConnId, surface.get());
+ sp<MediaPlayerBase> p = getPlayer();
+ if (p == 0) return UNKNOWN_ERROR;
+ return p->setVideoISurface(surface);
+}
+
+status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
{
LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
sp<MediaPlayerBase> p = getPlayer();
@@ -966,20 +974,6 @@ status_t MediaPlayerService::Client::getMetadata(
return OK;
}
-status_t MediaPlayerService::Client::suspend() {
- sp<MediaPlayerBase> p = getPlayer();
- if (p == 0) return UNKNOWN_ERROR;
-
- return p->suspend();
-}
-
-status_t MediaPlayerService::Client::resume() {
- sp<MediaPlayerBase> p = getPlayer();
- if (p == 0) return UNKNOWN_ERROR;
-
- return p->resume();
-}
-
status_t MediaPlayerService::Client::prepareAsync()
{
LOGV("[%d] prepareAsync", mConnId);
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 4492e20..e197cde 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -206,7 +206,8 @@ private:
// IMediaPlayer interface
virtual void disconnect();
- virtual status_t setVideoSurface(const sp<ISurface>& surface);
+ virtual status_t setVideoISurface(const sp<ISurface>& surface);
+ virtual status_t setVideoSurface(const sp<Surface>& surface);
virtual status_t prepareAsync();
virtual status_t start();
virtual status_t stop();
@@ -224,8 +225,6 @@ private:
virtual status_t getMetadata(bool update_only,
bool apply_filter,
Parcel *reply);
- virtual status_t suspend();
- virtual status_t resume();
virtual status_t setAuxEffectSendLevel(float level);
virtual status_t attachAuxEffect(int effectId);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 19915f1..be6a8be 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -70,7 +70,7 @@ status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera)
return mRecorder->setCamera(camera);
}
-status_t MediaRecorderClient::setPreviewSurface(const sp<ISurface>& surface)
+status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface");
Mutex::Autolock lock(mLock);
@@ -164,6 +164,17 @@ status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t leng
return mRecorder->setOutputFile(fd, offset, length);
}
+status_t MediaRecorderClient::setOutputFileAuxiliary(int fd)
+{
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ Mutex::Autolock lock(mLock);
+ if (mRecorder == NULL) {
+ LOGE("recorder is not initialized");
+ return NO_INIT;
+ }
+ return mRecorder->setOutputFileAuxiliary(fd);
+}
+
status_t MediaRecorderClient::setVideoSize(int width, int height)
{
LOGV("setVideoSize(%dx%d)", width, height);
@@ -337,4 +348,3 @@ status_t MediaRecorderClient::dump(int fd, const Vector<String16>& args) const {
}
}; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 1d1913d..fded98e 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -29,7 +29,7 @@ class MediaRecorderClient : public BnMediaRecorder
{
public:
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setVideoSource(int vs);
virtual status_t setAudioSource(int as);
virtual status_t setOutputFormat(int of);
@@ -37,6 +37,7 @@ public:
virtual status_t setAudioEncoder(int ae);
virtual status_t setOutputFile(const char* path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setParameters(const String8& params);
@@ -66,4 +67,3 @@ private:
}; // namespace android
#endif // ANDROID_MEDIARECORDERCLIENT_H
-
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 4a60ece..06e4b70 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,8 @@ public:
const char* path, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+ virtual status_t setVideoISurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+ virtual status_t setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index 6bded09..e0957f6 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -44,13 +44,20 @@ status_t StagefrightPlayer::setDataSource(int fd, int64_t offset, int64_t length
return mPlayer->setDataSource(dup(fd), offset, length);
}
-status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) {
- LOGV("setVideoSurface");
+status_t StagefrightPlayer::setVideoISurface(const sp<ISurface> &surface) {
+ LOGV("setVideoISurface");
mPlayer->setISurface(surface);
return OK;
}
+status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
+ LOGV("setVideoSurface");
+
+ mPlayer->setSurface(surface);
+ return OK;
+}
+
status_t StagefrightPlayer::prepare() {
return mPlayer->prepare();
}
@@ -140,16 +147,6 @@ player_type StagefrightPlayer::playerType() {
return STAGEFRIGHT_PLAYER;
}
-status_t StagefrightPlayer::suspend() {
- LOGV("suspend");
- return mPlayer->suspend();
-}
-
-status_t StagefrightPlayer::resume() {
- LOGV("resume");
- return mPlayer->resume();
-}
-
status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) {
return INVALID_OPERATION;
}
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 781eb44..3899447 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -35,7 +35,8 @@ public:
const char *url, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface> &surface);
+ virtual status_t setVideoISurface(const sp<ISurface> &surface);
+ virtual status_t setVideoSurface(const sp<Surface> &surface);
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
@@ -50,8 +51,6 @@ public:
virtual player_type playerType();
virtual status_t invoke(const Parcel &request, Parcel *reply);
virtual void setAudioSink(const sp<AudioSink> &audioSink);
- virtual status_t suspend();
- virtual status_t resume();
virtual status_t getMetadata(
const media::Metadata::Filter& ids, Parcel *records);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index d37d83d..913d953 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -20,10 +20,12 @@
#include "StagefrightRecorder.h"
-#include <binder/IPCThreadState.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaSourceSplitter.h>
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDebug.h>
@@ -33,21 +35,20 @@
#include <media/stagefright/OMXCodec.h>
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
-#include <camera/Camera.h>
#include <camera/CameraParameters.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <utils/Errors.h>
#include <sys/types.h>
-#include <unistd.h>
#include <ctype.h>
+#include <unistd.h>
#include "ARTPWriter.h"
namespace android {
StagefrightRecorder::StagefrightRecorder()
- : mWriter(NULL),
- mOutputFd(-1) {
+ : mWriter(NULL), mWriterAux(NULL),
+ mOutputFd(-1), mOutputFdAux(-1) {
LOGV("Constructor");
reset();
@@ -164,7 +165,8 @@ status_t StagefrightRecorder::setVideoSize(int width, int height) {
status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
LOGV("setVideoFrameRate: %d", frames_per_second);
- if (frames_per_second <= 0 || frames_per_second > 30) {
+ if ((frames_per_second <= 0 && frames_per_second != -1) ||
+ frames_per_second > 120) {
LOGE("Invalid video frame rate: %d", frames_per_second);
return BAD_VALUE;
}
@@ -182,26 +184,11 @@ status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera) {
return BAD_VALUE;
}
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera = Camera::create(camera);
- if (mCamera == 0) {
- LOGE("Unable to connect to camera");
- IPCThreadState::self()->restoreCallingIdentity(token);
- return -EBUSY;
- }
-
- LOGV("Connected to camera");
- if (mCamera->previewEnabled()) {
- LOGV("camera is hot");
- mFlags |= FLAGS_HOT_CAMERA;
- }
- IPCThreadState::self()->restoreCallingIdentity(token);
-
+ mCamera = camera;
return OK;
}
-status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
+status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
LOGV("setPreviewSurface: %p", surface.get());
mPreviewSurface = surface;
@@ -235,6 +222,24 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng
return OK;
}
+status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) {
+ LOGV("setOutputFileAuxiliary: %d", fd);
+
+ if (fd < 0) {
+ LOGE("Invalid file descriptor: %d", fd);
+ return -EBADF;
+ }
+
+ mCaptureAuxVideo = true;
+
+ if (mOutputFdAux >= 0) {
+ ::close(mOutputFdAux);
+ }
+ mOutputFdAux = dup(fd);
+
+ return OK;
+}
+
// Attempt to parse an int64 literal optionally surrounded by whitespace,
// returns true on success, false otherwise.
static bool safe_strtoi64(const char *s, int64_t *val) {
@@ -474,6 +479,68 @@ status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) {
return OK;
}
+status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
+ LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
+
+ if(timeLapseEnable == 0) {
+ mCaptureTimeLapse = false;
+ } else if (timeLapseEnable == 1) {
+ mCaptureTimeLapse = true;
+ } else {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
+ LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+
+ // Not allowing time more than a day
+ if (timeUs <= 0 || timeUs > 86400*1E6) {
+ LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
+ return BAD_VALUE;
+ }
+
+ mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) {
+ LOGV("setParamAuxVideoWidth : %d", width);
+
+ if (width <= 0) {
+ LOGE("Width (%d) is not positive", width);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoWidth = width;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) {
+ LOGV("setParamAuxVideoHeight : %d", height);
+
+ if (height <= 0) {
+ LOGE("Height (%d) is not positive", height);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoHeight = height;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) {
+ LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate);
+
+ if (bitRate <= 0) {
+ LOGE("Invalid video encoding bit rate: %d", bitRate);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoBitRate = bitRate;
+ return OK;
+}
+
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -557,6 +624,32 @@ status_t StagefrightRecorder::setParameter(
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamVideoTimeScale(timeScale);
}
+ } else if (key == "time-lapse-enable") {
+ int32_t timeLapseEnable;
+ if (safe_strtoi32(value.string(), &timeLapseEnable)) {
+ return setParamTimeLapseEnable(timeLapseEnable);
+ }
+ } else if (key == "time-between-time-lapse-frame-capture") {
+ int64_t timeBetweenTimeLapseFrameCaptureMs;
+ if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+ return setParamTimeBetweenTimeLapseFrameCapture(
+ 1000LL * timeBetweenTimeLapseFrameCaptureMs);
+ }
+ } else if (key == "video-aux-param-width") {
+ int32_t auxWidth;
+ if (safe_strtoi32(value.string(), &auxWidth)) {
+ return setParamAuxVideoWidth(auxWidth);
+ }
+ } else if (key == "video-aux-param-height") {
+ int32_t auxHeight;
+ if (safe_strtoi32(value.string(), &auxHeight)) {
+ return setParamAuxVideoHeight(auxHeight);
+ }
+ } else if (key == "video-aux-param-encoding-bitrate") {
+ int32_t auxVideoBitRate;
+ if (safe_strtoi32(value.string(), &auxVideoBitRate)) {
+ return setParamAuxVideoEncodingBitRate(auxVideoBitRate);
+ }
} else {
LOGE("setParameter: failed to find key %s", key.string());
}
@@ -790,7 +883,14 @@ status_t StagefrightRecorder::startRTPRecording() {
if (mAudioSource != AUDIO_SOURCE_LIST_END) {
source = createAudioSource();
} else {
- status_t err = setupVideoEncoder(&source);
+
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+
+ err = setupVideoEncoder(cameraSource, mVideoBitRate, &source);
if (err != OK) {
return err;
}
@@ -826,8 +926,14 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
return ERROR_UNSUPPORTED;
}
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+
sp<MediaSource> encoder;
- status_t err = setupVideoEncoder(&encoder);
+ err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder);
if (err != OK) {
return err;
@@ -855,7 +961,7 @@ void StagefrightRecorder::clipVideoFrameRate() {
"enc.vid.fps.min", mVideoEncoder);
int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.fps.max", mVideoEncoder);
- if (mFrameRate < minFrameRate) {
+ if (mFrameRate < minFrameRate && mFrameRate != -1) {
LOGW("Intended video encoding frame rate (%d fps) is too small"
" and will be set to (%d fps)", mFrameRate, minFrameRate);
mFrameRate = minFrameRate;
@@ -900,57 +1006,15 @@ void StagefrightRecorder::clipVideoFrameWidth() {
}
}
-status_t StagefrightRecorder::setupCameraSource() {
- clipVideoBitRate();
- clipVideoFrameRate();
- clipVideoFrameWidth();
- clipVideoFrameHeight();
-
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if (mCamera == 0) {
- mCamera = Camera::connect(mCameraId);
- if (mCamera == 0) {
- LOGE("Camera connection could not be established.");
- return -EBUSY;
- }
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera->lock();
- }
-
- // Set the actual video recording frame size
- CameraParameters params(mCamera->getParameters());
- params.setPreviewSize(mVideoWidth, mVideoHeight);
- params.setPreviewFrameRate(mFrameRate);
- String8 s = params.flatten();
- if (OK != mCamera->setParameters(s)) {
- LOGE("Could not change settings."
- " Someone else is using camera %d?", mCameraId);
- return -EBUSY;
- }
- CameraParameters newCameraParams(mCamera->getParameters());
-
- // Check on video frame size
- int frameWidth = 0, frameHeight = 0;
- newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
- if (frameWidth < 0 || frameWidth != mVideoWidth ||
- frameHeight < 0 || frameHeight != mVideoHeight) {
- LOGE("Failed to set the video frame size to %dx%d",
- mVideoWidth, mVideoHeight);
- IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
- }
-
- // Check on video frame rate
- int frameRate = newCameraParams.getPreviewFrameRate();
- if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
- LOGE("Failed to set frame rate to %d fps. The actual "
- "frame rate is %d", mFrameRate, frameRate);
+status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+ if (!mCaptureTimeLapse) {
+ // Dont clip for time lapse capture as encoder will have enough
+ // time to encode because of slow capture rate of time lapse.
+ clipVideoBitRate();
+ clipVideoFrameRate();
+ clipVideoFrameWidth();
+ clipVideoFrameHeight();
}
-
- // This CHECK is good, since we just passed the lock/unlock
- // check earlier by calling mCamera->setParameters().
- CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
- IPCThreadState::self()->restoreCallingIdentity(token);
return OK;
}
@@ -971,17 +1035,49 @@ void StagefrightRecorder::clipVideoFrameHeight() {
}
}
-status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
- source->clear();
+status_t StagefrightRecorder::setupCameraSource(sp<CameraSource> *cameraSource) {
+ status_t err = OK;
+ if ((err = checkVideoEncoderCapabilities()) != OK) {
+ return err;
+ }
+ Size videoSize;
+ videoSize.width = mVideoWidth;
+ videoSize.height = mVideoHeight;
+ if (mCaptureTimeLapse) {
+ mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+ mCamera, mCameraId,
+ videoSize, mFrameRate, mPreviewSurface,
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ *cameraSource = mCameraSourceTimeLapse;
+ } else {
+ *cameraSource = CameraSource::CreateFromCamera(
+ mCamera, mCameraId, videoSize, mFrameRate, mPreviewSurface);
+ }
+ CHECK(*cameraSource != NULL);
+
+ // When frame rate is not set, the actual frame rate will be set to
+ // the current frame rate being used.
+ if (mFrameRate == -1) {
+ int32_t frameRate = 0;
+ CHECK ((*cameraSource)->getFormat()->findInt32(
+ kKeySampleRate, &frameRate));
+ LOGI("Frame rate is not explicitly set. Use the current frame "
+ "rate (%d fps)", frameRate);
+ mFrameRate = frameRate;
+ }
- status_t err = setupCameraSource();
- if (err != OK) return err;
+ CHECK(mFrameRate != -1);
+ return OK;
+}
- sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
- CHECK(cameraSource != NULL);
+status_t StagefrightRecorder::setupVideoEncoder(
+ sp<MediaSource> cameraSource,
+ int32_t videoBitRate,
+ sp<MediaSource> *source) {
+ source->clear();
sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
+ enc_meta->setInt32(kKeyBitRate, videoBitRate);
enc_meta->setInt32(kKeySampleRate, mFrameRate);
switch (mVideoEncoder) {
@@ -1025,14 +1121,24 @@ status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
}
if (mVideoEncoderLevel != -1) {
enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+ } else if (mCaptureTimeLapse) {
+ // Check if we are using high resolution and/or high bitrate and
+ // set appropriate level for the software AVCEncoder.
+ if ((width * height >= 921600) // 720p
+ || (videoBitRate >= 20000000)) {
+ enc_meta->setInt32(kKeyVideoLevel, 50);
+ }
}
OMXClient client;
CHECK_EQ(client.connect(), OK);
+ // Use software codec for time lapse
+ uint32_t encoder_flags = (mCaptureTimeLapse) ? OMXCodec::kPreferSoftwareCodecs : 0;
sp<MediaSource> encoder = OMXCodec::Create(
client.interface(), enc_meta,
- true /* createEncoder */, cameraSource);
+ true /* createEncoder */, cameraSource,
+ NULL, encoder_flags);
if (encoder == NULL) {
return UNKNOWN_ERROR;
}
@@ -1063,51 +1169,147 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
return OK;
}
-status_t StagefrightRecorder::startMPEG4Recording() {
- int32_t totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording(
+ bool useSplitCameraSource,
+ int outputFd,
+ int32_t videoWidth, int32_t videoHeight,
+ int32_t videoBitRate,
+ int32_t *totalBitRate,
+ sp<MediaWriter> *mediaWriter) {
+ mediaWriter->clear();
+ *totalBitRate = 0;
status_t err = OK;
- sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
+ sp<MediaWriter> writer = new MPEG4Writer(dup(outputFd));
// Add audio source first if it exists
- if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+ if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) {
err = setupAudioEncoder(writer);
if (err != OK) return err;
- totalBitRate += mAudioBitRate;
+ *totalBitRate += mAudioBitRate;
}
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource == VIDEO_SOURCE_CAMERA) {
+
+ sp<MediaSource> cameraMediaSource;
+ if (useSplitCameraSource) {
+ LOGV("Using Split camera source");
+ cameraMediaSource = mCameraSourceSplitter->createClient();
+ } else {
+ sp<CameraSource> cameraSource;
+ err = setupCameraSource(&cameraSource);
+ cameraMediaSource = cameraSource;
+ }
+ if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) {
+ // Use downsampling from the original source.
+ cameraMediaSource =
+ new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight);
+ }
+ if (err != OK) {
+ return err;
+ }
+
sp<MediaSource> encoder;
- err = setupVideoEncoder(&encoder);
- if (err != OK) return err;
+ err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder);
+ if (err != OK) {
+ return err;
+ }
+
writer->addSource(encoder);
- totalBitRate += mVideoBitRate;
+ *totalBitRate += videoBitRate;
}
if (mInterleaveDurationUs > 0) {
reinterpret_cast<MPEG4Writer *>(writer.get())->
setInterleaveDuration(mInterleaveDurationUs);
}
-
if (mMaxFileDurationUs != 0) {
writer->setMaxFileDuration(mMaxFileDurationUs);
}
if (mMaxFileSizeBytes != 0) {
writer->setMaxFileSize(mMaxFileSizeBytes);
}
- sp<MetaData> meta = new MetaData;
- meta->setInt64(kKeyTime, systemTime() / 1000);
- meta->setInt32(kKeyFileType, mOutputFormat);
- meta->setInt32(kKeyBitRate, totalBitRate);
- meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+
+ writer->setListener(mListener);
+ *mediaWriter = writer;
+ return OK;
+}
+
+void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+ sp<MetaData> *meta) {
+ (*meta)->setInt64(kKeyTime, startTimeUs);
+ (*meta)->setInt32(kKeyFileType, mOutputFormat);
+ (*meta)->setInt32(kKeyBitRate, totalBitRate);
+ (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
if (mMovieTimeScale > 0) {
- meta->setInt32(kKeyTimeScale, mMovieTimeScale);
+ (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
}
if (mTrackEveryTimeDurationUs > 0) {
- meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+ (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
}
- writer->setListener(mListener);
- mWriter = writer;
- return mWriter->start(meta.get());
+}
+
+status_t StagefrightRecorder::startMPEG4Recording() {
+ if (mCaptureAuxVideo) {
+ if (!mCaptureTimeLapse) {
+ LOGE("Auxiliary video can be captured only in time lapse mode");
+ return UNKNOWN_ERROR;
+ }
+ LOGV("Creating MediaSourceSplitter");
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+ mCameraSourceSplitter = new MediaSourceSplitter(cameraSource);
+ } else {
+ mCameraSourceSplitter = NULL;
+ }
+
+ int32_t totalBitRate;
+ status_t err = setupMPEG4Recording(mCaptureAuxVideo,
+ mOutputFd, mVideoWidth, mVideoHeight,
+ mVideoBitRate, &totalBitRate, &mWriter);
+ if (err != OK) {
+ return err;
+ }
+
+ int64_t startTimeUs = systemTime() / 1000;
+ sp<MetaData> meta = new MetaData;
+ setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+
+ err = mWriter->start(meta.get());
+ if (err != OK) {
+ return err;
+ }
+
+ if (mCaptureAuxVideo) {
+ CHECK(mOutputFdAux >= 0);
+ if (mWriterAux != NULL) {
+ LOGE("Auxiliary File writer is not avaialble");
+ return UNKNOWN_ERROR;
+ }
+ if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) ||
+ ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) {
+ LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)",
+ mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight);
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t totalBitrateAux;
+ err = setupMPEG4Recording(mCaptureAuxVideo,
+ mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight,
+ mAuxVideoBitRate, &totalBitrateAux, &mWriterAux);
+ if (err != OK) {
+ return err;
+ }
+
+ sp<MetaData> metaAux = new MetaData;
+ setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux);
+
+ return mWriterAux->start(metaAux.get());
+ }
+
+ return OK;
}
status_t StagefrightRecorder::pause() {
@@ -1116,28 +1318,36 @@ status_t StagefrightRecorder::pause() {
return UNKNOWN_ERROR;
}
mWriter->pause();
+
+ if (mCaptureAuxVideo) {
+ if (mWriterAux == NULL) {
+ return UNKNOWN_ERROR;
+ }
+ mWriterAux->pause();
+ }
+
return OK;
}
status_t StagefrightRecorder::stop() {
LOGV("stop");
status_t err = OK;
- if (mWriter != NULL) {
- err = mWriter->stop();
- mWriter.clear();
+
+ if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
+ mCameraSourceTimeLapse->startQuickReadReturns();
+ mCameraSourceTimeLapse = NULL;
}
- if (mCamera != 0) {
- LOGV("Disconnect camera");
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
- LOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
+ if (mCaptureAuxVideo) {
+ if (mWriterAux != NULL) {
+ mWriterAux->stop();
+ mWriterAux.clear();
}
- mCamera->unlock();
- mCamera.clear();
- IPCThreadState::self()->restoreCallingIdentity(token);
- mFlags = 0;
+ }
+
+ if (mWriter != NULL) {
+ err = mWriter->stop();
+ mWriter.clear();
}
if (mOutputFd >= 0) {
@@ -1145,6 +1355,13 @@ status_t StagefrightRecorder::stop() {
mOutputFd = -1;
}
+ if (mCaptureAuxVideo) {
+ if (mOutputFdAux >= 0) {
+ ::close(mOutputFdAux);
+ mOutputFdAux = -1;
+ }
+ }
+
return err;
}
@@ -1169,8 +1386,11 @@ status_t StagefrightRecorder::reset() {
mVideoEncoder = VIDEO_ENCODER_H263;
mVideoWidth = 176;
mVideoHeight = 144;
- mFrameRate = 20;
+ mAuxVideoWidth = 176;
+ mAuxVideoHeight = 144;
+ mFrameRate = -1;
mVideoBitRate = 192000;
+ mAuxVideoBitRate = 192000;
mSampleRate = 8000;
mAudioChannels = 1;
mAudioBitRate = 12200;
@@ -1187,10 +1407,15 @@ status_t StagefrightRecorder::reset() {
mMaxFileDurationUs = 0;
mMaxFileSizeBytes = 0;
mTrackEveryTimeDurationUs = 0;
+ mCaptureTimeLapse = false;
+ mTimeBetweenTimeLapseFrameCaptureUs = -1;
+ mCaptureAuxVideo = false;
+ mCameraSourceSplitter = NULL;
+ mCameraSourceTimeLapse = NULL;
mEncoderProfiles = MediaProfiles::getInstance();
mOutputFd = -1;
- mFlags = 0;
+ mOutputFdAux = -1;
return OK;
}
@@ -1227,6 +1452,8 @@ status_t StagefrightRecorder::dump(
snprintf(buffer, SIZE, " Recorder: %p\n", this);
snprintf(buffer, SIZE, " Output file (fd %d):\n", mOutputFd);
result.append(buffer);
+ snprintf(buffer, SIZE, " Output file Auxiliary (fd %d):\n", mOutputFdAux);
+ result.append(buffer);
snprintf(buffer, SIZE, " File format: %d\n", mOutputFormat);
result.append(buffer);
snprintf(buffer, SIZE, " Max file size (bytes): %lld\n", mMaxFileSizeBytes);
@@ -1259,8 +1486,6 @@ status_t StagefrightRecorder::dump(
result.append(buffer);
snprintf(buffer, SIZE, " Camera Id: %d\n", mCameraId);
result.append(buffer);
- snprintf(buffer, SIZE, " Camera flags: %d\n", mFlags);
- result.append(buffer);
snprintf(buffer, SIZE, " Encoder: %d\n", mVideoEncoder);
result.append(buffer);
snprintf(buffer, SIZE, " Encoder profile: %d\n", mVideoEncoderProfile);
@@ -1271,10 +1496,14 @@ status_t StagefrightRecorder::dump(
result.append(buffer);
snprintf(buffer, SIZE, " Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
result.append(buffer);
+ snprintf(buffer, SIZE, " Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight);
+ result.append(buffer);
snprintf(buffer, SIZE, " Frame rate (fps): %d\n", mFrameRate);
result.append(buffer);
snprintf(buffer, SIZE, " Bit rate (bps): %d\n", mVideoBitRate);
result.append(buffer);
+ snprintf(buffer, SIZE, " Aux Bit rate (bps): %d\n", mAuxVideoBitRate);
+ result.append(buffer);
::write(fd, result.string(), result.size());
return OK;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index ad0dfa0..7d2549f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -19,13 +19,18 @@
#define STAGEFRIGHT_RECORDER_H_
#include <media/MediaRecorderBase.h>
+#include <camera/CameraParameters.h>
#include <utils/String8.h>
namespace android {
class Camera;
+class CameraSource;
+class CameraSourceTimeLapse;
+class MediaSourceSplitter;
struct MediaSource;
struct MediaWriter;
+class MetaData;
struct AudioSource;
class MediaProfiles;
@@ -42,9 +47,10 @@ struct StagefrightRecorder : public MediaRecorderBase {
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setOutputFile(const char *path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
virtual status_t setParameters(const String8& params);
virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
virtual status_t prepare();
@@ -57,15 +63,10 @@ struct StagefrightRecorder : public MediaRecorderBase {
virtual status_t dump(int fd, const Vector<String16>& args) const;
private:
- enum CameraFlags {
- FLAGS_SET_CAMERA = 1L << 0,
- FLAGS_HOT_CAMERA = 1L << 1,
- };
-
- sp<Camera> mCamera;
- sp<ISurface> mPreviewSurface;
+ sp<ICamera> mCamera;
+ sp<Surface> mPreviewSurface;
sp<IMediaRecorderClient> mListener;
- sp<MediaWriter> mWriter;
+ sp<MediaWriter> mWriter, mWriterAux;
sp<AudioSource> mAudioSourceNode;
audio_source mAudioSource;
@@ -75,8 +76,9 @@ private:
video_encoder mVideoEncoder;
bool mUse64BitFileOffset;
int32_t mVideoWidth, mVideoHeight;
+ int32_t mAuxVideoWidth, mAuxVideoHeight;
int32_t mFrameRate;
- int32_t mVideoBitRate;
+ int32_t mVideoBitRate, mAuxVideoBitRate;
int32_t mAudioBitRate;
int32_t mAudioChannels;
int32_t mSampleRate;
@@ -92,21 +94,39 @@ private:
int64_t mMaxFileDurationUs;
int64_t mTrackEveryTimeDurationUs;
+ bool mCaptureTimeLapse;
+ int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+ bool mCaptureAuxVideo;
+ sp<MediaSourceSplitter> mCameraSourceSplitter;
+ sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
+
String8 mParams;
- int mOutputFd;
- int32_t mFlags;
+ int mOutputFd, mOutputFdAux;
MediaProfiles *mEncoderProfiles;
+ status_t setupMPEG4Recording(
+ bool useSplitCameraSource,
+ int outputFd,
+ int32_t videoWidth, int32_t videoHeight,
+ int32_t videoBitRate,
+ int32_t *totalBitRate,
+ sp<MediaWriter> *mediaWriter);
+ void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+ sp<MetaData> *meta);
status_t startMPEG4Recording();
status_t startAMRRecording();
status_t startAACRecording();
status_t startRTPRecording();
status_t startMPEG2TSRecording();
sp<MediaSource> createAudioSource();
- status_t setupCameraSource();
+ status_t checkVideoEncoderCapabilities();
+ status_t setupCameraSource(sp<CameraSource> *cameraSource);
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
- status_t setupVideoEncoder(sp<MediaSource> *source);
+ status_t setupVideoEncoder(
+ sp<MediaSource> cameraSource,
+ int32_t videoBitRate,
+ sp<MediaSource> *source);
// Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
@@ -114,6 +134,11 @@ private:
status_t setParamAudioNumberOfChannels(int32_t channles);
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamAudioTimeScale(int32_t timeScale);
+ status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
+ status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+ status_t setParamAuxVideoHeight(int32_t height);
+ status_t setParamAuxVideoWidth(int32_t width);
+ status_t setParamAuxVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t seconds);
status_t setParamVideoEncoderProfile(int32_t profile);
@@ -138,4 +163,3 @@ private:
} // namespace android
#endif // STAGEFRIGHT_RECORDER_H_
-
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 6e6c3cd..5eaf592 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,7 +75,10 @@ class TestPlayerStub : public MediaPlayerInterface {
// All the methods below wrap the mPlayer instance.
- virtual status_t setVideoSurface(const android::sp<android::ISurface>& s) {
+ virtual status_t setVideoISurface(const android::sp<android::ISurface>& s) {
+ return mPlayer->setVideoISurface(s);
+ }
+ virtual status_t setVideoSurface(const android::sp<android::Surface>& s) {
return mPlayer->setVideoSurface(s);
}
virtual status_t prepare() {return mPlayer->prepare();}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index d674547..d1870ee 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,8 @@ LOCAL_SRC_FILES:= \
AudioSource.cpp \
AwesomePlayer.cpp \
CameraSource.cpp \
+ CameraSourceTimeLapse.cpp \
+ VideoSourceDownSampler.cpp \
DataSource.cpp \
DRMExtractor.cpp \
ESDS.cpp \
@@ -25,6 +27,7 @@ LOCAL_SRC_FILES:= \
MediaDefs.cpp \
MediaExtractor.cpp \
MediaSource.cpp \
+ MediaSourceSplitter.cpp \
MetaData.cpp \
NuCachedSource2.cpp \
NuHTTPDataSource.cpp \
@@ -60,6 +63,7 @@ LOCAL_SHARED_LIBRARIES := \
libsonivox \
libvorbisidec \
libsurfaceflinger_client \
+ libstagefright_yuv \
libcamera_client \
libdrmframework
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index e0bcdc3..ca90c0c 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -45,7 +45,7 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -101,13 +101,14 @@ struct AwesomeLocalRenderer : public AwesomeRenderer {
bool previewOnly,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<ISurface> &isurface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight)
: mTarget(NULL),
mLibHandle(NULL) {
init(previewOnly, componentName,
- colorFormat, surface, displayWidth,
+ colorFormat, isurface, surface, displayWidth,
displayHeight, decodedWidth, decodedHeight);
}
@@ -139,7 +140,8 @@ private:
bool previewOnly,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<ISurface> &isurface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight);
@@ -151,7 +153,8 @@ void AwesomeLocalRenderer::init(
bool previewOnly,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<ISurface> &isurface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight) {
if (!previewOnly) {
@@ -177,7 +180,7 @@ void AwesomeLocalRenderer::init(
if (func) {
mTarget =
- (*func)(surface, componentName, colorFormat,
+ (*func)(isurface, componentName, colorFormat,
displayWidth, displayHeight,
decodedWidth, decodedHeight);
}
@@ -191,6 +194,35 @@ void AwesomeLocalRenderer::init(
}
}
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+ AwesomeNativeWindowRenderer(const sp<ANativeWindow> &nativeWindow)
+ : mNativeWindow(nativeWindow) {
+ }
+
+ virtual void render(MediaBuffer *buffer) {
+ status_t err = mNativeWindow->queueBuffer(
+ mNativeWindow.get(), buffer->graphicBuffer().get());
+ if (err != 0) {
+ LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+ -err);
+ return;
+ }
+
+ sp<MetaData> metaData = buffer->meta_data();
+ metaData->setInt32(kKeyRendered, 1);
+ }
+
+protected:
+ virtual ~AwesomeNativeWindowRenderer() {}
+
+private:
+ sp<ANativeWindow> mNativeWindow;
+
+ AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+ AwesomeNativeWindowRenderer &operator=(
+ const AwesomeNativeWindowRenderer &);
+};
+
AwesomePlayer::AwesomePlayer()
: mQueueStarted(false),
mTimeSource(NULL),
@@ -394,6 +426,12 @@ void AwesomePlayer::reset_l() {
LOGI("interrupting the connection process");
mConnectingDataSource->disconnect();
}
+
+ if (mFlags & PREPARING_CONNECTED) {
+ // We are basically done preparing, we're just buffering
+ // enough data to start playback, we can safely interrupt that.
+ finishAsyncPrepare_l();
+ }
}
while (mFlags & PREPARING) {
@@ -794,8 +832,18 @@ status_t AwesomePlayer::play_l() {
return OK;
}
+void AwesomePlayer::notifyVideoSize_l() {
+ sp<MetaData> meta = mVideoSource->getFormat();
+
+ int32_t decodedWidth, decodedHeight;
+ CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
+ CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
+
+ notifyListener_l(MEDIA_SET_VIDEO_SIZE, decodedWidth, decodedHeight);
+}
+
void AwesomePlayer::initRenderer_l() {
- if (mISurface != NULL) {
+ if (mSurface != NULL || mISurface != NULL) {
sp<MetaData> meta = mVideoSource->getFormat();
int32_t format;
@@ -812,7 +860,27 @@ void AwesomePlayer::initRenderer_l() {
// before creating a new one.
IPCThreadState::self()->flushCommands();
- if (!strncmp("OMX.", component, 4)) {
+ if (mSurface != NULL) {
+ if (strncmp(component, "OMX.", 4) == 0) {
+ // Hardware decoders avoid the CPU color conversion by decoding
+ // directly to ANativeBuffers, so we must use a renderer that
+ // just pushes those buffers to the ANativeWindow.
+ mVideoRenderer = new AwesomeNativeWindowRenderer(mSurface);
+ } else {
+ // Other decoders are instantiated locally and as a consequence
+ // allocate their buffers in local address space. This renderer
+ // then performs a color conversion and copy to get the data
+ // into the ANativeBuffer.
+ mVideoRenderer = new AwesomeLocalRenderer(
+ false, // previewOnly
+ component,
+ (OMX_COLOR_FORMATTYPE)format,
+ mISurface,
+ mSurface,
+ mVideoWidth, mVideoHeight,
+ decodedWidth, decodedHeight);
+ }
+ } else {
// Our OMX codecs allocate buffers on the media_server side
// therefore they require a remote IOMXRenderer that knows how
// to display them.
@@ -822,16 +890,6 @@ void AwesomePlayer::initRenderer_l() {
(OMX_COLOR_FORMATTYPE)format,
decodedWidth, decodedHeight,
mVideoWidth, mVideoHeight));
- } else {
- // Other decoders are instantiated locally and as a consequence
- // allocate their buffers in local address space.
- mVideoRenderer = new AwesomeLocalRenderer(
- false, // previewOnly
- component,
- (OMX_COLOR_FORMATTYPE)format,
- mISurface,
- mVideoWidth, mVideoHeight,
- decodedWidth, decodedHeight);
}
}
}
@@ -882,6 +940,12 @@ void AwesomePlayer::setISurface(const sp<ISurface> &isurface) {
mISurface = isurface;
}
+void AwesomePlayer::setSurface(const sp<Surface> &surface) {
+ Mutex::Autolock autoLock(mLock);
+
+ mSurface = surface;
+}
+
void AwesomePlayer::setAudioSink(
const sp<MediaPlayerBase::AudioSink> &audioSink) {
Mutex::Autolock autoLock(mLock);
@@ -1067,7 +1131,7 @@ status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
- NULL, flags);
+ NULL, flags, mSurface);
if (mVideoSource != NULL) {
int64_t durationUs;
@@ -1098,7 +1162,7 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
}
if (mAudioPlayer != NULL) {
- LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+ LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
// If we don't have a video time, seek audio to the originally
// requested seek time instead.
@@ -1116,6 +1180,13 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
mFlags |= FIRST_FRAME;
mSeeking = false;
mSeekNotificationSent = false;
+
+ if (mDecryptHandle != NULL) {
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::PAUSE, 0);
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::START, videoTimeUs / 1000);
+ }
}
void AwesomePlayer::onVideoEvent() {
@@ -1172,6 +1243,8 @@ void AwesomePlayer::onVideoEvent() {
if (err == INFO_FORMAT_CHANGED) {
LOGV("VideoSource signalled format change.");
+ notifyVideoSize_l();
+
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
initRenderer_l();
@@ -1213,17 +1286,11 @@ void AwesomePlayer::onVideoEvent() {
mVideoTimeUs = timeUs;
}
+ bool wasSeeking = mSeeking;
finishSeekIfNecessary(timeUs);
TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
- if (mDecryptHandle != NULL) {
- mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
- Playback::PAUSE, 0);
- mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
- Playback::START, timeUs / 1000);
- }
-
if (mFlags & FIRST_FRAME) {
mFlags &= ~FIRST_FRAME;
@@ -1240,6 +1307,11 @@ void AwesomePlayer::onVideoEvent() {
int64_t latenessUs = nowUs - timeUs;
+ if (wasSeeking) {
+ // Let's display the first frame after seeking right away.
+ latenessUs = 0;
+ }
+
if (mRTPSession != NULL) {
// We'll completely ignore timestamps for gtalk videochat
// and we'll play incoming video as fast as we get it.
@@ -1611,7 +1683,7 @@ void AwesomePlayer::abortPrepare(status_t err) {
}
mPrepareResult = err;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
}
@@ -1659,6 +1731,8 @@ void AwesomePlayer::onPrepareAsyncEvent() {
}
}
+ mFlags |= PREPARING_CONNECTED;
+
if (mCachedSource != NULL || mRTSPController != NULL) {
postBufferingEvent_l();
} else {
@@ -1668,17 +1742,17 @@ void AwesomePlayer::onPrepareAsyncEvent() {
void AwesomePlayer::finishAsyncPrepare_l() {
if (mIsAsyncPrepare) {
- if (mVideoWidth < 0 || mVideoHeight < 0) {
+ if (mVideoSource == NULL) {
notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
} else {
- notifyListener_l(MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight);
+ notifyVideoSize_l();
}
notifyListener_l(MEDIA_PREPARED);
}
mPrepareResult = OK;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mFlags |= PREPARED;
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
@@ -1793,13 +1867,14 @@ status_t AwesomePlayer::resume() {
mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
- if (state->mLastVideoFrame && mISurface != NULL) {
+ if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
mVideoRenderer =
new AwesomeLocalRenderer(
true, // previewOnly
"",
(OMX_COLOR_FORMATTYPE)state->mColorFormat,
mISurface,
+ mSurface,
state->mVideoWidth,
state->mVideoHeight,
state->mDecodedWidth,
@@ -1834,4 +1909,3 @@ void AwesomePlayer::postAudioSeekComplete() {
}
} // namespace android
-
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 89cb135..159d937 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,6 +27,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <surfaceflinger/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -65,6 +66,11 @@ void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
LOGV("postData(%d, ptr:%p, size:%d)",
msgType, dataPtr->pointer(), dataPtr->size());
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallback(msgType, dataPtr);
+ }
}
void CameraSourceListener::postDataTimestamp(
@@ -77,6 +83,10 @@ void CameraSourceListener::postDataTimestamp(
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+ return OMX_COLOR_FormatYUV420Planar;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
return OMX_COLOR_FormatYUV422SemiPlanar;
}
@@ -99,72 +109,430 @@ static int32_t getColorFormat(const char* colorFormat) {
CHECK_EQ(0, "Unknown color format");
}
-// static
CameraSource *CameraSource::Create() {
- sp<Camera> camera = Camera::connect(0);
-
- if (camera.get() == NULL) {
- return NULL;
- }
+ Size size;
+ size.width = -1;
+ size.height = -1;
- return new CameraSource(camera);
+ sp<ICamera> camera;
+ return new CameraSource(camera, 0, size, -1, NULL, false);
}
// static
-CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
- if (camera.get() == NULL) {
- return NULL;
+CameraSource *CameraSource::CreateFromCamera(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers) {
+
+ CameraSource *source = new CameraSource(camera, cameraId,
+ videoSize, frameRate, surface,
+ storeMetaDataInVideoBuffers);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
}
-
- return new CameraSource(camera);
+ return source;
}
-CameraSource::CameraSource(const sp<Camera> &camera)
- : mCamera(camera),
- mFirstFrameTimeUs(0),
- mLastFrameTimestampUs(0),
+CameraSource::CameraSource(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers)
+ : mCameraFlags(0),
+ mVideoFrameRate(-1),
+ mCamera(0),
+ mSurface(surface),
mNumFramesReceived(0),
+ mLastFrameTimestampUs(0),
+ mStarted(false),
+ mFirstFrameTimeUs(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false),
- mStarted(false) {
+ mCollectStats(false) {
+
+ mVideoSize.width = -1;
+ mVideoSize.height = -1;
+
+ mInitCheck = init(camera, cameraId,
+ videoSize, frameRate,
+ storeMetaDataInVideoBuffers);
+}
+
+status_t CameraSource::initCheck() const {
+ return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+ const sp<ICamera>& camera, int32_t cameraId) {
+
+ if (camera == 0) {
+ mCamera = Camera::connect(cameraId);
+ mCameraFlags &= ~FLAGS_HOT_CAMERA;
+ } else {
+ mCamera = Camera::create(camera);
+ mCameraFlags |= FLAGS_HOT_CAMERA;
+ }
+
+ // Is camera available?
+ if (mCamera == 0) {
+ LOGE("Camera connection could not be established.");
+ return -EBUSY;
+ }
+ if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
+ mCamera->lock();
+ }
+ return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+ int32_t width, int32_t height,
+ const Vector<Size>& supportedSizes) {
+
+ LOGV("isVideoSizeSupported");
+ for (size_t i = 0; i < supportedSizes.size(); ++i) {
+ if (width == supportedSizes[i].width &&
+ height == supportedSizes[i].height) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ * CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ * supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+ const CameraParameters& params,
+ bool *isSetVideoSizeSupported,
+ Vector<Size>& sizes) {
+
+ *isSetVideoSizeSupported = true;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ LOGD("Camera does not support setVideoSize()");
+ params.getSupportedPreviewSizes(sizes);
+ *isSetVideoSizeSupported = false;
+ }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+ const CameraParameters& params) {
+ mColorFormat = getColorFormat(params.get(
+ CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ if (mColorFormat == -1) {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+ CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate) {
+
+ Vector<Size> sizes;
+ bool isSetVideoSizeSupportedByCamera = true;
+ getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+ bool isCameraParamChanged = false;
+ if (width != -1 && height != -1) {
+ if (!isVideoSizeSupported(width, height, sizes)) {
+ LOGE("Video dimension (%dx%d) is unsupported", width, height);
+ return BAD_VALUE;
+ }
+ if (isSetVideoSizeSupportedByCamera) {
+ params->setVideoSize(width, height);
+ } else {
+ params->setPreviewSize(width, height);
+ }
+ isCameraParamChanged = true;
+ } else if ((width == -1 && height != -1) ||
+ (width != -1 && height == -1)) {
+ // If one and only one of the width and height is -1
+ // we reject such a request.
+ LOGE("Requested video size (%dx%d) is not supported", width, height);
+ return BAD_VALUE;
+ } else { // width == -1 && height == -1
+ // Do not configure the camera.
+ // Use the current width and height value setting from the camera.
+ }
+
+ if (frameRate != -1) {
+ CHECK(frameRate > 0 && frameRate <= 120);
+ const char* supportedFrameRates =
+ params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+ CHECK(supportedFrameRates != NULL);
+ LOGV("Supported frame rates: %s", supportedFrameRates);
+ char buf[4];
+ snprintf(buf, 4, "%d", frameRate);
+ if (strstr(supportedFrameRates, buf) == NULL) {
+ LOGE("Requested frame rate (%d) is not supported: %s",
+ frameRate, supportedFrameRates);
+ return BAD_VALUE;
+ }
+
+ // The frame rate is supported, set the camera to the requested value.
+ params->setPreviewFrameRate(frameRate);
+ isCameraParamChanged = true;
+ } else { // frameRate == -1
+ // Do not configure the camera.
+ // Use the current frame rate value setting from the camera
+ }
+
+ if (isCameraParamChanged) {
+ // Either frame rate or frame size needs to be changed.
+ String8 s = params->flatten();
+ if (OK != mCamera->setParameters(s)) {
+ LOGE("Could not change settings."
+ " Someone else is using camera %p?", mCamera.get());
+ return -EBUSY;
+ }
+ }
+ return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+ const CameraParameters& params,
+ int32_t width, int32_t height) {
+
+ // The actual video size is the same as the preview size
+ // if the camera hal does not support separate video and
+ // preview output. In this case, we retrieve the video
+ // size from preview.
+ int32_t frameWidthActual = -1;
+ int32_t frameHeightActual = -1;
+ Vector<Size> sizes;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ // video size is the same as preview size
+ params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+ } else {
+ // video size may not be the same as preview
+ params.getVideoSize(&frameWidthActual, &frameHeightActual);
+ }
+ if (frameWidthActual < 0 || frameHeightActual < 0) {
+ LOGE("Failed to retrieve video frame size (%dx%d)",
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ // Check the actual video frame size against the target/requested
+ // video frame size.
+ if (width != -1 && height != -1) {
+ if (frameWidthActual != width || frameHeightActual != height) {
+ LOGE("Failed to set video frame size to %dx%d. "
+ "The actual video size is %dx%d ", width, height,
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Good now.
+ mVideoSize.width = frameWidthActual;
+ mVideoSize.height = frameHeightActual;
+ return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+ const CameraParameters& params,
+ int32_t frameRate) {
+
+ int32_t frameRateActual = params.getPreviewFrameRate();
+ if (frameRateActual < 0) {
+ LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame rate against the target/requested
+ // video frame rate.
+ if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+ LOGE("Failed to set preview frame rate to %d fps. The actual "
+ "frame rate is %d", frameRate, frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Good now.
+ mVideoFrameRate = frameRateActual;
+ return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ * as the video source
+ * @param videoSize the target video frame size. If both
+ * width and height in videoSize is -1, use the current
+ * width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ * if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ * data or real YUV data in video buffers. Request to
+ * store meta data in video buffers may not be honored
+ * if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers) {
+
+ status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- String8 s = mCamera->getParameters();
- IPCThreadState::self()->restoreCallingIdentity(token);
- printf("params: \"%s\"\n", s.string());
+ if ((err = isCameraAvailable(camera, cameraId)) != OK) {
+ return err;
+ }
+ CameraParameters params(mCamera->getParameters());
+ if ((err = isCameraColorFormatSupported(params)) != OK) {
+ return err;
+ }
+
+ // Set the camera to use the requested video frame size
+ // and/or frame rate.
+ if ((err = configureCamera(&params,
+ videoSize.width, videoSize.height,
+ frameRate))) {
+ return err;
+ }
+
+ // Check on video frame size and frame rate.
+ CameraParameters newCameraParams(mCamera->getParameters());
+ if ((err = checkVideoSize(newCameraParams,
+ videoSize.width, videoSize.height)) != OK) {
+ return err;
+ }
+ if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+ return err;
+ }
- int32_t width, height, stride, sliceHeight;
- CameraParameters params(s);
- params.getPreviewSize(&width, &height);
+ // This CHECK is good, since we just passed the lock/unlock
+ // check earlier by calling mCamera->setParameters().
+ CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
- // Calculate glitch duraton threshold based on frame rate
- int32_t frameRate = params.getPreviewFrameRate();
- int64_t glitchDurationUs = (1000000LL / frameRate);
+ mIsMetaDataStoredInVideoBuffers = false;
+ if (storeMetaDataInVideoBuffers &&
+ OK == mCamera->storeMetaDataInBuffers(true)) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
+
+ /*
+ * mCamera->startRecording() signals camera hal to make
+ * available the video buffers (for instance, allocation
+ * of the video buffers may be triggered when camera hal's
+ * startRecording() method is called). Making available these
+ * video buffers earlier (before calling start()) is critical,
+ * if one wants to configure omx video encoders to use these
+ * buffers for passing video frame data during video recording
+ * without the need to memcpy the video frame data stored
+ * in these buffers. Eliminating memcpy for video frame data
+ * is crucial in performance for HD quality video recording
+ * applications.
+ *
+ * Based on OMX IL spec, configuring the omx video encoders
+ * must occur in loaded state. When start() is called, omx
+ * video encoders are already in idle state, which is too
+ * late. Thus, we must call mCamera->startRecording() earlier.
+ */
+ startCameraRecording();
+
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
if (glitchDurationUs > mGlitchDurationThresholdUs) {
mGlitchDurationThresholdUs = glitchDurationUs;
}
- const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
- CHECK(colorFormatStr != NULL);
- int32_t colorFormat = getColorFormat(colorFormatStr);
-
// XXX: query camera for the stride and slice height
// when the capability becomes available.
- stride = width;
- sliceHeight = height;
-
mMeta = new MetaData;
- mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
- mMeta->setInt32(kKeyColorFormat, colorFormat);
- mMeta->setInt32(kKeyWidth, width);
- mMeta->setInt32(kKeyHeight, height);
- mMeta->setInt32(kKeyStride, stride);
- mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ mMeta->setInt32(kKeyColorFormat, mColorFormat);
+ mMeta->setInt32(kKeyWidth, mVideoSize.width);
+ mMeta->setInt32(kKeyHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyStride, mVideoSize.width);
+ mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+ mMeta->setInt32(kKeySampleRate, mVideoFrameRate);
+ return OK;
}
CameraSource::~CameraSource() {
@@ -173,8 +541,17 @@ CameraSource::~CameraSource() {
}
}
+void CameraSource::startCameraRecording() {
+ CHECK_EQ(OK, mCamera->startRecording());
+ CHECK(mCamera->recordingEnabled());
+}
+
status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
+ if (mInitCheck != OK) {
+ LOGE("CameraSource is not initialized yet");
+ return mInitCheck;
+ }
char value[PROPERTY_VALUE_MAX];
if (property_get("media.stagefright.record-stats", value, NULL)
@@ -190,13 +567,17 @@ status_t CameraSource::start(MetaData *meta) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
- CHECK_EQ(OK, mCamera->startRecording());
IPCThreadState::self()->restoreCallingIdentity(token);
mStarted = true;
return OK;
}
+void CameraSource::stopCameraRecording() {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+}
+
status_t CameraSource::stop() {
LOGV("stop");
Mutex::Autolock autoLock(mLock);
@@ -204,15 +585,23 @@ status_t CameraSource::stop() {
mFrameAvailableCondition.signal();
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->setListener(NULL);
- mCamera->stopRecording();
+ stopCameraRecording();
releaseQueuedFrames();
while (!mFramesBeingEncoded.empty()) {
LOGI("Waiting for outstanding frames being encoded: %d",
mFramesBeingEncoded.size());
mFrameCompleteCondition.wait(mLock);
}
- mCamera = NULL;
+
+ LOGV("Disconnect camera");
+ if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ LOGV("Camera was cold when we started, stopping preview");
+ mCamera->stopPreview();
+ }
+ mCamera->unlock();
+ mCamera.clear();
+ mCamera = 0;
+ mCameraFlags = 0;
IPCThreadState::self()->restoreCallingIdentity(token);
if (mCollectStats) {
@@ -225,11 +614,15 @@ status_t CameraSource::stop() {
return OK;
}
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+ mCamera->releaseRecordingFrame(frame);
+}
+
void CameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
- mCamera->releaseRecordingFrame(*it);
+ releaseRecordingFrame(*it);
mFramesReceived.erase(it);
++mNumFramesDropped;
}
@@ -241,7 +634,7 @@ sp<MetaData> CameraSource::getFormat() {
void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
+ releaseRecordingFrame(frame);
IPCThreadState::self()->restoreCallingIdentity(token);
}
@@ -251,7 +644,6 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
-
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
@@ -343,6 +735,13 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
++mNumGlitches;
}
+ // May need to skip frame or modify timestamp. Currently implemented
+ // by the subclass CameraSourceTimeLapse.
+ if(skipCurrentFrame(timestampUs)) {
+ releaseOneRecordingFrame(data);
+ return;
+ }
+
mLastFrameTimestampUs = timestampUs;
if (mNumFramesReceived == 0) {
mFirstFrameTimeUs = timestampUs;
@@ -367,4 +766,31 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
mFrameAvailableCondition.signal();
}
+size_t CameraSource::getNumberOfVideoBuffers() const {
+ LOGV("getNumberOfVideoBuffers");
+ size_t nBuffers = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ nBuffers = mCamera->getNumberOfVideoBuffers();
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return nBuffers;
+}
+
+sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
+ LOGV("getVideoBuffer: %d", index);
+ sp<IMemory> buffer = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ buffer = mCamera->getVideoBuffer(index);
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return buffer;
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+ LOGV("isMetaDataStoredInVideoBuffers");
+ return mIsMetaDataStoredInVideoBuffers;
+}
+
} // namespace android
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..6fd1825
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,518 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include "OMX_Video.h"
+#include <limits.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs) {
+
+ CameraSourceTimeLapse *source = new
+ CameraSourceTimeLapse(camera, cameraId,
+ videoSize, videoFrameRate, surface,
+ timeBetweenTimeLapseFrameCaptureUs);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
+ }
+ return source;
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs)
+ : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
+ mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+ mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+ mLastTimeLapseFrameRealTimestampUs(0),
+ mSkipCurrentFrame(false) {
+
+ LOGV("starting time lapse mode");
+ mVideoWidth = videoSize.width;
+ mVideoHeight = videoSize.height;
+
+ if (trySettingPreviewSize(videoSize.width, videoSize.height)) {
+ mUseStillCameraForTimeLapse = false;
+ } else {
+ // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
+ // than the fastest rate at which the still camera can take pictures.
+ mUseStillCameraForTimeLapse = true;
+ CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
+ mNeedCropping = computeCropRectangleOffset();
+ mMeta->setInt32(kKeyWidth, videoSize.width);
+ mMeta->setInt32(kKeyHeight, videoSize.height);
+ }
+
+ // Initialize quick stop variables.
+ mQuickStop = false;
+ mForceRead = false;
+ mLastReadBufferCopy = NULL;
+ mStopWaitingForIdleCamera = false;
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+void CameraSourceTimeLapse::startQuickReadReturns() {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ LOGV("Enabling quick read returns");
+
+ // Enable quick stop mode.
+ mQuickStop = true;
+
+ if (mUseStillCameraForTimeLapse) {
+ // wake up the thread right away.
+ mTakePictureCondition.signal();
+ } else {
+ // Force dataCallbackTimestamp() coming from the video camera to not skip the
+ // next frame as we want read() to get a get a frame right away.
+ mForceRead = true;
+ }
+}
+
+bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPreviewSizes(supportedSizes);
+
+ bool previewSizeSupported = false;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth == width) && (pictureHeight == height)) {
+ previewSizeSupported = true;
+ }
+ }
+
+ if (previewSizeSupported) {
+ LOGV("Video size (%d, %d) is a supported preview size", width, height);
+ params.setPreviewSize(width, height);
+ CHECK(mCamera->setParameters(params.flatten()));
+ return true;
+ }
+
+ return false;
+}
+
+bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPictureSizes(supportedSizes);
+
+ int32_t minPictureSize = INT_MAX;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth >= width) && (pictureHeight >= height)) {
+ int32_t pictureSize = pictureWidth*pictureHeight;
+ if (pictureSize < minPictureSize) {
+ minPictureSize = pictureSize;
+ mPictureWidth = pictureWidth;
+ mPictureHeight = pictureHeight;
+ }
+ }
+ }
+ LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
+ return (minPictureSize != INT_MAX);
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+ if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+ return false;
+ }
+
+ CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+ int32_t widthDifference = mPictureWidth - mVideoWidth;
+ int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+ mCropRectStartX = widthDifference/2;
+ mCropRectStartY = heightDifference/2;
+
+ LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+ return true;
+}
+
+void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+ buffer->setObserver(NULL);
+ buffer->release();
+ } else {
+ return CameraSource::signalBufferReturned(buffer);
+ }
+}
+
+void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+ size_t sourceSize = sourceBuffer.size();
+ void* sourcePointer = sourceBuffer.data();
+
+ (*newBuffer) = new MediaBuffer(sourceSize);
+ memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
+
+ (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
+}
+
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+ int64_t frameTime;
+ CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
+ createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+ mLastReadBufferCopy->add_ref();
+ mLastReadBufferCopy->setObserver(this);
+}
+
+status_t CameraSourceTimeLapse::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ if (mLastReadBufferCopy == NULL) {
+ mLastReadStatus = CameraSource::read(buffer, options);
+
+ // mQuickStop may have turned to true while read was blocked. Make a copy of
+ // the buffer in that case.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && *buffer) {
+ fillLastReadBufferCopy(**buffer);
+ }
+ return mLastReadStatus;
+ } else {
+ (*buffer) = mLastReadBufferCopy;
+ (*buffer)->add_ref();
+ return mLastReadStatus;
+ }
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadTimeLapseEntry();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+ while (mStarted) {
+ {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCameraIdle = false;
+ }
+
+ // Even if mQuickStop == true we need to take one more picture
+ // as a read() may be blocked, waiting for a frame to get available.
+ // After this takePicture, if mQuickStop == true, we can safely exit
+ // this thread as read() will make a copy of this last frame and keep
+ // returning it in the quick stop mode.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ CHECK_EQ(OK, mCamera->takePicture());
+ if (mQuickStop) {
+ LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
+ return;
+ }
+ mTakePictureCondition.waitRelative(mQuickStopLock,
+ mTimeBetweenTimeLapseFrameCaptureUs * 1000);
+ }
+ LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ LOGV("start time lapse recording using still camera");
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ params.setPictureSize(mPictureWidth, mPictureHeight);
+ mCamera->setParameters(params.flatten());
+ mCameraIdle = true;
+ mStopWaitingForIdleCamera = false;
+
+ // disable shutter sound and play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+
+ // create a thread which takes pictures in a loop
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+ pthread_attr_destroy(&attr);
+ } else {
+ LOGV("start time lapse recording using video camera");
+ CHECK_EQ(OK, mCamera->startRecording());
+ }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ void *dummy;
+ pthread_join(mThreadTimeLapse, &dummy);
+
+ // Last takePicture may still be underway. Wait for the camera to get
+ // idle.
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mStopWaitingForIdleCamera = true;
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCamera->setListener(NULL);
+
+ // play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+ } else {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+ }
+ if (mLastReadBufferCopy) {
+ mLastReadBufferCopy->release();
+ mLastReadBufferCopy = NULL;
+ }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+ if (!mUseStillCameraForTimeLapse) {
+ mCamera->releaseRecordingFrame(frame);
+ }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+ size_t source_size = source_data->size();
+ void* source_pointer = source_data->pointer();
+
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+ memcpy(newMemory->pointer(), source_pointer, source_size);
+ return newMemory;
+}
+
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+ return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadStartPreview();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+ CHECK_EQ(OK, mCamera->startPreview());
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mCameraIdle = true;
+ mCameraIdleCondition.signal();
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+ // Start this in a different thread, so that the dataCallback can return
+ LOGV("restartPreview");
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+ pthread_t threadPreview;
+ pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+ pthread_attr_destroy(&attr);
+}
+
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else {
+ CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate memory for cropped image and setup a canvas using it.
+ sp<IMemory> croppedImageMemory = allocateIMemory(
+ YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+ YUVImage yuvImageCropped(yuvFormat,
+ mVideoWidth, mVideoHeight,
+ (uint8_t *)croppedImageMemory->pointer());
+ YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mPictureWidth, mPictureHeight,
+ (uint8_t *)source_data->pointer());
+ yuvCanvasCrop.CopyImageRect(
+ Rect(mCropRectStartX, mCropRectStartY,
+ mCropRectStartX + mVideoWidth,
+ mCropRectStartY + mVideoHeight),
+ 0, 0,
+ yuvImageSource);
+
+ return croppedImageMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ // takePicture will complete after this callback, so restart preview.
+ restartPreview();
+ return;
+ }
+ if (msgType != CAMERA_MSG_RAW_IMAGE) {
+ return;
+ }
+
+ LOGV("dataCallback for timelapse still frame");
+ CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+ int64_t timestampUs;
+ if (mNumFramesReceived == 0) {
+ timestampUs = mStartTimeUs;
+ } else {
+ timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ }
+
+ if (mNeedCropping) {
+ sp<IMemory> croppedImageData = cropYUVImage(data);
+ dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+ } else {
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+ }
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+ if (mSkipCurrentFrame) {
+ mSkipCurrentFrame = false;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+ if (!mUseStillCameraForTimeLapse) {
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
+ // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+ // to current time (timestampUs) and save frame data.
+ LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ return false;
+ }
+
+ {
+ Mutex::Autolock autoLock(mQuickStopLock);
+
+ // mForceRead may be set to true by startQuickReadReturns(). In that
+ // case don't skip this frame.
+ if (mForceRead) {
+ LOGV("dataCallbackTimestamp timelapse: forced read");
+ mForceRead = false;
+ *timestampUs = mLastFrameTimestampUs;
+ return false;
+ }
+ }
+
+ if (*timestampUs <
+ (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+ // Skip all frames from last encoded frame until
+ // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+ // Tell the camera to release its recording frame and return.
+ LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+ return true;
+ } else {
+ // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+ // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+ // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+ // of the last encoded frame's time stamp.
+ LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ return false;
+ }
+ }
+ return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data) {
+ if (!mUseStillCameraForTimeLapse) {
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
+ } else {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ // If we are using the still camera and stop() has been called, it may
+ // be waiting for the camera to get idle. In that case return
+ // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
+ // to a deadlock since it tries to access CameraSource::mLock which in
+ // this case is held by CameraSource::stop() currently waiting for the
+ // camera to get idle. And camera will not get idle until this call
+ // returns.
+ if (mStopWaitingForIdleCamera) {
+ return;
+ }
+ }
+ CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+} // namespace android
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index b46d8d0..e4f9a47 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -21,7 +21,7 @@ namespace android {
FileSource::FileSource(const char *filename)
: mFile(fopen(filename, "rb")),
- mFd(fileno(mFile)),
+ mFd(mFile == NULL ? -1 : fileno(mFile)),
mOffset(0),
mLength(-1),
mDecryptHandle(NULL),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a15c274..b3ed845 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -506,7 +506,7 @@ status_t MPEG4Writer::pause() {
}
void MPEG4Writer::stopWriterThread() {
- LOGV("stopWriterThread");
+ LOGD("Stopping writer thread");
{
Mutex::Autolock autolock(mLock);
@@ -517,6 +517,7 @@ void MPEG4Writer::stopWriterThread() {
void *dummy;
pthread_join(mThread, &dummy);
+ LOGD("Writer thread stopped");
}
status_t MPEG4Writer::stop() {
@@ -1228,6 +1229,7 @@ status_t MPEG4Writer::Track::pause() {
}
status_t MPEG4Writer::Track::stop() {
+ LOGD("Stopping %s track", mIsAudio? "Audio": "Video");
if (mDone) {
return OK;
}
@@ -1239,6 +1241,7 @@ status_t MPEG4Writer::Track::stop() {
status_t err = (status_t) dummy;
+ LOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
{
status_t status = mSource->stop();
if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
@@ -1246,6 +1249,7 @@ status_t MPEG4Writer::Track::stop() {
}
}
+ LOGD("%s track stopped", mIsAudio? "Audio": "Video");
return err;
}
@@ -2217,6 +2221,9 @@ void MPEG4Writer::Track::writeTrackHeader(
CHECK(mCodecSpecificData);
CHECK(mCodecSpecificDataSize > 0);
+ // Make sure all sizes encode to a single byte.
+ CHECK(mCodecSpecificDataSize + 23 < 128);
+
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt8(0x03); // ES_DescrTag
mOwner->writeInt8(23 + mCodecSpecificDataSize);
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index b973745..cbccd31 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -25,6 +25,8 @@
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
+#include <ui/GraphicBuffer.h>
+
namespace android {
// XXX make this truly atomic.
@@ -61,6 +63,20 @@ MediaBuffer::MediaBuffer(size_t size)
mOriginal(NULL) {
}
+MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
+ : mObserver(NULL),
+ mNextBuffer(NULL),
+ mRefCount(0),
+ mData(NULL),
+ mSize(1),
+ mRangeOffset(0),
+ mRangeLength(mSize),
+ mGraphicBuffer(graphicBuffer),
+ mOwnsData(false),
+ mMetaData(new MetaData),
+ mOriginal(NULL) {
+}
+
void MediaBuffer::release() {
if (mObserver == NULL) {
CHECK_EQ(mRefCount, 0);
@@ -92,10 +108,12 @@ void MediaBuffer::add_ref() {
}
void *MediaBuffer::data() const {
+ CHECK(mGraphicBuffer == NULL);
return mData;
}
size_t MediaBuffer::size() const {
+ CHECK(mGraphicBuffer == NULL);
return mSize;
}
@@ -108,15 +126,19 @@ size_t MediaBuffer::range_length() const {
}
void MediaBuffer::set_range(size_t offset, size_t length) {
- if (offset + length > mSize) {
+ if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
}
- CHECK(offset + length <= mSize);
+ CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
mRangeOffset = offset;
mRangeLength = length;
}
+sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
+ return mGraphicBuffer;
+}
+
sp<MetaData> MediaBuffer::meta_data() {
return mMetaData;
}
@@ -158,6 +180,8 @@ int MediaBuffer::refcount() const {
}
MediaBuffer *MediaBuffer::clone() {
+ CHECK_EQ(mGraphicBuffer, NULL);
+
MediaBuffer *buffer = new MediaBuffer(mData, mSize);
buffer->set_range(mRangeOffset, mRangeLength);
buffer->mMetaData = new MetaData(*mMetaData.get());
@@ -169,4 +193,3 @@ MediaBuffer *MediaBuffer::clone() {
}
} // namespace android
-
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index ee03c52..965c370 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -65,7 +65,7 @@ sp<MediaExtractor> MediaExtractor::Create(
}
if (!strncmp(mime, "drm", 3)) {
- char *originalMime = strrchr(mime, '+') + 1;
+ const char *originalMime = strrchr(mime, '+') + 1;
if (!strncmp(mime, "drm+es_based", 12)) {
return new DRMExtractor(source, originalMime);
diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp
new file mode 100644
index 0000000..abc7012
--- /dev/null
+++ b/media/libstagefright/MediaSourceSplitter.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSourceSplitter"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSourceSplitter.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) {
+ mNumberOfClients = 0;
+ mSource = mediaSource;
+ mSourceStarted = false;
+
+ mNumberOfClientsStarted = 0;
+ mNumberOfCurrentReads = 0;
+ mCurrentReadBit = 0;
+ mLastReadCompleted = true;
+}
+
+MediaSourceSplitter::~MediaSourceSplitter() {
+}
+
+sp<MediaSource> MediaSourceSplitter::createClient() {
+ Mutex::Autolock autoLock(mLock);
+
+ sp<MediaSource> client = new Client(this, mNumberOfClients++);
+ mClientsStarted.push(false);
+ mClientsDesiredReadBit.push(0);
+ return client;
+}
+
+status_t MediaSourceSplitter::start(int clientId, MetaData *params) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("start client (%d)", clientId);
+ if (mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ mNumberOfClientsStarted++;
+
+ if (!mSourceStarted) {
+ LOGV("Starting real source from client (%d)", clientId);
+ status_t err = mSource->start(params);
+
+ if (err == OK) {
+ mSourceStarted = true;
+ mClientsStarted.editItemAt(clientId) = true;
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ }
+
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = true;
+ if (mLastReadCompleted) {
+ // Last read was completed. So join in the threads for the next read.
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ } else {
+ // Last read is ongoing. So join in the threads for the current read.
+ mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit;
+ }
+ return OK;
+ }
+}
+
+status_t MediaSourceSplitter::stop(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("stop client (%d)", clientId);
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+ CHECK(mClientsStarted[clientId]);
+
+ if (--mNumberOfClientsStarted == 0) {
+ LOGV("Stopping real source from client (%d)", clientId);
+ status_t err = mSource->stop();
+ mSourceStarted = false;
+ mClientsStarted.editItemAt(clientId) = false;
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = false;
+ if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) {
+ // !mLastReadCompleted implies that buffer has been read from source, but all
+ // clients haven't read it.
+ // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this
+ // client would have wanted to read from this buffer. (i.e. it has not yet
+ // called read() for the current read buffer.)
+ // Since other threads may be waiting for all the clients' reads to complete,
+ // signal that this read has been aborted.
+ signalReadComplete_lock(true);
+ }
+ return OK;
+ }
+}
+
+sp<MetaData> MediaSourceSplitter::getFormat(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("getFormat client (%d)", clientId);
+ return mSource->getFormat();
+}
+
+status_t MediaSourceSplitter::read(int clientId,
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+
+ LOGV("read client (%d)", clientId);
+ *buffer = NULL;
+
+ if (!mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) {
+ // Desired buffer has not been read from source yet.
+
+ // If the current client is the special client with clientId = 0
+ // then read from source, else wait until the client 0 has finished
+ // reading from source.
+ if (clientId == 0) {
+ // Wait for all client's last read to complete first so as to not
+ // corrupt the buffer at mLastReadMediaBuffer.
+ waitForAllClientsLastRead_lock(clientId);
+
+ readFromSource_lock(options);
+ *buffer = mLastReadMediaBuffer;
+ } else {
+ waitForReadFromSource_lock(clientId);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+ CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]);
+ } else {
+ // Desired buffer has already been read from source. Use the cached data.
+ CHECK(clientId != 0);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+
+ mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId];
+ signalReadComplete_lock(false);
+
+ return mLastReadStatus;
+}
+
+void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) {
+ mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options);
+
+ mCurrentReadBit = !mCurrentReadBit;
+ mLastReadCompleted = false;
+ mReadFromSourceCondition.broadcast();
+}
+
+void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) {
+ mReadFromSourceCondition.wait(mLock);
+}
+
+void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) {
+ if (mLastReadCompleted) {
+ return;
+ }
+ mAllReadsCompleteCondition.wait(mLock);
+ CHECK(mLastReadCompleted);
+}
+
+void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) {
+ if (!readAborted) {
+ mNumberOfCurrentReads++;
+ }
+
+ if (mNumberOfCurrentReads == mNumberOfClientsStarted) {
+ mLastReadCompleted = true;
+ mNumberOfCurrentReads = 0;
+ mAllReadsCompleteCondition.broadcast();
+ }
+}
+
+status_t MediaSourceSplitter::pause(int clientId) {
+ return ERROR_UNSUPPORTED;
+}
+
+// Client
+
+MediaSourceSplitter::Client::Client(
+ sp<MediaSourceSplitter> splitter,
+ int32_t clientId) {
+ mSplitter = splitter;
+ mClientId = clientId;
+}
+
+status_t MediaSourceSplitter::Client::start(MetaData *params) {
+ return mSplitter->start(mClientId, params);
+}
+
+status_t MediaSourceSplitter::Client::stop() {
+ return mSplitter->stop(mClientId);
+}
+
+sp<MetaData> MediaSourceSplitter::Client::getFormat() {
+ return mSplitter->getFormat(mClientId);
+}
+
+status_t MediaSourceSplitter::Client::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ return mSplitter->read(mClientId, buffer, options);
+}
+
+status_t MediaSourceSplitter::Client::pause() {
+ return mSplitter->pause(mClientId);
+}
+
+} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 9a49a9b..9e1b436 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -39,6 +39,7 @@
#include <binder/MemoryDealer.h>
#include <binder/ProcessState.h>
#include <media/IMediaPlayerService.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
@@ -152,37 +153,37 @@ static sp<MediaSource> InstantiateSoftwareCodec(
static const CodecInfo kDecoderInfo[] = {
{ MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
+// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
{ MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
-// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
+// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
{ MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
};
@@ -194,25 +195,24 @@ static const CodecInfo kEncoderInfo[] = {
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
};
#undef OPTIONAL
@@ -307,16 +307,15 @@ static void InitOMXParams(T *params) {
}
static bool IsSoftwareCodec(const char *componentName) {
- if (!strncmp("OMX.PV.", componentName, 7)) {
- return true;
+ if (!strncmp("OMX.", componentName, 4)) {
+ return false;
}
- return false;
+ return true;
}
// A sort order in which non-OMX components are first,
-// followed by software codecs, i.e. OMX.PV.*, followed
-// by all the others.
+// followed by software codecs, and followed by all the others.
static int CompareSoftwareCodecsFirst(
const String8 *elem1, const String8 *elem2) {
bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
@@ -350,9 +349,13 @@ uint32_t OMXCodec::getComponentQuirks(
const char *componentName, bool isEncoder) {
uint32_t quirks = 0;
- if (!strcmp(componentName, "OMX.PV.avcdec")) {
- quirks |= kWantsNALFragments;
+ if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
+ quirks |= kDecoderLiesAboutNumberOfChannels;
}
+
if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
quirks |= kNeedsFlushBeforeDisable;
quirks |= kDecoderLiesAboutNumberOfChannels;
@@ -447,7 +450,16 @@ void OMXCodec::findMatchingCodecs(
continue;
}
- matchingCodecs->push(String8(componentName));
+ // When requesting software-only codecs, only push software codecs
+ // When requesting hardware-only codecs, only push hardware codecs
+ // When there is request neither for software-only nor for
+ // hardware-only codecs, push all codecs
+ if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) ||
+ ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) ||
+ (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+ matchingCodecs->push(String8(componentName));
+ }
}
if (flags & kPreferSoftwareCodecs) {
@@ -461,7 +473,8 @@ sp<MediaSource> OMXCodec::Create(
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName,
- uint32_t flags) {
+ uint32_t flags,
+ const sp<ANativeWindow> &nativeWindow) {
const char *mime;
bool success = meta->findCString(kKeyMIMEType, &mime);
CHECK(success);
@@ -517,7 +530,7 @@ sp<MediaSource> OMXCodec::Create(
sp<OMXCodec> codec = new OMXCodec(
omx, node, quirks,
createEncoder, mime, componentName,
- source);
+ source, nativeWindow);
observer->setCodec(codec);
@@ -725,6 +738,16 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
mQuirks &= ~kOutputBuffersAreUnreadable;
}
+ if (mNativeWindow != NULL
+ && !mIsEncoder
+ && !strncasecmp(mMIME, "video/", 6)
+ && !strncmp(mComponentName, "OMX.", 4)) {
+ status_t err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
+ }
+
return OK;
}
@@ -1283,6 +1306,10 @@ status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
h264type.bMBAFF = OMX_FALSE;
h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+ if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) {
+ h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+ }
+
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
CHECK_EQ(err, OK);
@@ -1408,7 +1435,8 @@ OMXCodec::OMXCodec(
bool isEncoder,
const char *mime,
const char *componentName,
- const sp<MediaSource> &source)
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow)
: mOMX(omx),
mOMXLivesLocally(omx->livesLocally(getpid())),
mNode(node),
@@ -1428,7 +1456,8 @@ OMXCodec::OMXCodec(
mTargetTimeUs(-1),
mSkipTimeUs(-1),
mLeftOverBuffer(NULL),
- mPaused(false) {
+ mPaused(false),
+ mNativeWindow(nativeWindow) {
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1572,6 +1601,10 @@ status_t OMXCodec::allocateBuffers() {
}
status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+ return allocateOutputBuffersFromNativeWindow();
+ }
+
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
@@ -1638,6 +1671,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
info.mBuffer = buffer;
info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
info.mMem = mem;
info.mMediaBuffer = NULL;
@@ -1664,6 +1698,173 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
return OK;
}
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+ // Get the number of buffers needed.
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ err = native_window_set_buffers_geometry(
+ mNativeWindow.get(),
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ def.format.video.eColorFormat);
+
+ if (err != 0) {
+ LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+
+ // Increase the buffer count by one to allow for the ANativeWindow to hold
+ // on to one of the buffers.
+ def.nBufferCountActual++;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ // Set up the native window.
+ // XXX TODO: Get the gralloc usage flags from the OMX plugin!
+ err = native_window_set_usage(
+ mNativeWindow.get(), GRALLOC_USAGE_HW_TEXTURE);
+ if (err != 0) {
+ LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_set_buffer_count(
+ mNativeWindow.get(), def.nBufferCountActual);
+ if (err != 0) {
+ LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ // XXX TODO: Do something so the ANativeWindow knows that we'll need to get
+ // the same set of buffers.
+
+ CODEC_LOGI("allocating %lu buffers from a native window of size %lu on "
+ "output port", def.nBufferCountActual, def.nBufferSize);
+
+ // Dequeue buffers and send them to OMX
+ OMX_U32 i;
+ for (i = 0; i < def.nBufferCountActual; i++) {
+ android_native_buffer_t* buf;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ IOMX::buffer_id bufferId;
+ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+ &bufferId);
+ if (err != 0) {
+ break;
+ }
+
+ CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+ bufferId, graphicBuffer.get());
+
+ BufferInfo info;
+ info.mData = NULL;
+ info.mSize = def.nBufferSize;
+ info.mBuffer = bufferId;
+ info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
+ info.mMem = NULL;
+ info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+ info.mMediaBuffer->setObserver(this);
+
+ mPortBuffers[kPortIndexOutput].push(info);
+ }
+
+ OMX_U32 cancelStart;
+ OMX_U32 cancelEnd;
+
+ if (err != 0) {
+ // If an error occurred while dequeuing we need to cancel any buffers
+ // that were dequeued.
+ cancelStart = 0;
+ cancelEnd = i;
+ } else {
+ // Return the last two buffers to the native window.
+ // XXX TODO: The number of buffers the native window owns should probably be
+ // queried from it when we put the native window in fixed buffer pool mode
+ // (which needs to be implemented). Currently it's hard-coded to 2.
+ cancelStart = def.nBufferCountActual - 2;
+ cancelEnd = def.nBufferCountActual;
+ }
+
+ for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+ cancelBufferToNativeWindow(info);
+ }
+
+ return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+ int err = mNativeWindow->cancelBuffer(
+ mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get());
+ if (err != 0) {
+ CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return err;
+ }
+ info->mOwnedByNativeWindow = true;
+ return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+ // Dequeue the next buffer from the native window.
+ android_native_buffer_t* buf;
+ int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // Determine which buffer we just dequeued.
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ BufferInfo *bufInfo = 0;
+ for (size_t i = 0; i < buffers->size(); i++) {
+ sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+ mMediaBuffer->graphicBuffer();
+ if (graphicBuffer->handle == buf->handle) {
+ bufInfo = &buffers->editItemAt(i);
+ break;
+ }
+ }
+
+ if (bufInfo == 0) {
+ CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // The native window no longer owns the buffer.
+ CHECK(bufInfo->mOwnedByNativeWindow);
+ bufInfo->mOwnedByNativeWindow = false;
+
+ return bufInfo;
+}
+
void OMXCodec::on_message(const omx_message &msg) {
Mutex::Autolock autoLock(mLock);
@@ -1748,6 +1949,15 @@ void OMXCodec::on_message(const omx_message &msg) {
mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
CHECK_EQ(err, OK);
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ if (info->mMediaBuffer != NULL) {
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (!info->mOwnedByNativeWindow && graphicBuffer != 0) {
+ cancelBufferToNativeWindow(info);
+ // Ignore any errors
+ }
+ }
+
buffers->removeAt(i);
#if 0
} else if (mPortStatus[kPortIndexOutput] == ENABLED
@@ -1776,8 +1986,10 @@ void OMXCodec::on_message(const omx_message &msg) {
}
MediaBuffer *buffer = info->mMediaBuffer;
+ bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
- if (msg.u.extended_buffer_data.range_offset
+ if (!isGraphicBuffer
+ && msg.u.extended_buffer_data.range_offset
+ msg.u.extended_buffer_data.range_length
> buffer->size()) {
CODEC_LOGE(
@@ -1801,7 +2013,7 @@ void OMXCodec::on_message(const omx_message &msg) {
buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
}
- if (mQuirks & kOutputBuffersAreUnreadable) {
+ if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
@@ -1871,7 +2083,43 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
case OMX_EventPortSettingsChanged:
{
- onPortSettingsChanged(data1);
+ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+ data1, data2);
+
+ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+ onPortSettingsChanged(data1);
+ } else if (data1 == kPortIndexOutput
+ && data2 == OMX_IndexConfigCommonOutputCrop) {
+
+ OMX_CONFIG_RECTTYPE rect;
+ rect.nPortIndex = kPortIndexOutput;
+ InitOMXParams(&rect);
+
+ status_t err =
+ mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect));
+
+ if (err == OK) {
+ CODEC_LOGV(
+ "output crop (%ld, %ld, %ld, %ld)",
+ rect.nLeft, rect.nTop, rect.nWidth, rect.nHeight);
+
+ if (mNativeWindow != NULL) {
+ android_native_rect_t crop;
+ crop.left = rect.nLeft;
+ crop.top = rect.nTop;
+ crop.right = crop.left + rect.nWidth - 1;
+ crop.bottom = crop.top + rect.nHeight - 1;
+
+ CHECK_EQ(0, native_window_set_crop(
+ mNativeWindow.get(), &crop));
+ }
+ } else {
+ CODEC_LOGE("getConfig(OMX_IndexConfigCommonOutputCrop) "
+ "returned error 0x%08x", err);
+ }
+ }
break;
}
@@ -2201,6 +2449,15 @@ status_t OMXCodec::freeBuffersOnPort(
// Make sure nobody but us owns this buffer at this point.
CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (!info->mOwnedByNativeWindow && graphicBuffer != 0) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err != OK) {
+ stickyErr = err;
+ }
+ }
+
info->mMediaBuffer->release();
}
@@ -2261,6 +2518,7 @@ void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
CHECK_EQ(mPortStatus[portIndex], ENABLED);
mPortStatus[portIndex] = DISABLING;
+ CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
CHECK_EQ(err, OK);
@@ -2274,6 +2532,7 @@ void OMXCodec::enablePortAsync(OMX_U32 portIndex) {
CHECK_EQ(mPortStatus[portIndex], DISABLED);
mPortStatus[portIndex] = ENABLING;
+ CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
CHECK_EQ(err, OK);
@@ -2299,7 +2558,10 @@ void OMXCodec::fillOutputBuffers() {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
for (size_t i = 0; i < buffers->size(); ++i) {
- fillOutputBuffer(&buffers->editItemAt(i));
+ BufferInfo *info = &buffers->editItemAt(i);
+ if (!info->mOwnedByNativeWindow) {
+ fillOutputBuffer(&buffers->editItemAt(i));
+ }
}
}
@@ -2516,7 +2778,23 @@ void OMXCodec::fillOutputBuffer(BufferInfo *info) {
return;
}
- CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer);
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (graphicBuffer != 0) {
+ // When using a native buffer we need to lock the buffer before giving
+ // it to OMX.
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
+ int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
+ graphicBuffer.get());
+ if (err != 0) {
+ CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return;
+ }
+ }
+
+ CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
if (err != OK) {
@@ -3099,7 +3377,32 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
if (info->mMediaBuffer == buffer) {
CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
- fillOutputBuffer(info);
+ if (buffer->graphicBuffer() == 0) {
+ fillOutputBuffer(info);
+ } else {
+ sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+ int32_t rendered = 0;
+ if (!metaData->findInt32(kKeyRendered, &rendered)) {
+ rendered = 0;
+ }
+ if (!rendered) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err < 0) {
+ return;
+ }
+ } else {
+ info->mOwnedByNativeWindow = true;
+ }
+
+ // Dequeue the next buffer from the native window.
+ BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+ if (nextBufInfo == 0) {
+ return;
+ }
+
+ // Give the buffer to the OMX node to fill.
+ fillOutputBuffer(nextBufInfo);
+ }
return;
}
}
@@ -3432,6 +3735,18 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("}\n");
}
+status_t OMXCodec::initNativeWindow() {
+ // Enable use of a GraphicBuffer as the output for this node. This must
+ // happen before getting the IndexParamPortDefinition parameter because it
+ // will affect the pixel format that the node reports.
+ status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != 0) {
+ return err;
+ }
+
+ return OK;
+}
+
void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
mOutputFormat = new MetaData;
mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
@@ -3566,17 +3881,8 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
CHECK(!"Unknown compression format.");
}
- if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
- // This component appears to be lying to me.
- mOutputFormat->setInt32(
- kKeyWidth, (video_def->nFrameWidth + 15) & -16);
- mOutputFormat->setInt32(
- kKeyHeight, (video_def->nFrameHeight + 15) & -16);
- } else {
- mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
- mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
- }
-
+ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
break;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 27faf4f..092c33e 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -281,7 +281,7 @@ status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
mNumSyncSamples = U32_AT(&header[4]);
if (mNumSyncSamples < 2) {
- LOGW("Table of sync samples is empty or has only a single entry!");
+ LOGV("Table of sync samples is empty or has only a single entry!");
}
mSyncSamples = new uint32_t[mNumSyncSamples];
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 1629e9f..6c05e03 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -39,7 +39,7 @@ static bool FileHasAcceptableExtension(const char *extension) {
".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2",
".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
- ".mkv", ".mka", ".webm", ".ts"
+ ".mkv", ".mka", ".webm", ".ts", ".fl"
};
static const size_t kNumValidExtensions =
sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp
new file mode 100644
index 0000000..ea7b09a
--- /dev/null
+++ b/media/libstagefright/VideoSourceDownSampler.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoSourceDownSampler"
+
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include "OMX_Video.h"
+
+namespace android {
+
+VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+ int32_t width, int32_t height) {
+ LOGV("Construct VideoSourceDownSampler");
+ CHECK(width > 0);
+ CHECK(height > 0);
+
+ mRealVideoSource = videoSource;
+ mWidth = width;
+ mHeight = height;
+
+ mMeta = new MetaData(*(mRealVideoSource->getFormat()));
+ CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth));
+ CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight));
+
+ if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) {
+ // Change meta data for width and height.
+ CHECK(mWidth <= mRealSourceWidth);
+ CHECK(mHeight <= mRealSourceHeight);
+
+ mNeedDownSampling = true;
+ computeDownSamplingParameters();
+ mMeta->setInt32(kKeyWidth, mWidth);
+ mMeta->setInt32(kKeyHeight, mHeight);
+ } else {
+ mNeedDownSampling = false;
+ }
+}
+
+VideoSourceDownSampler::~VideoSourceDownSampler() {
+}
+
+void VideoSourceDownSampler::computeDownSamplingParameters() {
+ mDownSampleSkipX = mRealSourceWidth / mWidth;
+ mDownSampleSkipY = mRealSourceHeight / mHeight;
+
+ mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth;
+ mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight;
+}
+
+void VideoSourceDownSampler::downSampleYUVImage(
+ const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate mediaBuffer for down sampled image and setup a canvas.
+ *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight));
+ YUVImage yuvDownSampledImage(yuvFormat,
+ mWidth, mHeight,
+ (uint8_t *)(*buffer)->data());
+ YUVCanvas yuvCanvasDownSample(yuvDownSampledImage);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mRealSourceWidth, mRealSourceHeight,
+ (uint8_t *)sourceBuffer.data());
+ yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY,
+ mDownSampleSkipX, mDownSampleSkipY,
+ yuvImageSource);
+}
+
+status_t VideoSourceDownSampler::start(MetaData *params) {
+ LOGV("start");
+ return mRealVideoSource->start();
+}
+
+status_t VideoSourceDownSampler::stop() {
+ LOGV("stop");
+ return mRealVideoSource->stop();
+}
+
+sp<MetaData> VideoSourceDownSampler::getFormat() {
+ LOGV("getFormat");
+ return mMeta;
+}
+
+status_t VideoSourceDownSampler::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ LOGV("read");
+ MediaBuffer *realBuffer;
+ status_t err = mRealVideoSource->read(&realBuffer, options);
+
+ if (mNeedDownSampling) {
+ downSampleYUVImage(*realBuffer, buffer);
+
+ int64_t frameTime;
+ realBuffer->meta_data()->findInt64(kKeyTime, &frameTime);
+ (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+
+ // We just want this buffer to be deleted when the encoder releases it.
+ // So don't add a reference to it and set the observer to NULL.
+ (*buffer)->setObserver(NULL);
+
+ // The original buffer is no longer required. Release it.
+ realBuffer->release();
+ } else {
+ *buffer = realBuffer;
+ }
+
+ return err;
+}
+
+status_t VideoSourceDownSampler::pause() {
+ LOGV("pause");
+ return mRealVideoSource->pause();
+}
+
+} // namespace android
diff --git a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
index 52a391f..a6b179e 100644
--- a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
@@ -33,6 +33,80 @@
namespace android {
+static status_t ConvertOmxAvcProfileToAvcSpecProfile(
+ int32_t omxProfile, AVCProfile* pvProfile) {
+ LOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile);
+ switch (omxProfile) {
+ case OMX_VIDEO_AVCProfileBaseline:
+ *pvProfile = AVC_BASELINE;
+ return OK;
+ default:
+ LOGE("Unsupported omx profile: %d", omxProfile);
+ }
+ return BAD_VALUE;
+}
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+ int32_t omxLevel, AVCLevel *pvLevel) {
+ LOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel);
+ AVCLevel level = AVC_LEVEL5_1;
+ switch (omxLevel) {
+ case OMX_VIDEO_AVCLevel1:
+ level = AVC_LEVEL1_B;
+ break;
+ case OMX_VIDEO_AVCLevel1b:
+ level = AVC_LEVEL1;
+ break;
+ case OMX_VIDEO_AVCLevel11:
+ level = AVC_LEVEL1_1;
+ break;
+ case OMX_VIDEO_AVCLevel12:
+ level = AVC_LEVEL1_2;
+ break;
+ case OMX_VIDEO_AVCLevel13:
+ level = AVC_LEVEL1_3;
+ break;
+ case OMX_VIDEO_AVCLevel2:
+ level = AVC_LEVEL2;
+ break;
+ case OMX_VIDEO_AVCLevel21:
+ level = AVC_LEVEL2_1;
+ break;
+ case OMX_VIDEO_AVCLevel22:
+ level = AVC_LEVEL2_2;
+ break;
+ case OMX_VIDEO_AVCLevel3:
+ level = AVC_LEVEL3;
+ break;
+ case OMX_VIDEO_AVCLevel31:
+ level = AVC_LEVEL3_1;
+ break;
+ case OMX_VIDEO_AVCLevel32:
+ level = AVC_LEVEL3_2;
+ break;
+ case OMX_VIDEO_AVCLevel4:
+ level = AVC_LEVEL4;
+ break;
+ case OMX_VIDEO_AVCLevel41:
+ level = AVC_LEVEL4_1;
+ break;
+ case OMX_VIDEO_AVCLevel42:
+ level = AVC_LEVEL4_2;
+ break;
+ case OMX_VIDEO_AVCLevel5:
+ level = AVC_LEVEL5;
+ break;
+ case OMX_VIDEO_AVCLevel51:
+ level = AVC_LEVEL5_1;
+ break;
+ default:
+ LOGE("Unknown omx level: %d", omxLevel);
+ return BAD_VALUE;
+ }
+ *pvLevel = level;
+ return OK;
+}
+
inline static void ConvertYUV420SemiPlanarToYUV420Planar(
uint8_t *inyuv, uint8_t* outyuv,
int32_t width, int32_t height) {
@@ -231,10 +305,16 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) {
mEncParams->level = AVC_LEVEL3_2;
int32_t profile, level;
if (meta->findInt32(kKeyVideoProfile, &profile)) {
- mEncParams->profile = (AVCProfile) profile;
+ if (OK != ConvertOmxAvcProfileToAvcSpecProfile(
+ profile, &mEncParams->profile)) {
+ return BAD_VALUE;
+ }
}
if (meta->findInt32(kKeyVideoLevel, &level)) {
- mEncParams->level = (AVCLevel) level;
+ if (OK != ConvertOmxAvcLevelToAvcSpecLevel(
+ level, &mEncParams->level)) {
+ return BAD_VALUE;
+ }
}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
index a011137..d5a5313 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
@@ -32,6 +32,104 @@
namespace android {
+static status_t ConvertOmxProfileLevel(
+ MP4EncodingMode mode,
+ int32_t omxProfile,
+ int32_t omxLevel,
+ ProfileLevelType* pvProfileLevel) {
+ LOGV("ConvertOmxProfileLevel: %d/%d/%d", mode, omxProfile, omxLevel);
+ ProfileLevelType profileLevel;
+ if (mode == H263_MODE) {
+ switch (omxProfile) {
+ case OMX_VIDEO_H263ProfileBaseline:
+ if (omxLevel > OMX_VIDEO_H263Level45) {
+ LOGE("Unsupported level (%d) for H263", omxLevel);
+ return BAD_VALUE;
+ } else {
+ LOGW("PV does not support level configuration for H263");
+ profileLevel = CORE_PROFILE_LEVEL2;
+ break;
+ }
+ default:
+ LOGE("Unsupported profile (%d) for H263", omxProfile);
+ return BAD_VALUE;
+ }
+ } else { // MPEG4
+ switch (omxProfile) {
+ case OMX_VIDEO_MPEG4ProfileSimple:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level0b:
+ profileLevel = SIMPLE_PROFILE_LEVEL0;
+ break;
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = SIMPLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = SIMPLE_PROFILE_LEVEL2;
+ break;
+ case OMX_VIDEO_MPEG4Level3:
+ profileLevel = SIMPLE_PROFILE_LEVEL3;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 simple profile",
+ omxLevel);
+ return BAD_VALUE;
+ }
+ case OMX_VIDEO_MPEG4ProfileSimpleScalable:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level0b:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL0;
+ break;
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL2;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 simple "
+ "scalable profile", omxLevel);
+ return BAD_VALUE;
+ }
+ case OMX_VIDEO_MPEG4ProfileCore:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = CORE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = CORE_PROFILE_LEVEL2;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 core "
+ "profile", omxLevel);
+ return BAD_VALUE;
+ }
+ case OMX_VIDEO_MPEG4ProfileCoreScalable:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL2;
+ break;
+ case OMX_VIDEO_MPEG4Level3:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL3;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 core "
+ "scalable profile", omxLevel);
+ return BAD_VALUE;
+ }
+ default:
+ LOGE("Unsupported MPEG4 profile (%d)", omxProfile);
+ return BAD_VALUE;
+ }
+ }
+
+ *pvProfileLevel = profileLevel;
+ return OK;
+}
+
inline static void ConvertYUV420SemiPlanarToYUV420Planar(
uint8_t *inyuv, uint8_t* outyuv,
int32_t width, int32_t height) {
@@ -150,9 +248,14 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) {
// If profile and level setting is not correct, failure
// is reported when the encoder is initialized.
mEncParams->profile_level = CORE_PROFILE_LEVEL2;
- int32_t profileLevel;
- if (meta->findInt32(kKeyVideoLevel, &profileLevel)) {
- mEncParams->profile_level = (ProfileLevelType)profileLevel;
+ int32_t profile, level;
+ if (meta->findInt32(kKeyVideoProfile, &profile) &&
+ meta->findInt32(kKeyVideoLevel, &level)) {
+ if (OK != ConvertOmxProfileLevel(
+ mEncParams->encMode, profile, level,
+ &mEncParams->profile_level)) {
+ return BAD_VALUE;
+ }
}
mEncParams->packetSize = 32;
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0dcbd73..ef2dba0 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \
SoftwareRenderer.cpp
LOCAL_C_INCLUDES := \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
+ $(TOP)/frameworks/base/include/media/stagefright/openmax \
+ $(TOP)/hardware/msm7k
LOCAL_SHARED_LIBRARIES := \
libbinder \
@@ -17,6 +18,11 @@ LOCAL_SHARED_LIBRARIES := \
libsurfaceflinger_client\
libcamera_client
+# ifeq ($(TARGET_BOARD_PLATFORM),msm7k)
+ifeq ($(TARGET_PRODUCT),passion)
+ LOCAL_CFLAGS += -DHAS_YCBCR420_SP_ADRENO
+endif
+
LOCAL_MODULE:= libstagefright_color_conversion
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index a6dbf69..662a84a 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -22,65 +22,182 @@
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryHeapPmem.h>
#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+// XXX: Temporary hack to allow referencing the _ADRENO pixel format here.
+#include <libgralloc-qsd8k/gralloc_priv.h>
namespace android {
SoftwareRenderer::SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight)
: mColorFormat(colorFormat),
- mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
- mISurface(surface),
+ mConverter(NULL),
+ mYUVMode(None),
+ mSurface(surface),
mDisplayWidth(displayWidth),
mDisplayHeight(displayHeight),
mDecodedWidth(decodedWidth),
- mDecodedHeight(decodedHeight),
- mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565
- mIndex(0) {
- mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize);
- if (mMemoryHeap->heapID() < 0) {
- LOGI("Creating physical memory heap failed, reverting to regular heap.");
- mMemoryHeap = new MemoryHeapBase(2 * mFrameSize);
- } else {
- sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap);
- pmemHeap->slap();
- mMemoryHeap = pmemHeap;
+ mDecodedHeight(decodedHeight) {
+ LOGI("input format = %d", mColorFormat);
+ LOGI("display = %d x %d, decoded = %d x %d",
+ mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight);
+
+ mDecodedWidth = mDisplayWidth;
+ mDecodedHeight = mDisplayHeight;
+
+ int halFormat;
+ switch (mColorFormat) {
+#if HAS_YCBCR420_SP_ADRENO
+ case OMX_COLOR_FormatYUV420Planar:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ mYUVMode = YUV420ToYUV420sp;
+ break;
+ }
+
+ case 0x7fa30c00:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ mYUVMode = YUV420spToYUV420sp;
+ break;
+ }
+#endif
+
+ default:
+ halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+ mConverter = new ColorConverter(
+ mColorFormat, OMX_COLOR_Format16bitRGB565);
+ CHECK(mConverter->isValid());
+ break;
}
- CHECK(mISurface.get() != NULL);
+ CHECK(mSurface.get() != NULL);
CHECK(mDecodedWidth > 0);
CHECK(mDecodedHeight > 0);
- CHECK(mMemoryHeap->heapID() >= 0);
- CHECK(mConverter.isValid());
+ CHECK(mConverter == NULL || mConverter->isValid());
- ISurface::BufferHeap bufferHeap(
- mDisplayWidth, mDisplayHeight,
- mDecodedWidth, mDecodedHeight,
- PIXEL_FORMAT_RGB_565,
- mMemoryHeap);
+ CHECK_EQ(0,
+ native_window_set_usage(
+ mSurface.get(),
+ GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+ | GRALLOC_USAGE_HW_TEXTURE));
- status_t err = mISurface->registerBuffers(bufferHeap);
- CHECK_EQ(err, OK);
+ CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2));
+
+ // Width must be multiple of 32???
+ CHECK_EQ(0, native_window_set_buffers_geometry(
+ mSurface.get(), mDecodedWidth, mDecodedHeight,
+ halFormat));
}
SoftwareRenderer::~SoftwareRenderer() {
- mISurface->unregisterBuffers();
+ delete mConverter;
+ mConverter = NULL;
+}
+
+static inline size_t ALIGN(size_t x, size_t alignment) {
+ return (x + alignment - 1) & ~(alignment - 1);
}
void SoftwareRenderer::render(
const void *data, size_t size, void *platformPrivate) {
- size_t offset = mIndex * mFrameSize;
- void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+ android_native_buffer_t *buf;
+ int err;
+ if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {
+ LOGW("Surface::dequeueBuffer returned error %d", err);
+ return;
+ }
+
+ CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));
+
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+ Rect bounds(mDecodedWidth, mDecodedHeight);
- mConverter.convert(
- mDecodedWidth, mDecodedHeight,
- data, 0, dst, 2 * mDecodedWidth);
+ void *dst;
+ CHECK_EQ(0, mapper.lock(
+ buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
- mISurface->postBuffer(offset);
- mIndex = 1 - mIndex;
+ if (mConverter) {
+ mConverter->convert(
+ mDecodedWidth, mDecodedHeight,
+ data, 0, dst, buf->stride * 2);
+ } else if (mYUVMode == YUV420spToYUV420sp) {
+ // Input and output are both YUV420sp, but the alignment requirements
+ // are different.
+ size_t srcYStride = mDecodedWidth;
+ const uint8_t *srcY = (const uint8_t *)data;
+ uint8_t *dstY = (uint8_t *)dst;
+ for (size_t i = 0; i < mDecodedHeight; ++i) {
+ memcpy(dstY, srcY, mDecodedWidth);
+ srcY += srcYStride;
+ dstY += buf->stride;
+ }
+
+ size_t srcUVStride = (mDecodedWidth + 1) & ~1;
+ size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2;
+
+ const uint8_t *srcUV = (const uint8_t *)data
+ + mDecodedHeight * mDecodedWidth;
+
+ size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096);
+ uint8_t *dstUV = (uint8_t *)dst + dstUVOffset;
+
+ for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) {
+ memcpy(dstUV, srcUV, (mDecodedWidth + 1) & ~1);
+ srcUV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+ } else if (mYUVMode == YUV420ToYUV420sp) {
+ // Input is YUV420 planar, output is YUV420sp, adhere to proper
+ // alignment requirements.
+ size_t srcYStride = mDecodedWidth;
+ const uint8_t *srcY = (const uint8_t *)data;
+ uint8_t *dstY = (uint8_t *)dst;
+ for (size_t i = 0; i < mDecodedHeight; ++i) {
+ memcpy(dstY, srcY, mDecodedWidth);
+ srcY += srcYStride;
+ dstY += buf->stride;
+ }
+
+ size_t srcUVStride = (mDecodedWidth + 1) / 2;
+ size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2;
+
+ const uint8_t *srcU = (const uint8_t *)data
+ + mDecodedHeight * mDecodedWidth;
+
+ const uint8_t *srcV =
+ srcU + ((mDecodedWidth + 1) / 2) * ((mDecodedHeight + 1) / 2);
+
+ size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096);
+ uint8_t *dstUV = (uint8_t *)dst + dstUVOffset;
+
+ for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) {
+ for (size_t j = 0; j < (mDecodedWidth + 1) / 2; ++j) {
+ dstUV[2 * j + 1] = srcU[j];
+ dstUV[2 * j] = srcV[j];
+ }
+ srcU += srcUVStride;
+ srcV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+ } else {
+ memcpy(dst, data, size);
+ }
+
+ CHECK_EQ(0, mapper.unlock(buf->handle));
+
+ if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {
+ LOGW("Surface::queueBuffer returned error %d", err);
+ }
+ buf = NULL;
}
} // namespace android
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 4526bf1..a0a7436 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -80,6 +80,7 @@ struct AwesomePlayer {
bool isPlaying() const;
void setISurface(const sp<ISurface> &isurface);
+ void setSurface(const sp<Surface> &surface);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
status_t setLooping(bool shouldLoop);
@@ -114,6 +115,11 @@ private:
AUDIO_AT_EOS = 256,
VIDEO_AT_EOS = 512,
AUTO_LOOPING = 1024,
+
+ // We are basically done preparing but are currently buffering
+ // sufficient data to begin playback and finish the preparation phase
+ // for good.
+ PREPARING_CONNECTED = 2048,
};
mutable Mutex mLock;
@@ -125,6 +131,7 @@ private:
wp<MediaPlayerBase> mListener;
sp<ISurface> mISurface;
+ sp<Surface> mSurface;
sp<MediaPlayerBase::AudioSink> mAudioSink;
SystemTimeSource mSystemTimeSource;
@@ -233,6 +240,7 @@ private:
status_t seekTo_l(int64_t timeUs);
status_t pause_l(bool at_eos = false);
void initRenderer_l();
+ void notifyVideoSize_l();
void seekAudioIfNecessary_l();
void cancelPlayerEvents(bool keepBufferingGoing = false);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index c99da59..5a6c96f9 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -59,10 +59,20 @@ public:
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size);
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer);
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b5b31ac..86c102c 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -49,10 +49,17 @@ struct OMXNodeInstance {
status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
+ status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+ status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer);
+ status_t useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer);
+
status_t allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data);
@@ -125,4 +132,3 @@ private:
} // namespace android
#endif // OMX_NODE_INSTANCE_H_
-
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 9eed089..8d58056 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -24,14 +24,14 @@
namespace android {
-class ISurface;
+class Surface;
class MemoryHeapBase;
class SoftwareRenderer : public VideoRenderer {
public:
SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight);
@@ -41,14 +41,18 @@ public:
const void *data, size_t size, void *platformPrivate);
private:
+ enum YUVMode {
+ None,
+ YUV420ToYUV420sp,
+ YUV420spToYUV420sp,
+ };
+
OMX_COLOR_FORMATTYPE mColorFormat;
- ColorConverter mConverter;
- sp<ISurface> mISurface;
+ ColorConverter *mConverter;
+ YUVMode mYUVMode;
+ sp<Surface> mSurface;
size_t mDisplayWidth, mDisplayHeight;
size_t mDecodedWidth, mDecodedHeight;
- size_t mFrameSize;
- sp<MemoryHeapBase> mMemoryHeap;
- int mIndex;
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 7c7d69e..d16476d 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -252,7 +252,7 @@ void BlockIterator::reset() {
}
void BlockIterator::seek(int64_t seekTimeUs) {
- mCluster = mSegment->GetCluster(seekTimeUs * 1000ll);
+ mCluster = mSegment->FindCluster(seekTimeUs * 1000ll);
mBlockEntry = mCluster != NULL ? mCluster->GetFirst() : NULL;
while (!eos() && block()->GetTrackNumber() != mTrackNum) {
@@ -476,7 +476,7 @@ void MatroskaExtractor::addTracks() {
size_t codecPrivateSize;
const unsigned char *codecPrivate =
- track->GetCodecPrivate(&codecPrivateSize);
+ track->GetCodecPrivate(codecPrivateSize);
enum { VIDEO_TRACK = 1, AUDIO_TRACK = 2 };
diff --git a/media/libstagefright/matroska/mkvparser.cpp b/media/libstagefright/matroska/mkvparser.cpp
index 4e51004..455b1d6 100644
--- a/media/libstagefright/matroska/mkvparser.cpp
+++ b/media/libstagefright/matroska/mkvparser.cpp
@@ -1,3103 +1,4511 @@
-#include "mkvparser.hpp"
-#include <cassert>
-#include <cstring>
-
-mkvparser::IMkvReader::~IMkvReader()
-{
-}
-
-long long mkvparser::ReadUInt(IMkvReader* pReader, long long pos, long& len)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(pos < available);
- assert((available - pos) >= 1); //assume here max u-int len is 8
-
- unsigned char b;
-
- hr = pReader->Read(pos, 1, &b);
- if (hr < 0)
- return hr;
-
- assert(hr == 0L);
-
- if (b & 0x80) //1000 0000
- {
- len = 1;
- b &= 0x7F; //0111 1111
- }
- else if (b & 0x40) //0100 0000
- {
- len = 2;
- b &= 0x3F; //0011 1111
- }
- else if (b & 0x20) //0010 0000
- {
- len = 3;
- b &= 0x1F; //0001 1111
- }
- else if (b & 0x10) //0001 0000
- {
- len = 4;
- b &= 0x0F; //0000 1111
- }
- else if (b & 0x08) //0000 1000
- {
- len = 5;
- b &= 0x07; //0000 0111
- }
- else if (b & 0x04) //0000 0100
- {
- len = 6;
- b &= 0x03; //0000 0011
- }
- else if (b & 0x02) //0000 0010
- {
- len = 7;
- b &= 0x01; //0000 0001
- }
- else
- {
- assert(b & 0x01); //0000 0001
- len = 8;
- b = 0; //0000 0000
- }
-
- assert((available - pos) >= len);
-
- long long result = b;
- ++pos;
- for (long i = 1; i < len; ++i)
- {
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- assert(hr == 0L);
-
- result <<= 8;
- result |= b;
-
- ++pos;
- }
-
- return result;
-}
-
-
-long long mkvparser::GetUIntLength(
- IMkvReader* pReader,
- long long pos,
- long& len)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- if (pos >= available)
- return pos; //too few bytes available
-
- unsigned char b;
-
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- assert(hr == 0L);
-
- if (b == 0) //we can't handle u-int values larger than 8 bytes
- return E_FILE_FORMAT_INVALID;
-
- unsigned char m = 0x80;
- len = 1;
-
- while (!(b & m))
- {
- m >>= 1;
- ++len;
- }
-
- return 0; //success
-}
-
-
-long long mkvparser::SyncReadUInt(
- IMkvReader* pReader,
- long long pos,
- long long stop,
- long& len)
-{
- assert(pReader);
-
- if (pos >= stop)
- return E_FILE_FORMAT_INVALID;
-
- unsigned char b;
-
- long hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (hr != 0L)
- return E_BUFFER_NOT_FULL;
-
- if (b == 0) //we can't handle u-int values larger than 8 bytes
- return E_FILE_FORMAT_INVALID;
-
- unsigned char m = 0x80;
- len = 1;
-
- while (!(b & m))
- {
- m >>= 1;
- ++len;
- }
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- long long result = b & (~m);
- ++pos;
-
- for (int i = 1; i < len; ++i)
- {
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (hr != 0L)
- return E_BUFFER_NOT_FULL;
-
- result <<= 8;
- result |= b;
-
- ++pos;
- }
-
- return result;
-}
-
-
-long long mkvparser::UnserializeUInt(
- IMkvReader* pReader,
- long long pos,
- long long size)
-{
- assert(pReader);
- assert(pos >= 0);
- assert(size > 0);
- assert(size <= 8);
-
- long long result = 0;
-
- for (long long i = 0; i < size; ++i)
- {
- unsigned char b;
-
- const long hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
- result <<= 8;
- result |= b;
-
- ++pos;
- }
-
- return result;
-}
-
-
-float mkvparser::Unserialize4Float(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
- assert((pos + 4) <= available);
-
- float result;
-
- unsigned char* const p = (unsigned char*)&result;
- unsigned char* q = p + 4;
-
- for (;;)
- {
- hr = pReader->Read(pos, 1, --q);
- assert(hr == 0L);
-
- if (q == p)
- break;
-
- ++pos;
- }
-
- return result;
-}
-
-
-double mkvparser::Unserialize8Double(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- double result;
-
- unsigned char* const p = (unsigned char*)&result;
- unsigned char* q = p + 8;
-
- for (;;)
- {
- const long hr = pReader->Read(pos, 1, --q);
- assert(hr == 0L);
-
- if (q == p)
- break;
-
- ++pos;
- }
-
- return result;
-}
-
-signed char mkvparser::Unserialize1SInt(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr == 0);
- assert(available <= total);
- assert(pos < available);
-
- signed char result;
-
- hr = pReader->Read(pos, 1, (unsigned char*)&result);
- assert(hr == 0);
-
- return result;
-}
-
-short mkvparser::Unserialize2SInt(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
- assert((pos + 2) <= available);
-
- short result;
-
- unsigned char* const p = (unsigned char*)&result;
- unsigned char* q = p + 2;
-
- for (;;)
- {
- hr = pReader->Read(pos, 1, --q);
- assert(hr == 0L);
-
- if (q == p)
- break;
-
- ++pos;
- }
-
- return result;
-}
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- long long& val)
-
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0);
- assert(size <= 8);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
-
- val = UnserializeUInt(pReader, pos, size);
- assert(val >= 0);
-
- pos += size; //consume size of payload
-
- return true;
-}
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- char*& val)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size_ = ReadUInt(pReader, pos, len);
- assert(size_ >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
- assert((pos + size_) <= available);
-
- const size_t size = static_cast<size_t>(size_);
- val = new char[size+1];
-
- for (size_t i = 0; i < size; ++i)
- {
- char c;
-
- hr = pReader->Read(pos + i, 1, (unsigned char*)&c);
- assert(hr == 0L);
-
- val[i] = c;
-
- if (c == '\0')
- break;
-
- }
-
- val[size] = '\0';
- pos += size_; //consume size of payload
-
- return true;
-}
-
-#if 0
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id,
- wchar_t*& val)
-{
- char* str;
-
- if (!Match(pReader, pos, id, str))
- return false;
-
- const size_t size = mbstowcs(NULL, str, 0);
-
- if (size == 0)
- val = NULL;
- else
- {
- val = new wchar_t[size+1];
- mbstowcs(val, str, size);
- val[size] = L'\0';
- }
-
- delete[] str;
- return true;
-}
-#endif
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- unsigned char*& val,
- size_t *optionalSize)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size_ = ReadUInt(pReader, pos, len);
- assert(size_ >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
- assert((pos + size_) <= available);
-
- const size_t size = static_cast<size_t>(size_);
- val = new unsigned char[size];
-
- if (optionalSize) {
- *optionalSize = size;
- }
-
- for (size_t i = 0; i < size; ++i)
- {
- unsigned char b;
-
- hr = pReader->Read(pos + i, 1, &b);
- assert(hr == 0L);
-
- val[i] = b;
- }
-
- pos += size_; //consume size of payload
- return true;
-}
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- double& val)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
- long idlen;
- const long long id = ReadUInt(pReader, pos, idlen);
- assert(id >= 0); //TODO
-
- if ((unsigned long)id != id_)
- return false;
-
- long sizelen;
- const long long size = ReadUInt(pReader, pos + idlen, sizelen);
-
- switch (size)
- {
- case 4:
- case 8:
- break;
- default:
- return false;
- }
-
- pos += idlen + sizelen; //consume id and size fields
- assert((pos + size) <= available);
-
- if (size == 4)
- val = Unserialize4Float(pReader, pos);
- else
- {
- assert(size == 8);
- val = Unserialize8Double(pReader, pos);
- }
-
- pos += size; //consume size of payload
-
- return true;
-}
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- short& val)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size <= 2);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
- assert((pos + size) <= available);
-
- //TODO:
- // Generalize this to work for any size signed int
- if (size == 1)
- val = Unserialize1SInt(pReader, pos);
- else
- val = Unserialize2SInt(pReader, pos);
-
- pos += size; //consume size of payload
-
- return true;
-}
-
-
-namespace mkvparser
-{
-
-EBMLHeader::EBMLHeader():
- m_docType(NULL)
-{
-}
-
-EBMLHeader::~EBMLHeader()
-{
- delete[] m_docType;
-}
-
-long long EBMLHeader::Parse(
- IMkvReader* pReader,
- long long& pos)
-{
- assert(pReader);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
-
- if (hr < 0)
- return hr;
-
- pos = 0;
- long long end = (1024 < available)? 1024: available;
-
- for (;;)
- {
- unsigned char b = 0;
-
- while (pos < end)
- {
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (b == 0x1A)
- break;
-
- ++pos;
- }
-
- if (b != 0x1A)
- {
- if ((pos >= 1024) ||
- (available >= total) ||
- ((total - available) < 5))
- return -1;
-
- return available + 5; //5 = 4-byte ID + 1st byte of size
- }
-
- if ((total - pos) < 5)
- return E_FILE_FORMAT_INVALID;
-
- if ((available - pos) < 5)
- return pos + 5; //try again later
-
- long len;
-
- const long long result = ReadUInt(pReader, pos, len);
-
- if (result < 0) //error
- return result;
-
- if (result == 0x0A45DFA3) //ReadId masks-off length indicator bits
- {
- assert(len == 4);
- pos += len;
- break;
- }
-
- ++pos; //throw away just the 0x1A byte, and try again
- }
-
- long len;
- long long result = GetUIntLength(pReader, pos, len);
-
- if (result < 0) //error
- return result;
-
- if (result > 0) //need more data
- return result;
-
- assert(len > 0);
- assert(len <= 8);
-
- if ((total - pos) < len)
- return E_FILE_FORMAT_INVALID;
- if ((available - pos) < len)
- return pos + len; //try again later
-
- result = ReadUInt(pReader, pos, len);
-
- if (result < 0) //error
- return result;
-
- pos += len; //consume u-int
-
- if ((total - pos) < result)
- return E_FILE_FORMAT_INVALID;
-
- if ((available - pos) < result)
- return pos + result;
-
- end = pos + result;
-
- m_version = 1;
- m_readVersion = 1;
- m_maxIdLength = 4;
- m_maxSizeLength = 8;
- m_docTypeVersion = 1;
- m_docTypeReadVersion = 1;
-
- while (pos < end)
- {
- if (Match(pReader, pos, 0x0286, m_version))
- ;
- else if (Match(pReader, pos, 0x02F7, m_readVersion))
- ;
- else if (Match(pReader, pos, 0x02F2, m_maxIdLength))
- ;
- else if (Match(pReader, pos, 0x02F3, m_maxSizeLength))
- ;
- else if (Match(pReader, pos, 0x0282, m_docType))
- ;
- else if (Match(pReader, pos, 0x0287, m_docTypeVersion))
- ;
- else if (Match(pReader, pos, 0x0285, m_docTypeReadVersion))
- ;
- else
- {
- result = ReadUInt(pReader, pos, len);
- assert(result > 0);
- assert(len > 0);
- assert(len <= 8);
-
- pos += len;
- assert(pos < end);
-
- result = ReadUInt(pReader, pos, len);
- assert(result >= 0);
- assert(len > 0);
- assert(len <= 8);
-
- pos += len + result;
- assert(pos <= end);
- }
- }
-
- assert(pos == end);
-
- return 0;
-}
-
-
-Segment::Segment(
- IMkvReader* pReader,
- long long start,
- long long size) :
- m_pReader(pReader),
- m_start(start),
- m_size(size),
- m_pos(start),
- m_pInfo(NULL),
- m_pTracks(NULL),
- m_clusterCount(0)
- //m_clusterNumber(0)
-{
-}
-
-
-Segment::~Segment()
-{
- Cluster** i = m_clusters;
- Cluster** j = m_clusters + m_clusterCount;
-
- while (i != j)
- {
- Cluster* p = *i++;
- assert(p);
- delete p;
- }
-
- delete[] m_clusters;
-
- delete m_pTracks;
- delete m_pInfo;
-}
-
-
-long long Segment::CreateInstance(
- IMkvReader* pReader,
- long long pos,
- Segment*& pSegment)
-{
- assert(pReader);
- assert(pos >= 0);
-
- pSegment = NULL;
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- //I would assume that in practice this loop would execute
- //exactly once, but we allow for other elements (e.g. Void)
- //to immediately follow the EBML header. This is fine for
- //the source filter case (since the entire file is available),
- //but in the splitter case over a network we should probably
- //just give up early. We could for example decide only to
- //execute this loop a maximum of, say, 10 times.
-
- while (pos < total)
- {
- //Read ID
-
- long len;
- long long result = GetUIntLength(pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > total)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- //TODO: if we liberalize the behavior of ReadUInt, we can
- //probably eliminate having to use GetUIntLength here.
- const long long id = ReadUInt(pReader, pos, len);
-
- if (id < 0) //error
- return id;
-
- pos += len; //consume ID
-
- //Read Size
-
- result = GetUIntLength(pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > total)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- //TODO: if we liberalize the behavior of ReadUInt, we can
- //probably eliminate having to use GetUIntLength here.
- const long long size = ReadUInt(pReader, pos, len);
-
- if (size < 0)
- return size;
-
- pos += len; //consume length of size of element
-
- //Pos now points to start of payload
-
- if ((pos + size) > total)
- return E_FILE_FORMAT_INVALID;
-
- if (id == 0x08538067) //Segment ID
- {
- pSegment = new Segment(pReader, pos, size);
- assert(pSegment); //TODO
-
- return 0; //success
- }
-
- pos += size; //consume payload
- }
-
- assert(pos == total);
-
- pSegment = new Segment(pReader, pos, 0);
- assert(pSegment); //TODO
-
- return 0; //success (sort of)
-}
-
-
-long long Segment::ParseHeaders()
-{
- //Outermost (level 0) segment object has been constructed,
- //and pos designates start of payload. We need to find the
- //inner (level 1) elements.
- long long total, available;
-
- long hr = m_pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- const long long stop = m_start + m_size;
- assert(stop <= total);
- assert(m_pos <= stop);
-
- bool bQuit = false;
- while ((m_pos < stop) && !bQuit)
- {
- long long pos = m_pos;
-
- long len;
- long long result = GetUIntLength(m_pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- const long long idpos = pos;
- const long long id = ReadUInt(m_pReader, idpos, len);
-
- if (id < 0) //error
- return id;
-
- pos += len; //consume ID
-
- //Read Size
- result = GetUIntLength(m_pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- const long long size = ReadUInt(m_pReader, pos, len);
-
- if (size < 0)
- return size;
-
- pos += len; //consume length of size of element
-
- //Pos now points to start of payload
-
- if ((pos + size) > stop)
- return E_FILE_FORMAT_INVALID;
-
- //We read EBML elements either in total or nothing at all.
-
- if ((pos + size) > available)
- return pos + size;
-
- if (id == 0x0549A966) //Segment Info ID
- {
- assert(m_pInfo == NULL);
- m_pInfo = new SegmentInfo(this, pos, size);
- assert(m_pInfo); //TODO
-
- if (m_pTracks)
- bQuit = true;
- }
- else if (id == 0x0654AE6B) //Tracks ID
- {
- assert(m_pTracks == NULL);
- m_pTracks = new Tracks(this, pos, size);
- assert(m_pTracks); //TODO
-
- if (m_pInfo)
- bQuit = true;
- }
- else if (id == 0x0F43B675) //Cluster ID
- {
-#if 0
- if (m_pInfo == NULL) //TODO: liberalize
- ;
- else if (m_pTracks == NULL)
- ;
- else
- //ParseCluster(idpos, pos, size);
- Cluster::Parse(this, m_clusters, pos, size);
-#endif
- bQuit = true;
- }
-
- m_pos = pos + size; //consume payload
- }
-
- assert(m_pos <= stop);
-
- return 0; //success
-}
-
-
-long Segment::ParseCluster(Cluster*& pCluster, long long& pos_) const
-{
- pCluster = NULL;
- pos_ = -1;
-
- const long long stop = m_start + m_size;
- assert(m_pos <= stop);
-
- long long pos = m_pos;
- long long off = -1;
-
-
- while (pos < stop)
- {
- long len;
- const long long idpos = pos;
-
- const long long id = SyncReadUInt(m_pReader, pos, stop, len);
-
- if (id < 0) //error
- return static_cast<long>(id);
-
- if (id == 0)
- return E_FILE_FORMAT_INVALID;
-
- pos += len; //consume id
- assert(pos < stop);
-
- const long long size = SyncReadUInt(m_pReader, pos, stop, len);
-
- if (size < 0) //error
- return static_cast<long>(size);
-
- pos += len; //consume size
- assert(pos <= stop);
-
- if (size == 0) //weird
- continue;
-
- //pos now points to start of payload
-
- pos += size; //consume payload
- assert(pos <= stop);
-
- if (off >= 0)
- {
- pos_ = idpos;
- break;
- }
-
- if (id == 0x0F43B675) //Cluster ID
- off = idpos - m_start;
- }
-
- Segment* const this_ = const_cast<Segment*>(this);
- const size_t idx = m_clusterCount;
-
- if (pos >= stop)
- {
- pos_ = stop;
-
-#if 0
- if (off < 0)
- {
- pCluster = Cluster::CreateEndOfStream(this_, idx);
- return 1L;
- }
-#else
- if (off < 0)
- return 1L;
-#endif
-
- //Reading 0 bytes at pos might work too -- it would depend
- //on how the reader is implemented.
-
- unsigned char b;
-
- const long hr = m_pReader->Read(pos - 1, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (hr != 0L)
- return E_BUFFER_NOT_FULL;
- }
-
- assert(off >= 0);
- assert(pos_ >= m_start);
- assert(pos_ <= stop);
-
- pCluster = Cluster::Parse(this_, idx, off);
- return 0L;
-}
-
-
-bool Segment::AddCluster(Cluster* pCluster, long long pos)
-{
- assert(pos >= m_start);
-
- const long long stop = m_start + m_size;
- assert(pos <= stop);
-
- if (pCluster)
- m_clusters[pos] = pCluster;
-
- m_pos = pos; //m_pos >= stop is now we know we have all clusters
-
- return (pos >= stop);
-}
-
-
-long Segment::Load()
-{
- //Outermost (level 0) segment object has been constructed,
- //and pos designates start of payload. We need to find the
- //inner (level 1) elements.
- const long long stop = m_start + m_size;
-#ifdef _DEBUG
- {
- long long total, available;
-
- long hr = m_pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available >= total);
- assert(stop <= total);
- }
-#endif
- long long index = m_pos;
-
- m_clusterCount = 0;
-
- while (index < stop)
- {
- long len = 0;
-
- long long result = GetUIntLength(m_pReader, index, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((index + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long idpos = index;
- const long long id = ReadUInt(m_pReader, idpos, len);
-
- if (id < 0) //error
- return static_cast<long>(id);
-
- index += len; //consume ID
-
- //Read Size
- result = GetUIntLength(m_pReader, index, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((index + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long size = ReadUInt(m_pReader, index, len);
-
- if (size < 0) //error
- return static_cast<long>(size);
-
- index += len; //consume length of size of element
-
- if (id == 0x0F43B675) // Cluster ID
- break;
-
- if (id == 0x014D9B74) // SeekHead ID
- {
- ParseSeekHead(index, size, NULL);
- break;
- }
- index += size;
- }
-
- if (m_clusterCount == 0)
- return -1L;
-
- while (m_pos < stop)
- {
- long long pos = m_pos;
-
- long len;
-
- long long result = GetUIntLength(m_pReader, pos, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long idpos = pos;
- const long long id = ReadUInt(m_pReader, idpos, len);
-
- if (id < 0) //error
- return static_cast<long>(id);
-
- pos += len; //consume ID
-
- //Read Size
- result = GetUIntLength(m_pReader, pos, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long size = ReadUInt(m_pReader, pos, len);
-
- if (size < 0) //error
- return static_cast<long>(size);
-
- pos += len; //consume length of size of element
-
- //Pos now points to start of payload
-
- if ((pos + size) > stop)
- return E_FILE_FORMAT_INVALID;
-
- if (id == 0x0F43B675) //Cluster ID
- break;
-
- if (id == 0x014D9B74) //SeekHead ID
- {
- m_clusters = new Cluster*[m_clusterCount];
- size_t index = 0;
-
- ParseSeekHead(pos, size, &index);
- assert(index == m_clusterCount);
- }
- else if (id == 0x0549A966) //Segment Info ID
- {
- assert(m_pInfo == NULL);
- m_pInfo = new SegmentInfo(this, pos, size);
- assert(m_pInfo); //TODO
- }
- else if (id == 0x0654AE6B) //Tracks ID
- {
- assert(m_pTracks == NULL);
- m_pTracks = new Tracks(this, pos, size);
- assert(m_pTracks); //TODO
- }
-
- m_pos = pos + size; //consume payload
- }
-
- assert(m_clusters);
-
- //TODO: see notes above. This check is here (temporarily) to ensure
- //that the first seekhead has entries for the clusters (because that's
- //when they're loaded). In case we are given a file that lists the
- //clusters in a second seekhead, the worst thing that happens is that
- //we treat this as an invalid file (which is better then simply
- //asserting somewhere). But that's only a work-around. What we need
- //to do is be able to handle having multiple seekheads, and having
- //clusters listed somewhere besides the first seekhead.
- //
- //if (m_clusters == NULL)
- // return E_FILE_FORMAT_INVALID;
-
- //NOTE: we stop parsing when we reach the first cluster, under the
- //assumption all clusters are named in some SeekHead. Clusters
- //will have been (pre)loaded, so we indicate that we have all clusters
- //by adjusting the parse position:
- m_pos = stop; //means "we have all clusters"
-
- return 0L;
-}
-
-
-void Segment::ParseSeekHead(long long start, long long size_, size_t* pIndex)
-{
- long long pos = start;
- const long long stop = start + size_;
- while (pos < stop)
- {
- long len;
-
- const long long id = ReadUInt(m_pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume ID
-
- const long long size = ReadUInt(m_pReader, pos, len);
- assert(size >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume Size field
- assert((pos + size) <= stop);
-
- if (id == 0x0DBB) //SeekEntry ID
- ParseSeekEntry(pos, size, pIndex);
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
-
- assert(pos == stop);
-}
-
-
-void Segment::ParseSecondarySeekHead(long long off, size_t* pIndex)
-{
- assert(off >= 0);
- assert(off < m_size);
-
- long long pos = m_start + off;
- const long long stop = m_start + m_size;
-
- long len;
-
- long long result = GetUIntLength(m_pReader, pos, len);
- assert(result == 0);
- assert((pos + len) <= stop);
-
- const long long idpos = pos;
-
- const long long id = ReadUInt(m_pReader, idpos, len);
- assert(id == 0x014D9B74); //SeekHead ID
-
- pos += len; //consume ID
- assert(pos < stop);
-
- //Read Size
-
- result = GetUIntLength(m_pReader, pos, len);
- assert(result == 0);
- assert((pos + len) <= stop);
-
- const long long size = ReadUInt(m_pReader, pos, len);
- assert(size >= 0);
-
- pos += len; //consume length of size of element
- assert((pos + size) <= stop);
-
- //Pos now points to start of payload
-
- ParseSeekHead(pos, size, pIndex);
-}
-
-
-void Segment::ParseSeekEntry(long long start, long long size_, size_t* pIndex)
-{
- long long pos = start;
-
- const long long stop = start + size_;
-
- long len;
-
- const long long seekIdId = ReadUInt(m_pReader, pos, len);
- //seekIdId;
- assert(seekIdId == 0x13AB); //SeekID ID
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long seekIdSize = ReadUInt(m_pReader, pos, len);
- assert(seekIdSize >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- const long long seekId = ReadUInt(m_pReader, pos, len); //payload
- assert(seekId >= 0);
- assert(len == seekIdSize);
- assert((pos + len) <= stop);
-
- pos += seekIdSize; //consume payload
-
- const long long seekPosId = ReadUInt(m_pReader, pos, len);
- //seekPosId;
- assert(seekPosId == 0x13AC); //SeekPos ID
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long seekPosSize = ReadUInt(m_pReader, pos, len);
- assert(seekPosSize >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume size
- assert((pos + seekPosSize) <= stop);
-
- const long long seekOff = UnserializeUInt(m_pReader, pos, seekPosSize);
- assert(seekOff >= 0);
- assert(seekOff < m_size);
-
- pos += seekPosSize; //consume payload
- assert(pos == stop);
-
- const long long seekPos = m_start + seekOff;
- assert(seekPos < (m_start + m_size));
-
- if (seekId == 0x0F43B675) //cluster id
- {
- if (pIndex == NULL)
- ++m_clusterCount;
- else
- {
- assert(m_clusters);
- assert(m_clusterCount > 0);
-
- size_t& index = *pIndex;
- assert(index < m_clusterCount);
-
- Cluster*& pCluster = m_clusters[index];
-
- pCluster = Cluster::Parse(this, index, seekOff);
- assert(pCluster); //TODO
-
- ++index;
- }
- }
- else if (seekId == 0x014D9B74) //SeekHead ID
- {
- ParseSecondarySeekHead(seekOff, pIndex);
- }
-}
-
-
-long long Segment::Unparsed() const
-{
- const long long stop = m_start + m_size;
-
- const long long result = stop - m_pos;
- assert(result >= 0);
-
- return result;
-}
-
-
-#if 0 //NOTE: too inefficient
-long long Segment::Load(long long time_ns)
-{
- if (Unparsed() <= 0)
- return 0;
-
- while (m_clusters.empty())
- {
- const long long result = Parse();
-
- if (result) //error, or not enough bytes available
- return result;
-
- if (Unparsed() <= 0)
- return 0;
- }
-
- while (m_clusters.back()->GetTime() < time_ns)
- {
- const long long result = Parse();
-
- if (result) //error, or not enough bytes available
- return result;
-
- if (Unparsed() <= 0)
- return 0;
- }
-
- return 0;
-}
-#endif
-
-
-Cluster* Segment::GetFirst()
-{
- if ((m_clusters == NULL) || (m_clusterCount <= 0))
- return &m_eos;
-
- Cluster* const pCluster = m_clusters[0];
- assert(pCluster);
-
- return pCluster;
-}
-
-
-Cluster* Segment::GetLast()
-{
- if ((m_clusters == NULL) || (m_clusterCount <= 0))
- return &m_eos;
-
- const size_t idx = m_clusterCount - 1;
- Cluster* const pCluster = m_clusters[idx];
- assert(pCluster);
-
- return pCluster;
-}
-
-
-unsigned long Segment::GetCount() const
-{
- //TODO: m_clusterCount should not be long long.
- return static_cast<unsigned long>(m_clusterCount);
-}
-
-
-Cluster* Segment::GetNext(const Cluster* pCurr)
-{
- assert(pCurr);
- assert(pCurr != &m_eos);
- assert(m_clusters);
- assert(m_clusterCount > 0);
-
- size_t idx = pCurr->m_index;
- assert(idx < m_clusterCount);
- assert(pCurr == m_clusters[idx]);
-
- idx++;
-
- if (idx >= m_clusterCount)
- return &m_eos;
-
- Cluster* const pNext = m_clusters[idx];
- assert(pNext);
-
- return pNext;
-}
-
-
-Cluster* Segment::GetCluster(long long time_ns)
-{
- if ((m_clusters == NULL) || (m_clusterCount <= 0))
- return &m_eos;
-
- {
- Cluster* const pCluster = m_clusters[0];
- assert(pCluster);
- assert(pCluster->m_index == 0);
-
- if (time_ns <= pCluster->GetTime())
- return pCluster;
- }
-
- //Binary search of cluster array
-
- size_t i = 0;
- size_t j = m_clusterCount;
-
- while (i < j)
- {
- //INVARIANT:
- //[0, i) <= time_ns
- //[i, j) ?
- //[j, m_clusterCount) > time_ns
-
- const size_t k = i + (j - i) / 2;
- assert(k < m_clusterCount);
-
- Cluster* const pCluster = m_clusters[k];
- assert(pCluster);
- assert(pCluster->m_index == k);
-
- const long long t = pCluster->GetTime();
-
- if (t <= time_ns)
- i = k + 1;
- else
- j = k;
-
- assert(i <= j);
- }
-
- assert(i == j);
- assert(i > 0);
- assert(i <= m_clusterCount);
-
- const size_t k = i - 1;
-
- Cluster* const pCluster = m_clusters[k];
- assert(pCluster);
- assert(pCluster->m_index == k);
- assert(pCluster->GetTime() <= time_ns);
-
- return pCluster;
-}
-
-
-Tracks* Segment::GetTracks() const
-{
- return m_pTracks;
-}
-
-
-const SegmentInfo* const Segment::GetInfo() const
-{
- return m_pInfo;
-}
-
-
-long long Segment::GetDuration() const
-{
- assert(m_pInfo);
- return m_pInfo->GetDuration();
-}
-
-
-SegmentInfo::SegmentInfo(Segment* pSegment, long long start, long long size_) :
- m_pSegment(pSegment),
- m_start(start),
- m_size(size_),
- m_pMuxingAppAsUTF8(NULL),
- m_pWritingAppAsUTF8(NULL),
- m_pTitleAsUTF8(NULL)
-{
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos = start;
- const long long stop = start + size_;
-
- m_timecodeScale = 1000000;
- m_duration = 0;
-
-
- while (pos < stop)
- {
- if (Match(pReader, pos, 0x0AD7B1, m_timecodeScale))
- assert(m_timecodeScale > 0);
-
- else if (Match(pReader, pos, 0x0489, m_duration))
- assert(m_duration >= 0);
-
- else if (Match(pReader, pos, 0x0D80, m_pMuxingAppAsUTF8)) //[4D][80]
- assert(m_pMuxingAppAsUTF8);
-
- else if (Match(pReader, pos, 0x1741, m_pWritingAppAsUTF8)) //[57][41]
- assert(m_pWritingAppAsUTF8);
-
- else if (Match(pReader, pos, 0x3BA9, m_pTitleAsUTF8)) //[7B][A9]
- assert(m_pTitleAsUTF8);
-
- else
- {
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- //id;
- assert(id >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume id
- assert((stop - pos) > 0);
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0);
- assert((pos + len) <= stop);
-
- pos += len + size; //consume size and payload
- assert(pos <= stop);
- }
- }
-
- assert(pos == stop);
-}
-
-SegmentInfo::~SegmentInfo()
-{
- if (m_pMuxingAppAsUTF8)
- {
- delete[] m_pMuxingAppAsUTF8;
- m_pMuxingAppAsUTF8 = NULL;
- }
-
- if (m_pWritingAppAsUTF8)
- {
- delete[] m_pWritingAppAsUTF8;
- m_pWritingAppAsUTF8 = NULL;
- }
-
- if (m_pTitleAsUTF8)
- {
- delete[] m_pTitleAsUTF8;
- m_pTitleAsUTF8 = NULL;
- }
-}
-
-long long SegmentInfo::GetTimeCodeScale() const
-{
- return m_timecodeScale;
-}
-
-
-long long SegmentInfo::GetDuration() const
-{
- assert(m_duration >= 0);
- assert(m_timecodeScale >= 1);
-
- const double dd = double(m_duration) * double(m_timecodeScale);
- const long long d = static_cast<long long>(dd);
-
- return d;
-}
-
-const char* SegmentInfo::GetMuxingAppAsUTF8() const
-{
- return m_pMuxingAppAsUTF8;
-}
-
-const char* SegmentInfo::GetWritingAppAsUTF8() const
-{
- return m_pWritingAppAsUTF8;
-}
-
-const char* SegmentInfo::GetTitleAsUTF8() const
-{
- return m_pTitleAsUTF8;
-}
-
-Track::Track(Segment* pSegment, const Info& i) :
- m_pSegment(pSegment),
- m_info(i)
-{
-}
-
-Track::~Track()
-{
- Info& info = const_cast<Info&>(m_info);
- info.Clear();
-}
-
-Track::Info::Info():
- type(-1),
- number(-1),
- uid(-1),
- nameAsUTF8(NULL),
- codecId(NULL),
- codecPrivate(NULL),
- codecPrivateSize(0),
- codecNameAsUTF8(NULL)
-{
-}
-
-void Track::Info::Clear()
-{
- delete[] nameAsUTF8;
- nameAsUTF8 = NULL;
-
- delete[] codecId;
- codecId = NULL;
-
- delete[] codecPrivate;
- codecPrivate = NULL;
-
- delete[] codecNameAsUTF8;
- codecNameAsUTF8 = NULL;
-}
-
-const BlockEntry* Track::GetEOS() const
-{
- return &m_eos;
-}
-
-long long Track::GetType() const
-{
- const unsigned long result = static_cast<unsigned long>(m_info.type);
- return result;
-}
-
-unsigned long Track::GetNumber() const
-{
- assert(m_info.number >= 0);
- const unsigned long result = static_cast<unsigned long>(m_info.number);
- return result;
-}
-
-const char* Track::GetNameAsUTF8() const
-{
- return m_info.nameAsUTF8;
-}
-
-const char* Track::GetCodecNameAsUTF8() const
-{
- return m_info.codecNameAsUTF8;
-}
-
-
-const char* Track::GetCodecId() const
-{
- return m_info.codecId;
-}
-
-
-const unsigned char* Track::GetCodecPrivate(size_t *optionalSize) const
-{
- if (optionalSize) {
- *optionalSize = m_info.codecPrivateSize;
- }
- return m_info.codecPrivate;
-}
-
-
-long Track::GetFirst(const BlockEntry*& pBlockEntry) const
-{
- Cluster* const pCluster = m_pSegment->GetFirst();
-
- //If Segment::GetFirst returns NULL, then this must be a network
- //download, and we haven't loaded any clusters yet. In this case,
- //returning NULL from Track::GetFirst means the same thing.
-
- if ((pCluster == NULL) || pCluster->EOS())
- {
- pBlockEntry = NULL;
- return E_BUFFER_NOT_FULL; //return 1L instead?
- }
-
- pBlockEntry = pCluster->GetFirst();
-
- while (pBlockEntry)
- {
- const Block* const pBlock = pBlockEntry->GetBlock();
- assert(pBlock);
-
- if (pBlock->GetTrackNumber() == (unsigned long)m_info.number)
- return 0L;
-
- pBlockEntry = pCluster->GetNext(pBlockEntry);
- }
-
- //NOTE: if we get here, it means that we didn't find a block with
- //a matching track number. We interpret that as an error (which
- //might be too conservative).
-
- pBlockEntry = GetEOS(); //so we can return a non-NULL value
- return 1L;
-}
-
-
-long Track::GetNext(const BlockEntry* pCurrEntry, const BlockEntry*& pNextEntry) const
-{
- assert(pCurrEntry);
- assert(!pCurrEntry->EOS()); //?
- assert(pCurrEntry->GetBlock()->GetTrackNumber() == (unsigned long)m_info.number);
-
- const Cluster* const pCurrCluster = pCurrEntry->GetCluster();
- assert(pCurrCluster);
- assert(!pCurrCluster->EOS());
-
- pNextEntry = pCurrCluster->GetNext(pCurrEntry);
-
- while (pNextEntry)
- {
- const Block* const pNextBlock = pNextEntry->GetBlock();
- assert(pNextBlock);
-
- if (pNextBlock->GetTrackNumber() == (unsigned long)m_info.number)
- return 0L;
-
- pNextEntry = pCurrCluster->GetNext(pNextEntry);
- }
-
- Segment* pSegment = pCurrCluster->m_pSegment;
- Cluster* const pNextCluster = pSegment->GetNext(pCurrCluster);
-
- if ((pNextCluster == NULL) || pNextCluster->EOS())
- {
- if (pSegment->Unparsed() <= 0) //all clusters have been loaded
- {
- pNextEntry = GetEOS();
- return 1L;
- }
-
- pNextEntry = NULL;
- return E_BUFFER_NOT_FULL;
- }
-
- pNextEntry = pNextCluster->GetFirst();
-
- while (pNextEntry)
- {
- const Block* const pNextBlock = pNextEntry->GetBlock();
- assert(pNextBlock);
-
- if (pNextBlock->GetTrackNumber() == (unsigned long)m_info.number)
- return 0L;
-
- pNextEntry = pNextCluster->GetNext(pNextEntry);
- }
-
- //TODO: what has happened here is that we did not find a block
- //with a matching track number on the next cluster. It might
- //be the case that some cluster beyond the next cluster
- //contains a block having a matching track number, but for
- //now we terminate the search immediately. We do this so that
- //we don't end up searching the entire file looking for the
- //next block. Another possibility is to try searching for the next
- //block in a small, fixed number of clusters (intead searching
- //just the next one), or to terminate the search when when the
- //there is a large gap in time, or large gap in file position. It
- //might very well be the case that the approach we use here is
- //unnecessarily conservative.
-
- //TODO: again, here's a case where we need to return the special
- //EOS block. Or something. It's OK if pNext is NULL, because
- //we only need it to set the stop time of the media sample.
- //(The start time is determined from pCurr, which is non-NULL
- //and non-EOS.) The problem is when we set pCurr=pNext; when
- //pCurr has the value NULL we interpret that to mean that we
- //haven't fully initialized pCurr and we attempt to set it to
- //point to the first block for this track. But that's not what
- //we want at all; we want the next call to PopulateSample to
- //return end-of-stream, not (re)start from the beginning.
- //
- //One work-around is to send EOS immediately. We would send
- //the EOS the next pass anyway, so maybe it's no great loss. The
- //only problem is that if this the stream really does end one
- //cluster early (relative to other tracks), or the last frame
- //happens to be a keyframe ("CanSeekToEnd").
- //
- //The problem is that we need a way to mark as stream as
- //"at end of stream" without actually being at end of stream.
- //We need to give pCurr some value that means "you've reached EOS".
- //We can't synthesize the special EOS Cluster immediately
- //(when we first open the file, say), because we use the existance
- //of that special cluster value to mean that we've read all of
- //the clusters (this is a network download, so we can't know apriori
- //how many we have).
- //
- //Or, we could return E_FAIL, and set another bit in the stream
- //object itself, to indicate that it should send EOS earlier
- //than when (pCurr=pStop).
- //
- //Or, probably the best solution, when we actually load the
- //blocks into a cluster: if we notice that there's no block
- //for a track, we synthesize a nonce EOS block for that track.
- //That way we always have something to return. But that will
- //only work for sequential scan???
-
- //pNext = NULL;
- //return E_FAIL;
- pNextEntry = GetEOS();
- return 1L;
-}
-
-
-Track::EOSBlock::EOSBlock()
-{
-}
-
-
-bool Track::EOSBlock::EOS() const
-{
- return true;
-}
-
-
-Cluster* Track::EOSBlock::GetCluster() const
-{
- return NULL;
-}
-
-
-size_t Track::EOSBlock::GetIndex() const
-{
- return 0;
-}
-
-
-const Block* Track::EOSBlock::GetBlock() const
-{
- return NULL;
-}
-
-
-bool Track::EOSBlock::IsBFrame() const
-{
- return false;
-}
-
-
-VideoTrack::VideoTrack(Segment* pSegment, const Info& i) :
- Track(pSegment, i),
- m_width(-1),
- m_height(-1),
- m_rate(-1)
-{
- assert(i.type == 1);
- assert(i.number > 0);
-
- IMkvReader* const pReader = pSegment->m_pReader;
-
- const Settings& s = i.settings;
- assert(s.start >= 0);
- assert(s.size >= 0);
-
- long long pos = s.start;
- assert(pos >= 0);
-
- const long long stop = pos + s.size;
-
- while (pos < stop)
- {
-#ifdef _DEBUG
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-#endif
- if (Match(pReader, pos, 0x30, m_width))
- ;
- else if (Match(pReader, pos, 0x3A, m_height))
- ;
- else if (Match(pReader, pos, 0x0383E3, m_rate))
- ;
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
- assert((pos + size) <= stop);
-
- //pos now designates start of payload
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- return;
-}
-
-
-bool VideoTrack::VetEntry(const BlockEntry* pBlockEntry) const
-{
- assert(pBlockEntry);
-
- const Block* const pBlock = pBlockEntry->GetBlock();
- assert(pBlock);
- assert(pBlock->GetTrackNumber() == (unsigned long)m_info.number);
-
- return pBlock->IsKey();
-}
-
-
-
-long long VideoTrack::GetWidth() const
-{
- return m_width;
-}
-
-
-long long VideoTrack::GetHeight() const
-{
- return m_height;
-}
-
-
-double VideoTrack::GetFrameRate() const
-{
- return m_rate;
-}
-
-
-AudioTrack::AudioTrack(Segment* pSegment, const Info& i) :
- Track(pSegment, i)
-{
- assert(i.type == 2);
- assert(i.number > 0);
-
- IMkvReader* const pReader = pSegment->m_pReader;
-
- const Settings& s = i.settings;
- assert(s.start >= 0);
- assert(s.size >= 0);
-
- long long pos = s.start;
- assert(pos >= 0);
-
- const long long stop = pos + s.size;
-
- while (pos < stop)
- {
-#ifdef _DEBUG
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-#endif
- if (Match(pReader, pos, 0x35, m_rate))
- ;
- else if (Match(pReader, pos, 0x1F, m_channels))
- ;
- else if (Match(pReader, pos, 0x2264, m_bitDepth))
- ;
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
- assert((pos + size) <= stop);
-
- //pos now designates start of payload
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- return;
-}
-
-bool AudioTrack::VetEntry(const BlockEntry* pBlockEntry) const
-{
- assert(pBlockEntry);
-
- const Block* const pBlock = pBlockEntry->GetBlock();
- assert(pBlock);
- assert(pBlock->GetTrackNumber() == (unsigned long)m_info.number);
-
- return true;
-}
-
-
-double AudioTrack::GetSamplingRate() const
-{
- return m_rate;
-}
-
-
-long long AudioTrack::GetChannels() const
-{
- return m_channels;
-}
-
-long long AudioTrack::GetBitDepth() const
-{
- return m_bitDepth;
-}
-
-Tracks::Tracks(Segment* pSegment, long long start, long long size_) :
- m_pSegment(pSegment),
- m_start(start),
- m_size(size_),
- m_trackEntries(NULL),
- m_trackEntriesEnd(NULL)
-{
- long long stop = m_start + m_size;
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos1 = m_start;
- int count = 0;
-
- while (pos1 < stop)
- {
- long len;
- const long long id = ReadUInt(pReader, pos1, len);
- assert(id >= 0);
- assert((pos1 + len) <= stop);
-
- pos1 += len; //consume id
-
- const long long size = ReadUInt(pReader, pos1, len);
- assert(size >= 0);
- assert((pos1 + len) <= stop);
-
- pos1 += len; //consume length of size
-
- //pos now desinates start of element
- if (id == 0x2E) //TrackEntry ID
- ++count;
-
- pos1 += size; //consume payload
- assert(pos1 <= stop);
- }
-
- if (count <= 0)
- return;
-
- m_trackEntries = new Track*[count];
- m_trackEntriesEnd = m_trackEntries;
-
- long long pos = m_start;
-
- while (pos < stop)
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size1 = ReadUInt(pReader, pos, len);
- assert(size1 >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
-
- //pos now desinates start of element
-
- if (id == 0x2E) //TrackEntry ID
- ParseTrackEntry(pos, size1, *m_trackEntriesEnd++);
-
- pos += size1; //consume payload
- assert(pos <= stop);
- }
-}
-
-unsigned long Tracks::GetTracksCount() const
-{
- const ptrdiff_t result = m_trackEntriesEnd - m_trackEntries;
- assert(result >= 0);
-
- return static_cast<unsigned long>(result);
-}
-
-
-void Tracks::ParseTrackEntry(
- long long start,
- long long size,
- Track*& pTrack)
-{
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos = start;
- const long long stop = start + size;
-
- Track::Info i;
-
- Track::Settings videoSettings;
- videoSettings.start = -1;
-
- Track::Settings audioSettings;
- audioSettings.start = -1;
-
- while (pos < stop)
- {
-#ifdef _DEBUG
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- len;
- id;
-#endif
- if (Match(pReader, pos, 0x57, i.number))
- assert(i.number > 0);
-
- else if (Match(pReader, pos, 0x33C5, i.uid))
- ;
-
- else if (Match(pReader, pos, 0x03, i.type))
- ;
-
- else if (Match(pReader, pos, 0x136E, i.nameAsUTF8))
- assert(i.nameAsUTF8);
-
- else if (Match(pReader, pos, 0x06, i.codecId))
- ;
-
- else if (Match(pReader, pos, 0x23A2, i.codecPrivate, &i.codecPrivateSize))
- ;
-
- else if (Match(pReader, pos, 0x058688, i.codecNameAsUTF8))
- assert(i.codecNameAsUTF8);
-
- else
- {
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
- const long long start = pos;
-
- pos += size; //consume payload
- assert(pos <= stop);
-
- if (id == 0x60)
- {
- videoSettings.start = start;
- videoSettings.size = size;
- }
- else if (id == 0x61)
- {
- audioSettings.start = start;
- audioSettings.size = size;
- }
- }
- }
-
- assert(pos == stop);
- //TODO: propertly vet info.number, to ensure both its existence,
- //and that it is unique among all tracks.
- assert(i.number > 0);
-
- //TODO: vet settings, to ensure that video settings (0x60)
- //were specified when type = 1, and that audio settings (0x61)
- //were specified when type = 2.
- if (i.type == 1) //video
- {
- assert(audioSettings.start < 0);
- assert(videoSettings.start >= 0);
-
- i.settings = videoSettings;
-
- VideoTrack* const t = new VideoTrack(m_pSegment, i);
- assert(t); //TODO
- pTrack = t;
- }
- else if (i.type == 2) //audio
- {
- assert(videoSettings.start < 0);
- assert(audioSettings.start >= 0);
-
- i.settings = audioSettings;
-
- AudioTrack* const t = new AudioTrack(m_pSegment, i);
- assert(t); //TODO
- pTrack = t;
- }
- else
- {
- // for now we do not support other track types yet.
- // TODO: support other track types
- i.Clear();
-
- pTrack = NULL;
- }
-
- return;
-}
-
-
-Tracks::~Tracks()
-{
- Track** i = m_trackEntries;
- Track** const j = m_trackEntriesEnd;
-
- while (i != j)
- {
- Track* pTrack = *i++;
- delete pTrack;
- pTrack = NULL;
- }
-
- delete[] m_trackEntries;
-}
-
-
-Track* Tracks::GetTrackByNumber(unsigned long tn) const
-{
- Track** i = m_trackEntries;
- Track** const j = m_trackEntriesEnd;
-
- while (i != j)
- {
- Track* const pTrack = *i++;
-
- if (pTrack == NULL)
- continue;
-
- if (tn == pTrack->GetNumber())
- return pTrack;
- }
-
- return NULL; //not found
-}
-
-
-Track* Tracks::GetTrackByIndex(unsigned long idx) const
-{
- const ptrdiff_t count = m_trackEntriesEnd - m_trackEntries;
-
- if (idx >= static_cast<unsigned long>(count))
- return NULL;
-
- return m_trackEntries[idx];
-}
-
-
-void Cluster::Load()
-{
- assert(m_pSegment);
-
- if (m_start > 0)
- {
- assert(m_size > 0);
- assert(m_timecode >= 0);
- return;
- }
-
- assert(m_size == 0);
- assert(m_timecode < 0);
-
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- const long long off = -m_start; //relative to segment
- long long pos = m_pSegment->m_start + off; //absolute
-
- long len;
-
- const long long id_ = ReadUInt(pReader, pos, len);
- assert(id_ >= 0);
- assert(id_ == 0x0F43B675); //Cluster ID
-
- pos += len; //consume id
-
- const long long size_ = ReadUInt(pReader, pos, len);
- assert(size_ >= 0);
-
- pos += len; //consume size
-
- m_start = pos;
- m_size = size_;
-
- const long long stop = m_start + size_;
-
- long long timecode = -1;
-
- while (pos < stop)
- {
- if (Match(pReader, pos, 0x67, timecode))
- break;
- else
- {
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- if (id == 0x20) //BlockGroup ID
- break;
-
- if (id == 0x23) //SimpleBlock ID
- break;
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- assert(pos <= stop);
- assert(timecode >= 0);
-
- m_timecode = timecode;
-}
-
-
-Cluster* Cluster::Parse(
- Segment* pSegment,
- size_t idx,
- long long off)
-{
- assert(pSegment);
- assert(off >= 0);
- assert(off < pSegment->m_size);
- Cluster* const pCluster = new Cluster(pSegment, idx, -off);
- assert(pCluster);
-
- return pCluster;
-}
-
-
-Cluster::Cluster() :
- m_pSegment(NULL),
- m_index(0),
- m_start(0),
- m_size(0),
- m_timecode(0),
- m_pEntries(NULL),
- m_entriesCount(0)
-{
-}
-
-Cluster::Cluster(
- Segment* pSegment,
- size_t idx,
- long long off) :
- m_pSegment(pSegment),
- m_index(idx),
- m_start(off),
- m_size(0),
- m_timecode(-1),
- m_pEntries(NULL),
- m_entriesCount(0)
-{
-}
-
-
-Cluster::~Cluster()
-{
-#if 0
- while (!m_pEntries.empty())
- {
- BlockEntry* pBlockEntry = m_pEntries.front();
- assert(pBlockEntry);
-
- m_pEntries.pop_front();
- delete pBlockEntry;
- }
-#else
- BlockEntry** i = m_pEntries;
- BlockEntry** const j = m_pEntries + m_entriesCount;
- while (i != j)
- {
- BlockEntry* p = *i++;
-
- assert(p);
- delete p;
- }
-
- delete[] m_pEntries;
-#endif
-
-}
-
-bool Cluster::EOS() const
-{
- return (m_pSegment == 0);
-}
-
-
-void Cluster::LoadBlockEntries()
-{
- if (m_pEntries)
- return;
-
- Load();
- assert(m_timecode >= 0);
- assert(m_start > 0);
- assert(m_size > 0);
-
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos = m_start;
- const long long stop = m_start + m_size;
- long long timecode = -1;
-
- long long idx = pos;
-
- m_entriesCount = 0;
-
- while (idx < stop)
- {
- if (Match(pReader, idx, 0x67, timecode))
- assert(timecode == m_timecode);
- else
- {
- long len;
-
- const long long id = ReadUInt(pReader, idx, len);
- assert(id >= 0); //TODO
- assert((idx + len) <= stop);
-
- idx += len; //consume id
-
- const long long size = ReadUInt(pReader, idx, len);
- assert(size >= 0); //TODO
- assert((idx + len) <= stop);
-
- idx += len; //consume size
-
- if (id == 0x20) //BlockGroup ID
- ++m_entriesCount;
- else if (id == 0x23) //SimpleBlock ID
- ++m_entriesCount;
-
- idx += size; //consume payload
-
- assert(idx <= stop);
- }
- }
-
- if (m_entriesCount == 0)
- return;
-
- m_pEntries = new BlockEntry*[m_entriesCount];
- size_t index = 0;
-
- while (pos < stop)
- {
- if (Match(pReader, pos, 0x67, timecode))
- assert(timecode == m_timecode);
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- if (id == 0x20) //BlockGroup ID
- ParseBlockGroup(pos, size, index++);
- else if (id == 0x23) //SimpleBlock ID
- ParseSimpleBlock(pos, size, index++);
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- assert(pos == stop);
- assert(timecode >= 0);
- assert(index == m_entriesCount);
-}
-
-
-
-long long Cluster::GetTimeCode()
-{
- Load();
- return m_timecode;
-}
-
-
-long long Cluster::GetTime()
-{
- const long long tc = GetTimeCode();
- assert(tc >= 0);
-
- const SegmentInfo* const pInfo = m_pSegment->GetInfo();
- assert(pInfo);
-
- const long long scale = pInfo->GetTimeCodeScale();
- assert(scale >= 1);
-
- const long long t = m_timecode * scale;
-
- return t;
-}
-
-
-void Cluster::ParseBlockGroup(long long start, long long size, size_t index)
-{
- assert(m_pEntries);
- assert(m_entriesCount);
- assert(index < m_entriesCount);
-
- BlockGroup* const pGroup = new BlockGroup(this, index, start, size);
- assert(pGroup); //TODO
-
- m_pEntries[index] = pGroup;
-}
-
-
-
-void Cluster::ParseSimpleBlock(long long start, long long size, size_t index)
-{
- assert(m_pEntries);
- assert(m_entriesCount);
- assert(index < m_entriesCount);
-
- SimpleBlock* const pSimpleBlock = new SimpleBlock(this, index, start, size);
- assert(pSimpleBlock); //TODO
-
- m_pEntries[index] = pSimpleBlock;
-}
-
-
-const BlockEntry* Cluster::GetFirst()
-{
- LoadBlockEntries();
-
- return m_pEntries[0];
-}
-
-
-const BlockEntry* Cluster::GetLast()
-{
- if (m_entriesCount == 0)
- return m_pEntries[0];
-
- return m_pEntries[m_entriesCount-1];
-}
-
-
-const BlockEntry* Cluster::GetNext(const BlockEntry* pEntry) const
-{
- assert(pEntry);
-
- size_t idx = pEntry->GetIndex();
-
- ++idx;
-
- if (idx == m_entriesCount)
- return NULL;
-
- return m_pEntries[idx];
-
-}
-
-
-const BlockEntry* Cluster::GetEntry(const Track* pTrack)
-{
-
- assert(pTrack);
-
- if (m_pSegment == NULL) //EOS
- return pTrack->GetEOS();
-
- LoadBlockEntries();
-
- BlockEntry* i = *m_pEntries;
- BlockEntry* j = *m_pEntries + m_entriesCount;
- while (i != j)
- {
- BlockEntry* pEntry = i;
- i++;
- assert(pEntry);
- assert(!pEntry->EOS());
-
- const Block* const pBlock = pEntry->GetBlock();
- assert(pBlock);
-
- if (pBlock->GetTrackNumber() != pTrack->GetNumber())
- continue;
-
- if (pTrack->VetEntry(pEntry))
- return pEntry;
- }
-
- return pTrack->GetEOS(); //no satisfactory block found
-}
-
-
-BlockEntry::BlockEntry()
-{
-}
-
-
-BlockEntry::~BlockEntry()
-{
-}
-
-
-
-SimpleBlock::SimpleBlock(
- Cluster* pCluster,
- size_t idx,
- long long start,
- long long size) :
- m_pCluster(pCluster),
- m_index(idx),
- m_block(start, size, pCluster->m_pSegment->m_pReader)
-{
-}
-
-
-bool SimpleBlock::EOS() const
-{
- return false;
-}
-
-
-Cluster* SimpleBlock::GetCluster() const
-{
- return m_pCluster;
-}
-
-
-size_t SimpleBlock::GetIndex() const
-{
- return m_index;
-}
-
-
-const Block* SimpleBlock::GetBlock() const
-{
- return &m_block;
-}
-
-
-bool SimpleBlock::IsBFrame() const
-{
- return false;
-}
-
-
-BlockGroup::BlockGroup(
- Cluster* pCluster,
- size_t idx,
- long long start,
- long long size_) :
- m_pCluster(pCluster),
- m_index(idx),
- m_prevTimeCode(0),
- m_nextTimeCode(0),
- m_pBlock(NULL) //TODO: accept multiple blocks within a block group
-{
- IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
-
- long long pos = start;
- const long long stop = start + size_;
-
- bool bSimpleBlock = false;
-
- while (pos < stop)
- {
- short t;
-
- if (Match(pReader, pos, 0x7B, t))
- {
- if (t < 0)
- m_prevTimeCode = t;
- else if (t > 0)
- m_nextTimeCode = t;
- else
- assert(false);
- }
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume ID
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- switch (id)
- {
- case 0x23: //SimpleBlock ID
- bSimpleBlock = true;
- //YES, FALL THROUGH TO NEXT CASE
-
- case 0x21: //Block ID
- ParseBlock(pos, size);
- break;
-
- default:
- break;
- }
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- assert(pos == stop);
- assert(m_pBlock);
-
- if (!bSimpleBlock)
- m_pBlock->SetKey(m_prevTimeCode >= 0);
-}
-
-
-BlockGroup::~BlockGroup()
-{
- delete m_pBlock;
-}
-
-
-void BlockGroup::ParseBlock(long long start, long long size)
-{
- IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
-
- Block* const pBlock = new Block(start, size, pReader);
- assert(pBlock); //TODO
-
- //TODO: the Matroska spec says you have multiple blocks within the
- //same block group, with blocks ranked by priority (the flag bits).
- //I haven't ever seen such a file (mkvmux certainly doesn't make
- //one), so until then I'll just assume block groups contain a single
- //block.
-#if 0
- m_blocks.push_back(pBlock);
-#else
- assert(m_pBlock == NULL);
- m_pBlock = pBlock;
-#endif
-
-#if 0
- Track* const pTrack = pBlock->GetTrack();
- assert(pTrack);
-
- pTrack->Insert(pBlock);
-#endif
-}
-
-
-bool BlockGroup::EOS() const
-{
- return false;
-}
-
-
-Cluster* BlockGroup::GetCluster() const
-{
- return m_pCluster;
-}
-
-
-size_t BlockGroup::GetIndex() const
-{
- return m_index;
-}
-
-
-const Block* BlockGroup::GetBlock() const
-{
- return m_pBlock;
-}
-
-
-short BlockGroup::GetPrevTimeCode() const
-{
- return m_prevTimeCode;
-}
-
-
-short BlockGroup::GetNextTimeCode() const
-{
- return m_nextTimeCode;
-}
-
-
-bool BlockGroup::IsBFrame() const
-{
- return (m_nextTimeCode > 0);
-}
-
-
-
-Block::Block(long long start, long long size_, IMkvReader* pReader) :
- m_start(start),
- m_size(size_)
-{
- long long pos = start;
- const long long stop = start + size_;
-
- long len;
-
- m_track = ReadUInt(pReader, pos, len);
- assert(m_track > 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume track number
- assert((stop - pos) >= 2);
-
- m_timecode = Unserialize2SInt(pReader, pos);
-
- pos += 2;
- assert((stop - pos) >= 1);
-
- const long hr = pReader->Read(pos, 1, &m_flags);
- assert(hr == 0L);
-
- ++pos;
- assert(pos <= stop);
-
- m_frameOff = pos;
-
- const long long frame_size = stop - pos;
-
- assert(frame_size <= 2147483647L);
-
- m_frameSize = static_cast<long>(frame_size);
-}
-
-
-long long Block::GetTimeCode(Cluster* pCluster) const
-{
- assert(pCluster);
-
- const long long tc0 = pCluster->GetTimeCode();
- assert(tc0 >= 0);
-
- const long long tc = tc0 + static_cast<long long>(m_timecode);
- assert(tc >= 0);
-
- return tc; //unscaled timecode units
-}
-
-
-long long Block::GetTime(Cluster* pCluster) const
-{
- assert(pCluster);
-
- const long long tc = GetTimeCode(pCluster);
-
- const Segment* const pSegment = pCluster->m_pSegment;
- const SegmentInfo* const pInfo = pSegment->GetInfo();
- assert(pInfo);
-
- const long long scale = pInfo->GetTimeCodeScale();
- assert(scale >= 1);
-
- const long long ns = tc * scale;
-
- return ns;
-}
-
-
-unsigned long Block::GetTrackNumber() const
-{
- assert(m_track > 0);
-
- return static_cast<unsigned long>(m_track);
-}
-
-
-bool Block::IsKey() const
-{
- return ((m_flags & static_cast<unsigned char>(1 << 7)) != 0);
-}
-
-
-void Block::SetKey(bool bKey)
-{
- if (bKey)
- m_flags |= static_cast<unsigned char>(1 << 7);
- else
- m_flags &= 0x7F;
-}
-
-
-long Block::GetSize() const
-{
- return m_frameSize;
-}
-
-
-long Block::Read(IMkvReader* pReader, unsigned char* buf) const
-{
-
- assert(pReader);
- assert(buf);
-
- const long hr = pReader->Read(m_frameOff, m_frameSize, buf);
-
- return hr;
-}
-
-
-} //end namespace mkvparser
+// Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+
+#include "mkvparser.hpp"
+#include <cassert>
+#include <cstring>
+#include <new>
+//#include <windows.h>
+//#include "odbgstream.hpp"
+//using std::endl;
+
+mkvparser::IMkvReader::~IMkvReader()
+{
+}
+
+
+void mkvparser::GetVersion(int& major, int& minor, int& build, int& revision)
+{
+ major = 1;
+ minor = 0;
+ build = 0;
+ revision = 4;
+}
+
+
+long long mkvparser::ReadUInt(IMkvReader* pReader, long long pos, long& len)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(pos < available);
+ assert((available - pos) >= 1); //assume here max u-int len is 8
+
+ unsigned char b;
+
+ hr = pReader->Read(pos, 1, &b);
+ if (hr < 0)
+ return hr;
+
+ assert(hr == 0L);
+
+ if (b & 0x80) //1000 0000
+ {
+ len = 1;
+ b &= 0x7F; //0111 1111
+ }
+ else if (b & 0x40) //0100 0000
+ {
+ len = 2;
+ b &= 0x3F; //0011 1111
+ }
+ else if (b & 0x20) //0010 0000
+ {
+ len = 3;
+ b &= 0x1F; //0001 1111
+ }
+ else if (b & 0x10) //0001 0000
+ {
+ len = 4;
+ b &= 0x0F; //0000 1111
+ }
+ else if (b & 0x08) //0000 1000
+ {
+ len = 5;
+ b &= 0x07; //0000 0111
+ }
+ else if (b & 0x04) //0000 0100
+ {
+ len = 6;
+ b &= 0x03; //0000 0011
+ }
+ else if (b & 0x02) //0000 0010
+ {
+ len = 7;
+ b &= 0x01; //0000 0001
+ }
+ else
+ {
+ assert(b & 0x01); //0000 0001
+ len = 8;
+ b = 0; //0000 0000
+ }
+
+ assert((available - pos) >= len);
+
+ long long result = b;
+ ++pos;
+ for (long i = 1; i < len; ++i)
+ {
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ assert(hr == 0L);
+
+ result <<= 8;
+ result |= b;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+long long mkvparser::GetUIntLength(
+ IMkvReader* pReader,
+ long long pos,
+ long& len)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ if (pos >= available)
+ return pos; //too few bytes available
+
+ unsigned char b;
+
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ assert(hr == 0L);
+
+ if (b == 0) //we can't handle u-int values larger than 8 bytes
+ return E_FILE_FORMAT_INVALID;
+
+ unsigned char m = 0x80;
+ len = 1;
+
+ while (!(b & m))
+ {
+ m >>= 1;
+ ++len;
+ }
+
+ return 0; //success
+}
+
+
+long long mkvparser::SyncReadUInt(
+ IMkvReader* pReader,
+ long long pos,
+ long long stop,
+ long& len)
+{
+ assert(pReader);
+
+ if (pos >= stop)
+ return E_FILE_FORMAT_INVALID;
+
+ unsigned char b;
+
+ long hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ if (hr != 0L)
+ return E_BUFFER_NOT_FULL;
+
+ if (b == 0) //we can't handle u-int values larger than 8 bytes
+ return E_FILE_FORMAT_INVALID;
+
+ unsigned char m = 0x80;
+ len = 1;
+
+ while (!(b & m))
+ {
+ m >>= 1;
+ ++len;
+ }
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ long long result = b & (~m);
+ ++pos;
+
+ for (int i = 1; i < len; ++i)
+ {
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ if (hr != 0L)
+ return E_BUFFER_NOT_FULL;
+
+ result <<= 8;
+ result |= b;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+long long mkvparser::UnserializeUInt(
+ IMkvReader* pReader,
+ long long pos,
+ long long size)
+{
+ assert(pReader);
+ assert(pos >= 0);
+ assert(size > 0);
+ assert(size <= 8);
+
+ long long result = 0;
+
+ for (long long i = 0; i < size; ++i)
+ {
+ unsigned char b;
+
+ const long hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+ result <<= 8;
+ result |= b;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+float mkvparser::Unserialize4Float(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+ assert((pos + 4) <= available);
+
+ float result;
+
+ unsigned char* const p = (unsigned char*)&result;
+ unsigned char* q = p + 4;
+
+ for (;;)
+ {
+ hr = pReader->Read(pos, 1, --q);
+ assert(hr == 0L);
+
+ if (q == p)
+ break;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+double mkvparser::Unserialize8Double(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ double result;
+
+ unsigned char* const p = (unsigned char*)&result;
+ unsigned char* q = p + 8;
+
+ for (;;)
+ {
+ const long hr = pReader->Read(pos, 1, --q);
+ assert(hr == 0L);
+
+ if (q == p)
+ break;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+signed char mkvparser::Unserialize1SInt(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr == 0);
+ assert(available <= total);
+ assert(pos < available);
+
+ signed char result;
+
+ hr = pReader->Read(pos, 1, (unsigned char*)&result);
+ assert(hr == 0);
+
+ return result;
+}
+
+short mkvparser::Unserialize2SInt(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+ assert((pos + 2) <= available);
+
+ short result;
+
+ unsigned char* const p = (unsigned char*)&result;
+ unsigned char* q = p + 2;
+
+ for (;;)
+ {
+ hr = pReader->Read(pos, 1, --q);
+ assert(hr == 0L);
+
+ if (q == p)
+ break;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ long long& val)
+
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert(size <= 8);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+
+ val = UnserializeUInt(pReader, pos, size);
+ assert(val >= 0);
+
+ pos += size; //consume size of payload
+
+ return true;
+}
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ char*& val)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size_ = ReadUInt(pReader, pos, len);
+ assert(size_ >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+ assert((pos + size_) <= available);
+
+ const size_t size = static_cast<size_t>(size_);
+ val = new char[size+1];
+
+ for (size_t i = 0; i < size; ++i)
+ {
+ char c;
+
+ hr = pReader->Read(pos + i, 1, (unsigned char*)&c);
+ assert(hr == 0L);
+
+ val[i] = c;
+
+ if (c == '\0')
+ break;
+
+ }
+
+ val[size] = '\0';
+ pos += size_; //consume size of payload
+
+ return true;
+}
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ unsigned char*& buf,
+ size_t& buflen)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size_ = ReadUInt(pReader, pos, len);
+ assert(size_ >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+ assert((pos + size_) <= available);
+
+ const long buflen_ = static_cast<long>(size_);
+
+ buf = new (std::nothrow) unsigned char[buflen_];
+ assert(buf); //TODO
+
+ hr = pReader->Read(pos, buflen_, buf);
+ assert(hr == 0L);
+
+ buflen = buflen_;
+
+ pos += size_; //consume size of payload
+ return true;
+}
+
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ double& val)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+ long idlen;
+ const long long id = ReadUInt(pReader, pos, idlen);
+ assert(id >= 0); //TODO
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ long sizelen;
+ const long long size = ReadUInt(pReader, pos + idlen, sizelen);
+
+ switch (size)
+ {
+ case 4:
+ case 8:
+ break;
+ default:
+ return false;
+ }
+
+ pos += idlen + sizelen; //consume id and size fields
+ assert((pos + size) <= available);
+
+ if (size == 4)
+ val = Unserialize4Float(pReader, pos);
+ else
+ {
+ assert(size == 8);
+ val = Unserialize8Double(pReader, pos);
+ }
+
+ pos += size; //consume size of payload
+
+ return true;
+}
+
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ short& val)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size <= 2);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+ assert((pos + size) <= available);
+
+ //TODO:
+ // Generalize this to work for any size signed int
+ if (size == 1)
+ val = Unserialize1SInt(pReader, pos);
+ else
+ val = Unserialize2SInt(pReader, pos);
+
+ pos += size; //consume size of payload
+
+ return true;
+}
+
+
+namespace mkvparser
+{
+
+EBMLHeader::EBMLHeader():
+ m_docType(NULL)
+{
+}
+
+EBMLHeader::~EBMLHeader()
+{
+ delete[] m_docType;
+}
+
+long long EBMLHeader::Parse(
+ IMkvReader* pReader,
+ long long& pos)
+{
+ assert(pReader);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+
+ if (hr < 0)
+ return hr;
+
+ pos = 0;
+ long long end = (1024 < available)? 1024: available;
+
+ for (;;)
+ {
+ unsigned char b = 0;
+
+ while (pos < end)
+ {
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ if (b == 0x1A)
+ break;
+
+ ++pos;
+ }
+
+ if (b != 0x1A)
+ {
+ if ((pos >= 1024) ||
+ (available >= total) ||
+ ((total - available) < 5))
+ return -1;
+
+ return available + 5; //5 = 4-byte ID + 1st byte of size
+ }
+
+ if ((total - pos) < 5)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((available - pos) < 5)
+ return pos + 5; //try again later
+
+ long len;
+
+ const long long result = ReadUInt(pReader, pos, len);
+
+ if (result < 0) //error
+ return result;
+
+ if (result == 0x0A45DFA3) //ReadId masks-off length indicator bits
+ {
+ assert(len == 4);
+ pos += len;
+ break;
+ }
+
+ ++pos; //throw away just the 0x1A byte, and try again
+ }
+
+ long len;
+ long long result = GetUIntLength(pReader, pos, len);
+
+ if (result < 0) //error
+ return result;
+
+ if (result > 0) //need more data
+ return result;
+
+ assert(len > 0);
+ assert(len <= 8);
+
+ if ((total - pos) < len)
+ return E_FILE_FORMAT_INVALID;
+ if ((available - pos) < len)
+ return pos + len; //try again later
+
+ result = ReadUInt(pReader, pos, len);
+
+ if (result < 0) //error
+ return result;
+
+ pos += len; //consume u-int
+
+ if ((total - pos) < result)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((available - pos) < result)
+ return pos + result;
+
+ end = pos + result;
+
+ m_version = 1;
+ m_readVersion = 1;
+ m_maxIdLength = 4;
+ m_maxSizeLength = 8;
+ m_docTypeVersion = 1;
+ m_docTypeReadVersion = 1;
+
+ while (pos < end)
+ {
+ if (Match(pReader, pos, 0x0286, m_version))
+ ;
+ else if (Match(pReader, pos, 0x02F7, m_readVersion))
+ ;
+ else if (Match(pReader, pos, 0x02F2, m_maxIdLength))
+ ;
+ else if (Match(pReader, pos, 0x02F3, m_maxSizeLength))
+ ;
+ else if (Match(pReader, pos, 0x0282, m_docType))
+ ;
+ else if (Match(pReader, pos, 0x0287, m_docTypeVersion))
+ ;
+ else if (Match(pReader, pos, 0x0285, m_docTypeReadVersion))
+ ;
+ else
+ {
+ result = ReadUInt(pReader, pos, len);
+ assert(result > 0);
+ assert(len > 0);
+ assert(len <= 8);
+
+ pos += len;
+ assert(pos < end);
+
+ result = ReadUInt(pReader, pos, len);
+ assert(result >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+
+ pos += len + result;
+ assert(pos <= end);
+ }
+ }
+
+ assert(pos == end);
+
+ return 0;
+}
+
+
+Segment::Segment(
+ IMkvReader* pReader,
+ long long start,
+ long long size) :
+ m_pReader(pReader),
+ m_start(start),
+ m_size(size),
+ m_pos(start),
+ m_pInfo(NULL),
+ m_pTracks(NULL),
+ m_pCues(NULL),
+ m_clusters(NULL),
+ m_clusterCount(0),
+ m_clusterPreloadCount(0),
+ m_clusterSize(0)
+{
+}
+
+
+Segment::~Segment()
+{
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ Cluster** i = m_clusters;
+ Cluster** j = m_clusters + count;
+
+ while (i != j)
+ {
+ Cluster* const p = *i++;
+ assert(p);
+
+ delete p;
+ }
+
+ delete[] m_clusters;
+
+ delete m_pTracks;
+ delete m_pInfo;
+ delete m_pCues;
+}
+
+
+long long Segment::CreateInstance(
+ IMkvReader* pReader,
+ long long pos,
+ Segment*& pSegment)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ pSegment = NULL;
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ //I would assume that in practice this loop would execute
+ //exactly once, but we allow for other elements (e.g. Void)
+ //to immediately follow the EBML header. This is fine for
+ //the source filter case (since the entire file is available),
+ //but in the splitter case over a network we should probably
+ //just give up early. We could for example decide only to
+ //execute this loop a maximum of, say, 10 times.
+
+ while (pos < total)
+ {
+ //Read ID
+
+ long len;
+ long long result = GetUIntLength(pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > total)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ //TODO: if we liberalize the behavior of ReadUInt, we can
+ //probably eliminate having to use GetUIntLength here.
+ const long long id = ReadUInt(pReader, pos, len);
+
+ if (id < 0) //error
+ return id;
+
+ pos += len; //consume ID
+
+ //Read Size
+
+ result = GetUIntLength(pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > total)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ //TODO: if we liberalize the behavior of ReadUInt, we can
+ //probably eliminate having to use GetUIntLength here.
+ const long long size = ReadUInt(pReader, pos, len);
+
+ if (size < 0)
+ return size;
+
+ pos += len; //consume length of size of element
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > total)
+ return E_FILE_FORMAT_INVALID;
+
+ if (id == 0x08538067) //Segment ID
+ {
+ pSegment = new Segment(pReader, pos, size);
+ assert(pSegment); //TODO
+
+ return 0; //success
+ }
+
+ pos += size; //consume payload
+ }
+
+ assert(pos == total);
+
+ pSegment = new Segment(pReader, pos, 0);
+ assert(pSegment); //TODO
+
+ return 0; //success (sort of)
+}
+
+
+long long Segment::ParseHeaders()
+{
+ //Outermost (level 0) segment object has been constructed,
+ //and pos designates start of payload. We need to find the
+ //inner (level 1) elements.
+ long long total, available;
+
+ long hr = m_pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ const long long stop = m_start + m_size;
+ assert(stop <= total);
+ assert(m_pos <= stop);
+
+ bool bQuit = false;
+
+ while ((m_pos < stop) && !bQuit)
+ {
+ long long pos = m_pos;
+
+ long len;
+ long long result = GetUIntLength(m_pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ const long long idpos = pos;
+ const long long id = ReadUInt(m_pReader, idpos, len);
+
+ if (id < 0) //error
+ return id;
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+
+ if (size < 0)
+ return size;
+
+ pos += len; //consume length of size of element
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ //We read EBML elements either in total or nothing at all.
+
+ if ((pos + size) > available)
+ return pos + size;
+
+ if (id == 0x0549A966) //Segment Info ID
+ {
+ assert(m_pInfo == NULL);
+
+ m_pInfo = new SegmentInfo(this, pos, size);
+ assert(m_pInfo); //TODO
+ }
+ else if (id == 0x0654AE6B) //Tracks ID
+ {
+ assert(m_pTracks == NULL);
+
+ m_pTracks = new Tracks(this, pos, size);
+ assert(m_pTracks); //TODO
+ }
+ else if (id == 0x0C53BB6B) //Cues ID
+ {
+ if (m_pCues == NULL)
+ {
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+ }
+ }
+ else if (id == 0x014D9B74) //SeekHead ID
+ {
+ ParseSeekHead(pos, size);
+ }
+ else if (id == 0x0F43B675) //Cluster ID
+ {
+ bQuit = true;
+ }
+
+ if (!bQuit)
+ m_pos = pos + size; //consume payload
+ }
+
+ assert(m_pos <= stop);
+
+ if (m_pInfo == NULL) //TODO: liberalize this behavior
+ return E_FILE_FORMAT_INVALID;
+
+ if (m_pTracks == NULL)
+ return E_FILE_FORMAT_INVALID;
+
+ return 0; //success
+}
+
+
+#if 0
+long Segment::ParseCluster(Cluster*& pCluster, long long& pos_) const
+{
+ pCluster = NULL;
+ pos_ = -1;
+
+ const long long stop = m_start + m_size;
+ assert(m_pos <= stop);
+
+ long long pos = m_pos;
+ long long off = -1;
+
+ while (pos < stop)
+ {
+ long len;
+ const long long idpos = pos;
+
+ const long long id = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ if (id == 0)
+ return E_FILE_FORMAT_INVALID;
+
+ pos += len; //consume id
+ assert(pos < stop);
+
+ const long long size = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos += len; //consume size
+ assert(pos <= stop);
+
+ if (size == 0) //weird
+ continue;
+
+ //pos now points to start of payload
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+
+ if (id == 0x0F43B675) //Cluster ID
+ {
+ off = idpos - m_start; // >= 0 means we found a cluster
+ break;
+ }
+ }
+
+ assert(pos <= stop);
+
+ //Indicate to caller how much of file has been consumed. This is
+ //used later in AddCluster to adjust the current parse position
+ //(the value cached in the segment object itself) to the
+ //file position value just past the cluster we parsed.
+
+ if (off < 0) //we did not found any more clusters
+ {
+ pos_ = stop; //pos_ >= 0 here means EOF (cluster is NULL)
+ return 0; //TODO: confirm this return value
+ }
+
+ //We found a cluster. Now read something, to ensure that it is
+ //fully loaded in the network cache.
+
+ if (pos >= stop) //we parsed the entire segment
+ {
+ //We did find a cluster, but it was very last element in the segment.
+ //Our preference is that the loop above runs 1 1/2 times:
+ //the first pass finds the cluster, and the second pass
+ //finds the element the follows the cluster. In this case, however,
+ //we reached the end of the file without finding another element,
+ //so we didn't actually read anything yet associated with "end of the
+ //cluster". And we must perform an actual read, in order
+ //to guarantee that all of the data that belongs to this
+ //cluster has been loaded into the network cache. So instead
+ //of reading the next element that follows the cluster, we
+ //read the last byte of the cluster (which is also the last
+ //byte in the file).
+
+ //Read the last byte of the file. (Reading 0 bytes at pos
+ //might work too -- it would depend on how the reader is
+ //implemented. Here we take the more conservative approach,
+ //since this makes fewer assumptions about the network
+ //reader abstraction.)
+
+ unsigned char b;
+
+ const int result = m_pReader->Read(pos - 1, 1, &b);
+ assert(result == 0);
+
+ pos_ = stop;
+ }
+ else
+ {
+ long len;
+ const long long idpos = pos;
+
+ const long long id = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ if (id == 0)
+ return E_BUFFER_NOT_FULL;
+
+ pos += len; //consume id
+ assert(pos < stop);
+
+ const long long size = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos_ = idpos;
+ }
+
+ //We found a cluster, and it has been completely loaded into the
+ //network cache. (We can guarantee this because we actually read
+ //the EBML tag that follows the cluster, or, if we reached EOF,
+ //because we actually read the last byte of the cluster).
+
+ Segment* const this_ = const_cast<Segment*>(this);
+
+ pCluster = Cluster::Parse(this_, m_clusterCount, off);
+ assert(pCluster);
+ assert(pCluster->m_index == m_clusterCount);
+
+ return 0;
+}
+
+
+bool Segment::AddCluster(Cluster* pCluster, long long pos)
+{
+ assert(pos >= m_start);
+
+ const long long stop = m_start + m_size;
+ assert(pos <= stop);
+
+ if (pCluster)
+ {
+ AppendCluster(pCluster);
+ assert(m_clusters);
+ assert(m_clusterSize > pCluster->m_index);
+ assert(m_clusters[pCluster->m_index] == pCluster);
+ }
+
+ m_pos = pos; //m_pos >= stop is now we know we have all clusters
+
+ return (pos >= stop);
+}
+#endif
+
+
+long Segment::LoadCluster()
+{
+ const long long stop = m_start + m_size;
+
+ while (m_pos < stop)
+ {
+ long long pos = m_pos;
+
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long idpos = pos;
+ const long long id = ReadUInt(m_pReader, idpos, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos += len; //consume length of size of element
+
+ if (size == 0) //weird
+ {
+ m_pos = pos;
+ continue;
+ }
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if (id == 0x0C53BB6B) //Cues ID
+ {
+ if (m_pCues == NULL)
+ {
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+ }
+
+ m_pos = pos + size; //consume payload
+ continue;
+ }
+
+ if (id != 0x0F43B675) //Cluster ID
+ {
+ m_pos = pos + size; //consume payload
+ continue;
+ }
+
+ const long idx = m_clusterCount;
+ const long long idoff = idpos - m_start;
+
+ if (m_clusterPreloadCount > 0)
+ {
+ assert(idx < m_clusterSize);
+
+ Cluster* const pCluster = m_clusters[idx];
+ assert(pCluster);
+ assert(pCluster->m_index < 0);
+
+ const long long off_ = pCluster->m_pos;
+ assert(off_);
+
+ const long long off = off_ * ((off_ >= 0) ? 1 : -1);
+ assert(idoff <= off);
+
+ if (idoff == off) //cluster has been preloaded already
+ {
+ pCluster->m_index = idx;
+ ++m_clusterCount;
+ --m_clusterPreloadCount;
+
+ m_pos = pos + size; //consume payload
+ break;
+ }
+ }
+
+ Cluster* const pCluster = Cluster::Parse(this, idx, idoff);
+ assert(pCluster);
+ assert(pCluster->m_index == idx);
+
+ AppendCluster(pCluster);
+ assert(m_clusters);
+ assert(idx < m_clusterSize);
+ assert(m_clusters[idx] == pCluster);
+
+ m_pos = pos + size; //consume payload
+ break;
+ }
+
+ assert(m_pos <= stop);
+ return 0;
+}
+
+
+void Segment::AppendCluster(Cluster* pCluster)
+{
+ assert(pCluster);
+ assert(pCluster->m_index >= 0);
+
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ long& size = m_clusterSize;
+ assert(size >= count);
+
+ const long idx = pCluster->m_index;
+ assert(idx == m_clusterCount);
+
+ if (count >= size)
+ {
+ long n;
+
+ if (size > 0)
+ n = 2 * size;
+ else if (m_pInfo == 0)
+ n = 2048;
+ else
+ {
+ const long long ns = m_pInfo->GetDuration();
+
+ if (ns <= 0)
+ n = 2048;
+ else
+ {
+ const long long sec = (ns + 999999999LL) / 1000000000LL;
+ n = static_cast<long>(sec);
+ }
+ }
+
+ Cluster** const qq = new Cluster*[n];
+ Cluster** q = qq;
+
+ Cluster** p = m_clusters;
+ Cluster** const pp = p + count;
+
+ while (p != pp)
+ *q++ = *p++;
+
+ delete[] m_clusters;
+
+ m_clusters = qq;
+ size = n;
+ }
+
+ if (m_clusterPreloadCount > 0)
+ {
+ assert(m_clusters);
+
+ Cluster** const p = m_clusters + m_clusterCount;
+ assert(*p);
+ assert((*p)->m_index < 0);
+
+ Cluster** q = p + m_clusterPreloadCount;
+ assert(q < (m_clusters + size));
+
+ for (;;)
+ {
+ Cluster** const qq = q - 1;
+ assert((*qq)->m_index < 0);
+
+ *q = *qq;
+ q = qq;
+
+ if (q == p)
+ break;
+ }
+ }
+
+ m_clusters[idx] = pCluster;
+ ++m_clusterCount;
+}
+
+
+void Segment::PreloadCluster(Cluster* pCluster, ptrdiff_t idx)
+{
+ assert(pCluster);
+ assert(pCluster->m_index < 0);
+ assert(idx >= m_clusterCount);
+
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ long& size = m_clusterSize;
+ assert(size >= count);
+
+ if (count >= size)
+ {
+ long n;
+
+ if (size > 0)
+ n = 2 * size;
+ else if (m_pInfo == 0)
+ n = 2048;
+ else
+ {
+ const long long ns = m_pInfo->GetDuration();
+
+ if (ns <= 0)
+ n = 2048;
+ else
+ {
+ const long long sec = (ns + 999999999LL) / 1000000000LL;
+ n = static_cast<long>(sec);
+ }
+ }
+
+ Cluster** const qq = new Cluster*[n];
+ Cluster** q = qq;
+
+ Cluster** p = m_clusters;
+ Cluster** const pp = p + count;
+
+ while (p != pp)
+ *q++ = *p++;
+
+ delete[] m_clusters;
+
+ m_clusters = qq;
+ size = n;
+ }
+
+ assert(m_clusters);
+
+ Cluster** const p = m_clusters + idx;
+
+ Cluster** q = m_clusters + count;
+ assert(q >= p);
+ assert(q < (m_clusters + size));
+
+ while (q > p)
+ {
+ Cluster** const qq = q - 1;
+ assert((*qq)->m_index < 0);
+
+ *q = *qq;
+ q = qq;
+ }
+
+ m_clusters[idx] = pCluster;
+ ++m_clusterPreloadCount;
+}
+
+
+long Segment::Load()
+{
+ assert(m_clusters == NULL);
+ assert(m_clusterSize == 0);
+ assert(m_clusterCount == 0);
+
+ //Outermost (level 0) segment object has been constructed,
+ //and pos designates start of payload. We need to find the
+ //inner (level 1) elements.
+ const long long stop = m_start + m_size;
+
+#ifdef _DEBUG //TODO: this is really Microsoft-specific
+ {
+ long long total, available;
+
+ long hr = m_pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available >= total);
+ assert(stop <= total);
+ }
+#endif
+
+ while (m_pos < stop)
+ {
+ long long pos = m_pos;
+
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long idpos = pos;
+ const long long id = ReadUInt(m_pReader, idpos, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos += len; //consume length of size of element
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if (id == 0x0F43B675) //Cluster ID
+ {
+ const long idx = m_clusterCount;
+ const long long off = idpos - m_start;
+
+ Cluster* const pCluster = Cluster::Parse(this, idx, off);
+ assert(pCluster);
+ assert(pCluster->m_index == idx);
+
+ AppendCluster(pCluster);
+ assert(m_clusters);
+ assert(m_clusterSize > idx);
+ assert(m_clusters[idx] == pCluster);
+ }
+ else if (id == 0x0C53BB6B) //Cues ID
+ {
+ assert(m_pCues == NULL);
+
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+ }
+ else if (id == 0x0549A966) //SegmentInfo ID
+ {
+ assert(m_pInfo == NULL);
+
+ m_pInfo = new SegmentInfo(this, pos, size);
+ assert(m_pInfo);
+ }
+ else if (id == 0x0654AE6B) //Tracks ID
+ {
+ assert(m_pTracks == NULL);
+
+ m_pTracks = new Tracks(this, pos, size);
+ assert(m_pTracks); //TODO
+ }
+
+ m_pos = pos + size; //consume payload
+ }
+
+ assert(m_pos >= stop);
+
+ if (m_pInfo == NULL)
+ return E_FILE_FORMAT_INVALID; //TODO: ignore this case?
+
+ if (m_pTracks == NULL)
+ return E_FILE_FORMAT_INVALID;
+
+ if (m_clusters == NULL) //TODO: ignore this case?
+ return E_FILE_FORMAT_INVALID;
+
+ //TODO: decide whether we require Cues element
+ //if (m_pCues == NULL)
+ // return E_FILE_FORMAT_INVALID;
+
+ return 0;
+}
+
+
+void Segment::ParseSeekHead(long long start, long long size_)
+{
+ long long pos = start;
+ const long long stop = start + size_;
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(m_pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x0DBB) //SeekEntry ID
+ ParseSeekEntry(pos, size);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(pos == stop);
+}
+
+
+void Segment::ParseCues(long long off)
+{
+ if (m_pCues)
+ return;
+
+ //odbgstream os;
+ //os << "Segment::ParseCues (begin)" << endl;
+
+ long long pos = m_start + off;
+ const long long stop = m_start + m_size;
+
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0);
+ assert((pos + len) <= stop);
+
+ const long long idpos = pos;
+
+ const long long id = ReadUInt(m_pReader, idpos, len);
+ assert(id == 0x0C53BB6B); //Cues ID
+
+ pos += len; //consume ID
+ assert(pos < stop);
+
+ //Read Size
+
+ result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0);
+ assert((pos + len) <= stop);
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size >= 0);
+
+ pos += len; //consume length of size of element
+ assert((pos + size) <= stop);
+
+ //Pos now points to start of payload
+
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+
+ //os << "Segment::ParseCues (end)" << endl;
+}
+
+
+void Segment::ParseSeekEntry(
+ long long start,
+ long long size_)
+{
+ long long pos = start;
+
+ const long long stop = start + size_;
+
+ long len;
+
+ const long long seekIdId = ReadUInt(m_pReader, pos, len);
+ //seekIdId;
+ assert(seekIdId == 0x13AB); //SeekID ID
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long seekIdSize = ReadUInt(m_pReader, pos, len);
+ assert(seekIdSize >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ const long long seekId = ReadUInt(m_pReader, pos, len); //payload
+ assert(seekId >= 0);
+ assert(len == seekIdSize);
+ assert((pos + len) <= stop);
+
+ pos += seekIdSize; //consume payload
+
+ const long long seekPosId = ReadUInt(m_pReader, pos, len);
+ //seekPosId;
+ assert(seekPosId == 0x13AC); //SeekPos ID
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long seekPosSize = ReadUInt(m_pReader, pos, len);
+ assert(seekPosSize >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+ assert((pos + seekPosSize) <= stop);
+
+ const long long seekOff = UnserializeUInt(m_pReader, pos, seekPosSize);
+ assert(seekOff >= 0);
+ assert(seekOff < m_size);
+
+ pos += seekPosSize; //consume payload
+ assert(pos == stop);
+
+ const long long seekPos = m_start + seekOff;
+ assert(seekPos < (m_start + m_size));
+
+ if (seekId == 0x0C53BB6B) //Cues ID
+ ParseCues(seekOff);
+}
+
+
+Cues::Cues(Segment* pSegment, long long start_, long long size_) :
+ m_pSegment(pSegment),
+ m_start(start_),
+ m_size(size_),
+ m_cue_points(NULL),
+ m_count(0),
+ m_preload_count(0),
+ m_pos(start_)
+{
+}
+
+
+Cues::~Cues()
+{
+ const size_t n = m_count + m_preload_count;
+
+ CuePoint** p = m_cue_points;
+ CuePoint** const q = p + n;
+
+ while (p != q)
+ {
+ CuePoint* const pCP = *p++;
+ assert(pCP);
+
+ delete pCP;
+ }
+
+ delete[] m_cue_points;
+}
+
+
+void Cues::Init() const
+{
+ if (m_cue_points)
+ return;
+
+ assert(m_count == 0);
+ assert(m_preload_count == 0);
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ const long long stop = m_start + m_size;
+ long long pos = m_start;
+
+ size_t cue_points_size = 0;
+
+ while (pos < stop)
+ {
+ const long long idpos = pos;
+
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x3B) //CuePoint ID
+ PreloadCuePoint(cue_points_size, idpos);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+}
+
+
+void Cues::PreloadCuePoint(
+ size_t& cue_points_size,
+ long long pos) const
+{
+ assert(m_count == 0);
+
+ if (m_preload_count >= cue_points_size)
+ {
+ size_t n;
+
+ if (cue_points_size > 0)
+ n = static_cast<size_t>(2 * cue_points_size);
+ else
+ {
+ const SegmentInfo* const pInfo = m_pSegment->GetInfo();
+
+ if (pInfo == NULL)
+ n = 2048;
+ else
+ {
+ const long long ns = pInfo->GetDuration();
+
+ if (ns <= 0)
+ n = 2048;
+ else
+ {
+ const long long sec = (ns + 999999999LL) / 1000000000LL;
+ n = static_cast<size_t>(sec);
+ }
+ }
+ }
+
+ CuePoint** const qq = new CuePoint*[n];
+ CuePoint** q = qq; //beginning of target
+
+ CuePoint** p = m_cue_points; //beginning of source
+ CuePoint** const pp = p + m_preload_count; //end of source
+
+ while (p != pp)
+ *q++ = *p++;
+
+ delete[] m_cue_points;
+
+ m_cue_points = qq;
+ cue_points_size = n;
+ }
+
+ CuePoint* const pCP = new CuePoint(m_preload_count, pos);
+ m_cue_points[m_preload_count++] = pCP;
+}
+
+
+bool Cues::LoadCuePoint() const
+{
+ //odbgstream os;
+ //os << "Cues::LoadCuePoint" << endl;
+
+ const long long stop = m_start + m_size;
+
+ if (m_pos >= stop)
+ return false; //nothing else to do
+
+ Init();
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ while (m_pos < stop)
+ {
+ const long long idpos = m_pos;
+
+ long len;
+
+ const long long id = ReadUInt(pReader, m_pos, len);
+ assert(id >= 0); //TODO
+ assert((m_pos + len) <= stop);
+
+ m_pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, m_pos, len);
+ assert(size >= 0);
+ assert((m_pos + len) <= stop);
+
+ m_pos += len; //consume Size field
+ assert((m_pos + size) <= stop);
+
+ if (id != 0x3B) //CuePoint ID
+ {
+ m_pos += size; //consume payload
+ assert(m_pos <= stop);
+
+ continue;
+ }
+
+ assert(m_preload_count > 0);
+
+ CuePoint* const pCP = m_cue_points[m_count];
+ assert(pCP);
+ assert((pCP->GetTimeCode() >= 0) || (-pCP->GetTimeCode() == idpos));
+
+ pCP->Load(pReader);
+ ++m_count;
+ --m_preload_count;
+
+ m_pos += size; //consume payload
+ assert(m_pos <= stop);
+
+ break;
+ }
+
+ return (m_pos < stop);
+}
+
+
+bool Cues::Find(
+ long long time_ns,
+ const Track* pTrack,
+ const CuePoint*& pCP,
+ const CuePoint::TrackPosition*& pTP) const
+{
+ assert(time_ns >= 0);
+ assert(pTrack);
+
+ LoadCuePoint();
+
+ assert(m_cue_points);
+ assert(m_count > 0);
+
+ CuePoint** const ii = m_cue_points;
+ CuePoint** i = ii;
+
+ CuePoint** const jj = ii + m_count + m_preload_count;
+ CuePoint** j = jj;
+
+ pCP = *i;
+ assert(pCP);
+
+ if (time_ns <= pCP->GetTime(m_pSegment))
+ {
+ pTP = pCP->Find(pTrack);
+ return (pTP != NULL);
+ }
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[ii, i) <= time_ns
+ //[i, j) ?
+ //[j, jj) > time_ns
+
+ CuePoint** const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ CuePoint* const pCP = *k;
+ assert(pCP);
+
+ pCP->Load(pReader);
+
+ const long long t = pCP->GetTime(m_pSegment);
+
+ if (t <= time_ns)
+ i = k + 1;
+ else
+ j = k;
+
+ assert(i <= j);
+ }
+
+ assert(i == j);
+ assert(i <= jj);
+ assert(i > ii);
+
+ pCP = *--i;
+ assert(pCP);
+ assert(pCP->GetTime(m_pSegment) <= time_ns);
+
+ //TODO: here and elsewhere, it's probably not correct to search
+ //for the cue point with this time, and then search for a matching
+ //track. In principle, the matching track could be on some earlier
+ //cue point, and with our current algorithm, we'd miss it. To make
+ //this bullet-proof, we'd need to create a secondary structure,
+ //with a list of cue points that apply to a track, and then search
+ //that track-based structure for a matching cue point.
+
+ pTP = pCP->Find(pTrack);
+ return (pTP != NULL);
+}
+
+
+#if 0
+bool Cues::FindNext(
+ long long time_ns,
+ const Track* pTrack,
+ const CuePoint*& pCP,
+ const CuePoint::TrackPosition*& pTP) const
+{
+ pCP = 0;
+ pTP = 0;
+
+ if (m_count == 0)
+ return false;
+
+ assert(m_cue_points);
+
+ const CuePoint* const* const ii = m_cue_points;
+ const CuePoint* const* i = ii;
+
+ const CuePoint* const* const jj = ii + m_count;
+ const CuePoint* const* j = jj;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[ii, i) <= time_ns
+ //[i, j) ?
+ //[j, jj) > time_ns
+
+ const CuePoint* const* const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ pCP = *k;
+ assert(pCP);
+
+ const long long t = pCP->GetTime(m_pSegment);
+
+ if (t <= time_ns)
+ i = k + 1;
+ else
+ j = k;
+
+ assert(i <= j);
+ }
+
+ assert(i == j);
+ assert(i <= jj);
+
+ if (i >= jj) //time_ns is greater than max cue point
+ return false;
+
+ pCP = *i;
+ assert(pCP);
+ assert(pCP->GetTime(m_pSegment) > time_ns);
+
+ pTP = pCP->Find(pTrack);
+ return (pTP != NULL);
+}
+#endif
+
+
+const CuePoint* Cues::GetFirst() const
+{
+ LoadCuePoint(); //init cues
+
+ const size_t count = m_count + m_preload_count;
+
+ if (count == 0) //weird
+ return NULL;
+
+ CuePoint* const* const pp = m_cue_points;
+ assert(pp);
+
+ CuePoint* const pCP = pp[0];
+ assert(pCP);
+ assert(pCP->GetTimeCode() >= 0);
+
+ return pCP;
+}
+
+
+const CuePoint* Cues::GetLast() const
+{
+ LoadCuePoint(); //init cues
+
+ const size_t count = m_count + m_preload_count;
+
+ if (count == 0) //weird
+ return NULL;
+
+ const size_t index = count - 1;
+
+ CuePoint* const* const pp = m_cue_points;
+ assert(pp);
+
+ CuePoint* const pCP = pp[index];
+ assert(pCP);
+
+ pCP->Load(m_pSegment->m_pReader);
+ assert(pCP->GetTimeCode() >= 0);
+
+ return pCP;
+}
+
+
+const CuePoint* Cues::GetNext(const CuePoint* pCurr) const
+{
+ if (pCurr == NULL)
+ return NULL;
+
+ assert(pCurr->GetTimeCode() >= 0);
+ assert(m_cue_points);
+ assert(m_count >= 1);
+
+ const size_t count = m_count + m_preload_count;
+
+ size_t index = pCurr->m_index;
+ assert(index < count);
+
+ CuePoint* const* const pp = m_cue_points;
+ assert(pp);
+ assert(pp[index] == pCurr);
+
+ ++index;
+
+ if (index >= count)
+ return NULL;
+
+ CuePoint* const pNext = pp[index];
+ assert(pNext);
+
+ pNext->Load(m_pSegment->m_pReader);
+
+ return pNext;
+}
+
+
+const BlockEntry* Cues::GetBlock(
+ const CuePoint* pCP,
+ const CuePoint::TrackPosition* pTP) const
+{
+ if (pCP == NULL)
+ return NULL;
+
+ if (pTP == NULL)
+ return NULL;
+
+ return m_pSegment->GetBlock(*pCP, *pTP);
+}
+
+
+const BlockEntry* Segment::GetBlock(
+ const CuePoint& cp,
+ const CuePoint::TrackPosition& tp)
+{
+ Cluster** const ii = m_clusters;
+ Cluster** i = ii;
+
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ Cluster** const jj = ii + count;
+ Cluster** j = jj;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[ii, i) < pTP->m_pos
+ //[i, j) ?
+ //[j, jj) > pTP->m_pos
+
+ Cluster** const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ Cluster* const pCluster = *k;
+ assert(pCluster);
+
+ const long long pos_ = pCluster->m_pos;
+ assert(pos_);
+
+ const long long pos = pos_ * ((pos_ < 0) ? -1 : 1);
+
+ if (pos < tp.m_pos)
+ i = k + 1;
+ else if (pos > tp.m_pos)
+ j = k;
+ else
+ return pCluster->GetEntry(cp, tp);
+ }
+
+ assert(i == j);
+
+ Cluster* const pCluster = Cluster::Parse(this, -1, tp.m_pos);
+ const ptrdiff_t idx = i - m_clusters;
+
+ PreloadCluster(pCluster, idx);
+ assert(m_clusters);
+ assert(m_clusterPreloadCount > 0);
+ assert(m_clusters[idx] == pCluster);
+
+ return pCluster->GetEntry(cp, tp);
+}
+
+
+
+CuePoint::CuePoint(size_t idx, long long pos) :
+ m_index(idx),
+ m_timecode(-1 * pos),
+ m_track_positions(NULL),
+ m_track_positions_count(0)
+{
+ assert(pos > 0);
+}
+
+
+CuePoint::~CuePoint()
+{
+ delete[] m_track_positions;
+}
+
+
+void CuePoint::Load(IMkvReader* pReader)
+{
+ //odbgstream os;
+ //os << "CuePoint::Load(begin): timecode=" << m_timecode << endl;
+
+ if (m_timecode >= 0) //already loaded
+ return;
+
+ assert(m_track_positions == NULL);
+ assert(m_track_positions_count == 0);
+
+ long long pos_ = -m_timecode;
+
+ long long stop;
+
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos_, len);
+ assert(id == 0x3B); //CuePoint ID
+ //assert((pos + len) <= stop);
+
+ pos_ += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos_, len);
+ assert(size >= 0);
+ //assert((pos + len) <= stop);
+
+ pos_ += len; //consume Size field
+ //assert((pos + size) <= stop);
+
+ //pos_ now points to start of payload
+
+ stop = pos_ + size;
+ }
+
+ long long pos = pos_;
+
+ //First count number of track positions
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x33) //CueTime ID
+ m_timecode = UnserializeUInt(pReader, pos, size);
+
+ else if (id == 0x37) //CueTrackPosition(s) ID
+ ++m_track_positions_count;
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(m_timecode >= 0);
+ assert(m_track_positions_count > 0);
+
+ //os << "CuePoint::Load(cont'd): idpos=" << idpos
+ // << " timecode=" << m_timecode
+ // << endl;
+
+ m_track_positions = new TrackPosition[m_track_positions_count];
+
+ //Now parse track positions
+
+ TrackPosition* p = m_track_positions;
+ pos = pos_;
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x37) //CueTrackPosition(s) ID
+ {
+ TrackPosition& tp = *p++;
+ tp.Parse(pReader, pos, size);
+ }
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(size_t(p - m_track_positions) == m_track_positions_count);
+}
+
+
+
+void CuePoint::TrackPosition::Parse(
+ IMkvReader* pReader,
+ long long start_,
+ long long size_)
+{
+ const long long stop = start_ + size_;
+ long long pos = start_;
+
+ m_track = -1;
+ m_pos = -1;
+ m_block = 1; //default
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x77) //CueTrack ID
+ m_track = UnserializeUInt(pReader, pos, size);
+
+ else if (id == 0x71) //CueClusterPos ID
+ m_pos = UnserializeUInt(pReader, pos, size);
+
+ else if (id == 0x1378) //CueBlockNumber
+ m_block = UnserializeUInt(pReader, pos, size);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(m_pos >= 0);
+ //assert(m_track > 0);
+ //assert(m_block > 0);
+}
+
+
+const CuePoint::TrackPosition* CuePoint::Find(const Track* pTrack) const
+{
+ assert(pTrack);
+
+ const long long n = pTrack->GetNumber();
+
+ const TrackPosition* i = m_track_positions;
+ const TrackPosition* const j = i + m_track_positions_count;
+
+ while (i != j)
+ {
+ const TrackPosition& p = *i++;
+
+ if (p.m_track == n)
+ return &p;
+ }
+
+ return NULL; //no matching track number found
+}
+
+
+long long CuePoint::GetTimeCode() const
+{
+ return m_timecode;
+}
+
+long long CuePoint::GetTime(Segment* pSegment) const
+{
+ assert(pSegment);
+ assert(m_timecode >= 0);
+
+ const SegmentInfo* const pInfo = pSegment->GetInfo();
+ assert(pInfo);
+
+ const long long scale = pInfo->GetTimeCodeScale();
+ assert(scale >= 1);
+
+ const long long time = scale * m_timecode;
+
+ return time;
+}
+
+
+long long Segment::Unparsed() const
+{
+ const long long stop = m_start + m_size;
+
+ const long long result = stop - m_pos;
+ assert(result >= 0);
+
+ return result;
+}
+
+
+Cluster* Segment::GetFirst()
+{
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return &m_eos;
+
+ Cluster* const pCluster = m_clusters[0];
+ assert(pCluster);
+
+ return pCluster;
+}
+
+
+Cluster* Segment::GetLast()
+{
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return &m_eos;
+
+ const long idx = m_clusterCount - 1;
+
+ Cluster* const pCluster = m_clusters[idx];
+ assert(pCluster);
+
+ return pCluster;
+}
+
+
+unsigned long Segment::GetCount() const
+{
+ return m_clusterCount;
+}
+
+
+Cluster* Segment::GetNext(const Cluster* pCurr)
+{
+ assert(pCurr);
+ assert(pCurr != &m_eos);
+ assert(m_clusters);
+
+ long idx = pCurr->m_index;
+
+ if (idx >= 0)
+ {
+ assert(m_clusterCount > 0);
+ assert(idx < m_clusterCount);
+ assert(pCurr == m_clusters[idx]);
+
+ ++idx;
+
+ if (idx >= m_clusterCount)
+ return &m_eos; //caller will LoadCluster as desired
+
+ Cluster* const pNext = m_clusters[idx];
+ assert(pNext);
+ assert(pNext->m_index >= 0);
+ assert(pNext->m_index == idx);
+
+ return pNext;
+ }
+
+ assert(m_clusterPreloadCount > 0);
+
+ const long long off_ = pCurr->m_pos;
+ const long long off = off_ * ((off_ < 0) ? -1 : 1);
+
+ long long pos = m_start + off;
+ const long long stop = m_start + m_size; //end of segment
+
+ {
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long id = ReadUInt(m_pReader, pos, len);
+ assert(id == 0x0F43B675); //Cluster ID //TODO
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size > 0); //TODO
+ assert((pCurr->m_size <= 0) || (pCurr->m_size == size));
+
+ pos += len; //consume length of size of element
+ assert((pos + size) <= stop); //TODO
+
+ //Pos now points to start of payload
+
+ pos += size; //consume payload
+ }
+
+ long long off_next = 0;
+
+ while (pos < stop)
+ {
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long idpos = pos; //pos of next (potential) cluster
+
+ const long long id = ReadUInt(m_pReader, idpos, len);
+ assert(id > 0); //TODO
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size >= 0); //TODO
+
+ pos += len; //consume length of size of element
+ assert((pos + size) <= stop); //TODO
+
+ //Pos now points to start of payload
+
+ if (size == 0) //weird
+ continue;
+
+ if (id == 0x0F43B675) //Cluster ID
+ {
+ off_next = idpos - m_start;
+ break;
+ }
+
+ pos += size; //consume payload
+ }
+
+ if (off_next <= 0)
+ return 0;
+
+ Cluster** const ii = m_clusters + m_clusterCount;
+ Cluster** i = ii;
+
+ Cluster** const jj = ii + m_clusterPreloadCount;
+ Cluster** j = jj;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[0, i) < pos_next
+ //[i, j) ?
+ //[j, jj) > pos_next
+
+ Cluster** const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ Cluster* const pNext = *k;
+ assert(pNext);
+ assert(pNext->m_index < 0);
+
+ const long long pos_ = pNext->m_pos;
+ assert(pos_);
+
+ pos = pos_ * ((pos_ < 0) ? -1 : 1);
+
+ if (pos < off_next)
+ i = k + 1;
+ else if (pos > off_next)
+ j = k;
+ else
+ return pNext;
+ }
+
+ assert(i == j);
+
+ Cluster* const pNext = Cluster::Parse(this, -1, off_next);
+ const ptrdiff_t idx_next = i - m_clusters; //insertion position
+
+ PreloadCluster(pNext, idx_next);
+ assert(m_clusters);
+ assert(idx_next < m_clusterSize);
+ assert(m_clusters[idx_next] == pNext);
+
+ return pNext;
+}
+
+
+Cluster* Segment::FindCluster(long long time_ns)
+{
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return &m_eos;
+
+ {
+ Cluster* const pCluster = m_clusters[0];
+ assert(pCluster);
+ assert(pCluster->m_index == 0);
+
+ if (time_ns <= pCluster->GetTime())
+ return pCluster;
+ }
+
+ //Binary search of cluster array
+
+ long i = 0;
+ long j = m_clusterCount;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[0, i) <= time_ns
+ //[i, j) ?
+ //[j, m_clusterCount) > time_ns
+
+ const long k = i + (j - i) / 2;
+ assert(k < m_clusterCount);
+
+ Cluster* const pCluster = m_clusters[k];
+ assert(pCluster);
+ assert(pCluster->m_index == k);
+
+ const long long t = pCluster->GetTime();
+
+ if (t <= time_ns)
+ i = k + 1;
+ else
+ j = k;
+
+ assert(i <= j);
+ }
+
+ assert(i == j);
+ assert(i > 0);
+ assert(i <= m_clusterCount);
+
+ const long k = i - 1;
+
+ Cluster* const pCluster = m_clusters[k];
+ assert(pCluster);
+ assert(pCluster->m_index == k);
+ assert(pCluster->GetTime() <= time_ns);
+
+ return pCluster;
+}
+
+
+const BlockEntry* Segment::Seek(
+ long long time_ns,
+ const Track* pTrack)
+{
+ assert(pTrack);
+
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return pTrack->GetEOS();
+
+ Cluster** const i = m_clusters;
+ assert(i);
+
+ {
+ Cluster* const pCluster = *i;
+ assert(pCluster);
+ assert(pCluster->m_index == 0); //m_clusterCount > 0
+ assert(pCluster->m_pSegment == this);
+
+ if (time_ns <= pCluster->GetTime())
+ return pCluster->GetEntry(pTrack);
+ }
+
+ Cluster** const j = i + m_clusterCount;
+
+ if (pTrack->GetType() == 2) //audio
+ {
+ //TODO: we could decide to use cues for this, as we do for video.
+ //But we only use it for video because looking around for a keyframe
+ //can get expensive. Audio doesn't require anything special so a
+ //straight cluster search is good enough (we assume).
+
+ Cluster** lo = i;
+ Cluster** hi = j;
+
+ while (lo < hi)
+ {
+ //INVARIANT:
+ //[i, lo) <= time_ns
+ //[lo, hi) ?
+ //[hi, j) > time_ns
+
+ Cluster** const mid = lo + (hi - lo) / 2;
+ assert(mid < hi);
+
+ Cluster* const pCluster = *mid;
+ assert(pCluster);
+ assert(pCluster->m_index == long(mid - m_clusters));
+ assert(pCluster->m_pSegment == this);
+
+ const long long t = pCluster->GetTime();
+
+ if (t <= time_ns)
+ lo = mid + 1;
+ else
+ hi = mid;
+
+ assert(lo <= hi);
+ }
+
+ assert(lo == hi);
+ assert(lo > i);
+ assert(lo <= j);
+
+ Cluster* const pCluster = *--lo;
+ assert(pCluster);
+ assert(pCluster->GetTime() <= time_ns);
+
+ return pCluster->GetEntry(pTrack);
+ }
+
+ assert(pTrack->GetType() == 1); //video
+
+ Cluster** lo = i;
+ Cluster** hi = j;
+
+ while (lo < hi)
+ {
+ //INVARIANT:
+ //[i, lo) <= time_ns
+ //[lo, hi) ?
+ //[hi, j) > time_ns
+
+ Cluster** const mid = lo + (hi - lo) / 2;
+ assert(mid < hi);
+
+ Cluster* const pCluster = *mid;
+ assert(pCluster);
+
+ const long long t = pCluster->GetTime();
+
+ if (t <= time_ns)
+ lo = mid + 1;
+ else
+ hi = mid;
+
+ assert(lo <= hi);
+ }
+
+ assert(lo == hi);
+ assert(lo > i);
+ assert(lo <= j);
+
+ Cluster* pCluster = *--lo;
+ assert(pCluster);
+ assert(pCluster->GetTime() <= time_ns);
+
+ {
+ const BlockEntry* const pBlockEntry = pCluster->GetEntry(pTrack);
+ assert(pBlockEntry);
+
+ if (!pBlockEntry->EOS()) //found a keyframe
+ {
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+
+ //TODO: this isn't necessarily the keyframe we want,
+ //since there might another keyframe on this same
+ //cluster with a greater timecode that but that is
+ //still less than the requested time. For now we
+ //simply return the first keyframe we find.
+
+ if (pBlock->GetTime(pCluster) <= time_ns)
+ return pBlockEntry;
+ }
+ }
+
+ const VideoTrack* const pVideo = static_cast<const VideoTrack*>(pTrack);
+
+ while (lo != i)
+ {
+ pCluster = *--lo;
+ assert(pCluster);
+ assert(pCluster->GetTime() <= time_ns);
+
+ const BlockEntry* const pBlockEntry = pCluster->GetMaxKey(pVideo);
+ assert(pBlockEntry);
+
+ if (!pBlockEntry->EOS())
+ return pBlockEntry;
+ }
+
+ //weird: we're on the first cluster, but no keyframe found
+ //should never happen but we must return something anyway
+
+ return pTrack->GetEOS();
+}
+
+
+#if 0
+bool Segment::SearchCues(
+ long long time_ns,
+ Track* pTrack,
+ Cluster*& pCluster,
+ const BlockEntry*& pBlockEntry,
+ const CuePoint*& pCP,
+ const CuePoint::TrackPosition*& pTP)
+{
+ if (pTrack->GetType() != 1) //not video
+ return false; //TODO: for now, just handle video stream
+
+ if (m_pCues == NULL)
+ return false;
+
+ if (!m_pCues->Find(time_ns, pTrack, pCP, pTP))
+ return false; //weird
+
+ assert(pCP);
+ assert(pTP);
+ assert(pTP->m_track == pTrack->GetNumber());
+
+ //We have the cue point and track position we want,
+ //so we now need to search for the cluster having
+ //the indicated position.
+
+ return GetCluster(pCP, pTP, pCluster, pBlockEntry);
+}
+#endif
+
+
+Tracks* Segment::GetTracks() const
+{
+ return m_pTracks;
+}
+
+
+const SegmentInfo* Segment::GetInfo() const
+{
+ return m_pInfo;
+}
+
+
+const Cues* Segment::GetCues() const
+{
+ return m_pCues;
+}
+
+
+long long Segment::GetDuration() const
+{
+ assert(m_pInfo);
+ return m_pInfo->GetDuration();
+}
+
+
+SegmentInfo::SegmentInfo(Segment* pSegment, long long start, long long size_) :
+ m_pSegment(pSegment),
+ m_start(start),
+ m_size(size_),
+ m_pMuxingAppAsUTF8(NULL),
+ m_pWritingAppAsUTF8(NULL),
+ m_pTitleAsUTF8(NULL)
+{
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ long long pos = start;
+ const long long stop = start + size_;
+
+ m_timecodeScale = 1000000;
+ m_duration = -1;
+
+ while (pos < stop)
+ {
+ if (Match(pReader, pos, 0x0AD7B1, m_timecodeScale))
+ assert(m_timecodeScale > 0);
+
+ else if (Match(pReader, pos, 0x0489, m_duration))
+ assert(m_duration >= 0);
+
+ else if (Match(pReader, pos, 0x0D80, m_pMuxingAppAsUTF8)) //[4D][80]
+ assert(m_pMuxingAppAsUTF8);
+
+ else if (Match(pReader, pos, 0x1741, m_pWritingAppAsUTF8)) //[57][41]
+ assert(m_pWritingAppAsUTF8);
+
+ else if (Match(pReader, pos, 0x3BA9, m_pTitleAsUTF8)) //[7B][A9]
+ assert(m_pTitleAsUTF8);
+
+ else
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ //id;
+ assert(id >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+ assert((stop - pos) > 0);
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len + size; //consume size and payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos == stop);
+}
+
+SegmentInfo::~SegmentInfo()
+{
+ if (m_pMuxingAppAsUTF8)
+ {
+ delete[] m_pMuxingAppAsUTF8;
+ m_pMuxingAppAsUTF8 = NULL;
+ }
+
+ if (m_pWritingAppAsUTF8)
+ {
+ delete[] m_pWritingAppAsUTF8;
+ m_pWritingAppAsUTF8 = NULL;
+ }
+
+ if (m_pTitleAsUTF8)
+ {
+ delete[] m_pTitleAsUTF8;
+ m_pTitleAsUTF8 = NULL;
+ }
+}
+
+long long SegmentInfo::GetTimeCodeScale() const
+{
+ return m_timecodeScale;
+}
+
+
+long long SegmentInfo::GetDuration() const
+{
+ if (m_duration < 0)
+ return -1;
+
+ assert(m_timecodeScale >= 1);
+
+ const double dd = double(m_duration) * double(m_timecodeScale);
+ const long long d = static_cast<long long>(dd);
+
+ return d;
+}
+
+const char* SegmentInfo::GetMuxingAppAsUTF8() const
+{
+ return m_pMuxingAppAsUTF8;
+}
+
+
+const char* SegmentInfo::GetWritingAppAsUTF8() const
+{
+ return m_pWritingAppAsUTF8;
+}
+
+const char* SegmentInfo::GetTitleAsUTF8() const
+{
+ return m_pTitleAsUTF8;
+}
+
+Track::Track(Segment* pSegment, const Info& i) :
+ m_pSegment(pSegment),
+ m_info(i)
+{
+}
+
+Track::~Track()
+{
+ Info& info = const_cast<Info&>(m_info);
+ info.Clear();
+}
+
+Track::Info::Info():
+ type(-1),
+ number(-1),
+ uid(-1),
+ nameAsUTF8(NULL),
+ codecId(NULL),
+ codecPrivate(NULL),
+ codecPrivateSize(0),
+ codecNameAsUTF8(NULL)
+{
+}
+
+
+void Track::Info::Clear()
+{
+ delete[] nameAsUTF8;
+ nameAsUTF8 = NULL;
+
+ delete[] codecId;
+ codecId = NULL;
+
+ delete[] codecPrivate;
+ codecPrivate = NULL;
+
+ codecPrivateSize = 0;
+
+ delete[] codecNameAsUTF8;
+ codecNameAsUTF8 = NULL;
+}
+
+const BlockEntry* Track::GetEOS() const
+{
+ return &m_eos;
+}
+
+long long Track::GetType() const
+{
+ return m_info.type;
+}
+
+long long Track::GetNumber() const
+{
+ return m_info.number;
+}
+
+const char* Track::GetNameAsUTF8() const
+{
+ return m_info.nameAsUTF8;
+}
+
+const char* Track::GetCodecNameAsUTF8() const
+{
+ return m_info.codecNameAsUTF8;
+}
+
+
+const char* Track::GetCodecId() const
+{
+ return m_info.codecId;
+}
+
+const unsigned char* Track::GetCodecPrivate(size_t& size) const
+{
+ size = m_info.codecPrivateSize;
+ return m_info.codecPrivate;
+}
+
+
+long Track::GetFirst(const BlockEntry*& pBlockEntry) const
+{
+ Cluster* pCluster = m_pSegment->GetFirst();
+
+ //If Segment::GetFirst returns NULL, then this must be a network
+ //download, and we haven't loaded any clusters yet. In this case,
+ //returning NULL from Track::GetFirst means the same thing.
+
+ for (int i = 0; i < 100; ++i) //arbitrary upper bound
+ {
+ if (pCluster == NULL)
+ {
+ pBlockEntry = GetEOS();
+ return 1;
+ }
+
+ if (pCluster->EOS())
+ {
+ if (m_pSegment->Unparsed() <= 0) //all clusters have been loaded
+ {
+ pBlockEntry = GetEOS();
+ return 1;
+ }
+
+ pBlockEntry = 0;
+ return E_BUFFER_NOT_FULL;
+ }
+
+ pBlockEntry = pCluster->GetFirst();
+
+ while (pBlockEntry)
+ {
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() == m_info.number)
+ return 0;
+
+ pBlockEntry = pCluster->GetNext(pBlockEntry);
+ }
+
+ pCluster = m_pSegment->GetNext(pCluster);
+ }
+
+ //NOTE: if we get here, it means that we didn't find a block with
+ //a matching track number. We interpret that as an error (which
+ //might be too conservative).
+
+ pBlockEntry = GetEOS(); //so we can return a non-NULL value
+ return 1;
+}
+
+
+long Track::GetNext(
+ const BlockEntry* pCurrEntry,
+ const BlockEntry*& pNextEntry) const
+{
+ assert(pCurrEntry);
+ assert(!pCurrEntry->EOS()); //?
+
+ const Block* const pCurrBlock = pCurrEntry->GetBlock();
+ assert(pCurrBlock->GetTrackNumber() == m_info.number);
+
+ Cluster* pCluster = pCurrEntry->GetCluster();
+ assert(pCluster);
+ assert(!pCluster->EOS());
+
+ pNextEntry = pCluster->GetNext(pCurrEntry);
+
+ for (int i = 0; i < 100; ++i) //arbitrary upper bound to search
+ {
+ while (pNextEntry)
+ {
+ const Block* const pNextBlock = pNextEntry->GetBlock();
+ assert(pNextBlock);
+
+ if (pNextBlock->GetTrackNumber() == m_info.number)
+ return 0;
+
+ pNextEntry = pCluster->GetNext(pNextEntry);
+ }
+
+ pCluster = m_pSegment->GetNext(pCluster);
+
+ if (pCluster == NULL)
+ {
+ pNextEntry = GetEOS();
+ return 1;
+ }
+
+ if (pCluster->EOS())
+ {
+ if (m_pSegment->Unparsed() <= 0) //all clusters have been loaded
+ {
+ pNextEntry = GetEOS();
+ return 1;
+ }
+
+ //TODO: there is a potential O(n^2) problem here: we tell the
+ //caller to (pre)load another cluster, which he does, but then he
+ //calls GetNext again, which repeats the same search. This is
+ //a pathological case, since the only way it can happen is if
+ //there exists a long sequence of clusters none of which contain a
+ // block from this track. One way around this problem is for the
+ //caller to be smarter when he loads another cluster: don't call
+ //us back until you have a cluster that contains a block from this
+ //track. (Of course, that's not cheap either, since our caller
+ //would have to scan the each cluster as it's loaded, so that
+ //would just push back the problem.)
+
+ pNextEntry = NULL;
+ return E_BUFFER_NOT_FULL;
+ }
+
+ pNextEntry = pCluster->GetFirst();
+ }
+
+ //NOTE: if we get here, it means that we didn't find a block with
+ //a matching track number after lots of searching, so we give
+ //up trying.
+
+ pNextEntry = GetEOS(); //so we can return a non-NULL value
+ return 1;
+}
+
+
+Track::EOSBlock::EOSBlock()
+{
+}
+
+
+bool Track::EOSBlock::EOS() const
+{
+ return true;
+}
+
+
+Cluster* Track::EOSBlock::GetCluster() const
+{
+ return NULL;
+}
+
+
+size_t Track::EOSBlock::GetIndex() const
+{
+ return 0;
+}
+
+
+const Block* Track::EOSBlock::GetBlock() const
+{
+ return NULL;
+}
+
+
+bool Track::EOSBlock::IsBFrame() const
+{
+ return false;
+}
+
+
+VideoTrack::VideoTrack(Segment* pSegment, const Info& i) :
+ Track(pSegment, i),
+ m_width(-1),
+ m_height(-1),
+ m_rate(-1)
+{
+ assert(i.type == 1);
+ assert(i.number > 0);
+
+ IMkvReader* const pReader = pSegment->m_pReader;
+
+ const Settings& s = i.settings;
+ assert(s.start >= 0);
+ assert(s.size >= 0);
+
+ long long pos = s.start;
+ assert(pos >= 0);
+
+ const long long stop = pos + s.size;
+
+ while (pos < stop)
+ {
+#ifdef _DEBUG
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+#endif
+ if (Match(pReader, pos, 0x30, m_width))
+ ;
+ else if (Match(pReader, pos, 0x3A, m_height))
+ ;
+ else if (Match(pReader, pos, 0x0383E3, m_rate))
+ ;
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+ assert((pos + size) <= stop);
+
+ //pos now designates start of payload
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ return;
+}
+
+
+bool VideoTrack::VetEntry(const BlockEntry* pBlockEntry) const
+{
+ assert(pBlockEntry);
+
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+ assert(pBlock->GetTrackNumber() == m_info.number);
+
+ return pBlock->IsKey();
+}
+
+
+long long VideoTrack::GetWidth() const
+{
+ return m_width;
+}
+
+
+long long VideoTrack::GetHeight() const
+{
+ return m_height;
+}
+
+
+double VideoTrack::GetFrameRate() const
+{
+ return m_rate;
+}
+
+
+AudioTrack::AudioTrack(Segment* pSegment, const Info& i) :
+ Track(pSegment, i),
+ m_rate(0.0),
+ m_channels(0),
+ m_bitDepth(-1)
+{
+ assert(i.type == 2);
+ assert(i.number > 0);
+
+ IMkvReader* const pReader = pSegment->m_pReader;
+
+ const Settings& s = i.settings;
+ assert(s.start >= 0);
+ assert(s.size >= 0);
+
+ long long pos = s.start;
+ assert(pos >= 0);
+
+ const long long stop = pos + s.size;
+
+ while (pos < stop)
+ {
+#ifdef _DEBUG
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+#endif
+ if (Match(pReader, pos, 0x35, m_rate))
+ ;
+ else if (Match(pReader, pos, 0x1F, m_channels))
+ ;
+ else if (Match(pReader, pos, 0x2264, m_bitDepth))
+ ;
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+ assert((pos + size) <= stop);
+
+ //pos now designates start of payload
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ return;
+}
+
+
+bool AudioTrack::VetEntry(const BlockEntry* pBlockEntry) const
+{
+ assert(pBlockEntry);
+
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+ assert(pBlock->GetTrackNumber() == m_info.number);
+
+ return true;
+}
+
+
+double AudioTrack::GetSamplingRate() const
+{
+ return m_rate;
+}
+
+
+long long AudioTrack::GetChannels() const
+{
+ return m_channels;
+}
+
+long long AudioTrack::GetBitDepth() const
+{
+ return m_bitDepth;
+}
+
+Tracks::Tracks(Segment* pSegment, long long start, long long size_) :
+ m_pSegment(pSegment),
+ m_start(start),
+ m_size(size_),
+ m_trackEntries(NULL),
+ m_trackEntriesEnd(NULL)
+{
+ long long stop = m_start + m_size;
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ long long pos1 = m_start;
+ int count = 0;
+
+ while (pos1 < stop)
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos1, len);
+ assert(id >= 0);
+ assert((pos1 + len) <= stop);
+
+ pos1 += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos1, len);
+ assert(size >= 0);
+ assert((pos1 + len) <= stop);
+
+ pos1 += len; //consume length of size
+
+ //pos now desinates start of element
+ if (id == 0x2E) //TrackEntry ID
+ ++count;
+
+ pos1 += size; //consume payload
+ assert(pos1 <= stop);
+ }
+
+ if (count <= 0)
+ return;
+
+ m_trackEntries = new Track*[count];
+ m_trackEntriesEnd = m_trackEntries;
+
+ long long pos = m_start;
+
+ while (pos < stop)
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size1 = ReadUInt(pReader, pos, len);
+ assert(size1 >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+
+ //pos now desinates start of element
+
+ if (id == 0x2E) //TrackEntry ID
+ ParseTrackEntry(pos, size1, *m_trackEntriesEnd++);
+
+ pos += size1; //consume payload
+ assert(pos <= stop);
+ }
+}
+
+
+unsigned long Tracks::GetTracksCount() const
+{
+ const ptrdiff_t result = m_trackEntriesEnd - m_trackEntries;
+ assert(result >= 0);
+
+ return static_cast<unsigned long>(result);
+}
+
+
+void Tracks::ParseTrackEntry(
+ long long start,
+ long long size,
+ Track*& pTrack)
+{
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ long long pos = start;
+ const long long stop = start + size;
+
+ Track::Info i;
+
+ Track::Settings videoSettings;
+ videoSettings.start = -1;
+
+ Track::Settings audioSettings;
+ audioSettings.start = -1;
+
+ while (pos < stop)
+ {
+#ifdef _DEBUG
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ len;
+ id;
+#endif
+ if (Match(pReader, pos, 0x57, i.number))
+ assert(i.number > 0);
+ else if (Match(pReader, pos, 0x33C5, i.uid))
+ ;
+ else if (Match(pReader, pos, 0x03, i.type))
+ ;
+ else if (Match(pReader, pos, 0x136E, i.nameAsUTF8))
+ assert(i.nameAsUTF8);
+ else if (Match(pReader, pos, 0x06, i.codecId))
+ ;
+ else if (Match(pReader,
+ pos,
+ 0x23A2,
+ i.codecPrivate,
+ i.codecPrivateSize))
+ ;
+ else if (Match(pReader, pos, 0x058688, i.codecNameAsUTF8))
+ assert(i.codecNameAsUTF8);
+ else
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+ const long long start = pos;
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+
+ if (id == 0x60)
+ {
+ videoSettings.start = start;
+ videoSettings.size = size;
+ }
+ else if (id == 0x61)
+ {
+ audioSettings.start = start;
+ audioSettings.size = size;
+ }
+ }
+ }
+
+ assert(pos == stop);
+ //TODO: propertly vet info.number, to ensure both its existence,
+ //and that it is unique among all tracks.
+ assert(i.number > 0);
+
+ //TODO: vet settings, to ensure that video settings (0x60)
+ //were specified when type = 1, and that audio settings (0x61)
+ //were specified when type = 2.
+ if (i.type == 1) //video
+ {
+ assert(audioSettings.start < 0);
+ assert(videoSettings.start >= 0);
+
+ i.settings = videoSettings;
+
+ VideoTrack* const t = new VideoTrack(m_pSegment, i);
+ assert(t); //TODO
+ pTrack = t;
+ }
+ else if (i.type == 2) //audio
+ {
+ assert(videoSettings.start < 0);
+ assert(audioSettings.start >= 0);
+
+ i.settings = audioSettings;
+
+ AudioTrack* const t = new AudioTrack(m_pSegment, i);
+ assert(t); //TODO
+ pTrack = t;
+ }
+ else
+ {
+ // for now we do not support other track types yet.
+ // TODO: support other track types
+ i.Clear();
+
+ pTrack = NULL;
+ }
+
+ return;
+}
+
+
+Tracks::~Tracks()
+{
+ Track** i = m_trackEntries;
+ Track** const j = m_trackEntriesEnd;
+
+ while (i != j)
+ {
+ Track* const pTrack = *i++;
+ delete pTrack;
+ }
+
+ delete[] m_trackEntries;
+}
+
+
+Track* Tracks::GetTrackByNumber(unsigned long tn_) const
+{
+ const long long tn = tn_;
+
+ Track** i = m_trackEntries;
+ Track** const j = m_trackEntriesEnd;
+
+ while (i != j)
+ {
+ Track* const pTrack = *i++;
+
+ if (pTrack == NULL)
+ continue;
+
+ if (tn == pTrack->GetNumber())
+ return pTrack;
+ }
+
+ return NULL; //not found
+}
+
+
+Track* Tracks::GetTrackByIndex(unsigned long idx) const
+{
+ const ptrdiff_t count = m_trackEntriesEnd - m_trackEntries;
+
+ if (idx >= static_cast<unsigned long>(count))
+ return NULL;
+
+ return m_trackEntries[idx];
+}
+
+
+void Cluster::Load()
+{
+ assert(m_pSegment);
+ assert(m_pos);
+ assert(m_size);
+
+ if (m_pos > 0) //loaded
+ {
+ assert(m_size > 0);
+ assert(m_timecode >= 0);
+ return;
+ }
+
+ assert(m_pos < 0); //not loaded yet
+ assert(m_size < 0);
+ assert(m_timecode < 0);
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ m_pos *= -1; //relative to segment
+ long long pos = m_pSegment->m_start + m_pos; //absolute
+
+ long len;
+
+ const long long id_ = ReadUInt(pReader, pos, len);
+ assert(id_ >= 0);
+ assert(id_ == 0x0F43B675); //Cluster ID
+
+ pos += len; //consume id
+
+ const long long size_ = ReadUInt(pReader, pos, len);
+ assert(size_ >= 0);
+
+ pos += len; //consume size
+
+ m_size = size_;
+ const long long stop = pos + size_;
+
+ long long timecode = -1;
+
+ while (pos < stop)
+ {
+ if (Match(pReader, pos, 0x67, timecode))
+ break;
+ else
+ {
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ if (id == 0x20) //BlockGroup ID
+ break;
+
+ if (id == 0x23) //SimpleBlock ID
+ break;
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos <= stop);
+ assert(timecode >= 0);
+
+ m_timecode = timecode;
+}
+
+
+Cluster* Cluster::Parse(
+ Segment* pSegment,
+ long idx,
+ long long off)
+{
+ assert(pSegment);
+ assert(off >= 0);
+ assert(off < pSegment->m_size);
+
+ Cluster* const pCluster = new Cluster(pSegment, idx, -off);
+ assert(pCluster);
+
+ return pCluster;
+}
+
+
+Cluster::Cluster() :
+ m_pSegment(NULL),
+ m_index(0),
+ m_pos(0),
+ m_size(0),
+ m_timecode(0),
+ m_entries(NULL),
+ m_entriesCount(0)
+{
+}
+
+
+Cluster::Cluster(
+ Segment* pSegment,
+ long idx,
+ long long off) :
+ m_pSegment(pSegment),
+ m_index(idx),
+ m_pos(off),
+ m_size(-1),
+ m_timecode(-1),
+ m_entries(NULL),
+ m_entriesCount(0)
+{
+}
+
+
+Cluster::~Cluster()
+{
+ BlockEntry** i = m_entries;
+ BlockEntry** const j = m_entries + m_entriesCount;
+
+ while (i != j)
+ {
+ BlockEntry* p = *i++;
+ assert(p);
+
+ delete p;
+ }
+
+ delete[] m_entries;
+}
+
+
+bool Cluster::EOS() const
+{
+ return (m_pSegment == NULL);
+}
+
+
+void Cluster::LoadBlockEntries()
+{
+ if (m_entries)
+ return;
+
+ assert(m_pSegment);
+ assert(m_pos);
+ assert(m_size);
+ assert(m_entriesCount == 0);
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ if (m_pos < 0)
+ m_pos *= -1; //relative to segment
+
+ long long pos = m_pSegment->m_start + m_pos; //absolute
+
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ id;
+ assert(id >= 0);
+ assert(id == 0x0F43B675); //Cluster ID
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size > 0);
+
+ pos += len; //consume size
+
+ //pos now points to start of payload
+
+ if (m_size >= 0)
+ assert(size == m_size);
+ else
+ m_size = size;
+ }
+
+ const long long stop = pos + m_size;
+ long long timecode = -1; //of cluster itself
+
+ //First count the number of entries
+
+ long long idx = pos; //points to start of payload
+ m_entriesCount = 0;
+
+ while (idx < stop)
+ {
+ if (Match(pReader, idx, 0x67, timecode))
+ {
+ if (m_timecode >= 0)
+ assert(timecode == m_timecode);
+ else
+ m_timecode = timecode;
+ }
+ else
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, idx, len);
+ assert(id >= 0); //TODO
+ assert((idx + len) <= stop);
+
+ idx += len; //consume id
+
+ const long long size = ReadUInt(pReader, idx, len);
+ assert(size >= 0); //TODO
+ assert((idx + len) <= stop);
+
+ idx += len; //consume size
+
+ if (id == 0x20) //BlockGroup ID
+ ++m_entriesCount;
+ else if (id == 0x23) //SimpleBlock ID
+ ++m_entriesCount;
+
+ idx += size; //consume payload
+ assert(idx <= stop);
+ }
+ }
+
+ assert(idx == stop);
+ assert(m_timecode >= 0);
+
+ if (m_entriesCount == 0) //TODO: handle empty clusters
+ return;
+
+ m_entries = new BlockEntry*[m_entriesCount];
+ size_t index = 0;
+
+ while (pos < stop)
+ {
+ if (Match(pReader, pos, 0x67, timecode))
+ assert(timecode == m_timecode);
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ if (id == 0x20) //BlockGroup ID
+ ParseBlockGroup(pos, size, index++);
+ else if (id == 0x23) //SimpleBlock ID
+ ParseSimpleBlock(pos, size, index++);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos == stop);
+ assert(timecode >= 0);
+ assert(index == m_entriesCount);
+}
+
+
+
+long long Cluster::GetTimeCode()
+{
+ Load();
+ return m_timecode;
+}
+
+
+long long Cluster::GetTime()
+{
+ const long long tc = GetTimeCode();
+ assert(tc >= 0);
+
+ const SegmentInfo* const pInfo = m_pSegment->GetInfo();
+ assert(pInfo);
+
+ const long long scale = pInfo->GetTimeCodeScale();
+ assert(scale >= 1);
+
+ const long long t = m_timecode * scale;
+
+ return t;
+}
+
+
+long long Cluster::GetFirstTime()
+{
+ const BlockEntry* const pEntry = GetFirst();
+
+ if (pEntry == NULL) //empty cluster
+ return GetTime();
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ return pBlock->GetTime(this);
+}
+
+
+long long Cluster::GetLastTime()
+{
+ const BlockEntry* const pEntry = GetLast();
+
+ if (pEntry == NULL) //empty cluster
+ return GetTime();
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ return pBlock->GetTime(this);
+}
+
+
+void Cluster::ParseBlockGroup(long long start, long long size, size_t index)
+{
+ assert(m_entries);
+ assert(m_entriesCount);
+ assert(index < m_entriesCount);
+
+ BlockGroup* const pGroup =
+ new (std::nothrow) BlockGroup(this, index, start, size);
+ assert(pGroup); //TODO
+
+ m_entries[index] = pGroup;
+}
+
+
+
+void Cluster::ParseSimpleBlock(long long start, long long size, size_t index)
+{
+ assert(m_entries);
+ assert(m_entriesCount);
+ assert(index < m_entriesCount);
+
+ SimpleBlock* const pSimpleBlock =
+ new (std::nothrow) SimpleBlock(this, index, start, size);
+ assert(pSimpleBlock); //TODO
+
+ m_entries[index] = pSimpleBlock;
+}
+
+
+const BlockEntry* Cluster::GetFirst()
+{
+ LoadBlockEntries();
+ //assert(m_entries);
+ //assert(m_entriesCount >= 1);
+
+ if ((m_entries == NULL) || (m_entriesCount == 0))
+ return NULL;
+
+ const BlockEntry* const pFirst = m_entries[0];
+ assert(pFirst);
+
+ return pFirst;
+}
+
+
+const BlockEntry* Cluster::GetLast()
+{
+ LoadBlockEntries();
+ //assert(m_entries);
+ //assert(m_entriesCount >= 1);
+
+ if ((m_entries == NULL) || (m_entriesCount == 0))
+ return NULL;
+
+ const size_t idx = m_entriesCount - 1;
+
+ const BlockEntry* const pLast = m_entries[idx];
+ assert(pLast);
+
+ return pLast;
+}
+
+
+const BlockEntry* Cluster::GetNext(const BlockEntry* pEntry) const
+{
+ assert(pEntry);
+ assert(m_entries);
+ assert(m_entriesCount);
+
+ size_t idx = pEntry->GetIndex();
+ assert(idx < m_entriesCount);
+ assert(m_entries[idx] == pEntry);
+
+ ++idx;
+
+ if (idx >= m_entriesCount)
+ return NULL;
+
+ return m_entries[idx];
+}
+
+
+const BlockEntry* Cluster::GetEntry(const Track* pTrack)
+{
+ assert(pTrack);
+
+ if (m_pSegment == NULL) //EOS
+ return pTrack->GetEOS();
+
+ LoadBlockEntries();
+
+ if ((m_entries == NULL) || (m_entriesCount == 0))
+ return NULL;
+
+ BlockEntry** i = m_entries;
+ assert(i);
+
+ BlockEntry** const j = i + m_entriesCount;
+
+ while (i != j)
+ {
+ const BlockEntry* const pEntry = *i++;
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() != pTrack->GetNumber())
+ continue;
+
+ if (pTrack->VetEntry(pEntry))
+ return pEntry;
+ }
+
+ return pTrack->GetEOS(); //no satisfactory block found
+}
+
+
+const BlockEntry*
+Cluster::GetEntry(
+ const CuePoint& cp,
+ const CuePoint::TrackPosition& tp)
+{
+ assert(m_pSegment);
+
+ LoadBlockEntries();
+
+ if (m_entries == NULL)
+ return NULL;
+
+ const long long count = m_entriesCount;
+
+ if (count <= 0)
+ return NULL;
+
+ const long long tc = cp.GetTimeCode();
+
+ if ((tp.m_block > 0) && (tp.m_block <= count))
+ {
+ const size_t block = static_cast<size_t>(tp.m_block);
+ const size_t index = block - 1;
+
+ const BlockEntry* const pEntry = m_entries[index];
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if ((pBlock->GetTrackNumber() == tp.m_track) &&
+ (pBlock->GetTimeCode(this) == tc))
+ {
+ return pEntry;
+ }
+ }
+
+ const BlockEntry* const* i = m_entries;
+ const BlockEntry* const* const j = i + count;
+
+ while (i != j)
+ {
+ const BlockEntry* const pEntry = *i++;
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() != tp.m_track)
+ continue;
+
+ const long long tc_ = pBlock->GetTimeCode(this);
+
+ if (tc_ < tc)
+ continue;
+
+ if (tc_ > tc)
+ return NULL;
+
+ const Tracks* const pTracks = m_pSegment->GetTracks();
+ assert(pTracks);
+
+ const long tn = static_cast<long>(tp.m_track);
+ const Track* const pTrack = pTracks->GetTrackByNumber(tn);
+
+ if (pTrack == NULL)
+ return NULL;
+
+ const long long type = pTrack->GetType();
+
+ if (type == 2) //audio
+ return pEntry;
+
+ if (type != 1) //not video
+ return NULL;
+
+ if (!pBlock->IsKey())
+ return NULL;
+
+ return pEntry;
+ }
+
+ return NULL;
+}
+
+
+const BlockEntry* Cluster::GetMaxKey(const VideoTrack* pTrack)
+{
+ assert(pTrack);
+
+ if (m_pSegment == NULL) //EOS
+ return pTrack->GetEOS();
+
+ LoadBlockEntries();
+ //assert(m_entries);
+
+ BlockEntry** i = m_entries + m_entriesCount;
+ BlockEntry** const j = m_entries;
+
+ while (i != j)
+ {
+ const BlockEntry* const pEntry = *--i;
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() != pTrack->GetNumber())
+ continue;
+
+ if (pBlock->IsKey())
+ return pEntry;
+ }
+
+ return pTrack->GetEOS(); //no satisfactory block found
+}
+
+
+
+BlockEntry::BlockEntry()
+{
+}
+
+
+BlockEntry::~BlockEntry()
+{
+}
+
+
+SimpleBlock::SimpleBlock(
+ Cluster* pCluster,
+ size_t idx,
+ long long start,
+ long long size) :
+ m_pCluster(pCluster),
+ m_index(idx),
+ m_block(start, size, pCluster->m_pSegment->m_pReader)
+{
+}
+
+
+bool SimpleBlock::EOS() const
+{
+ return false;
+}
+
+
+Cluster* SimpleBlock::GetCluster() const
+{
+ return m_pCluster;
+}
+
+
+size_t SimpleBlock::GetIndex() const
+{
+ return m_index;
+}
+
+
+const Block* SimpleBlock::GetBlock() const
+{
+ return &m_block;
+}
+
+
+bool SimpleBlock::IsBFrame() const
+{
+ return false;
+}
+
+
+BlockGroup::BlockGroup(
+ Cluster* pCluster,
+ size_t idx,
+ long long start,
+ long long size_) :
+ m_pCluster(pCluster),
+ m_index(idx),
+ m_prevTimeCode(0),
+ m_nextTimeCode(0),
+ m_pBlock(NULL) //TODO: accept multiple blocks within a block group
+{
+ IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
+
+ long long pos = start;
+ const long long stop = start + size_;
+
+ bool bSimpleBlock = false;
+ bool bReferenceBlock = false;
+
+ while (pos < stop)
+ {
+ short t;
+
+ if (Match(pReader, pos, 0x7B, t))
+ {
+ if (t < 0)
+ m_prevTimeCode = t;
+ else if (t > 0)
+ m_nextTimeCode = t;
+ else
+ assert(false);
+
+ bReferenceBlock = true;
+ }
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ switch (id)
+ {
+ case 0x23: //SimpleBlock ID
+ bSimpleBlock = true;
+ //YES, FALL THROUGH TO NEXT CASE
+
+ case 0x21: //Block ID
+ ParseBlock(pos, size);
+ break;
+
+ default:
+ break;
+ }
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos == stop);
+ assert(m_pBlock);
+
+ if (!bSimpleBlock)
+ m_pBlock->SetKey(!bReferenceBlock);
+}
+
+
+BlockGroup::~BlockGroup()
+{
+ delete m_pBlock;
+}
+
+
+void BlockGroup::ParseBlock(long long start, long long size)
+{
+ IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
+
+ Block* const pBlock = new Block(start, size, pReader);
+ assert(pBlock); //TODO
+
+ //TODO: the Matroska spec says you have multiple blocks within the
+ //same block group, with blocks ranked by priority (the flag bits).
+
+ assert(m_pBlock == NULL);
+ m_pBlock = pBlock;
+}
+
+
+bool BlockGroup::EOS() const
+{
+ return false;
+}
+
+
+Cluster* BlockGroup::GetCluster() const
+{
+ return m_pCluster;
+}
+
+
+size_t BlockGroup::GetIndex() const
+{
+ return m_index;
+}
+
+
+const Block* BlockGroup::GetBlock() const
+{
+ return m_pBlock;
+}
+
+
+short BlockGroup::GetPrevTimeCode() const
+{
+ return m_prevTimeCode;
+}
+
+
+short BlockGroup::GetNextTimeCode() const
+{
+ return m_nextTimeCode;
+}
+
+
+bool BlockGroup::IsBFrame() const
+{
+ return (m_nextTimeCode > 0);
+}
+
+
+
+Block::Block(long long start, long long size_, IMkvReader* pReader) :
+ m_start(start),
+ m_size(size_)
+{
+ long long pos = start;
+ const long long stop = start + size_;
+
+ long len;
+
+ m_track = ReadUInt(pReader, pos, len);
+ assert(m_track > 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume track number
+ assert((stop - pos) >= 2);
+
+ m_timecode = Unserialize2SInt(pReader, pos);
+
+ pos += 2;
+ assert((stop - pos) >= 1);
+
+ const long hr = pReader->Read(pos, 1, &m_flags);
+ assert(hr == 0L);
+
+ ++pos;
+ assert(pos <= stop);
+
+ m_frameOff = pos;
+
+ const long long frame_size = stop - pos;
+
+ assert(frame_size <= 2147483647L);
+
+ m_frameSize = static_cast<long>(frame_size);
+}
+
+
+long long Block::GetTimeCode(Cluster* pCluster) const
+{
+ assert(pCluster);
+
+ const long long tc0 = pCluster->GetTimeCode();
+ assert(tc0 >= 0);
+
+ const long long tc = tc0 + static_cast<long long>(m_timecode);
+ assert(tc >= 0);
+
+ return tc; //unscaled timecode units
+}
+
+
+long long Block::GetTime(Cluster* pCluster) const
+{
+ assert(pCluster);
+
+ const long long tc = GetTimeCode(pCluster);
+
+ const Segment* const pSegment = pCluster->m_pSegment;
+ const SegmentInfo* const pInfo = pSegment->GetInfo();
+ assert(pInfo);
+
+ const long long scale = pInfo->GetTimeCodeScale();
+ assert(scale >= 1);
+
+ const long long ns = tc * scale;
+
+ return ns;
+}
+
+
+long long Block::GetTrackNumber() const
+{
+ return m_track;
+}
+
+
+bool Block::IsKey() const
+{
+ return ((m_flags & static_cast<unsigned char>(1 << 7)) != 0);
+}
+
+
+void Block::SetKey(bool bKey)
+{
+ if (bKey)
+ m_flags |= static_cast<unsigned char>(1 << 7);
+ else
+ m_flags &= 0x7F;
+}
+
+
+long long Block::GetOffset() const
+{
+ return m_frameOff;
+}
+
+
+long Block::GetSize() const
+{
+ return m_frameSize;
+}
+
+
+long Block::Read(IMkvReader* pReader, unsigned char* buf) const
+{
+
+ assert(pReader);
+ assert(buf);
+
+ const long hr = pReader->Read(m_frameOff, m_frameSize, buf);
+
+ return hr;
+}
+
+
+} //end namespace mkvparser
diff --git a/media/libstagefright/matroska/mkvparser.hpp b/media/libstagefright/matroska/mkvparser.hpp
index 4d311b4..c46d349 100644
--- a/media/libstagefright/matroska/mkvparser.hpp
+++ b/media/libstagefright/matroska/mkvparser.hpp
@@ -1,428 +1,554 @@
-#ifndef MKVPARSER_HPP
-#define MKVPARSER_HPP
-
-#include <cstdlib>
-#include <cstdio>
-
-namespace mkvparser
-{
-
-const int E_FILE_FORMAT_INVALID = -2;
-const int E_BUFFER_NOT_FULL = -3;
-
-class IMkvReader
-{
-public:
- virtual int Read(long long position, long length, unsigned char* buffer) = 0;
- virtual int Length(long long* total, long long* available) = 0;
-protected:
- virtual ~IMkvReader();
-};
-
-long long GetUIntLength(IMkvReader*, long long, long&);
-long long ReadUInt(IMkvReader*, long long, long&);
-long long SyncReadUInt(IMkvReader*, long long pos, long long stop, long&);
-long long UnserializeUInt(IMkvReader*, long long pos, long long size);
-float Unserialize4Float(IMkvReader*, long long);
-double Unserialize8Double(IMkvReader*, long long);
-short Unserialize2SInt(IMkvReader*, long long);
-signed char Unserialize1SInt(IMkvReader*, long long);
-bool Match(IMkvReader*, long long&, unsigned long, long long&);
-bool Match(IMkvReader*, long long&, unsigned long, char*&);
-bool Match(IMkvReader*, long long&, unsigned long,unsigned char*&,
- size_t *optionalSize = NULL);
-bool Match(IMkvReader*, long long&, unsigned long, double&);
-bool Match(IMkvReader*, long long&, unsigned long, short&);
-
-
-struct EBMLHeader
-{
- EBMLHeader();
- ~EBMLHeader();
- long long m_version;
- long long m_readVersion;
- long long m_maxIdLength;
- long long m_maxSizeLength;
- char* m_docType;
- long long m_docTypeVersion;
- long long m_docTypeReadVersion;
-
- long long Parse(IMkvReader*, long long&);
-};
-
-
-class Segment;
-class Track;
-class Cluster;
-
-class Block
-{
- Block(const Block&);
- Block& operator=(const Block&);
-
-public:
- const long long m_start;
- const long long m_size;
-
- Block(long long start, long long size, IMkvReader*);
-
- unsigned long GetTrackNumber() const;
-
- long long GetTimeCode(Cluster*) const; //absolute, but not scaled
- long long GetTime(Cluster*) const; //absolute, and scaled (nanosecond units)
- bool IsKey() const;
- void SetKey(bool);
-
- long GetSize() const;
- long Read(IMkvReader*, unsigned char*) const;
-
-private:
- long long m_track; //Track::Number()
- short m_timecode; //relative to cluster
- unsigned char m_flags;
- long long m_frameOff;
- long m_frameSize;
-
-};
-
-
-class BlockEntry
-{
- BlockEntry(const BlockEntry&);
- BlockEntry& operator=(const BlockEntry&);
-
-public:
- virtual ~BlockEntry();
- virtual bool EOS() const = 0;
- virtual Cluster* GetCluster() const = 0;
- virtual size_t GetIndex() const = 0;
- virtual const Block* GetBlock() const = 0;
- virtual bool IsBFrame() const = 0;
-
-protected:
- BlockEntry();
-
-};
-
-
-class SimpleBlock : public BlockEntry
-{
- SimpleBlock(const SimpleBlock&);
- SimpleBlock& operator=(const SimpleBlock&);
-
-public:
- SimpleBlock(Cluster*, size_t, long long start, long long size);
-
- bool EOS() const;
- Cluster* GetCluster() const;
- size_t GetIndex() const;
- const Block* GetBlock() const;
- bool IsBFrame() const;
-
-protected:
- Cluster* const m_pCluster;
- const size_t m_index;
- Block m_block;
-
-};
-
-
-class BlockGroup : public BlockEntry
-{
- BlockGroup(const BlockGroup&);
- BlockGroup& operator=(const BlockGroup&);
-
-public:
- BlockGroup(Cluster*, size_t, long long, long long);
- ~BlockGroup();
-
- bool EOS() const;
- Cluster* GetCluster() const;
- size_t GetIndex() const;
- const Block* GetBlock() const;
- bool IsBFrame() const;
-
- short GetPrevTimeCode() const; //relative to block's time
- short GetNextTimeCode() const; //as above
-
-protected:
- Cluster* const m_pCluster;
- const size_t m_index;
-
-private:
- BlockGroup(Cluster*, size_t, unsigned long);
- void ParseBlock(long long start, long long size);
-
- short m_prevTimeCode;
- short m_nextTimeCode;
-
- //TODO: the Matroska spec says you can have multiple blocks within the
- //same block group, with blocks ranked by priority (the flag bits).
- //For now we just cache a single block.
-#if 0
- typedef std::deque<Block*> blocks_t;
- blocks_t m_blocks; //In practice should contain only a single element.
-#else
- Block* m_pBlock;
-#endif
-
-};
-
-
-class Track
-{
- Track(const Track&);
- Track& operator=(const Track&);
-
-public:
- Segment* const m_pSegment;
- virtual ~Track();
-
- long long GetType() const;
- unsigned long GetNumber() const;
- const char* GetNameAsUTF8() const;
- const char* GetCodecNameAsUTF8() const;
- const char* GetCodecId() const;
- const unsigned char* GetCodecPrivate(
- size_t *optionalSize = NULL) const;
-
- const BlockEntry* GetEOS() const;
-
- struct Settings
- {
- long long start;
- long long size;
- };
-
- struct Info
- {
- long long type;
- long long number;
- long long uid;
- char* nameAsUTF8;
- char* codecId;
- unsigned char* codecPrivate;
- size_t codecPrivateSize;
- char* codecNameAsUTF8;
- Settings settings;
- Info();
- void Clear();
- };
-
- long GetFirst(const BlockEntry*&) const;
- long GetNext(const BlockEntry* pCurr, const BlockEntry*& pNext) const;
- virtual bool VetEntry(const BlockEntry*) const = 0;
-
-protected:
- Track(Segment*, const Info&);
- const Info m_info;
-
- class EOSBlock : public BlockEntry
- {
- public:
- EOSBlock();
-
- bool EOS() const;
- Cluster* GetCluster() const;
- size_t GetIndex() const;
- const Block* GetBlock() const;
- bool IsBFrame() const;
- };
-
- EOSBlock m_eos;
-
-};
-
-
-class VideoTrack : public Track
-{
- VideoTrack(const VideoTrack&);
- VideoTrack& operator=(const VideoTrack&);
-
-public:
- VideoTrack(Segment*, const Info&);
- long long GetWidth() const;
- long long GetHeight() const;
- double GetFrameRate() const;
-
- bool VetEntry(const BlockEntry*) const;
-
-private:
- long long m_width;
- long long m_height;
- double m_rate;
-
-};
-
-
-class AudioTrack : public Track
-{
- AudioTrack(const AudioTrack&);
- AudioTrack& operator=(const AudioTrack&);
-
-public:
- AudioTrack(Segment*, const Info&);
- double GetSamplingRate() const;
- long long GetChannels() const;
- long long GetBitDepth() const;
- bool VetEntry(const BlockEntry*) const;
-
-private:
- double m_rate;
- long long m_channels;
- long long m_bitDepth;
-};
-
-
-class Tracks
-{
- Tracks(const Tracks&);
- Tracks& operator=(const Tracks&);
-
-public:
- Segment* const m_pSegment;
- const long long m_start;
- const long long m_size;
-
- Tracks(Segment*, long long start, long long size);
- virtual ~Tracks();
-
- Track* GetTrackByNumber(unsigned long tn) const;
- Track* GetTrackByIndex(unsigned long idx) const;
-
-private:
- Track** m_trackEntries;
- Track** m_trackEntriesEnd;
-
- void ParseTrackEntry(long long, long long, Track*&);
-
-public:
- unsigned long GetTracksCount() const;
-};
-
-
-class SegmentInfo
-{
- SegmentInfo(const SegmentInfo&);
- SegmentInfo& operator=(const SegmentInfo&);
-
-public:
- Segment* const m_pSegment;
- const long long m_start;
- const long long m_size;
-
- SegmentInfo(Segment*, long long start, long long size);
- ~SegmentInfo();
- long long GetTimeCodeScale() const;
- long long GetDuration() const; //scaled
- const char* GetMuxingAppAsUTF8() const;
- const char* GetWritingAppAsUTF8() const;
- const char* GetTitleAsUTF8() const;
-
-private:
- long long m_timecodeScale;
- double m_duration;
- char* m_pMuxingAppAsUTF8;
- char* m_pWritingAppAsUTF8;
- char* m_pTitleAsUTF8;
-};
-
-
-class Cluster
-{
- Cluster(const Cluster&);
- Cluster& operator=(const Cluster&);
-
-public:
- Segment* const m_pSegment;
- const size_t m_index;
-
-public:
- static Cluster* Parse(Segment*, size_t, long long off);
-
- Cluster(); //EndOfStream
- ~Cluster();
-
- bool EOS() const;
-
- long long GetTimeCode(); //absolute, but not scaled
- long long GetTime(); //absolute, and scaled (nanosecond units)
-
- const BlockEntry* GetFirst();
- const BlockEntry* GetLast();
- const BlockEntry* GetNext(const BlockEntry*) const;
- const BlockEntry* GetEntry(const Track*);
-protected:
- Cluster(Segment*, size_t, long long off);
-
-private:
- long long m_start;
- long long m_size;
- long long m_timecode;
- BlockEntry** m_pEntries;
- size_t m_entriesCount;
-
- void Load();
- void LoadBlockEntries();
- void ParseBlockGroup(long long, long long, size_t);
- void ParseSimpleBlock(long long, long long, size_t);
-
-};
-
-
-class Segment
-{
- Segment(const Segment&);
- Segment& operator=(const Segment&);
-
-private:
- Segment(IMkvReader*, long long pos, long long size);
-
-public:
- IMkvReader* const m_pReader;
- const long long m_start; //posn of segment payload
- const long long m_size; //size of segment payload
- Cluster m_eos; //TODO: make private?
-
- static long long CreateInstance(IMkvReader*, long long, Segment*&);
- ~Segment();
-
- //for big-bang loading (source filter)
- long Load();
-
- //for incremental loading (splitter)
- long long Unparsed() const;
- long long ParseHeaders();
- long ParseCluster(Cluster*&, long long& newpos) const;
- bool AddCluster(Cluster*, long long);
-
- Tracks* GetTracks() const;
- const SegmentInfo* const GetInfo() const;
- long long GetDuration() const;
-
- //NOTE: this turned out to be too inefficient.
- //long long Load(long long time_nanoseconds);
-
- Cluster* GetFirst();
- Cluster* GetLast();
- unsigned long GetCount() const;
-
- Cluster* GetNext(const Cluster*);
- Cluster* GetCluster(long long time_nanoseconds);
-
-private:
- long long m_pos; //absolute file posn; what has been consumed so far
- SegmentInfo* m_pInfo;
- Tracks* m_pTracks;
- Cluster** m_clusters;
- size_t m_clusterCount;
-
- void ParseSeekHead(long long pos, long long size, size_t*);
- void ParseSeekEntry(long long pos, long long size, size_t*);
- void ParseSecondarySeekHead(long long off, size_t*);
-};
-
-
-} //end namespace mkvparser
-
-#endif //MKVPARSER_HPP
+// Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+
+#ifndef MKVPARSER_HPP
+#define MKVPARSER_HPP
+
+#include <cstdlib>
+#include <cstdio>
+
+namespace mkvparser
+{
+
+const int E_FILE_FORMAT_INVALID = -2;
+const int E_BUFFER_NOT_FULL = -3;
+
+class IMkvReader
+{
+public:
+ virtual int Read(long long pos, long len, unsigned char* buf) = 0;
+ virtual int Length(long long* total, long long* available) = 0;
+protected:
+ virtual ~IMkvReader();
+};
+
+long long GetUIntLength(IMkvReader*, long long, long&);
+long long ReadUInt(IMkvReader*, long long, long&);
+long long SyncReadUInt(IMkvReader*, long long pos, long long stop, long&);
+long long UnserializeUInt(IMkvReader*, long long pos, long long size);
+float Unserialize4Float(IMkvReader*, long long);
+double Unserialize8Double(IMkvReader*, long long);
+short Unserialize2SInt(IMkvReader*, long long);
+signed char Unserialize1SInt(IMkvReader*, long long);
+bool Match(IMkvReader*, long long&, unsigned long, long long&);
+bool Match(IMkvReader*, long long&, unsigned long, char*&);
+bool Match(IMkvReader*, long long&, unsigned long,unsigned char*&, size_t&);
+bool Match(IMkvReader*, long long&, unsigned long, double&);
+bool Match(IMkvReader*, long long&, unsigned long, short&);
+
+void GetVersion(int& major, int& minor, int& build, int& revision);
+
+struct EBMLHeader
+{
+ EBMLHeader();
+ ~EBMLHeader();
+ long long m_version;
+ long long m_readVersion;
+ long long m_maxIdLength;
+ long long m_maxSizeLength;
+ char* m_docType;
+ long long m_docTypeVersion;
+ long long m_docTypeReadVersion;
+
+ long long Parse(IMkvReader*, long long&);
+};
+
+
+class Segment;
+class Track;
+class Cluster;
+
+class Block
+{
+ Block(const Block&);
+ Block& operator=(const Block&);
+
+public:
+ const long long m_start;
+ const long long m_size;
+
+ Block(long long start, long long size, IMkvReader*);
+
+ long long GetTrackNumber() const;
+ long long GetTimeCode(Cluster*) const; //absolute, but not scaled
+ long long GetTime(Cluster*) const; //absolute, and scaled (ns units)
+ bool IsKey() const;
+ void SetKey(bool);
+
+ long long GetOffset() const;
+ long GetSize() const;
+ long Read(IMkvReader*, unsigned char*) const;
+
+private:
+ long long m_track; //Track::Number()
+ short m_timecode; //relative to cluster
+ unsigned char m_flags;
+ long long m_frameOff;
+ long m_frameSize;
+
+};
+
+
+class BlockEntry
+{
+ BlockEntry(const BlockEntry&);
+ BlockEntry& operator=(const BlockEntry&);
+
+public:
+ virtual ~BlockEntry();
+ virtual bool EOS() const = 0;
+ virtual Cluster* GetCluster() const = 0;
+ virtual size_t GetIndex() const = 0;
+ virtual const Block* GetBlock() const = 0;
+ virtual bool IsBFrame() const = 0;
+
+protected:
+ BlockEntry();
+
+};
+
+
+class SimpleBlock : public BlockEntry
+{
+ SimpleBlock(const SimpleBlock&);
+ SimpleBlock& operator=(const SimpleBlock&);
+
+public:
+ SimpleBlock(Cluster*, size_t, long long start, long long size);
+
+ bool EOS() const;
+ Cluster* GetCluster() const;
+ size_t GetIndex() const;
+ const Block* GetBlock() const;
+ bool IsBFrame() const;
+
+protected:
+ Cluster* const m_pCluster;
+ const size_t m_index;
+ Block m_block;
+
+};
+
+
+class BlockGroup : public BlockEntry
+{
+ BlockGroup(const BlockGroup&);
+ BlockGroup& operator=(const BlockGroup&);
+
+public:
+ BlockGroup(Cluster*, size_t, long long, long long);
+ ~BlockGroup();
+
+ bool EOS() const;
+ Cluster* GetCluster() const;
+ size_t GetIndex() const;
+ const Block* GetBlock() const;
+ bool IsBFrame() const;
+
+ short GetPrevTimeCode() const; //relative to block's time
+ short GetNextTimeCode() const; //as above
+
+protected:
+ Cluster* const m_pCluster;
+ const size_t m_index;
+
+private:
+ BlockGroup(Cluster*, size_t, unsigned long);
+ void ParseBlock(long long start, long long size);
+
+ short m_prevTimeCode;
+ short m_nextTimeCode;
+
+ //TODO: the Matroska spec says you can have multiple blocks within the
+ //same block group, with blocks ranked by priority (the flag bits).
+ //For now we just cache a single block.
+#if 0
+ typedef std::deque<Block*> blocks_t;
+ blocks_t m_blocks; //In practice should contain only a single element.
+#else
+ Block* m_pBlock;
+#endif
+
+};
+
+
+class Track
+{
+ Track(const Track&);
+ Track& operator=(const Track&);
+
+public:
+ Segment* const m_pSegment;
+ virtual ~Track();
+
+ long long GetType() const;
+ long long GetNumber() const;
+ const char* GetNameAsUTF8() const;
+ const char* GetCodecNameAsUTF8() const;
+ const char* GetCodecId() const;
+ const unsigned char* GetCodecPrivate(size_t&) const;
+
+ const BlockEntry* GetEOS() const;
+
+ struct Settings
+ {
+ long long start;
+ long long size;
+ };
+
+ struct Info
+ {
+ long long type;
+ long long number;
+ long long uid;
+ char* nameAsUTF8;
+ char* codecId;
+ unsigned char* codecPrivate;
+ size_t codecPrivateSize;
+ char* codecNameAsUTF8;
+ Settings settings;
+ Info();
+ void Clear();
+ };
+
+ long GetFirst(const BlockEntry*&) const;
+ long GetNext(const BlockEntry* pCurr, const BlockEntry*& pNext) const;
+ virtual bool VetEntry(const BlockEntry*) const = 0;
+
+protected:
+ Track(Segment*, const Info&);
+ const Info m_info;
+
+ class EOSBlock : public BlockEntry
+ {
+ public:
+ EOSBlock();
+
+ bool EOS() const;
+ Cluster* GetCluster() const;
+ size_t GetIndex() const;
+ const Block* GetBlock() const;
+ bool IsBFrame() const;
+ };
+
+ EOSBlock m_eos;
+
+};
+
+
+class VideoTrack : public Track
+{
+ VideoTrack(const VideoTrack&);
+ VideoTrack& operator=(const VideoTrack&);
+
+public:
+ VideoTrack(Segment*, const Info&);
+ long long GetWidth() const;
+ long long GetHeight() const;
+ double GetFrameRate() const;
+
+ bool VetEntry(const BlockEntry*) const;
+
+private:
+ long long m_width;
+ long long m_height;
+ double m_rate;
+
+};
+
+
+class AudioTrack : public Track
+{
+ AudioTrack(const AudioTrack&);
+ AudioTrack& operator=(const AudioTrack&);
+
+public:
+ AudioTrack(Segment*, const Info&);
+ double GetSamplingRate() const;
+ long long GetChannels() const;
+ long long GetBitDepth() const;
+ bool VetEntry(const BlockEntry*) const;
+
+private:
+ double m_rate;
+ long long m_channels;
+ long long m_bitDepth;
+};
+
+
+class Tracks
+{
+ Tracks(const Tracks&);
+ Tracks& operator=(const Tracks&);
+
+public:
+ Segment* const m_pSegment;
+ const long long m_start;
+ const long long m_size;
+
+ Tracks(Segment*, long long start, long long size);
+ virtual ~Tracks();
+
+ Track* GetTrackByNumber(unsigned long tn) const;
+ Track* GetTrackByIndex(unsigned long idx) const;
+
+private:
+ Track** m_trackEntries;
+ Track** m_trackEntriesEnd;
+
+ void ParseTrackEntry(long long, long long, Track*&);
+
+public:
+ unsigned long GetTracksCount() const;
+};
+
+
+class SegmentInfo
+{
+ SegmentInfo(const SegmentInfo&);
+ SegmentInfo& operator=(const SegmentInfo&);
+
+public:
+ Segment* const m_pSegment;
+ const long long m_start;
+ const long long m_size;
+
+ SegmentInfo(Segment*, long long start, long long size);
+ ~SegmentInfo();
+ long long GetTimeCodeScale() const;
+ long long GetDuration() const; //scaled
+ const char* GetMuxingAppAsUTF8() const;
+ const char* GetWritingAppAsUTF8() const;
+ const char* GetTitleAsUTF8() const;
+
+private:
+ long long m_timecodeScale;
+ double m_duration;
+ char* m_pMuxingAppAsUTF8;
+ char* m_pWritingAppAsUTF8;
+ char* m_pTitleAsUTF8;
+};
+
+class Cues;
+class CuePoint
+{
+ friend class Cues;
+
+ CuePoint(size_t, long long);
+ ~CuePoint();
+
+ CuePoint(const CuePoint&);
+ CuePoint& operator=(const CuePoint&);
+
+public:
+ void Load(IMkvReader*);
+
+ long long GetTimeCode() const; //absolute but unscaled
+ long long GetTime(Segment*) const; //absolute and scaled (ns units)
+
+ struct TrackPosition
+ {
+ long long m_track;
+ long long m_pos; //of cluster
+ long long m_block;
+ //codec_state //defaults to 0
+ //reference = clusters containing req'd referenced blocks
+ // reftime = timecode of the referenced block
+
+ void Parse(IMkvReader*, long long, long long);
+ };
+
+ const TrackPosition* Find(const Track*) const;
+
+private:
+ const size_t m_index;
+ long long m_timecode;
+ TrackPosition* m_track_positions;
+ size_t m_track_positions_count;
+
+};
+
+
+class Cues
+{
+ friend class Segment;
+
+ Cues(Segment*, long long start, long long size);
+ ~Cues();
+
+ Cues(const Cues&);
+ Cues& operator=(const Cues&);
+
+public:
+ Segment* const m_pSegment;
+ const long long m_start;
+ const long long m_size;
+
+ bool Find( //lower bound of time_ns
+ long long time_ns,
+ const Track*,
+ const CuePoint*&,
+ const CuePoint::TrackPosition*&) const;
+
+#if 0
+ bool FindNext( //upper_bound of time_ns
+ long long time_ns,
+ const Track*,
+ const CuePoint*&,
+ const CuePoint::TrackPosition*&) const;
+#endif
+
+ const CuePoint* GetFirst() const;
+ const CuePoint* GetLast() const;
+
+ const CuePoint* GetNext(const CuePoint*) const;
+
+ const BlockEntry* GetBlock(
+ const CuePoint*,
+ const CuePoint::TrackPosition*) const;
+
+private:
+ void Init() const;
+ bool LoadCuePoint() const;
+ void PreloadCuePoint(size_t&, long long) const;
+
+ mutable CuePoint** m_cue_points;
+ mutable size_t m_count;
+ mutable size_t m_preload_count;
+ mutable long long m_pos;
+
+};
+
+
+class Cluster
+{
+ Cluster(const Cluster&);
+ Cluster& operator=(const Cluster&);
+
+public:
+ Segment* const m_pSegment;
+
+public:
+ static Cluster* Parse(Segment*, long, long long off);
+
+ Cluster(); //EndOfStream
+ ~Cluster();
+
+ bool EOS() const;
+
+ long long GetTimeCode(); //absolute, but not scaled
+ long long GetTime(); //absolute, and scaled (nanosecond units)
+ long long GetFirstTime(); //time (ns) of first (earliest) block
+ long long GetLastTime(); //time (ns) of last (latest) block
+
+ const BlockEntry* GetFirst();
+ const BlockEntry* GetLast();
+ const BlockEntry* GetNext(const BlockEntry*) const;
+ const BlockEntry* GetEntry(const Track*);
+ const BlockEntry* GetEntry(
+ const CuePoint&,
+ const CuePoint::TrackPosition&);
+ const BlockEntry* GetMaxKey(const VideoTrack*);
+
+protected:
+ Cluster(Segment*, long, long long off);
+
+public:
+ //TODO: these should all be private, with public selector functions
+ long m_index;
+ long long m_pos;
+ long long m_size;
+
+private:
+ long long m_timecode;
+ BlockEntry** m_entries;
+ size_t m_entriesCount;
+
+ void Load();
+ void LoadBlockEntries();
+ void ParseBlockGroup(long long, long long, size_t);
+ void ParseSimpleBlock(long long, long long, size_t);
+
+};
+
+
+class Segment
+{
+ friend class Cues;
+
+ Segment(const Segment&);
+ Segment& operator=(const Segment&);
+
+private:
+ Segment(IMkvReader*, long long pos, long long size);
+
+public:
+ IMkvReader* const m_pReader;
+ const long long m_start; //posn of segment payload
+ const long long m_size; //size of segment payload
+ Cluster m_eos; //TODO: make private?
+
+ static long long CreateInstance(IMkvReader*, long long, Segment*&);
+ ~Segment();
+
+ long Load(); //loads headers and all clusters
+
+ //for incremental loading (splitter)
+ long long Unparsed() const;
+ long long ParseHeaders(); //stops when first cluster is found
+ long LoadCluster(); //loads one cluster
+
+#if 0
+ //This pair parses one cluster, but only changes the state of the
+ //segment object when the cluster is actually added to the index.
+ long ParseCluster(Cluster*&, long long& newpos) const;
+ bool AddCluster(Cluster*, long long);
+#endif
+
+ Tracks* GetTracks() const;
+ const SegmentInfo* GetInfo() const;
+ const Cues* GetCues() const;
+
+ long long GetDuration() const;
+
+ unsigned long GetCount() const;
+ Cluster* GetFirst();
+ Cluster* GetLast();
+ Cluster* GetNext(const Cluster*);
+
+ Cluster* FindCluster(long long time_nanoseconds);
+ const BlockEntry* Seek(long long time_nanoseconds, const Track*);
+
+private:
+
+ long long m_pos; //absolute file posn; what has been consumed so far
+ SegmentInfo* m_pInfo;
+ Tracks* m_pTracks;
+ Cues* m_pCues;
+ Cluster** m_clusters;
+ long m_clusterCount; //number of entries for which m_index >= 0
+ long m_clusterPreloadCount; //number of entries for which m_index < 0
+ long m_clusterSize; //array size
+
+ void AppendCluster(Cluster*);
+ void PreloadCluster(Cluster*, ptrdiff_t);
+
+ void ParseSeekHead(long long pos, long long size);
+ void ParseSeekEntry(long long pos, long long size);
+ void ParseCues(long long);
+
+ const BlockEntry* GetBlock(
+ const CuePoint&,
+ const CuePoint::TrackPosition&);
+
+};
+
+
+} //end namespace mkvparser
+
+#endif //MKVPARSER_HPP
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index c927da1..f9f638f 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -289,6 +289,16 @@ status_t OMX::setConfig(
index, params, size);
}
+status_t OMX::enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->enableGraphicBuffers(port_index, enable);
+}
+
+status_t OMX::storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -296,6 +306,13 @@ status_t OMX::useBuffer(
port_index, params, buffer);
}
+status_t OMX::useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ return findInstance(node)->useGraphicBuffer(
+ port_index, graphicBuffer, buffer);
+}
+
status_t OMX::allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -497,12 +514,17 @@ sp<IOMXRenderer> OMX::createRenderer(
}
if (!impl) {
+#if 0
LOGW("Using software renderer.");
impl = new SoftwareRenderer(
colorFormat,
surface,
displayWidth, displayHeight,
encodedWidth, encodedHeight);
+#else
+ CHECK(!"Should not be here.");
+ return NULL;
+#endif
}
return new OMXRenderer(impl);
@@ -527,4 +549,3 @@ void OMXRenderer::render(IOMX::buffer_id buffer) {
}
} // namespace android
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5db516e..9b6d441 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -24,6 +24,7 @@
#include <OMX_Component.h>
#include <binder/IMemory.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaErrors.h>
@@ -40,6 +41,11 @@ struct BufferMeta {
mIsBackup(false) {
}
+ BufferMeta(const sp<GraphicBuffer> &graphicBuffer)
+ : mGraphicBuffer(graphicBuffer),
+ mIsBackup(false) {
+ }
+
void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
if (!mIsBackup) {
return;
@@ -61,6 +67,7 @@ struct BufferMeta {
}
private:
+ sp<GraphicBuffer> mGraphicBuffer;
sp<IMemory> mMem;
size_t mSize;
bool mIsBackup;
@@ -240,6 +247,74 @@ status_t OMXNodeInstance::setConfig(
return StatusFromOMXError(err);
}
+status_t OMXNodeInstance::enableGraphicBuffers(
+ OMX_U32 portIndex, OMX_BOOL enable) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ EnableAndroidNativeBuffersParams params = {
+ sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
+ };
+
+ err = OMX_SetParameter(mHandle, index, &params);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
+ err, err);
+
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t OMXNodeInstance::storeMetaDataInBuffers(
+ OMX_U32 portIndex,
+ OMX_BOOL enable) {
+ Mutex::Autolock autolock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.storeMetaDataInBuffers");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex %s failed", name);
+ return StatusFromOMXError(err);
+ }
+
+ StoreMetaDataInBuffersParams params;
+ memset(&params, 0, sizeof(params));
+ params.nSize = sizeof(params);
+
+ // Version: 1.0.0.0
+ params.nVersion.s.nVersionMajor = 1;
+
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
+ if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+ LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+ return UNKNOWN_ERROR;
+ }
+ return err;
+}
+
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer) {
@@ -273,6 +348,60 @@ status_t OMXNodeInstance::useBuffer(
return OK;
}
+status_t OMXNodeInstance::useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ UseAndroidNativeBufferParams params = {
+ sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
+ &header, graphicBuffer,
+ };
+
+ err = OMX_SetParameter(mHandle, index, &params);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
+ err);
+
+ delete bufferMeta;
+ bufferMeta = NULL;
+
+ *buffer = 0;
+
+ return UNKNOWN_ERROR;
+ }
+
+ CHECK_EQ(header->pAppPrivate, bufferMeta);
+
+ *buffer = header;
+
+ addActiveBuffer(portIndex, *buffer);
+
+ return OK;
+}
+
status_t OMXNodeInstance::allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data) {
@@ -498,4 +627,3 @@ void OMXNodeInstance::freeActiveBuffers() {
}
} // namespace android
-
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index b0d2c64..bbde516 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -18,18 +18,381 @@
#include "ARTPSource.h"
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <ctype.h>
namespace android {
-AMPEG4AudioAssembler::AMPEG4AudioAssembler(const sp<AMessage> &notify)
+static bool GetAttribute(const char *s, const char *key, AString *value) {
+ value->clear();
+
+ size_t keyLen = strlen(key);
+
+ for (;;) {
+ while (isspace(*s)) {
+ ++s;
+ }
+
+ const char *colonPos = strchr(s, ';');
+
+ size_t len =
+ (colonPos == NULL) ? strlen(s) : colonPos - s;
+
+ if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
+ value->setTo(&s[keyLen + 1], len - keyLen - 1);
+ return true;
+ }
+
+ if (colonPos == NULL) {
+ return false;
+ }
+
+ s = colonPos + 1;
+ }
+}
+
+static sp<ABuffer> decodeHex(const AString &s) {
+ if ((s.size() % 2) != 0) {
+ return NULL;
+ }
+
+ size_t outLen = s.size() / 2;
+ sp<ABuffer> buffer = new ABuffer(outLen);
+ uint8_t *out = buffer->data();
+
+ uint8_t accum = 0;
+ for (size_t i = 0; i < s.size(); ++i) {
+ char c = s.c_str()[i];
+ unsigned value;
+ if (c >= '0' && c <= '9') {
+ value = c - '0';
+ } else if (c >= 'a' && c <= 'f') {
+ value = c - 'a' + 10;
+ } else if (c >= 'A' && c <= 'F') {
+ value = c - 'A' + 10;
+ } else {
+ return NULL;
+ }
+
+ accum = (accum << 4) | value;
+
+ if (i & 1) {
+ *out++ = accum;
+
+ accum = 0;
+ }
+ }
+
+ return buffer;
+}
+
+static status_t parseAudioObjectType(
+ ABitReader *bits, unsigned *audioObjectType) {
+ *audioObjectType = bits->getBits(5);
+ if ((*audioObjectType) == 31) {
+ *audioObjectType = 32 + bits->getBits(6);
+ }
+
+ return OK;
+}
+
+static status_t parseGASpecificConfig(
+ ABitReader *bits,
+ unsigned audioObjectType, unsigned channelConfiguration) {
+ unsigned frameLengthFlag = bits->getBits(1);
+ unsigned dependsOnCoreCoder = bits->getBits(1);
+ if (dependsOnCoreCoder) {
+ /* unsigned coreCoderDelay = */bits->getBits(1);
+ }
+ unsigned extensionFlag = bits->getBits(1);
+
+ if (!channelConfiguration) {
+ // program_config_element
+ return ERROR_UNSUPPORTED; // XXX to be implemented
+ }
+
+ if (audioObjectType == 6 || audioObjectType == 20) {
+ /* unsigned layerNr = */bits->getBits(3);
+ }
+
+ if (extensionFlag) {
+ if (audioObjectType == 22) {
+ /* unsigned numOfSubFrame = */bits->getBits(5);
+ /* unsigned layerLength = */bits->getBits(11);
+ } else if (audioObjectType == 17 || audioObjectType == 19
+ || audioObjectType == 20 || audioObjectType == 23) {
+ /* unsigned aacSectionDataResilienceFlag = */bits->getBits(1);
+ /* unsigned aacScalefactorDataResilienceFlag = */bits->getBits(1);
+ /* unsigned aacSpectralDataResilienceFlag = */bits->getBits(1);
+ }
+
+ unsigned extensionFlag3 = bits->getBits(1);
+ CHECK_EQ(extensionFlag3, 0u); // TBD in version 3
+ }
+
+ return OK;
+}
+
+static status_t parseAudioSpecificConfig(ABitReader *bits) {
+ unsigned audioObjectType;
+ CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+
+ unsigned samplingFreqIndex = bits->getBits(4);
+ if (samplingFreqIndex == 0x0f) {
+ /* unsigned samplingFrequency = */bits->getBits(24);
+ }
+
+ unsigned channelConfiguration = bits->getBits(4);
+
+ unsigned extensionAudioObjectType = 0;
+ unsigned sbrPresent = 0;
+
+ if (audioObjectType == 5) {
+ extensionAudioObjectType = audioObjectType;
+ sbrPresent = 1;
+ unsigned extensionSamplingFreqIndex = bits->getBits(4);
+ if (extensionSamplingFreqIndex == 0x0f) {
+ /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+ }
+ CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+ }
+
+ CHECK((audioObjectType >= 1 && audioObjectType <= 4)
+ || (audioObjectType >= 6 && audioObjectType <= 7)
+ || audioObjectType == 17
+ || (audioObjectType >= 19 && audioObjectType <= 23));
+
+ CHECK_EQ(parseGASpecificConfig(
+ bits, audioObjectType, channelConfiguration), (status_t)OK);
+
+ if (audioObjectType == 17
+ || (audioObjectType >= 19 && audioObjectType <= 27)) {
+ unsigned epConfig = bits->getBits(2);
+ if (epConfig == 2 || epConfig == 3) {
+ // ErrorProtectionSpecificConfig
+ return ERROR_UNSUPPORTED; // XXX to be implemented
+
+ if (epConfig == 3) {
+ unsigned directMapping = bits->getBits(1);
+ CHECK_EQ(directMapping, 1u);
+ }
+ }
+ }
+
+#if 0
+ // This is not supported here as the upper layers did not explicitly
+ // signal the length of AudioSpecificConfig.
+
+ if (extensionAudioObjectType != 5 && bits->numBitsLeft() >= 16) {
+ unsigned syncExtensionType = bits->getBits(11);
+ if (syncExtensionType == 0x2b7) {
+ CHECK_EQ(parseAudioObjectType(bits, &extensionAudioObjectType),
+ (status_t)OK);
+
+ sbrPresent = bits->getBits(1);
+
+ if (sbrPresent == 1) {
+ unsigned extensionSamplingFreqIndex = bits->getBits(4);
+ if (extensionSamplingFreqIndex == 0x0f) {
+ /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+ }
+ }
+ }
+ }
+#endif
+
+ return OK;
+}
+
+static status_t parseStreamMuxConfig(
+ ABitReader *bits,
+ unsigned *numSubFrames,
+ unsigned *frameLengthType,
+ bool *otherDataPresent,
+ unsigned *otherDataLenBits) {
+ unsigned audioMuxVersion = bits->getBits(1);
+
+ unsigned audioMuxVersionA = 0;
+ if (audioMuxVersion == 1) {
+ audioMuxVersionA = bits->getBits(1);
+ }
+
+ CHECK_EQ(audioMuxVersionA, 0u); // otherwise future spec
+
+ if (audioMuxVersion != 0) {
+ return ERROR_UNSUPPORTED; // XXX to be implemented;
+ }
+ CHECK_EQ(audioMuxVersion, 0u); // XXX to be implemented
+
+ unsigned allStreamsSameTimeFraming = bits->getBits(1);
+ CHECK_EQ(allStreamsSameTimeFraming, 1u); // There's only one stream.
+
+ *numSubFrames = bits->getBits(6);
+ unsigned numProgram = bits->getBits(4);
+ CHECK_EQ(numProgram, 0u); // disabled in RTP LATM
+
+ unsigned numLayer = bits->getBits(3);
+ CHECK_EQ(numLayer, 0u); // disabled in RTP LATM
+
+ if (audioMuxVersion == 0) {
+ // AudioSpecificConfig
+ CHECK_EQ(parseAudioSpecificConfig(bits), (status_t)OK);
+ } else {
+ TRESPASS(); // XXX to be implemented
+ }
+
+ *frameLengthType = bits->getBits(3);
+ switch (*frameLengthType) {
+ case 0:
+ {
+ /* unsigned bufferFullness = */bits->getBits(8);
+
+ // The "coreFrameOffset" does not apply since there's only
+ // a single layer.
+ break;
+ }
+
+ case 1:
+ {
+ /* unsigned frameLength = */bits->getBits(9);
+ break;
+ }
+
+ case 3:
+ case 4:
+ case 5:
+ {
+ /* unsigned CELPframeLengthTableIndex = */bits->getBits(6);
+ break;
+ }
+
+ case 6:
+ case 7:
+ {
+ /* unsigned HVXCframeLengthTableIndex = */bits->getBits(1);
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ *otherDataPresent = bits->getBits(1);
+ *otherDataLenBits = 0;
+ if (*otherDataPresent) {
+ if (audioMuxVersion == 1) {
+ TRESPASS(); // XXX to be implemented
+ } else {
+ *otherDataLenBits = 0;
+
+ unsigned otherDataLenEsc;
+ do {
+ (*otherDataLenBits) <<= 8;
+ otherDataLenEsc = bits->getBits(1);
+ unsigned otherDataLenTmp = bits->getBits(8);
+ (*otherDataLenBits) += otherDataLenTmp;
+ } while (otherDataLenEsc);
+ }
+ }
+
+ unsigned crcCheckPresent = bits->getBits(1);
+ if (crcCheckPresent) {
+ /* unsigned crcCheckSum = */bits->getBits(8);
+ }
+
+ return OK;
+}
+
+sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
+ CHECK(!mMuxConfigPresent); // XXX to be implemented
+
+ sp<ABuffer> out = new ABuffer(buffer->size());
+ out->setRange(0, 0);
+
+ size_t offset = 0;
+ uint8_t *ptr = buffer->data();
+
+ for (size_t i = 0; i <= mNumSubFrames; ++i) {
+ // parse PayloadLengthInfo
+
+ unsigned payloadLength = 0;
+
+ switch (mFrameLengthType) {
+ case 0:
+ {
+ unsigned muxSlotLengthBytes = 0;
+ unsigned tmp;
+ do {
+ CHECK_LT(offset, buffer->size());
+ tmp = ptr[offset++];
+ muxSlotLengthBytes += tmp;
+ } while (tmp == 0xff);
+
+ payloadLength = muxSlotLengthBytes;
+ break;
+ }
+
+ default:
+ TRESPASS(); // XXX to be implemented
+ break;
+ }
+
+ CHECK_LE(offset + payloadLength, buffer->size());
+
+ memcpy(out->data() + out->size(), &ptr[offset], payloadLength);
+ out->setRange(0, out->size() + payloadLength);
+
+ offset += payloadLength;
+
+ if (mOtherDataPresent) {
+ // We want to stay byte-aligned.
+
+ CHECK((mOtherDataLenBits % 8) == 0);
+ CHECK_LE(offset + (mOtherDataLenBits / 8), buffer->size());
+ offset += mOtherDataLenBits / 8;
+ }
+ }
+
+ CHECK_EQ(offset, buffer->size());
+
+ return out;
+}
+
+AMPEG4AudioAssembler::AMPEG4AudioAssembler(
+ const sp<AMessage> &notify, const AString &params)
: mNotifyMsg(notify),
+ mMuxConfigPresent(false),
mAccessUnitRTPTime(0),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false) {
+ AString val;
+ if (!GetAttribute(params.c_str(), "cpresent", &val)) {
+ mMuxConfigPresent = true;
+ } else if (val == "0") {
+ mMuxConfigPresent = false;
+ } else {
+ CHECK(val == "1");
+ mMuxConfigPresent = true;
+ }
+
+ CHECK(GetAttribute(params.c_str(), "config", &val));
+
+ sp<ABuffer> config = decodeHex(val);
+ CHECK(config != NULL);
+
+ ABitReader bits(config->data(), config->size());
+ status_t err = parseStreamMuxConfig(
+ &bits, &mNumSubFrames, &mFrameLengthType,
+ &mOtherDataPresent, &mOtherDataLenBits);
+
+ CHECK_EQ(err, (status_t)NO_ERROR);
}
AMPEG4AudioAssembler::~AMPEG4AudioAssembler() {
@@ -108,13 +471,7 @@ void AMPEG4AudioAssembler::submitAccessUnit() {
while (it != mPackets.end()) {
const sp<ABuffer> &unit = *it;
- size_t n = 0;
- while (unit->data()[n] == 0xff) {
- ++n;
- }
- ++n;
-
- totalSize += unit->size() - n;
+ totalSize += unit->size();
++it;
}
@@ -124,20 +481,13 @@ void AMPEG4AudioAssembler::submitAccessUnit() {
while (it != mPackets.end()) {
const sp<ABuffer> &unit = *it;
- size_t n = 0;
- while (unit->data()[n] == 0xff) {
- ++n;
- }
- ++n;
-
memcpy((uint8_t *)accessUnit->data() + offset,
- unit->data() + n, unit->size() - n);
-
- offset += unit->size() - n;
+ unit->data(), unit->size());
++it;
}
+ accessUnit = removeLATMFraming(accessUnit);
CopyTimes(accessUnit, *mPackets.begin());
#if 0
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
index bf9f204..9cef94c 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
@@ -27,9 +27,11 @@
namespace android {
struct AMessage;
+struct AString;
struct AMPEG4AudioAssembler : public ARTPAssembler {
- AMPEG4AudioAssembler(const sp<AMessage> &notify);
+ AMPEG4AudioAssembler(
+ const sp<AMessage> &notify, const AString &params);
protected:
virtual ~AMPEG4AudioAssembler();
@@ -40,6 +42,13 @@ protected:
private:
sp<AMessage> mNotifyMsg;
+
+ bool mMuxConfigPresent;
+ unsigned mNumSubFrames;
+ unsigned mFrameLengthType;
+ bool mOtherDataPresent;
+ unsigned mOtherDataLenBits;
+
uint32_t mAccessUnitRTPTime;
bool mNextExpectedSeqNoValid;
uint32_t mNextExpectedSeqNo;
@@ -49,6 +58,8 @@ private:
AssemblyStatus addPacket(const sp<ARTPSource> &source);
void submitAccessUnit();
+ sp<ABuffer> removeLATMFraming(const sp<ABuffer> &buffer);
+
DISALLOW_EVIL_CONSTRUCTORS(AMPEG4AudioAssembler);
};
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 2518264..5aae4e7 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -57,7 +57,7 @@ ARTPSource::ARTPSource(
mAssembler = new AAVCAssembler(notify);
mIssueFIRRequests = true;
} else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
- mAssembler = new AMPEG4AudioAssembler(notify);
+ mAssembler = new AMPEG4AudioAssembler(notify, params);
} else if (!strncmp(desc.c_str(), "H263-1998/", 10)
|| !strncmp(desc.c_str(), "H263-2000/", 10)) {
mAssembler = new AH263Assembler(notify);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index f928c06..e936923 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -23,11 +23,13 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/MediaErrors.h>
#include <arpa/inet.h>
#include <fcntl.h>
#include <netdb.h>
+#include <openssl/md5.h>
#include <sys/socket.h>
namespace android {
@@ -37,6 +39,7 @@ const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll;
ARTSPConnection::ARTSPConnection()
: mState(DISCONNECTED),
+ mAuthType(NONE),
mSocket(-1),
mConnectionID(0),
mNextCSeq(0),
@@ -114,10 +117,13 @@ void ARTSPConnection::onMessageReceived(const sp<AMessage> &msg) {
// static
bool ARTSPConnection::ParseURL(
- const char *url, AString *host, unsigned *port, AString *path) {
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass) {
host->clear();
*port = 0;
path->clear();
+ user->clear();
+ pass->clear();
if (strncasecmp("rtsp://", url, 7)) {
return false;
@@ -133,6 +139,24 @@ bool ARTSPConnection::ParseURL(
path->setTo(slashPos);
}
+ ssize_t atPos = host->find("@");
+
+ if (atPos >= 0) {
+ // Split of user:pass@ from hostname.
+
+ AString userPass(*host, 0, atPos);
+ host->erase(0, atPos + 1);
+
+ ssize_t colonPos = userPass.find(":");
+
+ if (colonPos < 0) {
+ *user = userPass;
+ } else {
+ user->setTo(userPass, 0, colonPos);
+ pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
+ }
+ }
+
const char *colonPos = strchr(host->c_str(), ':');
if (colonPos != NULL) {
@@ -187,7 +211,12 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
AString host, path;
unsigned port;
- if (!ParseURL(url.c_str(), &host, &port, &path)) {
+ if (!ParseURL(url.c_str(), &host, &port, &path, &mUser, &mPass)
+ || (mUser.size() > 0 && mPass.size() == 0)) {
+ // If we have a user name but no password we have to give up
+ // right here, since we currently have no way of asking the user
+ // for this information.
+
LOGE("Malformed rtsp url %s", url.c_str());
reply->setInt32("result", ERROR_MALFORMED);
@@ -197,6 +226,10 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
return;
}
+ if (mUser.size() > 0) {
+ LOGV("user = '%s', pass = '%s'", mUser.c_str(), mPass.c_str());
+ }
+
struct hostent *ent = gethostbyname(host.c_str());
if (ent == NULL) {
LOGE("Unknown host %s", host.c_str());
@@ -262,6 +295,11 @@ void ARTSPConnection::onDisconnect(const sp<AMessage> &msg) {
reply->setInt32("result", OK);
mState = DISCONNECTED;
+ mUser.clear();
+ mPass.clear();
+ mAuthType = NONE;
+ mNonce.clear();
+
reply->post();
}
@@ -335,6 +373,12 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
AString request;
CHECK(msg->findString("request", &request));
+ // Just in case we need to re-issue the request with proper authentication
+ // later, stash it away.
+ reply->setString("original-request", request.c_str(), request.size());
+
+ addAuthentication(&request);
+
// Find the boundary between headers and the body.
ssize_t i = request.find("\r\n\r\n");
CHECK_GE(i, 0);
@@ -347,7 +391,7 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
request.insert(cseqHeader, i + 2);
- LOGV("%s", request.c_str());
+ LOGV("request: '%s'", request.c_str());
size_t numBytesSent = 0;
while (numBytesSent < request.size()) {
@@ -612,6 +656,30 @@ bool ARTSPConnection::receiveRTSPReponse() {
}
}
+ if (response->mStatusCode == 401) {
+ if (mAuthType == NONE && mUser.size() > 0
+ && parseAuthMethod(response)) {
+ ssize_t i;
+ CHECK_EQ((status_t)OK, findPendingRequest(response, &i));
+ CHECK_GE(i, 0);
+
+ sp<AMessage> reply = mPendingRequests.valueAt(i);
+ mPendingRequests.removeItemsAt(i);
+
+ AString request;
+ CHECK(reply->findString("original-request", &request));
+
+ sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+ msg->setMessage("reply", reply);
+ msg->setString("request", request.c_str(), request.size());
+
+ LOGI("re-sending request with authentication headers...");
+ onSendRequest(msg);
+
+ return true;
+ }
+ }
+
return notifyResponseListener(response);
}
@@ -628,26 +696,47 @@ bool ARTSPConnection::ParseSingleUnsignedLong(
return true;
}
-bool ARTSPConnection::notifyResponseListener(
- const sp<ARTSPResponse> &response) {
+status_t ARTSPConnection::findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const {
+ *index = 0;
+
ssize_t i = response->mHeaders.indexOfKey("cseq");
if (i < 0) {
- return true;
+ // This is an unsolicited server->client message.
+ return OK;
}
AString value = response->mHeaders.valueAt(i);
unsigned long cseq;
if (!ParseSingleUnsignedLong(value.c_str(), &cseq)) {
- return false;
+ return ERROR_MALFORMED;
}
i = mPendingRequests.indexOfKey(cseq);
if (i < 0) {
- // Unsolicited response?
- TRESPASS();
+ return -ENOENT;
+ }
+
+ *index = i;
+
+ return OK;
+}
+
+bool ARTSPConnection::notifyResponseListener(
+ const sp<ARTSPResponse> &response) {
+ ssize_t i;
+ status_t err = findPendingRequest(response, &i);
+
+ if (err == OK && i < 0) {
+ // An unsolicited server response is not a problem.
+ return true;
+ }
+
+ if (err != OK) {
+ return false;
}
sp<AMessage> reply = mPendingRequests.valueAt(i);
@@ -660,4 +749,160 @@ bool ARTSPConnection::notifyResponseListener(
return true;
}
+bool ARTSPConnection::parseAuthMethod(const sp<ARTSPResponse> &response) {
+ ssize_t i = response->mHeaders.indexOfKey("www-authenticate");
+
+ if (i < 0) {
+ return false;
+ }
+
+ AString value = response->mHeaders.valueAt(i);
+
+ if (!strncmp(value.c_str(), "Basic", 5)) {
+ mAuthType = BASIC;
+ } else {
+#if !defined(HAVE_ANDROID_OS)
+ // We don't have access to the MD5 implementation on the simulator,
+ // so we won't support digest authentication.
+ return false;
+#endif
+
+ CHECK(!strncmp(value.c_str(), "Digest", 6));
+ mAuthType = DIGEST;
+
+ i = value.find("nonce=");
+ CHECK_GE(i, 0);
+ CHECK_EQ(value.c_str()[i + 6], '\"');
+ ssize_t j = value.find("\"", i + 7);
+ CHECK_GE(j, 0);
+
+ mNonce.setTo(value, i + 7, j - i - 7);
+ }
+
+ return true;
+}
+
+#if defined(HAVE_ANDROID_OS)
+static void H(const AString &s, AString *out) {
+ out->clear();
+
+ MD5_CTX m;
+ MD5_Init(&m);
+ MD5_Update(&m, s.c_str(), s.size());
+
+ uint8_t key[16];
+ MD5_Final(key, &m);
+
+ for (size_t i = 0; i < 16; ++i) {
+ char nibble = key[i] >> 4;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+
+ nibble = key[i] & 0x0f;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+ }
+}
+#endif
+
+static void GetMethodAndURL(
+ const AString &request, AString *method, AString *url) {
+ ssize_t space1 = request.find(" ");
+ CHECK_GE(space1, 0);
+
+ ssize_t space2 = request.find(" ", space1 + 1);
+ CHECK_GE(space2, 0);
+
+ method->setTo(request, 0, space1);
+ url->setTo(request, space1 + 1, space2 - space1);
+}
+
+void ARTSPConnection::addAuthentication(AString *request) {
+ if (mAuthType == NONE) {
+ return;
+ }
+
+ // Find the boundary between headers and the body.
+ ssize_t i = request->find("\r\n\r\n");
+ CHECK_GE(i, 0);
+
+ if (mAuthType == BASIC) {
+ AString tmp;
+ tmp.append(mUser);
+ tmp.append(":");
+ tmp.append(mPass);
+
+ AString out;
+ encodeBase64(tmp.c_str(), tmp.size(), &out);
+
+ AString fragment;
+ fragment.append("Authorization: Basic ");
+ fragment.append(out);
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+
+ return;
+ }
+
+#if defined(HAVE_ANDROID_OS)
+ CHECK_EQ((int)mAuthType, (int)DIGEST);
+
+ AString method, url;
+ GetMethodAndURL(*request, &method, &url);
+
+ AString A1;
+ A1.append(mUser);
+ A1.append(":");
+ A1.append("Streaming Server");
+ A1.append(":");
+ A1.append(mPass);
+
+ AString A2;
+ A2.append(method);
+ A2.append(":");
+ A2.append(url);
+
+ AString HA1, HA2;
+ H(A1, &HA1);
+ H(A2, &HA2);
+
+ AString tmp;
+ tmp.append(HA1);
+ tmp.append(":");
+ tmp.append(mNonce);
+ tmp.append(":");
+ tmp.append(HA2);
+
+ AString digest;
+ H(tmp, &digest);
+
+ AString fragment;
+ fragment.append("Authorization: Digest ");
+ fragment.append("nonce=\"");
+ fragment.append(mNonce);
+ fragment.append("\", ");
+ fragment.append("username=\"");
+ fragment.append(mUser);
+ fragment.append("\", ");
+ fragment.append("uri=\"");
+ fragment.append(url);
+ fragment.append("\", ");
+ fragment.append("response=\"");
+ fragment.append(digest);
+ fragment.append("\"");
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+#endif
+}
+
} // namespace android
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 96e0d5b..19be2a6 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -42,6 +42,10 @@ struct ARTSPConnection : public AHandler {
void observeBinaryData(const sp<AMessage> &reply);
+ static bool ParseURL(
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass);
+
protected:
virtual ~ARTSPConnection();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -62,9 +66,18 @@ private:
kWhatObserveBinaryData = 'obin',
};
+ enum AuthType {
+ NONE,
+ BASIC,
+ DIGEST
+ };
+
static const int64_t kSelectTimeoutUs;
State mState;
+ AString mUser, mPass;
+ AuthType mAuthType;
+ AString mNonce;
int mSocket;
int32_t mConnectionID;
int32_t mNextCSeq;
@@ -90,8 +103,11 @@ private:
sp<ABuffer> receiveBinaryData();
bool notifyResponseListener(const sp<ARTSPResponse> &response);
- static bool ParseURL(
- const char *url, AString *host, unsigned *port, AString *path);
+ bool parseAuthMethod(const sp<ARTSPResponse> &response);
+ void addAuthentication(AString *request);
+
+ status_t findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const;
static bool ParseSingleUnsignedLong(
const char *from, unsigned long *x);
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 612caff..547fbab 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -53,21 +53,30 @@ bool ASessionDescription::parse(const void *data, size_t size) {
mFormats.push(AString("[root]"));
AString desc((const char *)data, size);
- LOGI("%s", desc.c_str());
size_t i = 0;
for (;;) {
- ssize_t eolPos = desc.find("\r\n", i);
+ ssize_t eolPos = desc.find("\n", i);
+
if (eolPos < 0) {
break;
}
- AString line(desc, i, eolPos - i);
+ AString line;
+ if ((size_t)eolPos > i && desc.c_str()[eolPos - 1] == '\r') {
+ // We accept both '\n' and '\r\n' line endings, if it's
+ // the latter, strip the '\r' as well.
+ line.setTo(desc, i, eolPos - i - 1);
+ } else {
+ line.setTo(desc, i, eolPos - i);
+ }
if (line.size() < 2 || line.c_str()[1] != '=') {
return false;
}
+ LOGI("%s", line.c_str());
+
switch (line.c_str()[0]) {
case 'v':
{
@@ -141,7 +150,7 @@ bool ASessionDescription::parse(const void *data, size_t size) {
}
}
- i = eolPos + 2;
+ i = eolPos + 1;
}
return true;
@@ -245,7 +254,7 @@ bool ASessionDescription::getDurationUs(int64_t *durationUs) const {
return false;
}
- if (value == "npt=now-") {
+ if (value == "npt=now-" || value == "npt=0-") {
return false;
}
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 081ae32..0bbadc1 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -23,6 +23,7 @@ LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/frameworks/base/media/libstagefright/include \
+ $(TOP)/external/openssl/include
LOCAL_MODULE:= libstagefright_rtsp
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6943608..9bb8c46 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -96,6 +96,7 @@ struct MyHandler : public AHandler {
mNetLooper(new ALooper),
mConn(new ARTSPConnection),
mRTPConn(new ARTPConnection),
+ mOriginalSessionURL(url),
mSessionURL(url),
mSetupTracksSuccessful(false),
mSeekPending(false),
@@ -113,6 +114,23 @@ struct MyHandler : public AHandler {
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
PRIORITY_HIGHEST);
+
+ // Strip any authentication info from the session url, we don't
+ // want to transmit user/pass in cleartext.
+ AString host, path, user, pass;
+ unsigned port;
+ if (ARTSPConnection::ParseURL(
+ mSessionURL.c_str(), &host, &port, &path, &user, &pass)
+ && user.size() > 0) {
+ mSessionURL.clear();
+ mSessionURL.append("rtsp://");
+ mSessionURL.append(host);
+ mSessionURL.append(":");
+ mSessionURL.append(StringPrintf("%u", port));
+ mSessionURL.append(path);
+
+ LOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ }
}
void connect(const sp<AMessage> &doneMsg) {
@@ -126,7 +144,7 @@ struct MyHandler : public AHandler {
mConn->observeBinaryData(notify);
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
}
void disconnect(const sp<AMessage> &doneMsg) {
@@ -312,7 +330,7 @@ struct MyHandler : public AHandler {
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
} else {
(new AMessage('quit', id()))->post();
}
@@ -922,7 +940,7 @@ struct MyHandler : public AHandler {
CHECK(GetAttribute(range.c_str(), "npt", &val));
float npt1, npt2;
- if (val == "now-") {
+ if (val == "now-" || val == "0-") {
// This is a live stream and therefore not seekable.
return;
} else {
@@ -992,6 +1010,7 @@ private:
sp<ARTSPConnection> mConn;
sp<ARTPConnection> mRTPConn;
sp<ASessionDescription> mSessionDesc;
+ AString mOriginalSessionURL; // This one still has user:pass@
AString mSessionURL;
AString mBaseURL;
AString mSessionID;
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
new file mode 100644
index 0000000..0794ad1
--- /dev/null
+++ b/media/libstagefright/yuv/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ YUVImage.cpp \
+ YUVCanvas.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils
+
+LOCAL_MODULE:= libstagefright_yuv
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp
new file mode 100644
index 0000000..38aa779
--- /dev/null
+++ b/media/libstagefright/yuv/YUVCanvas.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVCanvas"
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+
+namespace android {
+
+YUVCanvas::YUVCanvas(YUVImage &yuvImage)
+ : mYUVImage(yuvImage) {
+}
+
+YUVCanvas::~YUVCanvas() {
+}
+
+void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::FillYUVRectangle(const Rect& rect,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = rect.top; y < rect.bottom; ++y) {
+ for (int32_t x = rect.left; x < rect.right; ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::CopyImageRect(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage) {
+
+ // Try fast copy first
+ if (YUVImage::fastCopyRectangle(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, mYUVImage)) {
+ return;
+ }
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) {
+ for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) {
+ int32_t srcX = srcStartX + offsetX;
+ int32_t srcY = srcStartY + offsetY;
+
+ int32_t destX = destStartX + offsetX;
+ int32_t destY = destStartY + offsetY;
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::downsample(
+ int32_t srcOffsetX, int32_t srcOffsetY,
+ int32_t skipX, int32_t skipY,
+ const YUVImage &srcImage) {
+ // TODO: Add a low pass filter for downsampling.
+
+ // Check that srcImage is big enough to fill mYUVImage.
+ CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width());
+ CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height());
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ int32_t srcY = srcOffsetY;
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ int32_t srcX = srcOffsetX;
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+
+ srcX += skipX;
+ }
+ srcY += skipY;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
new file mode 100644
index 0000000..b712062
--- /dev/null
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -0,0 +1,413 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVImage"
+
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+#include <media/stagefright/MediaDebug.h>
+
+namespace android {
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+
+ size_t numberOfBytes = bufferSize(yuvFormat, width, height);
+ uint8_t *buffer = new uint8_t[numberOfBytes];
+ mBuffer = buffer;
+ mOwnBuffer = true;
+
+ initializeYUVPointers();
+}
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+ mBuffer = buffer;
+ mOwnBuffer = false;
+
+ initializeYUVPointers();
+}
+
+//static
+size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ int32_t numberOfPixels = width*height;
+ size_t numberOfBytes = 0;
+ if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) {
+ // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each.
+ numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1));
+ } else {
+ LOGE("Format not supported");
+ }
+ return numberOfBytes;
+}
+
+bool YUVImage::initializeYUVPointers() {
+ int32_t numberOfPixels = mWidth * mHeight;
+
+ if (mYUVFormat == YUV420Planar) {
+ mYdata = (uint8_t *)mBuffer;
+ mUdata = mYdata + numberOfPixels;
+ mVdata = mUdata + (numberOfPixels >> 2);
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // U and V channels are interleaved as VUVUVU.
+ // So V data starts at the end of Y channel and
+ // U data starts right after V's start.
+ mYdata = (uint8_t *)mBuffer;
+ mVdata = mYdata + numberOfPixels;
+ mUdata = mVdata + 1;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+ return true;
+}
+
+YUVImage::~YUVImage() {
+ if (mOwnBuffer) delete[] mBuffer;
+}
+
+bool YUVImage::getOffsets(int32_t x, int32_t y,
+ int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const {
+ *yOffset = y*mWidth + x;
+
+ int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1);
+ if (mYUVFormat == YUV420Planar) {
+ *uOffset = uvOffset;
+ *vOffset = uvOffset;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uOffset = 2*uvOffset;
+ *vOffset = 2*uvOffset;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+bool YUVImage::getOffsetIncrementsPerDataRow(
+ int32_t *yDataOffsetIncrement,
+ int32_t *uDataOffsetIncrement,
+ int32_t *vDataOffsetIncrement) const {
+ *yDataOffsetIncrement = mWidth;
+
+ int32_t uvDataOffsetIncrement = mWidth >> 1;
+
+ if (mYUVFormat == YUV420Planar) {
+ *uDataOffsetIncrement = uvDataOffsetIncrement;
+ *vDataOffsetIncrement = uvDataOffsetIncrement;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ *vDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+uint8_t* YUVImage::getYAddress(int32_t offset) const {
+ return mYdata + offset;
+}
+
+uint8_t* YUVImage::getUAddress(int32_t offset) const {
+ return mUdata + offset;
+}
+
+uint8_t* YUVImage::getVAddress(int32_t offset) const {
+ return mVdata + offset;
+}
+
+bool YUVImage::getYUVAddresses(int32_t x, int32_t y,
+ uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const {
+ int32_t yOffset;
+ int32_t uOffset;
+ int32_t vOffset;
+ if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false;
+
+ *yAddr = getYAddress(yOffset);
+ *uAddr = getUAddress(uOffset);
+ *vAddr = getVAddress(vOffset);
+
+ return true;
+}
+
+bool YUVImage::validPixel(int32_t x, int32_t y) const {
+ return (x >= 0 && x < mWidth &&
+ y >= 0 && y < mHeight);
+}
+
+bool YUVImage::getPixelValue(int32_t x, int32_t y,
+ uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yPtr = *yAddr;
+ *uPtr = *uAddr;
+ *vPtr = *vAddr;
+
+ return true;
+}
+
+bool YUVImage::setPixelValue(int32_t x, int32_t y,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yAddr = yValue;
+ *uAddr = uValue;
+ *vAddr = vValue;
+
+ return true;
+}
+
+void YUVImage::fastCopyRectangle420Planar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420Planar);
+ CHECK(destImage.mYUVFormat == YUV420Planar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr += ySrcOffsetIncrement;
+ yDestAddr += yDestOffsetIncrement;
+ }
+ }
+
+ // Copy U
+ {
+ size_t numberOfUBytesPerRow = (size_t) (width >> 1);
+ uint8_t *uSrcAddr = uSrcAddrBase;
+ uint8_t *uDestAddr = uDestAddrBase;
+ // Every other row has an entry for U/V channel values. Hence only
+ // go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow);
+
+ uSrcAddr += uSrcOffsetIncrement;
+ uDestAddr += uDestOffsetIncrement;
+ }
+ }
+
+ // Copy V
+ {
+ size_t numberOfVBytesPerRow = (size_t) (width >> 1);
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+void YUVImage::fastCopyRectangle420SemiPlanar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420SemiPlanar);
+ CHECK(destImage.mYUVFormat == YUV420SemiPlanar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr = ySrcAddr + ySrcOffsetIncrement;
+ yDestAddr = yDestAddr + yDestOffsetIncrement;
+ }
+ }
+
+ // Copy UV
+ {
+ // UV are interleaved. So number of UV bytes per row is 2*(width/2).
+ size_t numberOfUVBytesPerRow = (size_t) width;
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+// static
+bool YUVImage::fastCopyRectangle(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ if (srcImage.mYUVFormat == destImage.mYUVFormat) {
+ if (srcImage.mYUVFormat == YUV420Planar) {
+ fastCopyRectangle420Planar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ } else if (srcImage.mYUVFormat == YUV420SemiPlanar) {
+ fastCopyRectangle420SemiPlanar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ }
+ return true;
+ }
+ return false;
+}
+
+uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
+ CHECK(maxValue >= minValue);
+
+ if (v < minValue) return minValue;
+ else if (v > maxValue) return maxValue;
+ else return v;
+}
+
+void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+ uint8_t *r, uint8_t *g, uint8_t *b) const {
+ *r = yValue + (1.370705 * (vValue-128));
+ *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+ *b = yValue + (1.732446 * (uValue-128));
+
+ *r = clamp(*r, 0, 255);
+ *g = clamp(*g, 0, 255);
+ *b = clamp(*b, 0, 255);
+}
+
+bool YUVImage::writeToPPM(const char *filename) const {
+ FILE *fp = fopen(filename, "w");
+ if (fp == NULL) {
+ return false;
+ }
+ fprintf(fp, "P3\n");
+ fprintf(fp, "%d %d\n", mWidth, mHeight);
+ fprintf(fp, "255\n");
+ for (int32_t y = 0; y < mHeight; ++y) {
+ for (int32_t x = 0; x < mWidth; ++x) {
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+ getPixelValue(x, y, &yValue, &uValue, & vValue);
+
+ uint8_t rValue;
+ uint8_t gValue;
+ uint8_t bValue;
+ yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue);
+
+ fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue);
+ }
+ }
+ fclose(fp);
+ return true;
+}
+
+} // namespace android
diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk
new file mode 100644
index 0000000..7502f6e
--- /dev/null
+++ b/media/mtp/Android.mk
@@ -0,0 +1,78 @@
+#
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ MtpClient.cpp \
+ MtpCursor.cpp \
+ MtpDataPacket.cpp \
+ MtpDebug.cpp \
+ MtpDevice.cpp \
+ MtpEventPacket.cpp \
+ MtpDeviceInfo.cpp \
+ MtpObjectInfo.cpp \
+ MtpPacket.cpp \
+ MtpProperty.cpp \
+ MtpRequestPacket.cpp \
+ MtpResponsePacket.cpp \
+ MtpServer.cpp \
+ MtpStorageInfo.cpp \
+ MtpStringBuffer.cpp \
+ MtpStorage.cpp \
+ MtpUtils.cpp \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST
+
+include $(BUILD_STATIC_LIBRARY)
+
+endif
+
+ifeq ($(HOST_OS),linux)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ MtpClient.cpp \
+ MtpCursor.cpp \
+ MtpDataPacket.cpp \
+ MtpDebug.cpp \
+ MtpDevice.cpp \
+ MtpEventPacket.cpp \
+ MtpDeviceInfo.cpp \
+ MtpObjectInfo.cpp \
+ MtpPacket.cpp \
+ MtpProperty.cpp \
+ MtpRequestPacket.cpp \
+ MtpResponsePacket.cpp \
+ MtpStorageInfo.cpp \
+ MtpStringBuffer.cpp \
+ MtpStorage.cpp \
+ MtpUtils.cpp \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_HOST
+
+include $(BUILD_HOST_STATIC_LIBRARY)
+
+endif
diff --git a/media/mtp/MtpClient.cpp b/media/mtp/MtpClient.cpp
new file mode 100644
index 0000000..ceb6a43
--- /dev/null
+++ b/media/mtp/MtpClient.cpp
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpClient"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "MtpDevice.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+
+#include <usbhost/usbhost.h>
+#include <linux/version.h>
+#if LINUX_VERSION_CODE > KERNEL_VERSION(2, 6, 20)
+#include <linux/usb/ch9.h>
+#else
+#include <linux/usb_ch9.h>
+#endif
+
+namespace android {
+
+static bool isMtpDevice(uint16_t vendor, uint16_t product) {
+ // Sandisk Sansa Fuze
+ if (vendor == 0x0781 && product == 0x74c2)
+ return true;
+ // Samsung YP-Z5
+ if (vendor == 0x04e8 && product == 0x503c)
+ return true;
+ return false;
+}
+
+class MtpClientThread : public Thread {
+private:
+ MtpClient* mClient;
+
+public:
+ MtpClientThread(MtpClient* client)
+ : mClient(client)
+ {
+ }
+
+ virtual bool threadLoop() {
+ return mClient->threadLoop();
+ }
+};
+
+
+MtpClient::MtpClient()
+ : mThread(NULL),
+ mUsbHostContext(NULL),
+ mDone(false)
+{
+}
+
+MtpClient::~MtpClient() {
+ usb_host_cleanup(mUsbHostContext);
+}
+
+bool MtpClient::start() {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mThread)
+ return true;
+
+ mUsbHostContext = usb_host_init();
+ if (!mUsbHostContext)
+ return false;
+
+ mThread = new MtpClientThread(this);
+ mThread->run("MtpClientThread");
+ // wait for the thread to do initial device discovery before returning
+ mThreadStartCondition.wait(mMutex);
+
+ return true;
+}
+
+void MtpClient::stop() {
+ mDone = true;
+}
+
+MtpDevice* MtpClient::getDevice(int id) {
+ for (int i = 0; i < mDeviceList.size(); i++) {
+ MtpDevice* device = mDeviceList[i];
+ if (device->getID() == id)
+ return device;
+ }
+ return NULL;
+}
+
+bool MtpClient::usbDeviceAdded(const char *devname) {
+ struct usb_descriptor_header* desc;
+ struct usb_descriptor_iter iter;
+
+ struct usb_device *device = usb_device_open(devname);
+ if (!device) {
+ LOGE("usb_device_open failed\n");
+ return mDone;
+ }
+
+ usb_descriptor_iter_init(device, &iter);
+
+ while ((desc = usb_descriptor_iter_next(&iter)) != NULL) {
+ if (desc->bDescriptorType == USB_DT_INTERFACE) {
+ struct usb_interface_descriptor *interface = (struct usb_interface_descriptor *)desc;
+
+ if (interface->bInterfaceClass == USB_CLASS_STILL_IMAGE &&
+ interface->bInterfaceSubClass == 1 && // Still Image Capture
+ interface->bInterfaceProtocol == 1) // Picture Transfer Protocol (PIMA 15470)
+ {
+ LOGD("Found camera: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+ usb_device_get_product_name(device));
+ } else if (interface->bInterfaceClass == 0xFF &&
+ interface->bInterfaceSubClass == 0xFF &&
+ interface->bInterfaceProtocol == 0) {
+ char* interfaceName = usb_device_get_string(device, interface->iInterface);
+ if (!interfaceName || strcmp(interfaceName, "MTP"))
+ continue;
+ // Looks like an android style MTP device
+ LOGD("Found MTP device: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+ usb_device_get_product_name(device));
+ } else {
+ // look for special cased devices based on vendor/product ID
+ // we are doing this mainly for testing purposes
+ uint16_t vendor = usb_device_get_vendor_id(device);
+ uint16_t product = usb_device_get_product_id(device);
+ if (!isMtpDevice(vendor, product)) {
+ // not an MTP or PTP device
+ continue;
+ }
+ // request MTP OS string and descriptor
+ // some music players need to see this before entering MTP mode.
+ char buffer[256];
+ memset(buffer, 0, sizeof(buffer));
+ int ret = usb_device_send_control(device,
+ USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_STANDARD,
+ USB_REQ_GET_DESCRIPTOR, (USB_DT_STRING << 8) | 0xEE,
+ 0, sizeof(buffer), buffer);
+ printf("usb_device_send_control returned %d errno: %d\n", ret, errno);
+ if (ret > 0) {
+ printf("got MTP string %s\n", buffer);
+ ret = usb_device_send_control(device,
+ USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_VENDOR, 1,
+ 0, 4, sizeof(buffer), buffer);
+ printf("OS descriptor got %d\n", ret);
+ } else {
+ printf("no MTP string\n");
+ }
+ }
+
+ // if we got here, then we have a likely MTP or PTP device
+
+ // interface should be followed by three endpoints
+ struct usb_endpoint_descriptor *ep;
+ struct usb_endpoint_descriptor *ep_in_desc = NULL;
+ struct usb_endpoint_descriptor *ep_out_desc = NULL;
+ struct usb_endpoint_descriptor *ep_intr_desc = NULL;
+ for (int i = 0; i < 3; i++) {
+ ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+ if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) {
+ LOGE("endpoints not found\n");
+ return mDone;
+ }
+ if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) {
+ if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK)
+ ep_in_desc = ep;
+ else
+ ep_out_desc = ep;
+ } else if (ep->bmAttributes == USB_ENDPOINT_XFER_INT &&
+ ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) {
+ ep_intr_desc = ep;
+ }
+ }
+ if (!ep_in_desc || !ep_out_desc || !ep_intr_desc) {
+ LOGE("endpoints not found\n");
+ return mDone;
+ }
+
+ struct usb_endpoint *ep_in = usb_endpoint_open(device, ep_in_desc);
+ struct usb_endpoint *ep_out = usb_endpoint_open(device, ep_out_desc);
+ struct usb_endpoint *ep_intr = usb_endpoint_open(device, ep_intr_desc);
+
+ if (usb_device_claim_interface(device, interface->bInterfaceNumber)) {
+ LOGE("usb_device_claim_interface failed errno: %d\n", errno);
+ usb_endpoint_close(ep_in);
+ usb_endpoint_close(ep_out);
+ usb_endpoint_close(ep_intr);
+ return mDone;
+ }
+
+ MtpDevice* mtpDevice = new MtpDevice(device, interface->bInterfaceNumber,
+ ep_in, ep_out, ep_intr);
+ mDeviceList.add(mtpDevice);
+ mtpDevice->initialize();
+ deviceAdded(mtpDevice);
+ return mDone;
+ }
+ }
+
+ usb_device_close(device);
+ return mDone;
+}
+
+bool MtpClient::usbDeviceRemoved(const char *devname) {
+ for (int i = 0; i < mDeviceList.size(); i++) {
+ MtpDevice* device = mDeviceList[i];
+ if (!strcmp(devname, device->getDeviceName())) {
+ deviceRemoved(device);
+ mDeviceList.removeAt(i);
+ delete device;
+ LOGD("Camera removed!\n");
+ break;
+ }
+ }
+ return mDone;
+}
+
+bool MtpClient::usbDiscoveryDone() {
+ Mutex::Autolock autoLock(mMutex);
+ mThreadStartCondition.signal();
+ return mDone;
+}
+
+bool MtpClient::threadLoop() {
+ usb_host_run(mUsbHostContext, usb_device_added, usb_device_removed, usb_discovery_done, this);
+ return false;
+}
+
+int MtpClient::usb_device_added(const char *devname, void* client_data) {
+ LOGD("usb_device_added %s\n", devname);
+ return ((MtpClient *)client_data)->usbDeviceAdded(devname);
+}
+
+int MtpClient::usb_device_removed(const char *devname, void* client_data) {
+ LOGD("usb_device_removed %s\n", devname);
+ return ((MtpClient *)client_data)->usbDeviceRemoved(devname);
+}
+
+int MtpClient::usb_discovery_done(void* client_data) {
+ LOGD("usb_discovery_done\n");
+ return ((MtpClient *)client_data)->usbDiscoveryDone();
+}
+
+} // namespace android
diff --git a/media/mtp/MtpClient.h b/media/mtp/MtpClient.h
new file mode 100644
index 0000000..fa5c527
--- /dev/null
+++ b/media/mtp/MtpClient.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_CLIENT_H
+#define _MTP_CLIENT_H
+
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_host_context;
+
+namespace android {
+
+class MtpClientThread;
+
+class MtpClient {
+private:
+ MtpDeviceList mDeviceList;
+ MtpClientThread* mThread;
+ Condition mThreadStartCondition;
+ Mutex mMutex;
+ struct usb_host_context* mUsbHostContext;
+ bool mDone;
+
+public:
+ MtpClient();
+ virtual ~MtpClient();
+
+ bool start();
+ void stop();
+
+ inline MtpDeviceList& getDeviceList() { return mDeviceList; }
+ MtpDevice* getDevice(int id);
+
+
+ virtual void deviceAdded(MtpDevice *device) = 0;
+ virtual void deviceRemoved(MtpDevice *device) = 0;
+
+private:
+ // these return true if we should stop monitoring USB and clean up
+ bool usbDeviceAdded(const char *devname);
+ bool usbDeviceRemoved(const char *devname);
+ bool usbDiscoveryDone();
+
+ friend class MtpClientThread;
+ bool threadLoop();
+ static int usb_device_added(const char *devname, void* client_data);
+ static int usb_device_removed(const char *devname, void* client_data);
+ static int usb_discovery_done(void* client_data);
+};
+
+}; // namespace android
+
+#endif // _MTP_CLIENT_H
diff --git a/media/mtp/MtpCursor.cpp b/media/mtp/MtpCursor.cpp
new file mode 100644
index 0000000..35d90dc
--- /dev/null
+++ b/media/mtp/MtpCursor.cpp
@@ -0,0 +1,461 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpCursor"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "MtpCursor.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpStorageInfo.h"
+
+
+#include "binder/CursorWindow.h"
+
+namespace android {
+
+/* Device Column IDs */
+/* These must match the values in MtpCursor.java */
+#define DEVICE_ROW_ID 1
+#define DEVICE_MANUFACTURER 2
+#define DEVICE_MODEL 3
+
+/* Storage Column IDs */
+/* These must match the values in MtpCursor.java */
+#define STORAGE_ROW_ID 101
+#define STORAGE_IDENTIFIER 102
+#define STORAGE_DESCRIPTION 103
+
+/* Object Column IDs */
+/* These must match the values in MtpCursor.java */
+#define OBJECT_ROW_ID 201
+#define OBJECT_STORAGE_ID 202
+#define OBJECT_FORMAT 203
+#define OBJECT_PROTECTION_STATUS 204
+#define OBJECT_SIZE 205
+#define OBJECT_THUMB_FORMAT 206
+#define OBJECT_THUMB_SIZE 207
+#define OBJECT_THUMB_WIDTH 208
+#define OBJECT_THUMB_HEIGHT 209
+#define OBJECT_IMAGE_WIDTH 210
+#define OBJECT_IMAGE_HEIGHT 211
+#define OBJECT_IMAGE_DEPTH 212
+#define OBJECT_PARENT 213
+#define OBJECT_ASSOCIATION_TYPE 214
+#define OBJECT_ASSOCIATION_DESC 215
+#define OBJECT_SEQUENCE_NUMBER 216
+#define OBJECT_NAME 217
+#define OBJECT_DATE_CREATED 218
+#define OBJECT_DATE_MODIFIED 219
+#define OBJECT_KEYWORDS 220
+#define OBJECT_THUMB 221
+
+MtpCursor::MtpCursor(MtpClient* client, int queryType, int deviceID,
+ MtpStorageID storageID, MtpObjectHandle objectID,
+ int columnCount, int* columns)
+ : mClient(client),
+ mQueryType(queryType),
+ mDeviceID(deviceID),
+ mStorageID(storageID),
+ mQbjectID(objectID),
+ mColumnCount(columnCount),
+ mColumns(NULL)
+{
+ if (columns) {
+ mColumns = new int[columnCount];
+ memcpy(mColumns, columns, columnCount * sizeof(int));
+ }
+}
+
+MtpCursor::~MtpCursor() {
+ delete[] mColumns;
+}
+
+int MtpCursor::fillWindow(CursorWindow* window, int startPos) {
+ LOGD("MtpCursor::fillWindow mQueryType: %d\n", mQueryType);
+
+ switch (mQueryType) {
+ case DEVICE:
+ return fillDevices(window, startPos);
+ case DEVICE_ID:
+ return fillDevice(window, startPos);
+ case STORAGE:
+ return fillStorages(window, startPos);
+ case STORAGE_ID:
+ return fillStorage(window, startPos);
+ case OBJECT:
+ return fillObjects(window, 0, startPos);
+ case OBJECT_ID:
+ return fillObject(window, startPos);
+ case STORAGE_CHILDREN:
+ return fillObjects(window, -1, startPos);
+ case OBJECT_CHILDREN:
+ return fillObjects(window, mQbjectID, startPos);
+ default:
+ LOGE("MtpCursor::fillWindow: unknown query type %d\n", mQueryType);
+ return 0;
+ }
+}
+
+int MtpCursor::fillDevices(CursorWindow* window, int startPos) {
+ int count = 0;
+ MtpDeviceList& deviceList = mClient->getDeviceList();
+ for (int i = 0; i < deviceList.size(); i++) {
+ MtpDevice* device = deviceList[i];
+ if (fillDevice(window, device, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ return count;
+}
+
+int MtpCursor::fillDevice(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillDevice(window, device, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+int MtpCursor::fillStorages(CursorWindow* window, int startPos) {
+ int count = 0;
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (!device)
+ return 0;
+ MtpStorageIDList* storageIDs = device->getStorageIDs();
+ if (!storageIDs)
+ return 0;
+
+ for (int i = 0; i < storageIDs->size(); i++) {
+ MtpStorageID storageID = (*storageIDs)[i];
+ if (fillStorage(window, device, storageID, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ delete storageIDs;
+ return count;
+}
+
+int MtpCursor::fillStorage(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillStorage(window, device, mStorageID, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+int MtpCursor::fillObjects(CursorWindow* window, int parent, int startPos) {
+ int count = 0;
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (!device)
+ return 0;
+ MtpObjectHandleList* handles = device->getObjectHandles(mStorageID, 0, parent);
+ if (!handles)
+ return 0;
+
+ for (int i = 0; i < handles->size(); i++) {
+ MtpObjectHandle handle = (*handles)[i];
+ if (fillObject(window, device, handle, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ delete handles;
+ return count;
+}
+
+int MtpCursor::fillObject(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillObject(window, device, mQbjectID, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+bool MtpCursor::fillDevice(CursorWindow* window, MtpDevice* device, int row) {
+ MtpDeviceInfo* deviceInfo = device->getDeviceInfo();
+ if (!deviceInfo)
+ return false;
+ if (!prepareRow(window))
+ return false;
+
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case DEVICE_ROW_ID:
+ if (!putLong(window, device->getID(), row, i))
+ return false;
+ break;
+ case DEVICE_MANUFACTURER:
+ if (!putString(window, deviceInfo->mManufacturer, row, i))
+ return false;
+ break;
+ case DEVICE_MODEL:
+ if (!putString(window, deviceInfo->mModel, row, i))
+ return false;
+ break;
+ default:
+ LOGE("fillDevice: unknown column %d\n", mColumns[i]);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool MtpCursor::fillStorage(CursorWindow* window, MtpDevice* device,
+ MtpStorageID storageID, int row) {
+
+LOGD("fillStorage %d\n", storageID);
+
+ MtpStorageInfo* storageInfo = device->getStorageInfo(storageID);
+ if (!storageInfo)
+ return false;
+ if (!prepareRow(window)) {
+ delete storageInfo;
+ return false;
+ }
+
+ const char* text;
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case STORAGE_ROW_ID:
+ if (!putLong(window, storageID, row, i))
+ goto fail;
+ break;
+ case STORAGE_IDENTIFIER:
+ text = storageInfo->mVolumeIdentifier;
+ if (!text || !text[0])
+ text = "Camera Storage";
+ if (!putString(window, text, row, i))
+ goto fail;
+ break;
+ case STORAGE_DESCRIPTION:
+ text = storageInfo->mStorageDescription;
+ if (!text || !text[0])
+ text = "Storage Description";
+ if (!putString(window, text, row, i))
+ goto fail;
+ break;
+ default:
+ LOGE("fillStorage: unknown column %d\n", mColumns[i]);
+ goto fail;
+ }
+ }
+
+ delete storageInfo;
+ return true;
+
+fail:
+ delete storageInfo;
+ return false;
+}
+
+bool MtpCursor::fillObject(CursorWindow* window, MtpDevice* device,
+ MtpObjectHandle objectID, int row) {
+
+ MtpObjectInfo* objectInfo = device->getObjectInfo(objectID);
+ if (!objectInfo)
+ return false;
+ // objectInfo->print();
+ if (!prepareRow(window)) {
+ delete objectInfo;
+ return false;
+ }
+
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case OBJECT_ROW_ID:
+ if (!putLong(window, objectID, row, i))
+ goto fail;
+ break;
+ case OBJECT_STORAGE_ID:
+ if (!putLong(window, objectInfo->mStorageID, row, i))
+ goto fail;
+ break;
+ case OBJECT_FORMAT:
+ if (!putLong(window, objectInfo->mFormat, row, i))
+ goto fail;
+ break;
+ case OBJECT_PROTECTION_STATUS:
+ if (!putLong(window, objectInfo->mProtectionStatus, row, i))
+ goto fail;
+ break;
+ case OBJECT_SIZE:
+ if (!putLong(window, objectInfo->mCompressedSize, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_FORMAT:
+ if (!putLong(window, objectInfo->mThumbFormat, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_SIZE:
+ if (!putLong(window, objectInfo->mThumbCompressedSize, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_WIDTH:
+ if (!putLong(window, objectInfo->mThumbPixWidth, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_HEIGHT:
+ if (!putLong(window, objectInfo->mThumbPixHeight, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_WIDTH:
+ if (!putLong(window, objectInfo->mImagePixWidth, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_HEIGHT:
+ if (!putLong(window, objectInfo->mImagePixHeight, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_DEPTH:
+ if (!putLong(window, objectInfo->mImagePixDepth, row, i))
+ goto fail;
+ break;
+ case OBJECT_PARENT:
+ if (!putLong(window, objectInfo->mParent, row, i))
+ goto fail;
+ break;
+ case OBJECT_ASSOCIATION_TYPE:
+ if (!putLong(window, objectInfo->mAssociationType, row, i))
+ goto fail;
+ break;
+ case OBJECT_ASSOCIATION_DESC:
+ if (!putLong(window, objectInfo->mAssociationDesc, row, i))
+ goto fail;
+ break;
+ case OBJECT_SEQUENCE_NUMBER:
+ if (!putLong(window, objectInfo->mSequenceNumber, row, i))
+ goto fail;
+ break;
+ case OBJECT_NAME:
+ if (!putString(window, objectInfo->mName, row, i))
+ goto fail;
+ break;
+ case OBJECT_DATE_CREATED:
+ if (!putLong(window, objectInfo->mDateCreated, row, i))
+ goto fail;
+ break;
+ case OBJECT_DATE_MODIFIED:
+ if (!putLong(window, objectInfo->mDateModified, row, i))
+ goto fail;
+ break;
+ case OBJECT_KEYWORDS:
+ if (!putString(window, objectInfo->mKeywords, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB:
+ if (!putThumbnail(window, objectID, objectInfo->mFormat, row, i))
+ goto fail;
+ break;
+ default:
+ LOGE("fillObject: unknown column %d\n", mColumns[i]);
+ goto fail;
+ }
+ }
+
+ delete objectInfo;
+ return true;
+
+fail:
+ delete objectInfo;
+ return false;
+}
+
+bool MtpCursor::prepareRow(CursorWindow* window) {
+ if (!window->setNumColumns(mColumnCount)) {
+ LOGE("Failed to change column count from %d to %d", window->getNumColumns(), mColumnCount);
+ return false;
+ }
+ field_slot_t * fieldDir = window->allocRow();
+ if (!fieldDir) {
+ LOGE("Failed allocating fieldDir");
+ return false;
+ }
+ return true;
+}
+
+
+bool MtpCursor::putLong(CursorWindow* window, int64_t value, int row, int column) {
+ if (!window->putLong(row, column, value)) {
+ window->freeLastRow();
+ LOGE("Failed allocating space for a long in column %d", column);
+ return false;
+ }
+ return true;
+}
+
+bool MtpCursor::putString(CursorWindow* window, const char* text, int row, int column) {
+ int size = strlen(text) + 1;
+ int offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for text/blob %s", size, text);
+ return false;
+ }
+ window->copyIn(offset, (const uint8_t*)text, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(row, column);
+ fieldSlot->type = FIELD_TYPE_STRING;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+ return true;
+}
+
+bool MtpCursor::putThumbnail(CursorWindow* window, MtpObjectHandle objectID,
+ MtpObjectFormat format, int row, int column) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ void* thumbnail;
+ int size, offset;
+ if (format == MTP_FORMAT_ASSOCIATION) {
+ thumbnail = NULL;
+ size = offset = 0;
+ } else {
+ thumbnail = device->getThumbnail(objectID, size);
+
+ LOGV("putThumbnail: %p, size: %d\n", thumbnail, size);
+ offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for thumbnail", size);
+ return false;
+ }
+ }
+ if (thumbnail)
+ window->copyIn(offset, (const uint8_t*)thumbnail, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(row, column);
+ fieldSlot->type = FIELD_TYPE_BLOB;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+ return true;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpCursor.h b/media/mtp/MtpCursor.h
new file mode 100644
index 0000000..2e03c29
--- /dev/null
+++ b/media/mtp/MtpCursor.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_CURSOR_H
+#define _MTP_CURSOR_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class CursorWindow;
+
+class MtpCursor {
+private:
+ enum {
+ DEVICE = 1,
+ DEVICE_ID = 2,
+ STORAGE = 3,
+ STORAGE_ID = 4,
+ OBJECT = 5,
+ OBJECT_ID = 6,
+ STORAGE_CHILDREN = 7,
+ OBJECT_CHILDREN = 8,
+ };
+
+ MtpClient* mClient;
+ int mQueryType;
+ int mDeviceID;
+ MtpStorageID mStorageID;
+ MtpObjectHandle mQbjectID;
+ int mColumnCount;
+ int* mColumns;
+
+public:
+ MtpCursor(MtpClient* client, int queryType, int deviceID,
+ MtpStorageID storageID, MtpObjectHandle objectID,
+ int columnCount, int* columns);
+ virtual ~MtpCursor();
+
+ int fillWindow(CursorWindow* window, int startPos);
+
+private:
+ int fillDevices(CursorWindow* window, int startPos);
+ int fillDevice(CursorWindow* window, int startPos);
+ int fillStorages(CursorWindow* window, int startPos);
+ int fillStorage(CursorWindow* window, int startPos);
+ int fillObjects(CursorWindow* window, int parent, int startPos);
+ int fillObject(CursorWindow* window, int startPos);
+
+ bool fillDevice(CursorWindow* window, MtpDevice* device, int startPos);
+ bool fillStorage(CursorWindow* window, MtpDevice* device,
+ MtpStorageID storageID, int row);
+ bool fillObject(CursorWindow* window, MtpDevice* device,
+ MtpObjectHandle objectID, int row);
+
+ bool prepareRow(CursorWindow* window);
+ bool putLong(CursorWindow* window, int64_t value, int row, int column);
+ bool putString(CursorWindow* window, const char* text, int row, int column);
+ bool putThumbnail(CursorWindow* window, MtpObjectHandle objectID,
+ MtpObjectFormat format, int row, int column);
+};
+
+}; // namespace android
+
+#endif // _MTP_CURSOR_H
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
new file mode 100644
index 0000000..ec78ff0
--- /dev/null
+++ b/media/mtp/MtpDataPacket.cpp
@@ -0,0 +1,486 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDataPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include <usbhost/usbhost.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDataPacket::MtpDataPacket()
+ : MtpPacket(512),
+ mOffset(MTP_CONTAINER_HEADER_SIZE)
+{
+}
+
+MtpDataPacket::~MtpDataPacket() {
+}
+
+void MtpDataPacket::reset() {
+ MtpPacket::reset();
+ mOffset = MTP_CONTAINER_HEADER_SIZE;
+}
+
+void MtpDataPacket::setOperationCode(MtpOperationCode code) {
+ MtpPacket::putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+void MtpDataPacket::setTransactionID(MtpTransactionID id) {
+ MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint16_t MtpDataPacket::getUInt16() {
+ int offset = mOffset;
+ uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+ mOffset += 2;
+ return result;
+}
+
+uint32_t MtpDataPacket::getUInt32() {
+ int offset = mOffset;
+ uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+ ((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24);
+ mOffset += 4;
+ return result;
+}
+
+uint64_t MtpDataPacket::getUInt64() {
+ int offset = mOffset;
+ uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+ ((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
+ ((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
+ ((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56);
+ mOffset += 8;
+ return result;
+}
+
+void MtpDataPacket::getUInt128(uint128_t& value) {
+ value[0] = getUInt32();
+ value[1] = getUInt32();
+ value[2] = getUInt32();
+ value[3] = getUInt32();
+}
+
+void MtpDataPacket::getString(MtpStringBuffer& string)
+{
+ string.readFromPacket(this);
+}
+
+Int8List* MtpDataPacket::getAInt8() {
+ Int8List* result = new Int8List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt8());
+ return result;
+}
+
+UInt8List* MtpDataPacket::getAUInt8() {
+ UInt8List* result = new UInt8List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt8());
+ return result;
+}
+
+Int16List* MtpDataPacket::getAInt16() {
+ Int16List* result = new Int16List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt16());
+ return result;
+}
+
+UInt16List* MtpDataPacket::getAUInt16() {
+ UInt16List* result = new UInt16List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt16());
+ return result;
+}
+
+Int32List* MtpDataPacket::getAInt32() {
+ Int32List* result = new Int32List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt32());
+ return result;
+}
+
+UInt32List* MtpDataPacket::getAUInt32() {
+ UInt32List* result = new UInt32List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt32());
+ return result;
+}
+
+Int64List* MtpDataPacket::getAInt64() {
+ Int64List* result = new Int64List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt64());
+ return result;
+}
+
+UInt64List* MtpDataPacket::getAUInt64() {
+ UInt64List* result = new UInt64List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt64());
+ return result;
+}
+
+void MtpDataPacket::putInt8(int8_t value) {
+ allocate(mOffset + 1);
+ mBuffer[mOffset++] = (uint8_t)value;
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt8(uint8_t value) {
+ allocate(mOffset + 1);
+ mBuffer[mOffset++] = (uint8_t)value;
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt16(int16_t value) {
+ allocate(mOffset + 2);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt16(uint16_t value) {
+ allocate(mOffset + 2);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt32(int32_t value) {
+ allocate(mOffset + 4);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt32(uint32_t value) {
+ allocate(mOffset + 4);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt64(int64_t value) {
+ allocate(mOffset + 8);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt64(uint64_t value) {
+ allocate(mOffset + 8);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt128(const int128_t& value) {
+ putInt32(value[0]);
+ putInt32(value[1]);
+ putInt32(value[2]);
+ putInt32(value[3]);
+}
+
+void MtpDataPacket::putUInt128(const uint128_t& value) {
+ putUInt32(value[0]);
+ putUInt32(value[1]);
+ putUInt32(value[2]);
+ putUInt32(value[3]);
+}
+
+void MtpDataPacket::putInt128(int64_t value) {
+ putInt64(value);
+ putInt64(value < 0 ? -1 : 0);
+}
+
+void MtpDataPacket::putUInt128(uint64_t value) {
+ putUInt64(value);
+ putUInt64(0);
+}
+
+void MtpDataPacket::putAInt8(const int8_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt8(*values++);
+}
+
+void MtpDataPacket::putAUInt8(const uint8_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt8(*values++);
+}
+
+void MtpDataPacket::putAInt16(const int16_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const uint16_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const UInt16List* values) {
+ size_t count = (values ? values->size() : 0);
+ putUInt32(count);
+ for (size_t i = 0; i < count; i++)
+ putUInt16((*values)[i]);
+}
+
+void MtpDataPacket::putAInt32(const int32_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const uint32_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const UInt32List* list) {
+ if (!list) {
+ putEmptyArray();
+ } else {
+ size_t size = list->size();
+ putUInt32(size);
+ for (size_t i = 0; i < size; i++)
+ putUInt32((*list)[i]);
+ }
+}
+
+void MtpDataPacket::putAInt64(const int64_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt64(*values++);
+}
+
+void MtpDataPacket::putAUInt64(const uint64_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt64(*values++);
+}
+
+void MtpDataPacket::putString(const MtpStringBuffer& string) {
+ string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const char* s) {
+ MtpStringBuffer string(s);
+ string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const uint16_t* string) {
+ int count = 0;
+ for (int i = 0; i < 256; i++) {
+ if (string[i])
+ count++;
+ else
+ break;
+ }
+ putUInt8(count > 0 ? count + 1 : 0);
+ for (int i = 0; i < count; i++)
+ putUInt16(string[i]);
+ // only terminate with zero if string is not empty
+ if (count > 0)
+ putUInt16(0);
+}
+
+#ifdef MTP_DEVICE
+int MtpDataPacket::read(int fd) {
+ // first read the header
+ int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret != MTP_CONTAINER_HEADER_SIZE)
+ return -1;
+ // then the following data
+ int total = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+ int remaining = total - MTP_CONTAINER_HEADER_SIZE;
+ ret = ::read(fd, &mBuffer[0] + MTP_CONTAINER_HEADER_SIZE, remaining);
+ if (ret != remaining)
+ return -1;
+
+ mPacketSize = total;
+ mOffset = MTP_CONTAINER_HEADER_SIZE;
+ return total;
+}
+
+int MtpDataPacket::readDataHeader(int fd) {
+ int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret > 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+
+int MtpDataPacket::write(int fd) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ dump();
+ // send header separately from data
+ int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret == MTP_CONTAINER_HEADER_SIZE)
+ ret = ::write(fd, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+ mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::writeDataHeader(int fd, uint32_t length) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+#endif // MTP_DEVICE
+
+#ifdef MTP_HOST
+int MtpDataPacket::read(struct usb_endpoint *ep) {
+ // first read the header
+ int length = transfer(ep, mBuffer, mBufferSize);
+ if (length >= MTP_CONTAINER_HEADER_SIZE) {
+ // look at the length field to see if the data spans multiple packets
+ uint32_t totalLength = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+ while (totalLength > length) {
+ allocate(length + mAllocationIncrement);
+ int ret = transfer(ep, mBuffer + length, mAllocationIncrement);
+ if (ret >= 0)
+ length += ret;
+ else {
+ length = ret;
+ break;
+ }
+ }
+ }
+ if (length >= 0)
+ mPacketSize = length;
+ return length;
+}
+
+int MtpDataPacket::readData(struct usb_endpoint *ep, void* buffer, int length) {
+ int packetSize = usb_endpoint_max_packet(ep);
+ int read = 0;
+ while (read < length) {
+ int ret = transfer(ep, (char *)buffer + read, packetSize);
+ if (ret < 0) {
+printf("MtpDataPacket::readData returning %d\n", ret);
+ return ret;
+ }
+ read += ret;
+ }
+printf("MtpDataPacket::readData returning %d\n", read);
+ return read;
+}
+
+int MtpDataPacket::readDataHeader(struct usb_endpoint *ep) {
+ int length = transfer(ep, mBuffer, usb_endpoint_max_packet(ep));
+ if (length >= 0)
+ mPacketSize = length;
+ return length;
+}
+
+int MtpDataPacket::writeDataHeader(struct usb_endpoint *ep, uint32_t length) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ int ret = transfer(ep, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_endpoint *ep) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+
+ // send header separately from data
+ int ret = transfer(ep, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret == MTP_CONTAINER_HEADER_SIZE)
+ ret = transfer(ep, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+ mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_endpoint *ep, void* buffer, uint32_t length) {
+ int ret = 0;
+ int packetSize = usb_endpoint_max_packet(ep);
+ while (length > 0) {
+ int write = (length > packetSize ? packetSize : length);
+ int ret = transfer(ep, buffer, write);
+ if (ret < 0)
+ break;
+ length -= ret;
+ }
+ return (ret < 0 ? ret : 0);
+}
+
+#endif // MTP_HOST
+
+void* MtpDataPacket::getData(int& outLength) const {
+ int length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+ if (length > 0) {
+ void* result = malloc(length);
+ if (result) {
+ memcpy(result, mBuffer + MTP_CONTAINER_HEADER_SIZE, length);
+ outLength = length;
+ return result;
+ }
+ }
+ outLength = 0;
+ return NULL;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
new file mode 100644
index 0000000..fab6a07
--- /dev/null
+++ b/media/mtp/MtpDataPacket.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATA_PACKET_H
+#define _MTP_DATA_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpStringBuffer;
+
+class MtpDataPacket : public MtpPacket {
+private:
+ // current offset for get/put methods
+ int mOffset;
+
+public:
+ MtpDataPacket();
+ virtual ~MtpDataPacket();
+
+ virtual void reset();
+
+ void setOperationCode(MtpOperationCode code);
+ void setTransactionID(MtpTransactionID id);
+
+ inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
+ inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; }
+ uint16_t getUInt16();
+ inline int16_t getInt16() { return (int16_t)getUInt16(); }
+ uint32_t getUInt32();
+ inline int32_t getInt32() { return (int32_t)getUInt32(); }
+ uint64_t getUInt64();
+ inline int64_t getInt64() { return (int64_t)getUInt64(); }
+ void getUInt128(uint128_t& value);
+ inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
+ void getString(MtpStringBuffer& string);
+
+ Int8List* getAInt8();
+ UInt8List* getAUInt8();
+ Int16List* getAInt16();
+ UInt16List* getAUInt16();
+ Int32List* getAInt32();
+ UInt32List* getAUInt32();
+ Int64List* getAInt64();
+ UInt64List* getAUInt64();
+
+ void putInt8(int8_t value);
+ void putUInt8(uint8_t value);
+ void putInt16(int16_t value);
+ void putUInt16(uint16_t value);
+ void putInt32(int32_t value);
+ void putUInt32(uint32_t value);
+ void putInt64(int64_t value);
+ void putUInt64(uint64_t value);
+ void putInt128(const int128_t& value);
+ void putUInt128(const uint128_t& value);
+ void putInt128(int64_t value);
+ void putUInt128(uint64_t value);
+
+ void putAInt8(const int8_t* values, int count);
+ void putAUInt8(const uint8_t* values, int count);
+ void putAInt16(const int16_t* values, int count);
+ void putAUInt16(const uint16_t* values, int count);
+ void putAUInt16(const UInt16List* values);
+ void putAInt32(const int32_t* values, int count);
+ void putAUInt32(const uint32_t* values, int count);
+ void putAUInt32(const UInt32List* list);
+ void putAInt64(const int64_t* values, int count);
+ void putAUInt64(const uint64_t* values, int count);
+ void putString(const MtpStringBuffer& string);
+ void putString(const char* string);
+ void putString(const uint16_t* string);
+ inline void putEmptyString() { putUInt8(0); }
+ inline void putEmptyArray() { putUInt32(0); }
+
+
+#ifdef MTP_DEVICE
+ // fill our buffer with data from the given file descriptor
+ int read(int fd);
+ int readDataHeader(int fd);
+
+ // write our data to the given file descriptor
+ int write(int fd);
+ int writeDataHeader(int fd, uint32_t length);
+#endif
+
+#ifdef MTP_HOST
+ int read(struct usb_endpoint *ep);
+ int readData(struct usb_endpoint *ep, void* buffer, int length);
+ int readDataHeader(struct usb_endpoint *ep);
+
+ int writeDataHeader(struct usb_endpoint *ep, uint32_t length);
+ int write(struct usb_endpoint *ep);
+ int write(struct usb_endpoint *ep, void* buffer, uint32_t length);
+#endif
+
+ inline bool hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; }
+ void* getData(int& outLength) const;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATA_PACKET_H
diff --git a/media/mtp/MtpDatabase.h b/media/mtp/MtpDatabase.h
new file mode 100644
index 0000000..c8cb016
--- /dev/null
+++ b/media/mtp/MtpDatabase.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATABASE_H
+#define _MTP_DATABASE_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+class MtpProperty;
+
+class MtpDatabase {
+public:
+ virtual ~MtpDatabase() {}
+
+ // called from SendObjectInfo to reserve a database entry for the incoming file
+ virtual MtpObjectHandle beginSendObject(const char* path,
+ MtpObjectFormat format,
+ MtpObjectHandle parent,
+ MtpStorageID storage,
+ uint64_t size,
+ time_t modified) = 0;
+
+ // called to report success or failure of the SendObject file transfer
+ // success should signal a notification of the new object's creation,
+ // failure should remove the database entry created in beginSendObject
+ virtual void endSendObject(const char* path,
+ MtpObjectHandle handle,
+ MtpObjectFormat format,
+ bool succeeded) = 0;
+
+ virtual MtpObjectHandleList* getObjectList(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) = 0;
+
+ virtual int getNumObjects(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) = 0;
+
+ // callee should delete[] the results from these
+ // results can be NULL
+ virtual MtpObjectFormatList* getSupportedPlaybackFormats() = 0;
+ virtual MtpObjectFormatList* getSupportedCaptureFormats() = 0;
+ virtual MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format) = 0;
+ virtual MtpDevicePropertyList* getSupportedDeviceProperties() = 0;
+
+ virtual MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode resetDeviceProperty(MtpDeviceProperty property) = 0;
+
+ virtual MtpResponseCode getObjectInfo(MtpObjectHandle handle,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getObjectFilePath(MtpObjectHandle handle,
+ MtpString& filePath,
+ int64_t& fileLength) = 0;
+
+ virtual MtpResponseCode deleteFile(MtpObjectHandle handle) = 0;
+
+ virtual MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle) = 0;
+
+ virtual MtpResponseCode setObjectReferences(MtpObjectHandle handle,
+ MtpObjectHandleList* references) = 0;
+
+ virtual MtpProperty* getObjectPropertyDesc(MtpObjectProperty property,
+ MtpObjectFormat format) = 0;
+
+ virtual MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property) = 0;
+
+ virtual void sessionStarted() = 0;
+
+ virtual void sessionEnded() = 0;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATABASE_H
diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp
new file mode 100644
index 0000000..d6b107d
--- /dev/null
+++ b/media/mtp/MtpDebug.cpp
@@ -0,0 +1,387 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MtpDebug.h"
+
+namespace android {
+
+struct CodeEntry {
+ const char* name;
+ uint16_t code;
+};
+
+static const CodeEntry sOperationCodes[] = {
+ { "MTP_OPERATION_GET_DEVICE_INFO", 0x1001 },
+ { "MTP_OPERATION_OPEN_SESSION", 0x1002 },
+ { "MTP_OPERATION_CLOSE_SESSION", 0x1003 },
+ { "MTP_OPERATION_GET_STORAGE_IDS", 0x1004 },
+ { "MTP_OPERATION_GET_STORAGE_INFO", 0x1005 },
+ { "MTP_OPERATION_GET_NUM_OBJECTS", 0x1006 },
+ { "MTP_OPERATION_GET_OBJECT_HANDLES", 0x1007 },
+ { "MTP_OPERATION_GET_OBJECT_INFO", 0x1008 },
+ { "MTP_OPERATION_GET_OBJECT", 0x1009 },
+ { "MTP_OPERATION_GET_THUMB", 0x100A },
+ { "MTP_OPERATION_DELETE_OBJECT", 0x100B },
+ { "MTP_OPERATION_SEND_OBJECT_INFO", 0x100C },
+ { "MTP_OPERATION_SEND_OBJECT", 0x100D },
+ { "MTP_OPERATION_INITIATE_CAPTURE", 0x100E },
+ { "MTP_OPERATION_FORMAT_STORE", 0x100F },
+ { "MTP_OPERATION_RESET_DEVICE", 0x1010 },
+ { "MTP_OPERATION_SELF_TEST", 0x1011 },
+ { "MTP_OPERATION_SET_OBJECT_PROTECTION", 0x1012 },
+ { "MTP_OPERATION_POWER_DOWN", 0x1013 },
+ { "MTP_OPERATION_GET_DEVICE_PROP_DESC", 0x1014 },
+ { "MTP_OPERATION_GET_DEVICE_PROP_VALUE", 0x1015 },
+ { "MTP_OPERATION_SET_DEVICE_PROP_VALUE", 0x1016 },
+ { "MTP_OPERATION_RESET_DEVICE_PROP_VALUE", 0x1017 },
+ { "MTP_OPERATION_TERMINATE_OPEN_CAPTURE", 0x1018 },
+ { "MTP_OPERATION_MOVE_OBJECT", 0x1019 },
+ { "MTP_OPERATION_COPY_OBJECT", 0x101A },
+ { "MTP_OPERATION_GET_PARTIAL_OBJECT", 0x101B },
+ { "MTP_OPERATION_INITIATE_OPEN_CAPTURE", 0x101C },
+ { "MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED", 0x9801 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_DESC", 0x9802 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_VALUE", 0x9803 },
+ { "MTP_OPERATION_SET_OBJECT_PROP_VALUE", 0x9804 },
+ { "MTP_OPERATION_GET_OBJECT_REFERENCES", 0x9810 },
+ { "MTP_OPERATION_SET_OBJECT_REFERENCES", 0x9811 },
+ { "MTP_OPERATION_SKIP", 0x9820 },
+ { 0, 0 },
+};
+
+static const CodeEntry sFormatCodes[] = {
+ { "MTP_OPERATION_GET_DEVICE_INFO", 0x1001 },
+ { "MTP_FORMAT_UNDEFINED", 0x3000 },
+ { "MTP_FORMAT_ASSOCIATION", 0x3001 },
+ { "MTP_FORMAT_SCRIPT", 0x3002 },
+ { "MTP_FORMAT_EXECUTABLE", 0x3003 },
+ { "MTP_FORMAT_TEXT", 0x3004 },
+ { "MTP_FORMAT_HTML", 0x3005 },
+ { "MTP_FORMAT_DPOF", 0x3006 },
+ { "MTP_FORMAT_AIFF", 0x3007 },
+ { "MTP_FORMAT_WAV", 0x3008 },
+ { "MTP_FORMAT_MP3", 0x3009 },
+ { "MTP_FORMAT_AVI", 0x300A },
+ { "MTP_FORMAT_MPEG", 0x300B },
+ { "MTP_FORMAT_ASF", 0x300C },
+ { "MTP_FORMAT_DEFINED", 0x3800 },
+ { "MTP_FORMAT_EXIF_JPEG", 0x3801 },
+ { "MTP_FORMAT_TIFF_EP", 0x3802 },
+ { "MTP_FORMAT_FLASHPIX", 0x3803 },
+ { "MTP_FORMAT_BMP", 0x3804 },
+ { "MTP_FORMAT_CIFF", 0x3805 },
+ { "MTP_FORMAT_GIF", 0x3807 },
+ { "MTP_FORMAT_JFIF", 0x3808 },
+ { "MTP_FORMAT_CD", 0x3809 },
+ { "MTP_FORMAT_PICT", 0x380A },
+ { "MTP_FORMAT_PNG", 0x380B },
+ { "MTP_FORMAT_TIFF", 0x380D },
+ { "MTP_FORMAT_TIFF_IT", 0x380E },
+ { "MTP_FORMAT_JP2", 0x380F },
+ { "MTP_FORMAT_JPX", 0x3810 },
+ { "MTP_FORMAT_UNDEFINED_FIRMWARE", 0xB802 },
+ { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT", 0xB881 },
+ { "MTP_FORMAT_UNDEFINED_AUDIO", 0xB900 },
+ { "MTP_FORMAT_WMA", 0xB901 },
+ { "MTP_FORMAT_OGG", 0xB902 },
+ { "MTP_FORMAT_AAC", 0xB903 },
+ { "MTP_FORMAT_AUDIBLE", 0xB904 },
+ { "MTP_FORMAT_FLAC", 0xB906 },
+ { "MTP_FORMAT_UNDEFINED_VIDEO", 0xB980 },
+ { "MTP_FORMAT_WMV", 0xB981 },
+ { "MTP_FORMAT_MP4_CONTAINER", 0xB982 },
+ { "MTP_FORMAT_MP2", 0xB983 },
+ { "MTP_FORMAT_3GP_CONTAINER", 0xB984 },
+ { "MTP_FORMAT_UNDEFINED_COLLECTION", 0xBA00 },
+ { "MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM", 0xBA01 },
+ { "MTP_FORMAT_ABSTRACT_IMAGE_ALBUM", 0xBA02 },
+ { "MTP_FORMAT_ABSTRACT_AUDIO_ALBUM", 0xBA03 },
+ { "MTP_FORMAT_ABSTRACT_VIDEO_ALBUM", 0xBA04 },
+ { "MTP_FORMAT_ABSTRACT_AV_PLAYLIST", 0xBA05 },
+ { "MTP_FORMAT_ABSTRACT_CONTACT_GROUP", 0xBA06 },
+ { "MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER", 0xBA07 },
+ { "MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION", 0xBA08 },
+ { "MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST", 0xBA09 },
+ { "MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST", 0xBA0A },
+ { "MTP_FORMAT_ABSTRACT_MEDIACAST", 0xBA0B },
+ { "MTP_FORMAT_WPL_PLAYLIST", 0xBA10 },
+ { "MTP_FORMAT_M3U_PLAYLIST", 0xBA11 },
+ { "MTP_FORMAT_MPL_PLAYLIST", 0xBA12 },
+ { "MTP_FORMAT_ASX_PLAYLIST", 0xBA13 },
+ { "MTP_FORMAT_PLS_PLAYLIST", 0xBA14 },
+ { "MTP_FORMAT_UNDEFINED_DOCUMENT", 0xBA80 },
+ { "MTP_FORMAT_ABSTRACT_DOCUMENT", 0xBA81 },
+ { "MTP_FORMAT_XML_DOCUMENT", 0xBA82 },
+ { "MTP_FORMAT_MS_WORD_DOCUMENT", 0xBA83 },
+ { "MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT", 0xBA84 },
+ { "MTP_FORMAT_MS_EXCEL_SPREADSHEET", 0xBA85 },
+ { "MTP_FORMAT_MS_POWERPOINT_PRESENTATION", 0xBA86 },
+ { "MTP_FORMAT_UNDEFINED_MESSAGE", 0xBB00 },
+ { "MTP_FORMAT_ABSTRACT_MESSSAGE", 0xBB01 },
+ { "MTP_FORMAT_UNDEFINED_CONTACT", 0xBB80 },
+ { "MTP_FORMAT_ABSTRACT_CONTACT", 0xBB81 },
+ { "MTP_FORMAT_VCARD_2", 0xBB82 },
+ { 0, 0 },
+};
+
+static const CodeEntry sObjectPropCodes[] = {
+ { "MTP_PROPERTY_STORAGE_ID", 0xDC01 },
+ { "MTP_PROPERTY_OBJECT_FORMAT", 0xDC02 },
+ { "MTP_PROPERTY_PROTECTION_STATUS", 0xDC03 },
+ { "MTP_PROPERTY_OBJECT_SIZE", 0xDC04 },
+ { "MTP_PROPERTY_ASSOCIATION_TYPE", 0xDC05 },
+ { "MTP_PROPERTY_ASSOCIATION_DESC", 0xDC06 },
+ { "MTP_PROPERTY_OBJECT_FILE_NAME", 0xDC07 },
+ { "MTP_PROPERTY_DATE_CREATED", 0xDC08 },
+ { "MTP_PROPERTY_DATE_MODIFIED", 0xDC09 },
+ { "MTP_PROPERTY_KEYWORDS", 0xDC0A },
+ { "MTP_PROPERTY_PARENT_OBJECT", 0xDC0B },
+ { "MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS", 0xDC0C },
+ { "MTP_PROPERTY_HIDDEN", 0xDC0D },
+ { "MTP_PROPERTY_SYSTEM_OBJECT", 0xDC0E },
+ { "MTP_PROPERTY_PERSISTENT_UID", 0xDC41 },
+ { "MTP_PROPERTY_SYNC_ID", 0xDC42 },
+ { "MTP_PROPERTY_PROPERTY_BAG", 0xDC43 },
+ { "MTP_PROPERTY_NAME", 0xDC44 },
+ { "MTP_PROPERTY_CREATED_BY", 0xDC45 },
+ { "MTP_PROPERTY_ARTIST", 0xDC46 },
+ { "MTP_PROPERTY_DATE_AUTHORED", 0xDC47 },
+ { "MTP_PROPERTY_DESCRIPTION", 0xDC48 },
+ { "MTP_PROPERTY_URL_REFERENCE", 0xDC49 },
+ { "MTP_PROPERTY_LANGUAGE_LOCALE", 0xDC4A },
+ { "MTP_PROPERTY_COPYRIGHT_INFORMATION", 0xDC4B },
+ { "MTP_PROPERTY_SOURCE", 0xDC4C },
+ { "MTP_PROPERTY_ORIGIN_LOCATION", 0xDC4D },
+ { "MTP_PROPERTY_DATE_ADDED", 0xDC4E },
+ { "MTP_PROPERTY_NON_CONSUMABLE", 0xDC4F },
+ { "MTP_PROPERTY_CORRUPT_UNPLAYABLE", 0xDC50 },
+ { "MTP_PROPERTY_PRODUCER_SERIAL_NUMBER", 0xDC51 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT", 0xDC81 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE", 0xDC82 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT", 0xDC83 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH", 0xDC84 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION", 0xDC85 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA", 0xDC86 },
+ { "MTP_PROPERTY_WIDTH", 0xDC87 },
+ { "MTP_PROPERTY_HEIGHT", 0xDC88 },
+ { "MTP_PROPERTY_DURATION", 0xDC89 },
+ { "MTP_PROPERTY_RATING", 0xDC8A },
+ { "MTP_PROPERTY_TRACK", 0xDC8B },
+ { "MTP_PROPERTY_GENRE", 0xDC8C },
+ { "MTP_PROPERTY_CREDITS", 0xDC8D },
+ { "MTP_PROPERTY_LYRICS", 0xDC8E },
+ { "MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID", 0xDC8F },
+ { "MTP_PROPERTY_PRODUCED_BY", 0xDC90 },
+ { "MTP_PROPERTY_USE_COUNT", 0xDC91 },
+ { "MTP_PROPERTY_SKIP_COUNT", 0xDC92 },
+ { "MTP_PROPERTY_LAST_ACCESSED", 0xDC93 },
+ { "MTP_PROPERTY_PARENTAL_RATING", 0xDC94 },
+ { "MTP_PROPERTY_META_GENRE", 0xDC95 },
+ { "MTP_PROPERTY_COMPOSER", 0xDC96 },
+ { "MTP_PROPERTY_EFFECTIVE_RATING", 0xDC97 },
+ { "MTP_PROPERTY_SUBTITLE", 0xDC98 },
+ { "MTP_PROPERTY_ORIGINAL_RELEASE_DATE", 0xDC99 },
+ { "MTP_PROPERTY_ALBUM_NAME", 0xDC9A },
+ { "MTP_PROPERTY_ALBUM_ARTIST", 0xDC9B },
+ { "MTP_PROPERTY_MOOD", 0xDC9C },
+ { "MTP_PROPERTY_DRM_STATUS", 0xDC9D },
+ { "MTP_PROPERTY_SUB_DESCRIPTION", 0xDC9E },
+ { "MTP_PROPERTY_IS_CROPPED", 0xDCD1 },
+ { "MTP_PROPERTY_IS_COLOUR_CORRECTED", 0xDCD2 },
+ { "MTP_PROPERTY_IMAGE_BIT_DEPTH", 0xDCD3 },
+ { "MTP_PROPERTY_F_NUMBER", 0xDCD4 },
+ { "MTP_PROPERTY_EXPOSURE_TIME", 0xDCD5 },
+ { "MTP_PROPERTY_EXPOSURE_INDEX", 0xDCD6 },
+ { "MTP_PROPERTY_TOTAL_BITRATE", 0xDE91 },
+ { "MTP_PROPERTY_BITRATE_TYPE", 0xDE92 },
+ { "MTP_PROPERTY_SAMPLE_RATE", 0xDE93 },
+ { "MTP_PROPERTY_NUMBER_OF_CHANNELS", 0xDE94 },
+ { "MTP_PROPERTY_AUDIO_BIT_DEPTH", 0xDE95 },
+ { "MTP_PROPERTY_SCAN_TYPE", 0xDE97 },
+ { "MTP_PROPERTY_AUDIO_WAVE_CODEC", 0xDE99 },
+ { "MTP_PROPERTY_AUDIO_BITRATE", 0xDE9A },
+ { "MTP_PROPERTY_VIDEO_FOURCC_CODEC", 0xDE9B },
+ { "MTP_PROPERTY_VIDEO_BITRATE", 0xDE9C },
+ { "MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS", 0xDE9D },
+ { "MTP_PROPERTY_KEYFRAME_DISTANCE", 0xDE9E },
+ { "MTP_PROPERTY_BUFFER_SIZE", 0xDE9F },
+ { "MTP_PROPERTY_ENCODING_QUALITY", 0xDEA0 },
+ { "MTP_PROPERTY_ENCODING_PROFILE", 0xDEA1 },
+ { "MTP_PROPERTY_DISPLAY_NAME", 0xDCE0 },
+ { "MTP_PROPERTY_BODY_TEXT", 0xDCE1 },
+ { "MTP_PROPERTY_SUBJECT", 0xDCE2 },
+ { "MTP_PROPERTY_PRIORITY", 0xDCE3 },
+ { "MTP_PROPERTY_GIVEN_NAME", 0xDD00 },
+ { "MTP_PROPERTY_MIDDLE_NAMES", 0xDD01 },
+ { "MTP_PROPERTY_FAMILY_NAME", 0xDD02 },
+ { "MTP_PROPERTY_PREFIX", 0xDD03 },
+ { "MTP_PROPERTY_SUFFIX", 0xDD04 },
+ { "MTP_PROPERTY_PHONETIC_GIVEN_NAME", 0xDD05 },
+ { "MTP_PROPERTY_PHONETIC_FAMILY_NAME", 0xDD06 },
+ { "MTP_PROPERTY_EMAIL_PRIMARY", 0xDD07 },
+ { "MTP_PROPERTY_EMAIL_PERSONAL_1", 0xDD08 },
+ { "MTP_PROPERTY_EMAIL_PERSONAL_2", 0xDD09 },
+ { "MTP_PROPERTY_EMAIL_BUSINESS_1", 0xDD0A },
+ { "MTP_PROPERTY_EMAIL_BUSINESS_2", 0xDD0B },
+ { "MTP_PROPERTY_EMAIL_OTHERS", 0xDD0C },
+ { "MTP_PROPERTY_PHONE_NUMBER_PRIMARY", 0xDD0D },
+ { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL", 0xDD0E },
+ { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2", 0xDD0F },
+ { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS", 0xDD10 },
+ { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2", 0xDD11 },
+ { "MTP_PROPERTY_PHONE_NUMBER_MOBILE", 0xDD12 },
+ { "MTP_PROPERTY_PHONE_NUMBER_MOBILE_2", 0xDD13 },
+ { "MTP_PROPERTY_FAX_NUMBER_PRIMARY", 0xDD14 },
+ { "MTP_PROPERTY_FAX_NUMBER_PERSONAL", 0xDD15 },
+ { "MTP_PROPERTY_FAX_NUMBER_BUSINESS", 0xDD16 },
+ { "MTP_PROPERTY_PAGER_NUMBER", 0xDD17 },
+ { "MTP_PROPERTY_PHONE_NUMBER_OTHERS", 0xDD18 },
+ { "MTP_PROPERTY_PRIMARY_WEB_ADDRESS", 0xDD19 },
+ { "MTP_PROPERTY_PERSONAL_WEB_ADDRESS", 0xDD1A },
+ { "MTP_PROPERTY_BUSINESS_WEB_ADDRESS", 0xDD1B },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS", 0xDD1C },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2", 0xDD1D },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3", 0xDD1E },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL", 0xDD1F },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1", 0xDD20 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2", 0xDD21 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY", 0xDD22 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION", 0xDD23 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE", 0xDD24 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY", 0xDD25 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL", 0xDD26 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1", 0xDD27 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2", 0xDD28 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY", 0xDD29 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION", 0xDD2A },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE", 0xDD2B },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY", 0xDD2C },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL", 0xDD2D },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1", 0xDD2E },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2", 0xDD2F },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY", 0xDD30 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION", 0xDD31 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE", 0xDD32 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY", 0xDD33 },
+ { "MTP_PROPERTY_ORGANIZATION_NAME", 0xDD34 },
+ { "MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME", 0xDD35 },
+ { "MTP_PROPERTY_ROLE", 0xDD36 },
+ { "MTP_PROPERTY_BIRTHDATE", 0xDD37 },
+ { "MTP_PROPERTY_MESSAGE_TO", 0xDD40 },
+ { "MTP_PROPERTY_MESSAGE_CC", 0xDD41 },
+ { "MTP_PROPERTY_MESSAGE_BCC", 0xDD42 },
+ { "MTP_PROPERTY_MESSAGE_READ", 0xDD43 },
+ { "MTP_PROPERTY_MESSAGE_RECEIVED_TIME", 0xDD44 },
+ { "MTP_PROPERTY_MESSAGE_SENDER", 0xDD45 },
+ { "MTP_PROPERTY_ACTIVITY_BEGIN_TIME", 0xDD50 },
+ { "MTP_PROPERTY_ACTIVITY_END_TIME", 0xDD51 },
+ { "MTP_PROPERTY_ACTIVITY_LOCATION", 0xDD52 },
+ { "MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES", 0xDD54 },
+ { "MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES", 0xDD55 },
+ { "MTP_PROPERTY_ACTIVITY_RESOURCES", 0xDD56 },
+ { "MTP_PROPERTY_ACTIVITY_ACCEPTED", 0xDD57 },
+ { "MTP_PROPERTY_ACTIVITY_TENTATIVE", 0xDD58 },
+ { "MTP_PROPERTY_ACTIVITY_DECLINED", 0xDD59 },
+ { "MTP_PROPERTY_ACTIVITY_REMAINDER_TIME", 0xDD5A },
+ { "MTP_PROPERTY_ACTIVITY_OWNER", 0xDD5B },
+ { "MTP_PROPERTY_ACTIVITY_STATUS", 0xDD5C },
+ { "MTP_PROPERTY_OWNER", 0xDD5D },
+ { "MTP_PROPERTY_EDITOR", 0xDD5E },
+ { "MTP_PROPERTY_WEBMASTER", 0xDD5F },
+ { "MTP_PROPERTY_URL_SOURCE", 0xDD60 },
+ { "MTP_PROPERTY_URL_DESTINATION", 0xDD61 },
+ { "MTP_PROPERTY_TIME_BOOKMARK", 0xDD62 },
+ { "MTP_PROPERTY_OBJECT_BOOKMARK", 0xDD63 },
+ { "MTP_PROPERTY_BYTE_BOOKMARK", 0xDD64 },
+ { "MTP_PROPERTY_LAST_BUILD_DATE", 0xDD70 },
+ { "MTP_PROPERTY_TIME_TO_LIVE", 0xDD71 },
+ { "MTP_PROPERTY_MEDIA_GUID", 0xDD72 },
+ { 0, 0 },
+};
+
+static const CodeEntry sDevicePropCodes[] = {
+ { "MTP_DEVICE_PROPERTY_UNDEFINED", 0x5000 },
+ { "MTP_DEVICE_PROPERTY_BATTERY_LEVEL", 0x5001 },
+ { "MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE", 0x5002 },
+ { "MTP_DEVICE_PROPERTY_IMAGE_SIZE", 0x5003 },
+ { "MTP_DEVICE_PROPERTY_COMPRESSION_SETTING", 0x5004 },
+ { "MTP_DEVICE_PROPERTY_WHITE_BALANCE", 0x5005 },
+ { "MTP_DEVICE_PROPERTY_RGB_GAIN", 0x5006 },
+ { "MTP_DEVICE_PROPERTY_F_NUMBER", 0x5007 },
+ { "MTP_DEVICE_PROPERTY_FOCAL_LENGTH", 0x5008 },
+ { "MTP_DEVICE_PROPERTY_FOCUS_DISTANCE", 0x5009 },
+ { "MTP_DEVICE_PROPERTY_FOCUS_MODE", 0x500A },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE", 0x500B },
+ { "MTP_DEVICE_PROPERTY_FLASH_MODE", 0x500C },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_TIME", 0x500D },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE", 0x500E },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_INDEX", 0x500F },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION", 0x5010 },
+ { "MTP_DEVICE_PROPERTY_DATETIME", 0x5011 },
+ { "MTP_DEVICE_PROPERTY_CAPTURE_DELAY", 0x5012 },
+ { "MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE", 0x5013 },
+ { "MTP_DEVICE_PROPERTY_CONTRAST", 0x5014 },
+ { "MTP_DEVICE_PROPERTY_SHARPNESS", 0x5015 },
+ { "MTP_DEVICE_PROPERTY_DIGITAL_ZOOM", 0x5016 },
+ { "MTP_DEVICE_PROPERTY_EFFECT_MODE", 0x5017 },
+ { "MTP_DEVICE_PROPERTY_BURST_NUMBER", 0x5018 },
+ { "MTP_DEVICE_PROPERTY_BURST_INTERVAL", 0x5019 },
+ { "MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER", 0x501A },
+ { "MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL", 0x501B },
+ { "MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE", 0x501C },
+ { "MTP_DEVICE_PROPERTY_UPLOAD_URL", 0x501D },
+ { "MTP_DEVICE_PROPERTY_ARTIST", 0x501E },
+ { "MTP_DEVICE_PROPERTY_COPYRIGHT_INFO", 0x501F },
+ { "MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER", 0xD401 },
+ { "MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME", 0xD402 },
+ { "MTP_DEVICE_PROPERTY_VOLUME", 0xD403 },
+ { "MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED", 0xD404 },
+ { "MTP_DEVICE_PROPERTY_DEVICE_ICON", 0xD405 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_RATE", 0xD410 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT", 0xD411 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX", 0xD412 },
+ { "MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO", 0xD406 },
+ { "MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE", 0xD407 },
+ { 0, 0 },
+};
+
+static const char* getCodeName(uint16_t code, const CodeEntry* table) {
+ const CodeEntry* entry = table;
+ while (entry->name) {
+ if (entry->code == code)
+ return entry->name;
+ entry++;
+ }
+ return "UNKNOWN";
+}
+
+const char* MtpDebug::getOperationCodeName(MtpOperationCode code) {
+ return getCodeName(code, sOperationCodes);
+}
+
+const char* MtpDebug::getFormatCodeName(MtpOperationCode code) {
+ return getCodeName(code, sFormatCodes);
+}
+
+const char* MtpDebug::getObjectPropCodeName(MtpPropertyCode code) {
+ return getCodeName(code, sObjectPropCodes);
+}
+
+const char* MtpDebug::getDevicePropCodeName(MtpPropertyCode code) {
+ return getCodeName(code, sDevicePropCodes);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDebug.h b/media/mtp/MtpDebug.h
new file mode 100644
index 0000000..5b53e31
--- /dev/null
+++ b/media/mtp/MtpDebug.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEBUG_H
+#define _MTP_DEBUG_H
+
+// #define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDebug {
+public:
+ static const char* getOperationCodeName(MtpOperationCode code);
+ static const char* getFormatCodeName(MtpObjectFormat code);
+ static const char* getObjectPropCodeName(MtpPropertyCode code);
+ static const char* getDevicePropCodeName(MtpPropertyCode code);
+};
+
+}; // namespace android
+
+#endif // _MTP_DEBUG_H
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
new file mode 100644
index 0000000..fca0142
--- /dev/null
+++ b/media/mtp/MtpDevice.cpp
@@ -0,0 +1,496 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDevice"
+
+#include "MtpDebug.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpProperty.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <endian.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpDevice::MtpDevice(struct usb_device* device, int interface,
+ struct usb_endpoint *ep_in, struct usb_endpoint *ep_out,
+ struct usb_endpoint *ep_intr)
+ : mDevice(device),
+ mInterface(interface),
+ mEndpointIn(ep_in),
+ mEndpointOut(ep_out),
+ mEndpointIntr(ep_intr),
+ mDeviceInfo(NULL),
+ mID(usb_device_get_unique_id(device)),
+ mSessionID(0),
+ mTransactionID(0)
+{
+}
+
+MtpDevice::~MtpDevice() {
+ close();
+ for (int i = 0; i < mDeviceProperties.size(); i++)
+ delete mDeviceProperties[i];
+}
+
+void MtpDevice::initialize() {
+ openSession();
+ mDeviceInfo = getDeviceInfo();
+ if (mDeviceInfo) {
+ mDeviceInfo->print();
+
+ if (mDeviceInfo->mDeviceProperties) {
+ int count = mDeviceInfo->mDeviceProperties->size();
+ for (int i = 0; i < count; i++) {
+ MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+ MtpProperty* property = getDevicePropDesc(propCode);
+ if (property) {
+ property->print();
+ mDeviceProperties.push(property);
+ }
+ }
+ }
+ }
+}
+
+void MtpDevice::close() {
+ if (mDevice) {
+ usb_device_release_interface(mDevice, mInterface);
+ usb_device_close(mDevice);
+ mDevice = NULL;
+ }
+}
+
+const char* MtpDevice::getDeviceName() {
+ if (mDevice)
+ return usb_device_get_name(mDevice);
+ else
+ return "???";
+}
+
+bool MtpDevice::openSession() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mSessionID = 0;
+ mTransactionID = 0;
+ MtpSessionID newSession = 1;
+ mRequest.reset();
+ mRequest.setParameter(1, newSession);
+ if (!sendRequest(MTP_OPERATION_OPEN_SESSION))
+ return false;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_SESSION_ALREADY_OPEN)
+ newSession = mResponse.getParameter(1);
+ else if (ret != MTP_RESPONSE_OK)
+ return false;
+
+ mSessionID = newSession;
+ mTransactionID = 1;
+ return true;
+}
+
+bool MtpDevice::closeSession() {
+ // FIXME
+ return true;
+}
+
+MtpDeviceInfo* MtpDevice::getDeviceInfo() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpDeviceInfo* info = new MtpDeviceInfo;
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+MtpStorageIDList* MtpDevice::getStorageIDs() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getAUInt32();
+ }
+ return NULL;
+}
+
+MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, storageID);
+ if (!sendRequest(MTP_OPERATION_GET_STORAGE_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpStorageInfo* info = new MtpStorageInfo(storageID);
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID,
+ MtpObjectFormat format, MtpObjectHandle parent) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, storageID);
+ mRequest.setParameter(2, format);
+ mRequest.setParameter(3, parent);
+ if (!sendRequest(MTP_OPERATION_GET_OBJECT_HANDLES))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getAUInt32();
+ }
+ return NULL;
+}
+
+MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
+ Mutex::Autolock autoLock(mMutex);
+
+ // FIXME - we might want to add some caching here
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (!sendRequest(MTP_OPERATION_GET_OBJECT_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpObjectInfo* info = new MtpObjectInfo(handle);
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_GET_THUMB) && readData()) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getData(outLength);
+ }
+ }
+ outLength = 0;
+ return NULL;
+}
+
+MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ MtpObjectHandle parent = info->mParent;
+ if (parent == 0)
+ parent = MTP_PARENT_ROOT;
+
+ mRequest.setParameter(1, info->mStorageID);
+ mRequest.setParameter(2, info->mParent);
+
+ mData.putUInt32(info->mStorageID);
+ mData.putUInt16(info->mFormat);
+ mData.putUInt16(info->mProtectionStatus);
+ mData.putUInt32(info->mCompressedSize);
+ mData.putUInt16(info->mThumbFormat);
+ mData.putUInt32(info->mThumbCompressedSize);
+ mData.putUInt32(info->mThumbPixWidth);
+ mData.putUInt32(info->mThumbPixHeight);
+ mData.putUInt32(info->mImagePixWidth);
+ mData.putUInt32(info->mImagePixHeight);
+ mData.putUInt32(info->mImagePixDepth);
+ mData.putUInt32(info->mParent);
+ mData.putUInt16(info->mAssociationType);
+ mData.putUInt32(info->mAssociationDesc);
+ mData.putUInt32(info->mSequenceNumber);
+ mData.putString(info->mName);
+
+ char created[100], modified[100];
+ formatDateTime(info->mDateCreated, created, sizeof(created));
+ formatDateTime(info->mDateModified, modified, sizeof(modified));
+
+ mData.putString(created);
+ mData.putString(modified);
+ if (info->mKeywords)
+ mData.putString(info->mKeywords);
+ else
+ mData.putEmptyString();
+
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ info->mStorageID = mResponse.getParameter(1);
+ info->mParent = mResponse.getParameter(2);
+ info->mHandle = mResponse.getParameter(3);
+ return info->mHandle;
+ }
+ }
+ return (MtpObjectHandle)-1;
+}
+
+bool MtpDevice::sendObject(MtpObjectInfo* info, int srcFD) {
+ Mutex::Autolock autoLock(mMutex);
+
+ int remaining = info->mCompressedSize;
+ mRequest.reset();
+ mRequest.setParameter(1, info->mHandle);
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
+ // send data header
+ writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining);
+
+ char buffer[65536];
+ while (remaining > 0) {
+ int count = read(srcFD, buffer, sizeof(buffer));
+ if (count > 0) {
+ int written = mData.write(mEndpointOut, buffer, count);
+ // FIXME check error
+ remaining -= count;
+ } else {
+ break;
+ }
+ }
+ }
+ MtpResponseCode ret = readResponse();
+ return (remaining == 0 && ret == MTP_RESPONSE_OK);
+}
+
+bool MtpDevice::deleteObject(MtpObjectHandle handle) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_DELETE_OBJECT)) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK)
+ return true;
+ }
+ return false;
+}
+
+MtpObjectHandle MtpDevice::getParent(MtpObjectHandle handle) {
+ MtpObjectInfo* info = getObjectInfo(handle);
+ if (info)
+ return info->mParent;
+ else
+ return -1;
+}
+
+MtpObjectHandle MtpDevice::getStorageID(MtpObjectHandle handle) {
+ MtpObjectInfo* info = getObjectInfo(handle);
+ if (info)
+ return info->mStorageID;
+ else
+ return -1;
+}
+
+MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, code);
+ if (!sendRequest(MTP_OPERATION_GET_DEVICE_PROP_DESC))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpProperty* property = new MtpProperty;
+ property->read(mData);
+ return property;
+ }
+ return NULL;
+}
+
+class ReadObjectThread : public Thread {
+private:
+ MtpDevice* mDevice;
+ MtpObjectHandle mHandle;
+ int mObjectSize;
+ void* mInitialData;
+ int mInitialDataLength;
+ int mFD;
+
+public:
+ ReadObjectThread(MtpDevice* device, MtpObjectHandle handle, int objectSize)
+ : mDevice(device),
+ mHandle(handle),
+ mObjectSize(objectSize),
+ mInitialData(NULL),
+ mInitialDataLength(0)
+ {
+ }
+
+ virtual ~ReadObjectThread() {
+ if (mFD >= 0)
+ close(mFD);
+ free(mInitialData);
+ }
+
+ // returns file descriptor
+ int init() {
+ mDevice->mRequest.reset();
+ mDevice->mRequest.setParameter(1, mHandle);
+ if (mDevice->sendRequest(MTP_OPERATION_GET_OBJECT)
+ && mDevice->mData.readDataHeader(mDevice->mEndpointIn)) {
+
+ // mData will contain header and possibly the beginning of the object data
+ mInitialData = mDevice->mData.getData(mInitialDataLength);
+
+ // create a pipe for the client to read from
+ int pipefd[2];
+ if (pipe(pipefd) < 0) {
+ LOGE("pipe failed (%s)", strerror(errno));
+ return -1;
+ }
+
+ mFD = pipefd[1];
+ return pipefd[0];
+ } else {
+ return -1;
+ }
+ }
+
+ virtual bool threadLoop() {
+ int remaining = mObjectSize;
+ if (mInitialData) {
+ write(mFD, mInitialData, mInitialDataLength);
+ remaining -= mInitialDataLength;
+ free(mInitialData);
+ mInitialData = NULL;
+ }
+
+ char buffer[16384];
+ while (remaining > 0) {
+ int readSize = (remaining > sizeof(buffer) ? sizeof(buffer) : remaining);
+ int count = mDevice->mData.readData(mDevice->mEndpointIn, buffer, readSize);
+ int written;
+ if (count >= 0) {
+ int written = write(mFD, buffer, count);
+ // FIXME check error
+ remaining -= count;
+ } else {
+ break;
+ }
+ }
+
+ MtpResponseCode ret = mDevice->readResponse();
+ mDevice->mMutex.unlock();
+ return false;
+ }
+};
+
+ // returns the file descriptor for a pipe to read the object's data
+int MtpDevice::readObject(MtpObjectHandle handle, int objectSize) {
+ mMutex.lock();
+
+ ReadObjectThread* thread = new ReadObjectThread(this, handle, objectSize);
+ int fd = thread->init();
+ if (fd < 0) {
+ delete thread;
+ mMutex.unlock();
+ } else {
+ thread->run("ReadObjectThread");
+ }
+ return fd;
+}
+
+bool MtpDevice::sendRequest(MtpOperationCode operation) {
+ LOGV("sendRequest: %s\n", MtpDebug::getOperationCodeName(operation));
+ mRequest.setOperationCode(operation);
+ if (mTransactionID > 0)
+ mRequest.setTransactionID(mTransactionID++);
+ int ret = mRequest.write(mEndpointOut);
+ mRequest.dump();
+ return (ret > 0);
+}
+
+bool MtpDevice::sendData() {
+ LOGV("sendData\n");
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ int ret = mData.write(mEndpointOut);
+ mData.dump();
+ return (ret > 0);
+}
+
+bool MtpDevice::readData() {
+ mData.reset();
+ int ret = mData.read(mEndpointIn);
+ LOGV("readData returned %d\n", ret);
+ if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+ mData.dump();
+ return true;
+ }
+ else {
+ LOGV("readResponse failed\n");
+ return false;
+ }
+}
+
+bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) {
+ mData.setOperationCode(operation);
+ mData.setTransactionID(mRequest.getTransactionID());
+ return (!mData.writeDataHeader(mEndpointOut, dataLength));
+}
+
+MtpResponseCode MtpDevice::readResponse() {
+ LOGV("readResponse\n");
+ int ret = mResponse.read(mEndpointIn);
+ if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+ mResponse.dump();
+ return mResponse.getResponseCode();
+ }
+ else {
+ LOGD("readResponse failed\n");
+ return -1;
+ }
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
new file mode 100644
index 0000000..57f492f
--- /dev/null
+++ b/media/mtp/MtpDevice.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_H
+#define _MTP_DEVICE_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_device;
+
+namespace android {
+
+class MtpDeviceInfo;
+class MtpObjectInfo;
+class MtpStorageInfo;
+
+class MtpDevice {
+private:
+ struct usb_device* mDevice;
+ int mInterface;
+ struct usb_endpoint* mEndpointIn;
+ struct usb_endpoint* mEndpointOut;
+ struct usb_endpoint* mEndpointIntr;
+ MtpDeviceInfo* mDeviceInfo;
+ MtpPropertyList mDeviceProperties;
+
+ // a unique ID for the device
+ int mID;
+
+ // current session ID
+ MtpSessionID mSessionID;
+ // current transaction ID
+ MtpTransactionID mTransactionID;
+
+ MtpRequestPacket mRequest;
+ MtpDataPacket mData;
+ MtpResponsePacket mResponse;
+
+ // to ensure only one MTP transaction at a time
+ Mutex mMutex;
+
+public:
+ MtpDevice(struct usb_device* device, int interface,
+ struct usb_endpoint *ep_in, struct usb_endpoint *ep_out,
+ struct usb_endpoint *ep_intr);
+ virtual ~MtpDevice();
+
+ inline int getID() const { return mID; }
+
+ void initialize();
+ void close();
+ const char* getDeviceName();
+
+ bool openSession();
+ bool closeSession();
+
+ MtpDeviceInfo* getDeviceInfo();
+ MtpStorageIDList* getStorageIDs();
+ MtpStorageInfo* getStorageInfo(MtpStorageID storageID);
+ MtpObjectHandleList* getObjectHandles(MtpStorageID storageID, MtpObjectFormat format, MtpObjectHandle parent);
+ MtpObjectInfo* getObjectInfo(MtpObjectHandle handle);
+ void* getThumbnail(MtpObjectHandle handle, int& outLength);
+ MtpObjectHandle sendObjectInfo(MtpObjectInfo* info);
+ bool sendObject(MtpObjectInfo* info, int srcFD);
+ bool deleteObject(MtpObjectHandle handle);
+ MtpObjectHandle getParent(MtpObjectHandle handle);
+ MtpObjectHandle getStorageID(MtpObjectHandle handle);
+
+ MtpProperty* getDevicePropDesc(MtpDeviceProperty code);
+
+ // returns the file descriptor for a pipe to read the object's data
+ int readObject(MtpObjectHandle handle, int objectSize);
+
+private:
+ friend class ReadObjectThread;
+
+ bool sendRequest(MtpOperationCode operation);
+ bool sendData();
+ bool readData();
+ bool writeDataHeader(MtpOperationCode operation, int dataLength);
+ MtpResponseCode readResponse();
+
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_H
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
new file mode 100644
index 0000000..5a9322e
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDeviceInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpDeviceInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDeviceInfo::MtpDeviceInfo()
+ : mStandardVersion(0),
+ mVendorExtensionID(0),
+ mVendorExtensionVersion(0),
+ mVendorExtensionDesc(NULL),
+ mFunctionalCode(0),
+ mOperations(NULL),
+ mEvents(NULL),
+ mDeviceProperties(NULL),
+ mCaptureFormats(NULL),
+ mPlaybackFormats(NULL),
+ mManufacturer(NULL),
+ mModel(NULL),
+ mVersion(NULL),
+ mSerial(NULL)
+{
+}
+
+MtpDeviceInfo::~MtpDeviceInfo() {
+ if (mVendorExtensionDesc)
+ free(mVendorExtensionDesc);
+ delete mOperations;
+ delete mEvents;
+ delete mDeviceProperties;
+ delete mCaptureFormats;
+ delete mPlaybackFormats;
+ if (mManufacturer)
+ free(mManufacturer);
+ if (mModel)
+ free(mModel);
+ if (mVersion)
+ free(mVersion);
+ if (mSerial)
+ free(mSerial);
+}
+
+void MtpDeviceInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+
+ // read the device info
+ mStandardVersion = packet.getUInt16();
+ mVendorExtensionID = packet.getUInt32();
+ mVendorExtensionVersion = packet.getUInt16();
+
+ packet.getString(string);
+ mVendorExtensionDesc = strdup((const char *)string);
+
+ mFunctionalCode = packet.getUInt16();
+ mOperations = packet.getAUInt16();
+ mEvents = packet.getAUInt16();
+ mDeviceProperties = packet.getAUInt16();
+ mCaptureFormats = packet.getAUInt16();
+ mPlaybackFormats = packet.getAUInt16();
+
+ packet.getString(string);
+ mManufacturer = strdup((const char *)string);
+ packet.getString(string);
+ mModel = strdup((const char *)string);
+ packet.getString(string);
+ mVersion = strdup((const char *)string);
+ packet.getString(string);
+ mSerial = strdup((const char *)string);
+}
+
+void MtpDeviceInfo::print() {
+ LOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
+ mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
+ LOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+ mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
new file mode 100644
index 0000000..2abaa10
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_INFO_H
+#define _MTP_DEVICE_INFO_H
+
+struct stat;
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpDeviceInfo {
+public:
+ uint16_t mStandardVersion;
+ uint32_t mVendorExtensionID;
+ uint16_t mVendorExtensionVersion;
+ char* mVendorExtensionDesc;
+ uint16_t mFunctionalCode;
+ UInt16List* mOperations;
+ UInt16List* mEvents;
+ MtpDevicePropertyList* mDeviceProperties;
+ MtpObjectFormatList* mCaptureFormats;
+ MtpObjectFormatList* mPlaybackFormats;
+ char* mManufacturer;
+ char* mModel;
+ char* mVersion;
+ char* mSerial;
+
+public:
+ MtpDeviceInfo();
+ virtual ~MtpDeviceInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_INFO_H
diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp
new file mode 100644
index 0000000..fc74542
--- /dev/null
+++ b/media/mtp/MtpEventPacket.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpEventPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+
+#ifdef MTP_DEVICE
+#include <linux/usb/f_mtp.h>
+#endif
+
+#include "MtpEventPacket.h"
+
+namespace android {
+
+MtpEventPacket::MtpEventPacket()
+ : MtpPacket(512)
+{
+}
+
+MtpEventPacket::~MtpEventPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpEventPacket::write(int fd) {
+ struct mtp_event event;
+
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_EVENT);
+
+ event.data = mBuffer;
+ event.length = mPacketSize;
+ int ret = ::ioctl(fd, MTP_SEND_EVENT, (unsigned long)&event);
+ return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+int MtpEventPacket::read(struct usb_endpoint *ep) {
+ int ret = transfer(ep, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+} // namespace android
+
diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h
new file mode 100644
index 0000000..30ae869
--- /dev/null
+++ b/media/mtp/MtpEventPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_EVENT_PACKET_H
+#define _MTP_EVENT_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpEventPacket : public MtpPacket {
+
+public:
+ MtpEventPacket();
+ virtual ~MtpEventPacket();
+
+#ifdef MTP_DEVICE
+ // write our data to the given file descriptor
+ int write(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+ int read(struct usb_endpoint *ep);
+#endif
+
+ inline MtpEventCode getEventCode() const { return getContainerCode(); }
+ inline void setEventCode(MtpEventCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_EVENT_PACKET_H
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
new file mode 100644
index 0000000..ea68c3b
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpObjectInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpObjectInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpObjectInfo::MtpObjectInfo(MtpObjectHandle handle)
+ : mHandle(handle),
+ mStorageID(0),
+ mFormat(0),
+ mProtectionStatus(0),
+ mCompressedSize(0),
+ mThumbFormat(0),
+ mThumbCompressedSize(0),
+ mThumbPixWidth(0),
+ mThumbPixHeight(0),
+ mImagePixWidth(0),
+ mImagePixHeight(0),
+ mImagePixDepth(0),
+ mParent(0),
+ mAssociationType(0),
+ mAssociationDesc(0),
+ mSequenceNumber(0),
+ mName(NULL),
+ mDateCreated(0),
+ mDateModified(0),
+ mKeywords(NULL)
+{
+}
+
+MtpObjectInfo::~MtpObjectInfo() {
+ if (mName)
+ free(mName);
+ if (mKeywords)
+ free(mKeywords);
+}
+
+void MtpObjectInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+ time_t time;
+
+ mStorageID = packet.getUInt32();
+ mFormat = packet.getUInt16();
+ mProtectionStatus = packet.getUInt16();
+ mCompressedSize = packet.getUInt32();
+ mThumbFormat = packet.getUInt16();
+ mThumbCompressedSize = packet.getUInt32();
+ mThumbPixWidth = packet.getUInt32();
+ mThumbPixHeight = packet.getUInt32();
+ mImagePixWidth = packet.getUInt32();
+ mImagePixHeight = packet.getUInt32();
+ mImagePixDepth = packet.getUInt32();
+ mParent = packet.getUInt32();
+ mAssociationType = packet.getUInt16();
+ mAssociationDesc = packet.getUInt32();
+ mSequenceNumber = packet.getUInt32();
+
+ packet.getString(string);
+ mName = strdup((const char *)string);
+
+ packet.getString(string);
+ if (parseDateTime((const char*)string, time))
+ mDateCreated = time;
+
+ packet.getString(string);
+ if (parseDateTime((const char*)string, time))
+ mDateModified = time;
+
+ packet.getString(string);
+ mKeywords = strdup((const char *)string);
+}
+
+void MtpObjectInfo::print() {
+ LOGD("MtpObject Info %08X: %s\n", mHandle, mName);
+ LOGD(" mStorageID: %08X mFormat: %04X mProtectionStatus: %d\n",
+ mStorageID, mFormat, mProtectionStatus);
+ LOGD(" mCompressedSize: %d mThumbFormat: %04X mThumbCompressedSize: %d\n",
+ mCompressedSize, mFormat, mThumbCompressedSize);
+ LOGD(" mThumbPixWidth: %d mThumbPixHeight: %d\n", mThumbPixWidth, mThumbPixHeight);
+ LOGD(" mImagePixWidth: %d mImagePixHeight: %d mImagePixDepth: %d\n",
+ mImagePixWidth, mImagePixHeight, mImagePixDepth);
+ LOGD(" mParent: %08X mAssociationType: %04X mAssociationDesc: %04X\n",
+ mParent, mAssociationType, mAssociationDesc);
+ LOGD(" mSequenceNumber: %d mDateCreated: %ld mDateModified: %ld mKeywords: %s\n",
+ mSequenceNumber, mDateCreated, mDateModified, mKeywords);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
new file mode 100644
index 0000000..c7a449c
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_OBJECT_INFO_H
+#define _MTP_OBJECT_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpObjectInfo {
+public:
+ MtpObjectHandle mHandle;
+ MtpStorageID mStorageID;
+ MtpObjectFormat mFormat;
+ uint16_t mProtectionStatus;
+ uint32_t mCompressedSize;
+ MtpObjectFormat mThumbFormat;
+ uint32_t mThumbCompressedSize;
+ uint32_t mThumbPixWidth;
+ uint32_t mThumbPixHeight;
+ uint32_t mImagePixWidth;
+ uint32_t mImagePixHeight;
+ uint32_t mImagePixDepth;
+ MtpObjectHandle mParent;
+ uint16_t mAssociationType;
+ uint32_t mAssociationDesc;
+ uint32_t mSequenceNumber;
+ char* mName;
+ time_t mDateCreated;
+ time_t mDateModified;
+ char* mKeywords;
+
+public:
+ MtpObjectInfo(MtpObjectHandle handle);
+ virtual ~MtpObjectInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_OBJECT_INFO_H
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
new file mode 100644
index 0000000..42bf8ba
--- /dev/null
+++ b/media/mtp/MtpPacket.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpPacket"
+
+#include "MtpDebug.h"
+#include "MtpPacket.h"
+#include "mtp.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpPacket::MtpPacket(int bufferSize)
+ : mBuffer(NULL),
+ mBufferSize(bufferSize),
+ mAllocationIncrement(bufferSize),
+ mPacketSize(0)
+{
+ mBuffer = (uint8_t *)malloc(bufferSize);
+ if (!mBuffer) {
+ LOGE("out of memory!");
+ abort();
+ }
+}
+
+MtpPacket::~MtpPacket() {
+ if (mBuffer)
+ free(mBuffer);
+}
+
+void MtpPacket::reset() {
+ allocate(MTP_CONTAINER_HEADER_SIZE);
+ mPacketSize = MTP_CONTAINER_HEADER_SIZE;
+ memset(mBuffer, 0, mBufferSize);
+}
+
+void MtpPacket::allocate(int length) {
+ if (length > mBufferSize) {
+ int newLength = length + mAllocationIncrement;
+ mBuffer = (uint8_t *)realloc(mBuffer, newLength);
+ if (!mBuffer) {
+ LOGE("out of memory!");
+ abort();
+ }
+ mBufferSize = newLength;
+ }
+}
+
+void MtpPacket::dump() {
+#define DUMP_BYTES_PER_ROW 16
+ char buffer[500];
+ char* bufptr = buffer;
+
+ for (int i = 0; i < mPacketSize; i++) {
+ sprintf(bufptr, "%02X ", mBuffer[i]);
+ bufptr += strlen(bufptr);
+ if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) {
+ LOGV("%s", buffer);
+ bufptr = buffer;
+ }
+ }
+ if (bufptr != buffer) {
+ // print last line
+ LOGV("%s", buffer);
+ }
+ LOGV("\n");
+}
+
+uint16_t MtpPacket::getUInt16(int offset) const {
+ return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+}
+
+uint32_t MtpPacket::getUInt32(int offset) const {
+ return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+ ((uint32_t)mBuffer[offset + 1] << 8) | (uint32_t)mBuffer[offset];
+}
+
+void MtpPacket::putUInt16(int offset, uint16_t value) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+}
+
+void MtpPacket::putUInt32(int offset, uint32_t value) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+}
+
+uint16_t MtpPacket::getContainerCode() const {
+ return getUInt16(MTP_CONTAINER_CODE_OFFSET);
+}
+
+void MtpPacket::setContainerCode(uint16_t code) {
+ putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+MtpTransactionID MtpPacket::getTransactionID() const {
+ return getUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET);
+}
+
+void MtpPacket::setTransactionID(MtpTransactionID id) {
+ putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint32_t MtpPacket::getParameter(int index) const {
+ if (index < 1 || index > 5) {
+ LOGE("index %d out of range in MtpRequestPacket::getParameter", index);
+ return 0;
+ }
+ return getUInt32(MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t));
+}
+
+void MtpPacket::setParameter(int index, uint32_t value) {
+ if (index < 1 || index > 5) {
+ LOGE("index %d out of range in MtpResponsePacket::setParameter", index);
+ return;
+ }
+ int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t);
+ if (mPacketSize < offset + sizeof(uint32_t))
+ mPacketSize = offset + sizeof(uint32_t);
+ putUInt32(offset, value);
+}
+
+#ifdef MTP_HOST
+int MtpPacket::transfer(struct usb_endpoint *ep, void* buffer, int length) {
+ if (usb_endpoint_queue(ep, buffer, length)) {
+ LOGE("usb_endpoint_queue failed, errno: %d", errno);
+ return -1;
+ }
+ int ep_num;
+ return usb_endpoint_wait(usb_endpoint_get_device(ep), &ep_num);
+}
+#endif
+
+} // namespace android
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
new file mode 100644
index 0000000..9c8d6da
--- /dev/null
+++ b/media/mtp/MtpPacket.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PACKET_H
+#define _MTP_PACKET_H
+
+#include "MtpTypes.h"
+
+struct usb_endpoint;
+
+namespace android {
+
+class MtpPacket {
+
+protected:
+ uint8_t* mBuffer;
+ // current size of the buffer
+ int mBufferSize;
+ // number of bytes to add when resizing the buffer
+ int mAllocationIncrement;
+ // size of the data in the packet
+ int mPacketSize;
+
+public:
+ MtpPacket(int bufferSize);
+ virtual ~MtpPacket();
+
+ // sets packet size to the default container size and sets buffer to zero
+ virtual void reset();
+
+ void allocate(int length);
+ void dump();
+
+ uint16_t getContainerCode() const;
+ void setContainerCode(uint16_t code);
+
+ MtpTransactionID getTransactionID() const;
+ void setTransactionID(MtpTransactionID id);
+
+ uint32_t getParameter(int index) const;
+ void setParameter(int index, uint32_t value);
+
+#ifdef MTP_HOST
+ int transfer(struct usb_endpoint *ep, void* buffer, int length);
+#endif
+
+protected:
+ uint16_t getUInt16(int offset) const;
+ uint32_t getUInt32(int offset) const;
+ void putUInt16(int offset, uint16_t value);
+ void putUInt32(int offset, uint32_t value);
+};
+
+}; // namespace android
+
+#endif // _MTP_PACKET_H
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
new file mode 100644
index 0000000..bbd0237
--- /dev/null
+++ b/media/mtp/MtpProperty.cpp
@@ -0,0 +1,356 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpProperty"
+
+#include "MtpDataPacket.h"
+#include "MtpProperty.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpProperty::MtpProperty()
+ : mCode(0),
+ mType(0),
+ mWriteable(false),
+ mDefaultArrayLength(0),
+ mDefaultArrayValues(NULL),
+ mCurrentArrayLength(0),
+ mCurrentArrayValues(NULL),
+ mGroupCode(0),
+ mFormFlag(kFormNone),
+ mEnumLength(0),
+ mEnumValues(NULL)
+{
+ memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+ memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+ memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+ memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+}
+
+MtpProperty::MtpProperty(MtpPropertyCode propCode,
+ MtpDataType type,
+ bool writeable,
+ int defaultValue)
+ : mCode(propCode),
+ mType(type),
+ mWriteable(writeable),
+ mDefaultArrayLength(0),
+ mDefaultArrayValues(NULL),
+ mCurrentArrayLength(0),
+ mCurrentArrayValues(NULL),
+ mGroupCode(0),
+ mFormFlag(kFormNone),
+ mEnumLength(0),
+ mEnumValues(NULL)
+{
+ memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+ memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+ memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+ memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+
+ if (defaultValue) {
+ switch (type) {
+ case MTP_TYPE_INT8:
+ mDefaultValue.u.i8 = defaultValue;
+ break;
+ case MTP_TYPE_UINT8:
+ mDefaultValue.u.u8 = defaultValue;
+ break;
+ case MTP_TYPE_INT16:
+ mDefaultValue.u.i16 = defaultValue;
+ break;
+ case MTP_TYPE_UINT16:
+ mDefaultValue.u.u16 = defaultValue;
+ break;
+ case MTP_TYPE_INT32:
+ mDefaultValue.u.i32 = defaultValue;
+ break;
+ case MTP_TYPE_UINT32:
+ mDefaultValue.u.u32 = defaultValue;
+ break;
+ case MTP_TYPE_INT64:
+ mDefaultValue.u.i64 = defaultValue;
+ break;
+ case MTP_TYPE_UINT64:
+ mDefaultValue.u.u64 = defaultValue;
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::MtpProperty", type);
+ }
+ }
+}
+
+MtpProperty::~MtpProperty() {
+ if (mType == MTP_TYPE_STR) {
+ // free all strings
+ free(mDefaultValue.str);
+ free(mCurrentValue.str);
+ free(mMinimumValue.str);
+ free(mMaximumValue.str);
+ if (mDefaultArrayValues) {
+ for (int i = 0; i < mDefaultArrayLength; i++)
+ free(mDefaultArrayValues[i].str);
+ }
+ if (mCurrentArrayValues) {
+ for (int i = 0; i < mCurrentArrayLength; i++)
+ free(mCurrentArrayValues[i].str);
+ }
+ if (mEnumValues) {
+ for (int i = 0; i < mEnumLength; i++)
+ free(mEnumValues[i].str);
+ }
+ }
+ delete[] mDefaultArrayValues;
+ delete[] mCurrentArrayValues;
+ delete[] mEnumValues;
+}
+
+void MtpProperty::read(MtpDataPacket& packet) {
+ bool deviceProp = isDeviceProperty();
+
+ mCode = packet.getUInt16();
+ mType = packet.getUInt16();
+ mWriteable = (packet.getUInt8() == 1);
+ switch (mType) {
+ case MTP_TYPE_AINT8:
+ case MTP_TYPE_AUINT8:
+ case MTP_TYPE_AINT16:
+ case MTP_TYPE_AUINT16:
+ case MTP_TYPE_AINT32:
+ case MTP_TYPE_AUINT32:
+ case MTP_TYPE_AINT64:
+ case MTP_TYPE_AUINT64:
+ case MTP_TYPE_AINT128:
+ case MTP_TYPE_AUINT128:
+ mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
+ if (deviceProp)
+ mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+ break;
+ default:
+ readValue(packet, mDefaultValue);
+ if (deviceProp)
+ readValue(packet, mCurrentValue);
+ }
+ if (!deviceProp)
+ mGroupCode = packet.getUInt32();
+ mFormFlag = packet.getUInt8();
+
+ if (mFormFlag == kFormRange) {
+ readValue(packet, mMinimumValue);
+ readValue(packet, mMaximumValue);
+ readValue(packet, mStepSize);
+ } else if (mFormFlag == kFormEnum) {
+ mEnumLength = packet.getUInt16();
+ mEnumValues = new MtpPropertyValue[mEnumLength];
+ for (int i = 0; i < mEnumLength; i++)
+ readValue(packet, mEnumValues[i]);
+ }
+}
+
+void MtpProperty::write(MtpDataPacket& packet) {
+ bool deviceProp = isDeviceProperty();
+
+ packet.putUInt16(mCode);
+ packet.putUInt16(mType);
+ packet.putUInt8(mWriteable ? 1 : 0);
+
+ switch (mType) {
+ case MTP_TYPE_AINT8:
+ case MTP_TYPE_AUINT8:
+ case MTP_TYPE_AINT16:
+ case MTP_TYPE_AUINT16:
+ case MTP_TYPE_AINT32:
+ case MTP_TYPE_AUINT32:
+ case MTP_TYPE_AINT64:
+ case MTP_TYPE_AUINT64:
+ case MTP_TYPE_AINT128:
+ case MTP_TYPE_AUINT128:
+ writeArrayValues(packet, mDefaultArrayValues, mDefaultArrayLength);
+ if (deviceProp)
+ writeArrayValues(packet, mCurrentArrayValues, mCurrentArrayLength);
+ break;
+ default:
+ writeValue(packet, mDefaultValue);
+ if (deviceProp)
+ writeValue(packet, mCurrentValue);
+ }
+ packet.putUInt32(mGroupCode);
+ if (!deviceProp)
+ packet.putUInt8(mFormFlag);
+ if (mFormFlag == kFormRange) {
+ writeValue(packet, mMinimumValue);
+ writeValue(packet, mMaximumValue);
+ writeValue(packet, mStepSize);
+ } else if (mFormFlag == kFormEnum) {
+ packet.putUInt16(mEnumLength);
+ for (int i = 0; i < mEnumLength; i++)
+ writeValue(packet, mEnumValues[i]);
+ }
+}
+
+void MtpProperty::setDefaultValue(const uint16_t* string) {
+ free(mDefaultValue.str);
+ if (string) {
+ MtpStringBuffer buffer(string);
+ mDefaultValue.str = strdup(buffer);
+ }
+ else
+ mDefaultValue.str = NULL;
+}
+
+void MtpProperty::setCurrentValue(const uint16_t* string) {
+ free(mCurrentValue.str);
+ if (string) {
+ MtpStringBuffer buffer(string);
+ mCurrentValue.str = strdup(buffer);
+ }
+ else
+ mCurrentValue.str = NULL;
+}
+
+void MtpProperty::print() {
+ LOGV("MtpProperty %04X\n", mCode);
+ LOGV(" type %04X\n", mType);
+ LOGV(" writeable %s\n", (mWriteable ? "true" : "false"));
+}
+
+void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+ MtpStringBuffer stringBuffer;
+
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ case MTP_TYPE_AINT8:
+ value.u.i8 = packet.getInt8();
+ break;
+ case MTP_TYPE_UINT8:
+ case MTP_TYPE_AUINT8:
+ value.u.u8 = packet.getUInt8();
+ break;
+ case MTP_TYPE_INT16:
+ case MTP_TYPE_AINT16:
+ value.u.i16 = packet.getInt16();
+ break;
+ case MTP_TYPE_UINT16:
+ case MTP_TYPE_AUINT16:
+ value.u.u16 = packet.getUInt16();
+ break;
+ case MTP_TYPE_INT32:
+ case MTP_TYPE_AINT32:
+ value.u.i32 = packet.getInt32();
+ break;
+ case MTP_TYPE_UINT32:
+ case MTP_TYPE_AUINT32:
+ value.u.u32 = packet.getUInt32();
+ break;
+ case MTP_TYPE_INT64:
+ case MTP_TYPE_AINT64:
+ value.u.i64 = packet.getInt64();
+ break;
+ case MTP_TYPE_UINT64:
+ case MTP_TYPE_AUINT64:
+ value.u.u64 = packet.getUInt64();
+ break;
+ case MTP_TYPE_INT128:
+ case MTP_TYPE_AINT128:
+ packet.getInt128(value.u.i128);
+ break;
+ case MTP_TYPE_UINT128:
+ case MTP_TYPE_AUINT128:
+ packet.getUInt128(value.u.u128);
+ break;
+ case MTP_TYPE_STR:
+ packet.getString(stringBuffer);
+ value.str = strdup(stringBuffer);
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::readValue", mType);
+ }
+}
+
+void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+ MtpStringBuffer stringBuffer;
+
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ case MTP_TYPE_AINT8:
+ packet.putInt8(value.u.i8);
+ break;
+ case MTP_TYPE_UINT8:
+ case MTP_TYPE_AUINT8:
+ packet.putUInt8(value.u.u8);
+ break;
+ case MTP_TYPE_INT16:
+ case MTP_TYPE_AINT16:
+ packet.putInt16(value.u.i16);
+ break;
+ case MTP_TYPE_UINT16:
+ case MTP_TYPE_AUINT16:
+ packet.putUInt16(value.u.u16);
+ break;
+ case MTP_TYPE_INT32:
+ case MTP_TYPE_AINT32:
+ packet.putInt32(value.u.i32);
+ break;
+ case MTP_TYPE_UINT32:
+ case MTP_TYPE_AUINT32:
+ packet.putUInt32(value.u.u32);
+ break;
+ case MTP_TYPE_INT64:
+ case MTP_TYPE_AINT64:
+ packet.putInt64(value.u.i64);
+ break;
+ case MTP_TYPE_UINT64:
+ case MTP_TYPE_AUINT64:
+ packet.putUInt64(value.u.u64);
+ break;
+ case MTP_TYPE_INT128:
+ case MTP_TYPE_AINT128:
+ packet.putInt128(value.u.i128);
+ break;
+ case MTP_TYPE_UINT128:
+ case MTP_TYPE_AUINT128:
+ packet.putUInt128(value.u.u128);
+ break;
+ case MTP_TYPE_STR:
+ if (value.str)
+ packet.putString(value.str);
+ else
+ packet.putEmptyString();
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::writeValue", mType);
+ }
+}
+
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
+ length = packet.getUInt32();
+ if (length == 0)
+ return NULL;
+ MtpPropertyValue* result = new MtpPropertyValue[length];
+ for (int i = 0; i < length; i++)
+ readValue(packet, result[i]);
+ return result;
+}
+
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+ packet.putUInt32(length);
+ for (int i = 0; i < length; i++)
+ writeValue(packet, values[i]);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
new file mode 100644
index 0000000..98b465a
--- /dev/null
+++ b/media/mtp/MtpProperty.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PROPERTY_H
+#define _MTP_PROPERTY_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+struct MtpPropertyValue {
+ union {
+ int8_t i8;
+ uint8_t u8;
+ int16_t i16;
+ uint16_t u16;
+ int32_t i32;
+ uint32_t u32;
+ int64_t i64;
+ uint64_t u64;
+ int128_t i128;
+ uint128_t u128;
+ } u;
+ // string in UTF8 format
+ char* str;
+};
+
+class MtpProperty {
+public:
+ MtpPropertyCode mCode;
+ MtpDataType mType;
+ bool mWriteable;
+ MtpPropertyValue mDefaultValue;
+ MtpPropertyValue mCurrentValue;
+
+ // for array types
+ int mDefaultArrayLength;
+ MtpPropertyValue* mDefaultArrayValues;
+ int mCurrentArrayLength;
+ MtpPropertyValue* mCurrentArrayValues;
+
+ enum {
+ kFormNone = 0,
+ kFormRange = 1,
+ kFormEnum = 2,
+ };
+
+ uint32_t mGroupCode;
+ uint8_t mFormFlag;
+
+ // for range form
+ MtpPropertyValue mMinimumValue;
+ MtpPropertyValue mMaximumValue;
+ MtpPropertyValue mStepSize;
+
+ // for enum form
+ int mEnumLength;
+ MtpPropertyValue* mEnumValues;
+
+public:
+ MtpProperty();
+ MtpProperty(MtpPropertyCode propCode,
+ MtpDataType type,
+ bool writeable = false,
+ int defaultValue = 0);
+ virtual ~MtpProperty();
+
+ inline MtpPropertyCode getPropertyCode() const { return mCode; }
+
+ void read(MtpDataPacket& packet);
+ void write(MtpDataPacket& packet);
+
+ void setDefaultValue(const uint16_t* string);
+ void setCurrentValue(const uint16_t* string);
+
+ void print();
+
+ inline bool isDeviceProperty() const {
+ return ( ((mCode & 0xF000) == 0x5000)
+ || ((mCode & 0xF800) == 0xD000));
+ }
+
+private:
+ void readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ void writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length);
+ void writeArrayValues(MtpDataPacket& packet,
+ MtpPropertyValue* values, int length);
+};
+
+}; // namespace android
+
+#endif // _MTP_PROPERTY_H
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
new file mode 100644
index 0000000..8ece580
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpRequestPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpRequestPacket.h"
+
+namespace android {
+
+MtpRequestPacket::MtpRequestPacket()
+ : MtpPacket(512)
+{
+}
+
+MtpRequestPacket::~MtpRequestPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpRequestPacket::read(int fd) {
+ int ret = ::read(fd, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+#ifdef MTP_HOST
+ // write our buffer to the given endpoint (host mode)
+int MtpRequestPacket::write(struct usb_endpoint *ep)
+{
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_COMMAND);
+ return transfer(ep, mBuffer, mPacketSize);
+}
+#endif
+
+} // namespace android
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
new file mode 100644
index 0000000..df518f2
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_REQUEST_PACKET_H
+#define _MTP_REQUEST_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpRequestPacket : public MtpPacket {
+
+public:
+ MtpRequestPacket();
+ virtual ~MtpRequestPacket();
+
+#ifdef MTP_DEVICE
+ // fill our buffer with data from the given file descriptor
+ int read(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // write our buffer to the given endpoint
+ int write(struct usb_endpoint *ep);
+#endif
+
+ inline MtpOperationCode getOperationCode() const { return getContainerCode(); }
+ inline void setOperationCode(MtpOperationCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_REQUEST_PACKET_H
diff --git a/media/mtp/MtpResponsePacket.cpp b/media/mtp/MtpResponsePacket.cpp
new file mode 100644
index 0000000..3ef714e
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpResponsePacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpResponsePacket.h"
+
+namespace android {
+
+MtpResponsePacket::MtpResponsePacket()
+ : MtpPacket(512)
+{
+}
+
+MtpResponsePacket::~MtpResponsePacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpResponsePacket::write(int fd) {
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_RESPONSE);
+ int ret = ::write(fd, mBuffer, mPacketSize);
+ return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+int MtpResponsePacket::read(struct usb_endpoint *ep) {
+ int ret = transfer(ep, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+} // namespace android
+
diff --git a/media/mtp/MtpResponsePacket.h b/media/mtp/MtpResponsePacket.h
new file mode 100644
index 0000000..373f8f9
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_RESPONSE_PACKET_H
+#define _MTP_RESPONSE_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpResponsePacket : public MtpPacket {
+
+public:
+ MtpResponsePacket();
+ virtual ~MtpResponsePacket();
+
+#ifdef MTP_DEVICE
+ // write our data to the given file descriptor
+ int write(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+ int read(struct usb_endpoint *ep);
+#endif
+
+ inline MtpResponseCode getResponseCode() const { return getContainerCode(); }
+ inline void setResponseCode(MtpResponseCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_RESPONSE_PACKET_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
new file mode 100644
index 0000000..f74f395
--- /dev/null
+++ b/media/mtp/MtpServer.cpp
@@ -0,0 +1,797 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <dirent.h>
+
+#include <cutils/properties.h>
+
+#define LOG_TAG "MtpServer"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpProperty.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpStringBuffer.h"
+
+#include <linux/usb/f_mtp.h>
+
+namespace android {
+
+static const MtpOperationCode kSupportedOperationCodes[] = {
+ MTP_OPERATION_GET_DEVICE_INFO,
+ MTP_OPERATION_OPEN_SESSION,
+ MTP_OPERATION_CLOSE_SESSION,
+ MTP_OPERATION_GET_STORAGE_IDS,
+ MTP_OPERATION_GET_STORAGE_INFO,
+ MTP_OPERATION_GET_NUM_OBJECTS,
+ MTP_OPERATION_GET_OBJECT_HANDLES,
+ MTP_OPERATION_GET_OBJECT_INFO,
+ MTP_OPERATION_GET_OBJECT,
+// MTP_OPERATION_GET_THUMB,
+ MTP_OPERATION_DELETE_OBJECT,
+ MTP_OPERATION_SEND_OBJECT_INFO,
+ MTP_OPERATION_SEND_OBJECT,
+// MTP_OPERATION_INITIATE_CAPTURE,
+// MTP_OPERATION_FORMAT_STORE,
+// MTP_OPERATION_RESET_DEVICE,
+// MTP_OPERATION_SELF_TEST,
+// MTP_OPERATION_SET_OBJECT_PROTECTION,
+// MTP_OPERATION_POWER_DOWN,
+ MTP_OPERATION_GET_DEVICE_PROP_DESC,
+ MTP_OPERATION_GET_DEVICE_PROP_VALUE,
+ MTP_OPERATION_SET_DEVICE_PROP_VALUE,
+ MTP_OPERATION_RESET_DEVICE_PROP_VALUE,
+// MTP_OPERATION_TERMINATE_OPEN_CAPTURE,
+// MTP_OPERATION_MOVE_OBJECT,
+// MTP_OPERATION_COPY_OBJECT,
+// MTP_OPERATION_GET_PARTIAL_OBJECT,
+// MTP_OPERATION_INITIATE_OPEN_CAPTURE,
+ MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED,
+ MTP_OPERATION_GET_OBJECT_PROP_DESC,
+ MTP_OPERATION_GET_OBJECT_PROP_VALUE,
+ MTP_OPERATION_SET_OBJECT_PROP_VALUE,
+ MTP_OPERATION_GET_OBJECT_REFERENCES,
+ MTP_OPERATION_SET_OBJECT_REFERENCES,
+// MTP_OPERATION_SKIP,
+};
+
+static const MtpEventCode kSupportedEventCodes[] = {
+ MTP_EVENT_OBJECT_ADDED,
+ MTP_EVENT_OBJECT_REMOVED,
+};
+
+MtpServer::MtpServer(int fd, MtpDatabase* database,
+ int fileGroup, int filePerm, int directoryPerm)
+ : mFD(fd),
+ mDatabase(database),
+ mFileGroup(fileGroup),
+ mFilePermission(filePerm),
+ mDirectoryPermission(directoryPerm),
+ mSessionID(0),
+ mSessionOpen(false),
+ mSendObjectHandle(kInvalidObjectHandle),
+ mSendObjectFormat(0),
+ mSendObjectFileSize(0)
+{
+}
+
+MtpServer::~MtpServer() {
+}
+
+void MtpServer::addStorage(const char* filePath) {
+ int index = mStorages.size() + 1;
+ index |= index << 16; // set high and low part to our index
+ MtpStorage* storage = new MtpStorage(index, filePath, mDatabase);
+ addStorage(storage);
+}
+
+MtpStorage* MtpServer::getStorage(MtpStorageID id) {
+ for (int i = 0; i < mStorages.size(); i++) {
+ MtpStorage* storage = mStorages[i];
+ if (storage->getStorageID() == id)
+ return storage;
+ }
+ return NULL;
+}
+
+void MtpServer::run() {
+ int fd = mFD;
+
+ LOGV("MtpServer::run fd: %d\n", fd);
+
+ while (1) {
+ int ret = mRequest.read(fd);
+ if (ret < 0) {
+ LOGE("request read returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ MtpOperationCode operation = mRequest.getOperationCode();
+ MtpTransactionID transaction = mRequest.getTransactionID();
+
+ LOGV("operation: %s", MtpDebug::getOperationCodeName(operation));
+ mRequest.dump();
+
+ // FIXME need to generalize this
+ bool dataIn = (operation == MTP_OPERATION_SEND_OBJECT_INFO
+ || operation == MTP_OPERATION_SET_OBJECT_REFERENCES
+ || operation == MTP_OPERATION_SET_OBJECT_PROP_VALUE
+ || operation == MTP_OPERATION_SET_DEVICE_PROP_VALUE);
+ if (dataIn) {
+ int ret = mData.read(fd);
+ if (ret < 0) {
+ LOGE("data read returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ LOGV("received data:");
+ mData.dump();
+ } else {
+ mData.reset();
+ }
+
+ if (handleRequest()) {
+ if (!dataIn && mData.hasData()) {
+ mData.setOperationCode(operation);
+ mData.setTransactionID(transaction);
+ LOGV("sending data:");
+ mData.dump();
+ ret = mData.write(fd);
+ if (ret < 0) {
+ LOGE("request write returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ }
+
+ mResponse.setTransactionID(transaction);
+ LOGV("sending response %04X", mResponse.getResponseCode());
+ ret = mResponse.write(fd);
+ mResponse.dump();
+ if (ret < 0) {
+ LOGE("request write returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ } else {
+ LOGV("skipping response\n");
+ }
+ }
+
+ if (mSessionOpen)
+ mDatabase->sessionEnded();
+}
+
+void MtpServer::sendObjectAdded(MtpObjectHandle handle) {
+ if (mSessionOpen) {
+ LOGD("sendObjectAdded %d\n", handle);
+ mEvent.setEventCode(MTP_EVENT_OBJECT_ADDED);
+ mEvent.setTransactionID(mRequest.getTransactionID());
+ mEvent.setParameter(1, handle);
+ int ret = mEvent.write(mFD);
+ LOGD("mEvent.write returned %d\n", ret);
+ }
+}
+
+void MtpServer::sendObjectRemoved(MtpObjectHandle handle) {
+ if (mSessionOpen) {
+ LOGD("sendObjectRemoved %d\n", handle);
+ mEvent.setEventCode(MTP_EVENT_OBJECT_REMOVED);
+ mEvent.setTransactionID(mRequest.getTransactionID());
+ mEvent.setParameter(1, handle);
+ int ret = mEvent.write(mFD);
+ LOGD("mEvent.write returned %d\n", ret);
+ }
+}
+
+bool MtpServer::handleRequest() {
+ MtpOperationCode operation = mRequest.getOperationCode();
+ MtpResponseCode response;
+
+ mResponse.reset();
+
+ if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) {
+ // FIXME - need to delete mSendObjectHandle from the database
+ LOGE("expected SendObject after SendObjectInfo");
+ mSendObjectHandle = kInvalidObjectHandle;
+ }
+
+ switch (operation) {
+ case MTP_OPERATION_GET_DEVICE_INFO:
+ response = doGetDeviceInfo();
+ break;
+ case MTP_OPERATION_OPEN_SESSION:
+ response = doOpenSession();
+ break;
+ case MTP_OPERATION_CLOSE_SESSION:
+ response = doCloseSession();
+ break;
+ case MTP_OPERATION_GET_STORAGE_IDS:
+ response = doGetStorageIDs();
+ break;
+ case MTP_OPERATION_GET_STORAGE_INFO:
+ response = doGetStorageInfo();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED:
+ response = doGetObjectPropsSupported();
+ break;
+ case MTP_OPERATION_GET_OBJECT_HANDLES:
+ response = doGetObjectHandles();
+ break;
+ case MTP_OPERATION_GET_NUM_OBJECTS:
+ response = doGetNumObjects();
+ break;
+ case MTP_OPERATION_GET_OBJECT_REFERENCES:
+ response = doGetObjectReferences();
+ break;
+ case MTP_OPERATION_SET_OBJECT_REFERENCES:
+ response = doSetObjectReferences();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_VALUE:
+ response = doGetObjectPropValue();
+ break;
+ case MTP_OPERATION_SET_OBJECT_PROP_VALUE:
+ response = doSetObjectPropValue();
+ break;
+ case MTP_OPERATION_GET_DEVICE_PROP_VALUE:
+ response = doGetDevicePropValue();
+ break;
+ case MTP_OPERATION_SET_DEVICE_PROP_VALUE:
+ response = doSetDevicePropValue();
+ break;
+ case MTP_OPERATION_RESET_DEVICE_PROP_VALUE:
+ response = doResetDevicePropValue();
+ break;
+ case MTP_OPERATION_GET_OBJECT_INFO:
+ response = doGetObjectInfo();
+ break;
+ case MTP_OPERATION_GET_OBJECT:
+ response = doGetObject();
+ break;
+ case MTP_OPERATION_SEND_OBJECT_INFO:
+ response = doSendObjectInfo();
+ break;
+ case MTP_OPERATION_SEND_OBJECT:
+ response = doSendObject();
+ break;
+ case MTP_OPERATION_DELETE_OBJECT:
+ response = doDeleteObject();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_DESC:
+ response = doGetObjectPropDesc();
+ break;
+ case MTP_OPERATION_GET_DEVICE_PROP_DESC:
+ response = doGetDevicePropDesc();
+ break;
+ default:
+ LOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation));
+ response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED;
+ break;
+ }
+
+ if (response == MTP_RESPONSE_TRANSACTION_CANCELLED)
+ return false;
+ mResponse.setResponseCode(response);
+ return true;
+}
+
+MtpResponseCode MtpServer::doGetDeviceInfo() {
+ MtpStringBuffer string;
+ char prop_value[PROPERTY_VALUE_MAX];
+
+ MtpObjectFormatList* playbackFormats = mDatabase->getSupportedPlaybackFormats();
+ MtpObjectFormatList* captureFormats = mDatabase->getSupportedCaptureFormats();
+ MtpDevicePropertyList* deviceProperties = mDatabase->getSupportedDeviceProperties();
+
+ // fill in device info
+ mData.putUInt16(MTP_STANDARD_VERSION);
+ mData.putUInt32(6); // MTP Vendor Extension ID
+ mData.putUInt16(MTP_STANDARD_VERSION);
+ string.set("microsoft.com: 1.0;");
+ mData.putString(string); // MTP Extensions
+ mData.putUInt16(0); //Functional Mode
+ mData.putAUInt16(kSupportedOperationCodes,
+ sizeof(kSupportedOperationCodes) / sizeof(uint16_t)); // Operations Supported
+ mData.putAUInt16(kSupportedEventCodes,
+ sizeof(kSupportedEventCodes) / sizeof(uint16_t)); // Events Supported
+ mData.putAUInt16(deviceProperties); // Device Properties Supported
+ mData.putAUInt16(captureFormats); // Capture Formats
+ mData.putAUInt16(playbackFormats); // Playback Formats
+ // FIXME
+ string.set("Google, Inc.");
+ mData.putString(string); // Manufacturer
+
+ property_get("ro.product.model", prop_value, "MTP Device");
+ string.set(prop_value);
+ mData.putString(string); // Model
+ string.set("1.0");
+ mData.putString(string); // Device Version
+
+ property_get("ro.serialno", prop_value, "????????");
+ string.set(prop_value);
+ mData.putString(string); // Serial Number
+
+ delete playbackFormats;
+ delete captureFormats;
+ delete deviceProperties;
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doOpenSession() {
+ if (mSessionOpen) {
+ mResponse.setParameter(1, mSessionID);
+ return MTP_RESPONSE_SESSION_ALREADY_OPEN;
+ }
+ mSessionID = mRequest.getParameter(1);
+ mSessionOpen = true;
+
+ mDatabase->sessionStarted();
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doCloseSession() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ mSessionID = 0;
+ mSessionOpen = false;
+ mDatabase->sessionEnded();
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageIDs() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+
+ int count = mStorages.size();
+ mData.putUInt32(count);
+ for (int i = 0; i < count; i++)
+ mData.putUInt32(mStorages[i]->getStorageID());
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageInfo() {
+ MtpStringBuffer string;
+
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID id = mRequest.getParameter(1);
+ MtpStorage* storage = getStorage(id);
+ if (!storage)
+ return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+ mData.putUInt16(storage->getType());
+ mData.putUInt16(storage->getFileSystemType());
+ mData.putUInt16(storage->getAccessCapability());
+ mData.putUInt64(storage->getMaxCapacity());
+ mData.putUInt64(storage->getFreeSpace());
+ mData.putUInt32(1024*1024*1024); // Free Space in Objects
+ string.set(storage->getDescription());
+ mData.putString(string);
+ mData.putEmptyString(); // Volume Identifier
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropsSupported() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpObjectFormat format = mRequest.getParameter(1);
+ MtpDevicePropertyList* properties = mDatabase->getSupportedObjectProperties(format);
+ mData.putAUInt16(properties);
+ delete properties;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectHandles() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
+ MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
+ MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
+ // 0x00000000 for all objects?
+ if (parent == 0xFFFFFFFF)
+ parent = 0;
+
+ MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent);
+ mData.putAUInt32(handles);
+ delete handles;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetNumObjects() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
+ MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
+ MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
+ // 0x00000000 for all objects?
+ if (parent == 0xFFFFFFFF)
+ parent = 0;
+
+ int count = mDatabase->getNumObjects(storageID, format, parent);
+ if (count >= 0) {
+ mResponse.setParameter(1, count);
+ return MTP_RESPONSE_OK;
+ } else {
+ mResponse.setParameter(1, 0);
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+}
+
+MtpResponseCode MtpServer::doGetObjectReferences() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID handle = mRequest.getParameter(1);
+
+ // FIXME - check for invalid object handle
+ MtpObjectHandleList* handles = mDatabase->getObjectReferences(handle);
+ if (handles) {
+ mData.putAUInt32(handles);
+ delete handles;
+ } else {
+ mData.putEmptyArray();
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSetObjectReferences() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID handle = mRequest.getParameter(1);
+ MtpObjectHandleList* references = mData.getAUInt32();
+ MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
+ delete references;
+ return result;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropValue() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectProperty property = mRequest.getParameter(2);
+ LOGD("GetObjectPropValue %d %s\n", handle,
+ MtpDebug::getObjectPropCodeName(property));
+
+ return mDatabase->getObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doSetObjectPropValue() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectProperty property = mRequest.getParameter(2);
+ LOGD("SetObjectPropValue %d %s\n", handle,
+ MtpDebug::getObjectPropCodeName(property));
+
+ return mDatabase->setObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doGetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("GetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->getDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doSetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("SetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->setDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doResetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("ResetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->resetDeviceProperty(property);
+}
+
+MtpResponseCode MtpServer::doGetObjectInfo() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ return mDatabase->getObjectInfo(handle, mData);
+}
+
+MtpResponseCode MtpServer::doGetObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpString pathBuf;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+
+ const char* filePath = (const char *)pathBuf;
+ mtp_file_range mfr;
+ mfr.fd = open(filePath, O_RDONLY);
+ if (mfr.fd < 0) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ mfr.offset = 0;
+ mfr.length = fileLength;
+
+ // send data header
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ mData.writeDataHeader(mFD, fileLength + MTP_CONTAINER_HEADER_SIZE);
+
+ // then transfer the file
+ int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+ if (ret < 0) {
+ if (errno == ECANCELED)
+ return MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObjectInfo() {
+ MtpString path;
+ MtpStorageID storageID = mRequest.getParameter(1);
+ MtpStorage* storage = getStorage(storageID);
+ MtpObjectHandle parent = mRequest.getParameter(2);
+ if (!storage)
+ return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+ // special case the root
+ if (parent == MTP_PARENT_ROOT) {
+ path = storage->getPath();
+ parent = 0;
+ } else {
+ int64_t dummy;
+ int result = mDatabase->getObjectFilePath(parent, path, dummy);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+ }
+
+ // read only the fields we need
+ mData.getUInt32(); // storage ID
+ MtpObjectFormat format = mData.getUInt16();
+ mData.getUInt16(); // protection status
+ mSendObjectFileSize = mData.getUInt32();
+ mData.getUInt16(); // thumb format
+ mData.getUInt32(); // thumb compressed size
+ mData.getUInt32(); // thumb pix width
+ mData.getUInt32(); // thumb pix height
+ mData.getUInt32(); // image pix width
+ mData.getUInt32(); // image pix height
+ mData.getUInt32(); // image bit depth
+ mData.getUInt32(); // parent
+ uint16_t associationType = mData.getUInt16();
+ uint32_t associationDesc = mData.getUInt32(); // association desc
+ mData.getUInt32(); // sequence number
+ MtpStringBuffer name, created, modified;
+ mData.getString(name); // file name
+ mData.getString(created); // date created
+ mData.getString(modified); // date modified
+ // keywords follow
+
+ time_t modifiedTime;
+ if (!parseDateTime(modified, modifiedTime))
+ modifiedTime = 0;
+
+ if (path[path.size() - 1] != '/')
+ path += "/";
+ path += (const char *)name;
+
+ MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
+ format, parent, storageID, mSendObjectFileSize, modifiedTime);
+ if (handle == kInvalidObjectHandle) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+
+ if (format == MTP_FORMAT_ASSOCIATION) {
+ mode_t mask = umask(0);
+ int ret = mkdir((const char *)path, mDirectoryPermission);
+ umask(mask);
+ if (ret && ret != -EEXIST)
+ return MTP_RESPONSE_GENERAL_ERROR;
+ chown((const char *)path, getuid(), mFileGroup);
+ } else {
+ mSendObjectFilePath = path;
+ // save the handle for the SendObject call, which should follow
+ mSendObjectHandle = handle;
+ mSendObjectFormat = format;
+ }
+
+ mResponse.setParameter(1, storageID);
+ mResponse.setParameter(2, parent);
+ mResponse.setParameter(3, handle);
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObject() {
+ MtpResponseCode result = MTP_RESPONSE_OK;
+ mode_t mask;
+ int ret;
+
+ if (mSendObjectHandle == kInvalidObjectHandle) {
+ LOGE("Expected SendObjectInfo before SendObject");
+ result = MTP_RESPONSE_NO_VALID_OBJECT_INFO;
+ goto done;
+ }
+
+ // read the header
+ ret = mData.readDataHeader(mFD);
+ // FIXME - check for errors here.
+
+ // reset so we don't attempt to send this back
+ mData.reset();
+
+ mtp_file_range mfr;
+ mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC);
+ if (mfr.fd < 0) {
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ goto done;
+ }
+ fchown(mfr.fd, getuid(), mFileGroup);
+ // set permissions
+ mask = umask(0);
+ fchmod(mfr.fd, mFilePermission);
+ umask(mask);
+
+ mfr.offset = 0;
+ mfr.length = mSendObjectFileSize;
+
+ // transfer the file
+ ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+
+ LOGV("MTP_RECEIVE_FILE returned %d", ret);
+
+ if (ret < 0) {
+ unlink(mSendObjectFilePath);
+ if (errno == ECANCELED)
+ result = MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ }
+
+done:
+ mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat,
+ result == MTP_RESPONSE_OK);
+ mSendObjectHandle = kInvalidObjectHandle;
+ mSendObjectFormat = 0;
+ return result;
+}
+
+static void deleteRecursive(const char* path) {
+ char pathbuf[PATH_MAX];
+ int pathLength = strlen(path);
+ if (pathLength >= sizeof(pathbuf) - 1) {
+ LOGE("path too long: %s\n", path);
+ }
+ strcpy(pathbuf, path);
+ if (pathbuf[pathLength - 1] != '/') {
+ pathbuf[pathLength++] = '/';
+ }
+ char* fileSpot = pathbuf + pathLength;
+ int pathRemaining = sizeof(pathbuf) - pathLength - 1;
+
+ DIR* dir = opendir(path);
+ if (!dir) {
+ LOGE("opendir %s failed: %s", path, strerror(errno));
+ return;
+ }
+
+ struct dirent* entry;
+ while ((entry = readdir(dir))) {
+ const char* name = entry->d_name;
+
+ // ignore "." and ".."
+ if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+ continue;
+ }
+
+ int nameLength = strlen(name);
+ if (nameLength > pathRemaining) {
+ LOGE("path %s/%s too long\n", path, name);
+ continue;
+ }
+ strcpy(fileSpot, name);
+
+ int type = entry->d_type;
+ if (entry->d_type == DT_DIR) {
+ deleteRecursive(pathbuf);
+ rmdir(pathbuf);
+ } else {
+ unlink(pathbuf);
+ }
+ }
+}
+
+static void deletePath(const char* path) {
+ struct stat statbuf;
+ if (stat(path, &statbuf) == 0) {
+ if (S_ISDIR(statbuf.st_mode)) {
+ deleteRecursive(path);
+ rmdir(path);
+ } else {
+ unlink(path);
+ }
+ } else {
+ LOGE("deletePath stat failed for %s: %s", path, strerror(errno));
+ }
+}
+
+MtpResponseCode MtpServer::doDeleteObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ // FIXME - support deleting all objects if handle is 0xFFFFFFFF
+ // FIXME - implement deleting objects by format
+
+ MtpString filePath;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, filePath, fileLength);
+ if (result == MTP_RESPONSE_OK) {
+ LOGV("deleting %s", (const char *)filePath);
+ deletePath((const char *)filePath);
+ return mDatabase->deleteFile(handle);
+ } else {
+ return result;
+ }
+}
+
+MtpResponseCode MtpServer::doGetObjectPropDesc() {
+ MtpObjectProperty propCode = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ LOGD("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
+ MtpDebug::getFormatCodeName(format));
+ MtpProperty* property = mDatabase->getObjectPropertyDesc(propCode, format);
+ if (!property)
+ return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+ property->write(mData);
+ delete property;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetDevicePropDesc() {
+ MtpDeviceProperty propCode = mRequest.getParameter(1);
+ LOGD("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
+ MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
+ if (!property)
+ return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+ property->write(mData);
+ delete property;
+ return MTP_RESPONSE_OK;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
new file mode 100644
index 0000000..68a6564
--- /dev/null
+++ b/media/mtp/MtpServer.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_SERVER_H
+#define _MTP_SERVER_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpEventPacket.h"
+#include "mtp.h"
+
+#include "MtpUtils.h"
+
+namespace android {
+
+class MtpDatabase;
+class MtpStorage;
+
+class MtpServer {
+
+private:
+ // file descriptor for MTP kernel driver
+ int mFD;
+
+ MtpDatabase* mDatabase;
+
+ // group to own new files and folders
+ int mFileGroup;
+ // permissions for new files and directories
+ int mFilePermission;
+ int mDirectoryPermission;
+
+ // current session ID
+ MtpSessionID mSessionID;
+ // true if we have an open session and mSessionID is valid
+ bool mSessionOpen;
+
+ MtpRequestPacket mRequest;
+ MtpDataPacket mData;
+ MtpResponsePacket mResponse;
+ MtpEventPacket mEvent;
+
+ MtpStorageList mStorages;
+
+ // handle for new object, set by SendObjectInfo and used by SendObject
+ MtpObjectHandle mSendObjectHandle;
+ MtpObjectFormat mSendObjectFormat;
+ MtpString mSendObjectFilePath;
+ size_t mSendObjectFileSize;
+
+public:
+ MtpServer(int fd, MtpDatabase* database,
+ int fileGroup, int filePerm, int directoryPerm);
+ virtual ~MtpServer();
+
+ void addStorage(const char* filePath);
+ inline void addStorage(MtpStorage* storage) { mStorages.push(storage); }
+ MtpStorage* getStorage(MtpStorageID id);
+ void run();
+
+ void sendObjectAdded(MtpObjectHandle handle);
+ void sendObjectRemoved(MtpObjectHandle handle);
+
+private:
+ bool handleRequest();
+
+ MtpResponseCode doGetDeviceInfo();
+ MtpResponseCode doOpenSession();
+ MtpResponseCode doCloseSession();
+ MtpResponseCode doGetStorageIDs();
+ MtpResponseCode doGetStorageInfo();
+ MtpResponseCode doGetObjectPropsSupported();
+ MtpResponseCode doGetObjectHandles();
+ MtpResponseCode doGetNumObjects();
+ MtpResponseCode doGetObjectReferences();
+ MtpResponseCode doSetObjectReferences();
+ MtpResponseCode doGetObjectPropValue();
+ MtpResponseCode doSetObjectPropValue();
+ MtpResponseCode doGetDevicePropValue();
+ MtpResponseCode doSetDevicePropValue();
+ MtpResponseCode doResetDevicePropValue();
+ MtpResponseCode doGetObjectInfo();
+ MtpResponseCode doGetObject();
+ MtpResponseCode doSendObjectInfo();
+ MtpResponseCode doSendObject();
+ MtpResponseCode doDeleteObject();
+ MtpResponseCode doGetObjectPropDesc();
+ MtpResponseCode doGetDevicePropDesc();
+};
+
+}; // namespace android
+
+#endif // _MTP_SERVER_H
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
new file mode 100644
index 0000000..eccf186
--- /dev/null
+++ b/media/mtp/MtpStorage.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorage"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpStorage.h"
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/statfs.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+#include <string.h>
+#include <stdio.h>
+#include <limits.h>
+
+namespace android {
+
+MtpStorage::MtpStorage(MtpStorageID id, const char* filePath, MtpDatabase* db)
+ : mStorageID(id),
+ mFilePath(filePath),
+ mDatabase(db),
+ mMaxCapacity(0)
+{
+ LOGD("MtpStorage id: %d path: %s\n", id, filePath);
+}
+
+MtpStorage::~MtpStorage() {
+}
+
+int MtpStorage::getType() const {
+ return MTP_STORAGE_FIXED_RAM;
+}
+
+int MtpStorage::getFileSystemType() const {
+ return MTP_STORAGE_FILESYSTEM_HIERARCHICAL;
+}
+
+int MtpStorage::getAccessCapability() const {
+ return MTP_STORAGE_READ_WRITE;
+}
+
+uint64_t MtpStorage::getMaxCapacity() {
+ if (mMaxCapacity == 0) {
+ struct statfs stat;
+ if (statfs(mFilePath, &stat))
+ return -1;
+ mMaxCapacity = (uint64_t)stat.f_blocks * (uint64_t)stat.f_bsize;
+ }
+ return mMaxCapacity;
+}
+
+uint64_t MtpStorage::getFreeSpace() {
+ struct statfs stat;
+ if (statfs(mFilePath, &stat))
+ return -1;
+ return (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
+}
+
+const char* MtpStorage::getDescription() const {
+ return "Device Storage";
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
new file mode 100644
index 0000000..b13b926
--- /dev/null
+++ b/media/mtp/MtpStorage.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_H
+#define _MTP_STORAGE_H
+
+#include "mtp.h"
+
+namespace android {
+
+class MtpDatabase;
+
+class MtpStorage {
+
+private:
+ MtpStorageID mStorageID;
+ const char* mFilePath;
+ MtpDatabase* mDatabase;
+ uint64_t mMaxCapacity;
+
+public:
+ MtpStorage(MtpStorageID id, const char* filePath, MtpDatabase* db);
+ virtual ~MtpStorage();
+
+ inline MtpStorageID getStorageID() const { return mStorageID; }
+ int getType() const;
+ int getFileSystemType() const;
+ int getAccessCapability() const;
+ uint64_t getMaxCapacity();
+ uint64_t getFreeSpace();
+ const char* getDescription() const;
+ inline const char* getPath() const { return mFilePath; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_H
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
new file mode 100644
index 0000000..ca64ac0
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorageInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStorageInfo::MtpStorageInfo(MtpStorageID id)
+ : mStorageID(id),
+ mStorageType(0),
+ mFileSystemType(0),
+ mAccessCapability(0),
+ mMaxCapacity(0),
+ mFreeSpaceBytes(0),
+ mFreeSpaceObjects(0),
+ mStorageDescription(NULL),
+ mVolumeIdentifier(NULL)
+{
+}
+
+MtpStorageInfo::~MtpStorageInfo() {
+ if (mStorageDescription)
+ free(mStorageDescription);
+ if (mVolumeIdentifier)
+ free(mVolumeIdentifier);
+}
+
+void MtpStorageInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+
+ // read the device info
+ mStorageType = packet.getUInt16();
+ mFileSystemType = packet.getUInt16();
+ mAccessCapability = packet.getUInt16();
+ mMaxCapacity = packet.getUInt64();
+ mFreeSpaceBytes = packet.getUInt64();
+ mFreeSpaceObjects = packet.getUInt32();
+
+ packet.getString(string);
+ mStorageDescription = strdup((const char *)string);
+ packet.getString(string);
+ mVolumeIdentifier = strdup((const char *)string);
+}
+
+void MtpStorageInfo::print() {
+ LOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n",
+ mStorageID, mStorageType, mFileSystemType, mAccessCapability);
+ LOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n",
+ mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects);
+ LOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n",
+ mStorageDescription, mVolumeIdentifier);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
new file mode 100644
index 0000000..2cb626e
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_INFO_H
+#define _MTP_STORAGE_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpStorageInfo {
+public:
+ MtpStorageID mStorageID;
+ uint16_t mStorageType;
+ uint16_t mFileSystemType;
+ uint16_t mAccessCapability;
+ uint64_t mMaxCapacity;
+ uint64_t mFreeSpaceBytes;
+ uint32_t mFreeSpaceObjects;
+ char* mStorageDescription;
+ char* mVolumeIdentifier;
+
+public:
+ MtpStorageInfo(MtpStorageID id);
+ virtual ~MtpStorageInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_INFO_H
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
new file mode 100644
index 0000000..fe8cf04
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStringBuffer"
+
+#include <string.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStringBuffer::MtpStringBuffer()
+ : mCharCount(0),
+ mByteCount(1)
+{
+ mBuffer[0] = 0;
+}
+
+MtpStringBuffer::MtpStringBuffer(const char* src)
+ : mCharCount(0),
+ mByteCount(1)
+{
+ set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const uint16_t* src)
+ : mCharCount(0),
+ mByteCount(1)
+{
+ set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src)
+ : mCharCount(src.mCharCount),
+ mByteCount(src.mByteCount)
+{
+ memcpy(mBuffer, src.mBuffer, mByteCount);
+}
+
+
+MtpStringBuffer::~MtpStringBuffer() {
+}
+
+void MtpStringBuffer::set(const char* src) {
+ int length = strlen(src);
+ if (length >= sizeof(mBuffer))
+ length = sizeof(mBuffer) - 1;
+ memcpy(mBuffer, src, length);
+
+ // count the characters
+ int count = 0;
+ char ch;
+ while ((ch = *src++) != 0) {
+ if ((ch & 0x80) == 0) {
+ // single byte character
+ } else if ((ch & 0xE0) == 0xC0) {
+ // two byte character
+ if (! *src++) {
+ // last character was truncated, so ignore last byte
+ length--;
+ break;
+ }
+ } else if ((ch & 0xF0) == 0xE0) {
+ // 3 byte char
+ if (! *src++) {
+ // last character was truncated, so ignore last byte
+ length--;
+ break;
+ }
+ if (! *src++) {
+ // last character was truncated, so ignore last two bytes
+ length -= 2;
+ break;
+ }
+ }
+ count++;
+ }
+
+ mByteCount = length + 1;
+ mBuffer[length] = 0;
+ mCharCount = count;
+}
+
+void MtpStringBuffer::set(const uint16_t* src) {
+ int count = 0;
+ uint16_t ch;
+ uint8_t* dest = mBuffer;
+
+ while ((ch = *src++) != 0 && count < 255) {
+ if (ch >= 0x0800) {
+ *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+ *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else if (ch >= 0x80) {
+ *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else {
+ *dest++ = ch;
+ }
+ count++;
+ }
+ *dest++ = 0;
+ mCharCount = count;
+ mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+ int count = packet->getUInt8();
+ uint8_t* dest = mBuffer;
+ for (int i = 0; i < count; i++) {
+ uint16_t ch = packet->getUInt16();
+ if (ch >= 0x0800) {
+ *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+ *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else if (ch >= 0x80) {
+ *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else {
+ *dest++ = ch;
+ }
+ }
+ *dest++ = 0;
+ mCharCount = count;
+ mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
+ int count = mCharCount;
+ const uint8_t* src = mBuffer;
+ packet->putUInt8(count > 0 ? count + 1 : 0);
+
+ // expand utf8 to 16 bit chars
+ for (int i = 0; i < count; i++) {
+ uint16_t ch;
+ uint16_t ch1 = *src++;
+ if ((ch1 & 0x80) == 0) {
+ // single byte character
+ ch = ch1;
+ } else if ((ch1 & 0xE0) == 0xC0) {
+ // two byte character
+ uint16_t ch2 = *src++;
+ ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F);
+ } else {
+ // three byte character
+ uint16_t ch2 = *src++;
+ uint16_t ch3 = *src++;
+ ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F);
+ }
+ packet->putUInt16(ch);
+ }
+ // only terminate with zero if string is not empty
+ if (count > 0)
+ packet->putUInt16(0);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
new file mode 100644
index 0000000..cbc8307
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STRING_BUFFER_H
+#define _MTP_STRING_BUFFER_H
+
+#include <stdint.h>
+
+namespace android {
+
+class MtpDataPacket;
+
+// Represents a utf8 string, with a maximum of 255 characters
+class MtpStringBuffer {
+
+private:
+ // mBuffer contains string in UTF8 format
+ // maximum 3 bytes/character, with 1 extra for zero termination
+ uint8_t mBuffer[255 * 3 + 1];
+ int mCharCount;
+ int mByteCount;
+
+public:
+ MtpStringBuffer();
+ MtpStringBuffer(const char* src);
+ MtpStringBuffer(const uint16_t* src);
+ MtpStringBuffer(const MtpStringBuffer& src);
+ virtual ~MtpStringBuffer();
+
+ void set(const char* src);
+ void set(const uint16_t* src);
+
+ void readFromPacket(MtpDataPacket* packet);
+ void writeToPacket(MtpDataPacket* packet) const;
+
+ inline int getCharCount() const { return mCharCount; }
+ inline int getByteCount() const { return mByteCount; }
+
+ inline operator const char*() const { return (const char *)mBuffer; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STRING_BUFFER_H
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
new file mode 100644
index 0000000..720c854
--- /dev/null
+++ b/media/mtp/MtpTypes.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_TYPES_H
+#define _MTP_TYPES_H
+
+#include <stdint.h>
+#include "utils/String8.h"
+#include "utils/Vector.h"
+
+namespace android {
+
+typedef int32_t int128_t[4];
+typedef uint32_t uint128_t[4];
+
+typedef uint16_t MtpOperationCode;
+typedef uint16_t MtpResponseCode;
+typedef uint16_t MtpEventCode;
+typedef uint32_t MtpSessionID;
+typedef uint32_t MtpStorageID;
+typedef uint32_t MtpTransactionID;
+typedef uint16_t MtpPropertyCode;
+typedef uint16_t MtpDataType;
+typedef uint16_t MtpObjectFormat;
+typedef MtpPropertyCode MtpDeviceProperty;
+typedef MtpPropertyCode MtpObjectProperty;
+
+// object handles are unique across all storage but only within a single session.
+// object handles cannot be reused after an object is deleted.
+// values 0x00000000 and 0xFFFFFFFF are reserved for special purposes.
+typedef uint32_t MtpObjectHandle;
+
+// Special values
+#define MTP_PARENT_ROOT 0xFFFFFFFF // parent is root of the storage
+#define kInvalidObjectHandle 0xFFFFFFFF
+
+class MtpStorage;
+class MtpDevice;
+class MtpProperty;
+
+typedef Vector<MtpStorage *> MtpStorageList;
+typedef Vector<MtpDevice*> MtpDeviceList;
+typedef Vector<MtpProperty*> MtpPropertyList;
+
+typedef Vector<uint8_t> UInt8List;
+typedef Vector<uint16_t> UInt16List;
+typedef Vector<uint32_t> UInt32List;
+typedef Vector<uint64_t> UInt64List;
+typedef Vector<int8_t> Int8List;
+typedef Vector<int16_t> Int16List;
+typedef Vector<int32_t> Int32List;
+typedef Vector<int64_t> Int64List;
+
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt16List MtpDevicePropertyList;
+typedef UInt16List MtpObjectFormatList;
+typedef UInt32List MtpObjectHandleList;
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt32List MtpStorageIDList;
+
+typedef String8 MtpString;
+
+}; // namespace android
+
+#endif // _MTP_TYPES_H
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
new file mode 100644
index 0000000..ab01ef5
--- /dev/null
+++ b/media/mtp/MtpUtils.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpUtils"
+
+#include <stdio.h>
+#include <time.h>
+
+#include <cutils/tztime.h>
+#include "MtpUtils.h"
+
+namespace android {
+
+/*
+DateTime strings follow a compatible subset of the definition found in ISO 8601, and
+take the form of a Unicode string formatted as: "YYYYMMDDThhmmss.s". In this
+representation, YYYY shall be replaced by the year, MM replaced by the month (01-12),
+DD replaced by the day (01-31), T is a constant character 'T' delimiting time from date,
+hh is replaced by the hour (00-23), mm is replaced by the minute (00-59), and ss by the
+second (00-59). The ".s" is optional, and represents tenths of a second.
+*/
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds) {
+ int year, month, day, hour, minute, second;
+ struct tm tm;
+
+ if (sscanf(dateTime, "%04d%02d%02dT%02d%02d%02d",
+ &year, &month, &day, &hour, &minute, &second) != 6)
+ return false;
+ const char* tail = dateTime + 15;
+ // skip optional tenth of second
+ if (tail[0] == '.' && tail[1])
+ tail += 2;
+ //FIXME - support +/-hhmm
+ bool useUTC = (tail[0] == 'Z');
+
+ // hack to compute timezone
+ time_t dummy;
+ localtime_r(&dummy, &tm);
+
+ tm.tm_sec = second;
+ tm.tm_min = minute;
+ tm.tm_hour = hour;
+ tm.tm_mday = day;
+ tm.tm_mon = month;
+ tm.tm_year = year - 1900;
+ tm.tm_wday = 0;
+ tm.tm_isdst = -1;
+ if (useUTC)
+ outSeconds = mktime(&tm);
+ else
+ outSeconds = mktime_tz(&tm, tm.tm_zone);
+
+ return true;
+}
+
+void formatDateTime(time_t seconds, char* buffer, int bufferLength) {
+ struct tm tm;
+
+ localtime_r(&seconds, &tm);
+ snprintf(buffer, bufferLength, "%04d%02d%02dT%02d%02d%02d",
+ tm.tm_year + 1900, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpUtils.h b/media/mtp/MtpUtils.h
new file mode 100644
index 0000000..61f9055
--- /dev/null
+++ b/media/mtp/MtpUtils.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_UTILS_H
+#define _MTP_UTILS_H
+
+#include <stdint.h>
+
+namespace android {
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds);
+void formatDateTime(time_t seconds, char* buffer, int bufferLength);
+
+}; // namespace android
+
+#endif // _MTP_UTILS_H
diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h
new file mode 100644
index 0000000..b7afa66
--- /dev/null
+++ b/media/mtp/mtp.h
@@ -0,0 +1,475 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_H
+#define _MTP_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#define MTP_STANDARD_VERSION 100
+
+// Container Types
+#define MTP_CONTAINER_TYPE_UNDEFINED 0
+#define MTP_CONTAINER_TYPE_COMMAND 1
+#define MTP_CONTAINER_TYPE_DATA 2
+#define MTP_CONTAINER_TYPE_RESPONSE 3
+#define MTP_CONTAINER_TYPE_EVENT 4
+
+// Container Offsets
+#define MTP_CONTAINER_LENGTH_OFFSET 0
+#define MTP_CONTAINER_TYPE_OFFSET 4
+#define MTP_CONTAINER_CODE_OFFSET 6
+#define MTP_CONTAINER_TRANSACTION_ID_OFFSET 8
+#define MTP_CONTAINER_PARAMETER_OFFSET 12
+#define MTP_CONTAINER_HEADER_SIZE 12
+
+// MTP Types
+#define MTP_TYPE_UNDEFINED 0x0000 // Undefined
+#define MTP_TYPE_INT8 0x0001 // Signed 8-bit integer
+#define MTP_TYPE_UINT8 0x0002 // Unsigned 8-bit integer
+#define MTP_TYPE_INT16 0x0003 // Signed 16-bit integer
+#define MTP_TYPE_UINT16 0x0004 // Unsigned 16-bit integer
+#define MTP_TYPE_INT32 0x0005 // Signed 32-bit integer
+#define MTP_TYPE_UINT32 0x0006 // Unsigned 32-bit integer
+#define MTP_TYPE_INT64 0x0007 // Signed 64-bit integer
+#define MTP_TYPE_UINT64 0x0008 // Unsigned 64-bit integer
+#define MTP_TYPE_INT128 0x0009 // Signed 128-bit integer
+#define MTP_TYPE_UINT128 0x000A // Unsigned 128-bit integer
+#define MTP_TYPE_AINT8 0x4001 // Array of signed 8-bit integers
+#define MTP_TYPE_AUINT8 0x4002 // Array of unsigned 8-bit integers
+#define MTP_TYPE_AINT16 0x4003 // Array of signed 16-bit integers
+#define MTP_TYPE_AUINT16 0x4004 // Array of unsigned 16-bit integers
+#define MTP_TYPE_AINT32 0x4005 // Array of signed 32-bit integers
+#define MTP_TYPE_AUINT32 0x4006 // Array of unsigned 32-bit integers
+#define MTP_TYPE_AINT64 0x4007 // Array of signed 64-bit integers
+#define MTP_TYPE_AUINT64 0x4008 // Array of unsigned 64-bit integers
+#define MTP_TYPE_AINT128 0x4009 // Array of signed 128-bit integers
+#define MTP_TYPE_AUINT128 0x400A // Array of unsigned 128-bit integers
+#define MTP_TYPE_STR 0xFFFF // Variable-length Unicode string
+
+// MTP Format Codes
+#define MTP_FORMAT_UNDEFINED 0x3000 // Undefined object
+#define MTP_FORMAT_ASSOCIATION 0x3001 // Association (for example, a folder)
+#define MTP_FORMAT_SCRIPT 0x3002 // Device model-specific script
+#define MTP_FORMAT_EXECUTABLE 0x3003 // Device model-specific binary executable
+#define MTP_FORMAT_TEXT 0x3004 // Text file
+#define MTP_FORMAT_HTML 0x3005 // Hypertext Markup Language file (text)
+#define MTP_FORMAT_DPOF 0x3006 // Digital Print Order Format file (text)
+#define MTP_FORMAT_AIFF 0x3007 // Audio clip
+#define MTP_FORMAT_WAV 0x3008 // Audio clip
+#define MTP_FORMAT_MP3 0x3009 // Audio clip
+#define MTP_FORMAT_AVI 0x300A // Video clip
+#define MTP_FORMAT_MPEG 0x300B // Video clip
+#define MTP_FORMAT_ASF 0x300C // Microsoft Advanced Streaming Format (video)
+#define MTP_FORMAT_DEFINED 0x3800 // Unknown image object
+#define MTP_FORMAT_EXIF_JPEG 0x3801 // Exchangeable File Format, JEIDA standard
+#define MTP_FORMAT_TIFF_EP 0x3802 // Tag Image File Format for Electronic Photography
+#define MTP_FORMAT_FLASHPIX 0x3803 // Structured Storage Image Format
+#define MTP_FORMAT_BMP 0x3804 // Microsoft Windows Bitmap file
+#define MTP_FORMAT_CIFF 0x3805 // Canon Camera Image File Format
+#define MTP_FORMAT_GIF 0x3807 // Graphics Interchange Format
+#define MTP_FORMAT_JFIF 0x3808 // JPEG File Interchange Format
+#define MTP_FORMAT_CD 0x3809 // PhotoCD Image Pac
+#define MTP_FORMAT_PICT 0x380A // Quickdraw Image Format
+#define MTP_FORMAT_PNG 0x380B // Portable Network Graphics
+#define MTP_FORMAT_TIFF 0x380D // Tag Image File Format
+#define MTP_FORMAT_TIFF_IT 0x380E // Tag Image File Format for Information Technology (graphic arts)
+#define MTP_FORMAT_JP2 0x380F // JPEG2000 Baseline File Format
+#define MTP_FORMAT_JPX 0x3810 // JPEG2000 Extended File Format
+#define MTP_FORMAT_UNDEFINED_FIRMWARE 0xB802
+#define MTP_FORMAT_WINDOWS_IMAGE_FORMAT 0xB881
+#define MTP_FORMAT_UNDEFINED_AUDIO 0xB900
+#define MTP_FORMAT_WMA 0xB901
+#define MTP_FORMAT_OGG 0xB902
+#define MTP_FORMAT_AAC 0xB903
+#define MTP_FORMAT_AUDIBLE 0xB904
+#define MTP_FORMAT_FLAC 0xB906
+#define MTP_FORMAT_UNDEFINED_VIDEO 0xB980
+#define MTP_FORMAT_WMV 0xB981
+#define MTP_FORMAT_MP4_CONTAINER 0xB982 // ISO 14496-1
+#define MTP_FORMAT_MP2 0xB983
+#define MTP_FORMAT_3GP_CONTAINER 0xB984 // 3GPP file format. Details: http://www.3gpp.org/ftp/Specs/html-info/26244.htm (page title - \u201cTransparent end-to-end packet switched streaming service, 3GPP file format\u201d).
+#define MTP_FORMAT_UNDEFINED_COLLECTION 0xBA00
+#define MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM 0xBA01
+#define MTP_FORMAT_ABSTRACT_IMAGE_ALBUM 0xBA02
+#define MTP_FORMAT_ABSTRACT_AUDIO_ALBUM 0xBA03
+#define MTP_FORMAT_ABSTRACT_VIDEO_ALBUM 0xBA04
+#define MTP_FORMAT_ABSTRACT_AV_PLAYLIST 0xBA05
+#define MTP_FORMAT_ABSTRACT_CONTACT_GROUP 0xBA06
+#define MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER 0xBA07
+#define MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION 0xBA08
+#define MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST 0xBA09
+#define MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST 0xBA0A
+#define MTP_FORMAT_ABSTRACT_MEDIACAST 0xBA0B // For use with mediacasts; references multimedia enclosures of RSS feeds or episodic content
+#define MTP_FORMAT_WPL_PLAYLIST 0xBA10
+#define MTP_FORMAT_M3U_PLAYLIST 0xBA11
+#define MTP_FORMAT_MPL_PLAYLIST 0xBA12
+#define MTP_FORMAT_ASX_PLAYLIST 0xBA13
+#define MTP_FORMAT_PLS_PLAYLIST 0xBA14
+#define MTP_FORMAT_UNDEFINED_DOCUMENT 0xBA80
+#define MTP_FORMAT_ABSTRACT_DOCUMENT 0xBA81
+#define MTP_FORMAT_XML_DOCUMENT 0xBA82
+#define MTP_FORMAT_MS_WORD_DOCUMENT 0xBA83
+#define MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT 0xBA84
+#define MTP_FORMAT_MS_EXCEL_SPREADSHEET 0xBA85
+#define MTP_FORMAT_MS_POWERPOINT_PRESENTATION 0xBA86
+#define MTP_FORMAT_UNDEFINED_MESSAGE 0xBB00
+#define MTP_FORMAT_ABSTRACT_MESSSAGE 0xBB01
+#define MTP_FORMAT_UNDEFINED_CONTACT 0xBB80
+#define MTP_FORMAT_ABSTRACT_CONTACT 0xBB81
+#define MTP_FORMAT_VCARD_2 0xBB82
+
+// MTP Object Property Codes
+#define MTP_PROPERTY_STORAGE_ID 0xDC01
+#define MTP_PROPERTY_OBJECT_FORMAT 0xDC02
+#define MTP_PROPERTY_PROTECTION_STATUS 0xDC03
+#define MTP_PROPERTY_OBJECT_SIZE 0xDC04
+#define MTP_PROPERTY_ASSOCIATION_TYPE 0xDC05
+#define MTP_PROPERTY_ASSOCIATION_DESC 0xDC06
+#define MTP_PROPERTY_OBJECT_FILE_NAME 0xDC07
+#define MTP_PROPERTY_DATE_CREATED 0xDC08
+#define MTP_PROPERTY_DATE_MODIFIED 0xDC09
+#define MTP_PROPERTY_KEYWORDS 0xDC0A
+#define MTP_PROPERTY_PARENT_OBJECT 0xDC0B
+#define MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS 0xDC0C
+#define MTP_PROPERTY_HIDDEN 0xDC0D
+#define MTP_PROPERTY_SYSTEM_OBJECT 0xDC0E
+#define MTP_PROPERTY_PERSISTENT_UID 0xDC41
+#define MTP_PROPERTY_SYNC_ID 0xDC42
+#define MTP_PROPERTY_PROPERTY_BAG 0xDC43
+#define MTP_PROPERTY_NAME 0xDC44
+#define MTP_PROPERTY_CREATED_BY 0xDC45
+#define MTP_PROPERTY_ARTIST 0xDC46
+#define MTP_PROPERTY_DATE_AUTHORED 0xDC47
+#define MTP_PROPERTY_DESCRIPTION 0xDC48
+#define MTP_PROPERTY_URL_REFERENCE 0xDC49
+#define MTP_PROPERTY_LANGUAGE_LOCALE 0xDC4A
+#define MTP_PROPERTY_COPYRIGHT_INFORMATION 0xDC4B
+#define MTP_PROPERTY_SOURCE 0xDC4C
+#define MTP_PROPERTY_ORIGIN_LOCATION 0xDC4D
+#define MTP_PROPERTY_DATE_ADDED 0xDC4E
+#define MTP_PROPERTY_NON_CONSUMABLE 0xDC4F
+#define MTP_PROPERTY_CORRUPT_UNPLAYABLE 0xDC50
+#define MTP_PROPERTY_PRODUCER_SERIAL_NUMBER 0xDC51
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT 0xDC81
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE 0xDC82
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT 0xDC83
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH 0xDC84
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION 0xDC85
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA 0xDC86
+#define MTP_PROPERTY_WIDTH 0xDC87
+#define MTP_PROPERTY_HEIGHT 0xDC88
+#define MTP_PROPERTY_DURATION 0xDC89
+#define MTP_PROPERTY_RATING 0xDC8A
+#define MTP_PROPERTY_TRACK 0xDC8B
+#define MTP_PROPERTY_GENRE 0xDC8C
+#define MTP_PROPERTY_CREDITS 0xDC8D
+#define MTP_PROPERTY_LYRICS 0xDC8E
+#define MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID 0xDC8F
+#define MTP_PROPERTY_PRODUCED_BY 0xDC90
+#define MTP_PROPERTY_USE_COUNT 0xDC91
+#define MTP_PROPERTY_SKIP_COUNT 0xDC92
+#define MTP_PROPERTY_LAST_ACCESSED 0xDC93
+#define MTP_PROPERTY_PARENTAL_RATING 0xDC94
+#define MTP_PROPERTY_META_GENRE 0xDC95
+#define MTP_PROPERTY_COMPOSER 0xDC96
+#define MTP_PROPERTY_EFFECTIVE_RATING 0xDC97
+#define MTP_PROPERTY_SUBTITLE 0xDC98
+#define MTP_PROPERTY_ORIGINAL_RELEASE_DATE 0xDC99
+#define MTP_PROPERTY_ALBUM_NAME 0xDC9A
+#define MTP_PROPERTY_ALBUM_ARTIST 0xDC9B
+#define MTP_PROPERTY_MOOD 0xDC9C
+#define MTP_PROPERTY_DRM_STATUS 0xDC9D
+#define MTP_PROPERTY_SUB_DESCRIPTION 0xDC9E
+#define MTP_PROPERTY_IS_CROPPED 0xDCD1
+#define MTP_PROPERTY_IS_COLOUR_CORRECTED 0xDCD2
+#define MTP_PROPERTY_IMAGE_BIT_DEPTH 0xDCD3
+#define MTP_PROPERTY_F_NUMBER 0xDCD4
+#define MTP_PROPERTY_EXPOSURE_TIME 0xDCD5
+#define MTP_PROPERTY_EXPOSURE_INDEX 0xDCD6
+#define MTP_PROPERTY_TOTAL_BITRATE 0xDE91
+#define MTP_PROPERTY_BITRATE_TYPE 0xDE92
+#define MTP_PROPERTY_SAMPLE_RATE 0xDE93
+#define MTP_PROPERTY_NUMBER_OF_CHANNELS 0xDE94
+#define MTP_PROPERTY_AUDIO_BIT_DEPTH 0xDE95
+#define MTP_PROPERTY_SCAN_TYPE 0xDE97
+#define MTP_PROPERTY_AUDIO_WAVE_CODEC 0xDE99
+#define MTP_PROPERTY_AUDIO_BITRATE 0xDE9A
+#define MTP_PROPERTY_VIDEO_FOURCC_CODEC 0xDE9B
+#define MTP_PROPERTY_VIDEO_BITRATE 0xDE9C
+#define MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS 0xDE9D
+#define MTP_PROPERTY_KEYFRAME_DISTANCE 0xDE9E
+#define MTP_PROPERTY_BUFFER_SIZE 0xDE9F
+#define MTP_PROPERTY_ENCODING_QUALITY 0xDEA0
+#define MTP_PROPERTY_ENCODING_PROFILE 0xDEA1
+#define MTP_PROPERTY_DISPLAY_NAME 0xDCE0
+#define MTP_PROPERTY_BODY_TEXT 0xDCE1
+#define MTP_PROPERTY_SUBJECT 0xDCE2
+#define MTP_PROPERTY_PRIORITY 0xDCE3
+#define MTP_PROPERTY_GIVEN_NAME 0xDD00
+#define MTP_PROPERTY_MIDDLE_NAMES 0xDD01
+#define MTP_PROPERTY_FAMILY_NAME 0xDD02
+#define MTP_PROPERTY_PREFIX 0xDD03
+#define MTP_PROPERTY_SUFFIX 0xDD04
+#define MTP_PROPERTY_PHONETIC_GIVEN_NAME 0xDD05
+#define MTP_PROPERTY_PHONETIC_FAMILY_NAME 0xDD06
+#define MTP_PROPERTY_EMAIL_PRIMARY 0xDD07
+#define MTP_PROPERTY_EMAIL_PERSONAL_1 0xDD08
+#define MTP_PROPERTY_EMAIL_PERSONAL_2 0xDD09
+#define MTP_PROPERTY_EMAIL_BUSINESS_1 0xDD0A
+#define MTP_PROPERTY_EMAIL_BUSINESS_2 0xDD0B
+#define MTP_PROPERTY_EMAIL_OTHERS 0xDD0C
+#define MTP_PROPERTY_PHONE_NUMBER_PRIMARY 0xDD0D
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL 0xDD0E
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2 0xDD0F
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS 0xDD10
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2 0xDD11
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE 0xDD12
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE_2 0xDD13
+#define MTP_PROPERTY_FAX_NUMBER_PRIMARY 0xDD14
+#define MTP_PROPERTY_FAX_NUMBER_PERSONAL 0xDD15
+#define MTP_PROPERTY_FAX_NUMBER_BUSINESS 0xDD16
+#define MTP_PROPERTY_PAGER_NUMBER 0xDD17
+#define MTP_PROPERTY_PHONE_NUMBER_OTHERS 0xDD18
+#define MTP_PROPERTY_PRIMARY_WEB_ADDRESS 0xDD19
+#define MTP_PROPERTY_PERSONAL_WEB_ADDRESS 0xDD1A
+#define MTP_PROPERTY_BUSINESS_WEB_ADDRESS 0xDD1B
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS 0xDD1C
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2 0xDD1D
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3 0xDD1E
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL 0xDD1F
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1 0xDD20
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2 0xDD21
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY 0xDD22
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION 0xDD23
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE 0xDD24
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY 0xDD25
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL 0xDD26
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1 0xDD27
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2 0xDD28
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY 0xDD29
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION 0xDD2A
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE 0xDD2B
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY 0xDD2C
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL 0xDD2D
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1 0xDD2E
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2 0xDD2F
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY 0xDD30
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION 0xDD31
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE 0xDD32
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY 0xDD33
+#define MTP_PROPERTY_ORGANIZATION_NAME 0xDD34
+#define MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME 0xDD35
+#define MTP_PROPERTY_ROLE 0xDD36
+#define MTP_PROPERTY_BIRTHDATE 0xDD37
+#define MTP_PROPERTY_MESSAGE_TO 0xDD40
+#define MTP_PROPERTY_MESSAGE_CC 0xDD41
+#define MTP_PROPERTY_MESSAGE_BCC 0xDD42
+#define MTP_PROPERTY_MESSAGE_READ 0xDD43
+#define MTP_PROPERTY_MESSAGE_RECEIVED_TIME 0xDD44
+#define MTP_PROPERTY_MESSAGE_SENDER 0xDD45
+#define MTP_PROPERTY_ACTIVITY_BEGIN_TIME 0xDD50
+#define MTP_PROPERTY_ACTIVITY_END_TIME 0xDD51
+#define MTP_PROPERTY_ACTIVITY_LOCATION 0xDD52
+#define MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES 0xDD54
+#define MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES 0xDD55
+#define MTP_PROPERTY_ACTIVITY_RESOURCES 0xDD56
+#define MTP_PROPERTY_ACTIVITY_ACCEPTED 0xDD57
+#define MTP_PROPERTY_ACTIVITY_TENTATIVE 0xDD58
+#define MTP_PROPERTY_ACTIVITY_DECLINED 0xDD59
+#define MTP_PROPERTY_ACTIVITY_REMAINDER_TIME 0xDD5A
+#define MTP_PROPERTY_ACTIVITY_OWNER 0xDD5B
+#define MTP_PROPERTY_ACTIVITY_STATUS 0xDD5C
+#define MTP_PROPERTY_OWNER 0xDD5D
+#define MTP_PROPERTY_EDITOR 0xDD5E
+#define MTP_PROPERTY_WEBMASTER 0xDD5F
+#define MTP_PROPERTY_URL_SOURCE 0xDD60
+#define MTP_PROPERTY_URL_DESTINATION 0xDD61
+#define MTP_PROPERTY_TIME_BOOKMARK 0xDD62
+#define MTP_PROPERTY_OBJECT_BOOKMARK 0xDD63
+#define MTP_PROPERTY_BYTE_BOOKMARK 0xDD64
+#define MTP_PROPERTY_LAST_BUILD_DATE 0xDD70
+#define MTP_PROPERTY_TIME_TO_LIVE 0xDD71
+#define MTP_PROPERTY_MEDIA_GUID 0xDD72
+
+// MTP Device Property Codes
+#define MTP_DEVICE_PROPERTY_UNDEFINED 0x5000
+#define MTP_DEVICE_PROPERTY_BATTERY_LEVEL 0x5001
+#define MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE 0x5002
+#define MTP_DEVICE_PROPERTY_IMAGE_SIZE 0x5003
+#define MTP_DEVICE_PROPERTY_COMPRESSION_SETTING 0x5004
+#define MTP_DEVICE_PROPERTY_WHITE_BALANCE 0x5005
+#define MTP_DEVICE_PROPERTY_RGB_GAIN 0x5006
+#define MTP_DEVICE_PROPERTY_F_NUMBER 0x5007
+#define MTP_DEVICE_PROPERTY_FOCAL_LENGTH 0x5008
+#define MTP_DEVICE_PROPERTY_FOCUS_DISTANCE 0x5009
+#define MTP_DEVICE_PROPERTY_FOCUS_MODE 0x500A
+#define MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE 0x500B
+#define MTP_DEVICE_PROPERTY_FLASH_MODE 0x500C
+#define MTP_DEVICE_PROPERTY_EXPOSURE_TIME 0x500D
+#define MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE 0x500E
+#define MTP_DEVICE_PROPERTY_EXPOSURE_INDEX 0x500F
+#define MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION 0x5010
+#define MTP_DEVICE_PROPERTY_DATETIME 0x5011
+#define MTP_DEVICE_PROPERTY_CAPTURE_DELAY 0x5012
+#define MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE 0x5013
+#define MTP_DEVICE_PROPERTY_CONTRAST 0x5014
+#define MTP_DEVICE_PROPERTY_SHARPNESS 0x5015
+#define MTP_DEVICE_PROPERTY_DIGITAL_ZOOM 0x5016
+#define MTP_DEVICE_PROPERTY_EFFECT_MODE 0x5017
+#define MTP_DEVICE_PROPERTY_BURST_NUMBER 0x5018
+#define MTP_DEVICE_PROPERTY_BURST_INTERVAL 0x5019
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER 0x501A
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL 0x501B
+#define MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE 0x501C
+#define MTP_DEVICE_PROPERTY_UPLOAD_URL 0x501D
+#define MTP_DEVICE_PROPERTY_ARTIST 0x501E
+#define MTP_DEVICE_PROPERTY_COPYRIGHT_INFO 0x501F
+#define MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER 0xD401
+#define MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME 0xD402
+#define MTP_DEVICE_PROPERTY_VOLUME 0xD403
+#define MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED 0xD404
+#define MTP_DEVICE_PROPERTY_DEVICE_ICON 0xD405
+#define MTP_DEVICE_PROPERTY_PLAYBACK_RATE 0xD410
+#define MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT 0xD411
+#define MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX 0xD412
+#define MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO 0xD406
+#define MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE 0xD407
+
+// MTP Operation Codes
+#define MTP_OPERATION_GET_DEVICE_INFO 0x1001
+#define MTP_OPERATION_OPEN_SESSION 0x1002
+#define MTP_OPERATION_CLOSE_SESSION 0x1003
+#define MTP_OPERATION_GET_STORAGE_IDS 0x1004
+#define MTP_OPERATION_GET_STORAGE_INFO 0x1005
+#define MTP_OPERATION_GET_NUM_OBJECTS 0x1006
+#define MTP_OPERATION_GET_OBJECT_HANDLES 0x1007
+#define MTP_OPERATION_GET_OBJECT_INFO 0x1008
+#define MTP_OPERATION_GET_OBJECT 0x1009
+#define MTP_OPERATION_GET_THUMB 0x100A
+#define MTP_OPERATION_DELETE_OBJECT 0x100B
+#define MTP_OPERATION_SEND_OBJECT_INFO 0x100C
+#define MTP_OPERATION_SEND_OBJECT 0x100D
+#define MTP_OPERATION_INITIATE_CAPTURE 0x100E
+#define MTP_OPERATION_FORMAT_STORE 0x100F
+#define MTP_OPERATION_RESET_DEVICE 0x1010
+#define MTP_OPERATION_SELF_TEST 0x1011
+#define MTP_OPERATION_SET_OBJECT_PROTECTION 0x1012
+#define MTP_OPERATION_POWER_DOWN 0x1013
+#define MTP_OPERATION_GET_DEVICE_PROP_DESC 0x1014
+#define MTP_OPERATION_GET_DEVICE_PROP_VALUE 0x1015
+#define MTP_OPERATION_SET_DEVICE_PROP_VALUE 0x1016
+#define MTP_OPERATION_RESET_DEVICE_PROP_VALUE 0x1017
+#define MTP_OPERATION_TERMINATE_OPEN_CAPTURE 0x1018
+#define MTP_OPERATION_MOVE_OBJECT 0x1019
+#define MTP_OPERATION_COPY_OBJECT 0x101A
+#define MTP_OPERATION_GET_PARTIAL_OBJECT 0x101B
+#define MTP_OPERATION_INITIATE_OPEN_CAPTURE 0x101C
+#define MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED 0x9801
+#define MTP_OPERATION_GET_OBJECT_PROP_DESC 0x9802
+#define MTP_OPERATION_GET_OBJECT_PROP_VALUE 0x9803
+#define MTP_OPERATION_SET_OBJECT_PROP_VALUE 0x9804
+#define MTP_OPERATION_GET_OBJECT_REFERENCES 0x9810
+#define MTP_OPERATION_SET_OBJECT_REFERENCES 0x9811
+#define MTP_OPERATION_SKIP 0x9820
+
+// MTP Response Codes
+#define MTP_RESPONSE_UNDEFINED 0x2000
+#define MTP_RESPONSE_OK 0x2001
+#define MTP_RESPONSE_GENERAL_ERROR 0x2002
+#define MTP_RESPONSE_SESSION_NOT_OPEN 0x2003
+#define MTP_RESPONSE_INVALID_TRANSACTION_ID 0x2004
+#define MTP_RESPONSE_OPERATION_NOT_SUPPORTED 0x2005
+#define MTP_RESPONSE_PARAMETER_NOT_SUPPORTED 0x2006
+#define MTP_RESPONSE_INCOMPLETE_TRANSFER 0x2007
+#define MTP_RESPONSE_INVALID_STORAGE_ID 0x2008
+#define MTP_RESPONSE_INVALID_OBJECT_HANDLE 0x2009
+#define MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED 0x200A
+#define MTP_RESPONSE_INVALID_OBJECT_FORMAT_CODE 0x200B
+#define MTP_RESPONSE_STORAGE_FULL 0x200C
+#define MTP_RESPONSE_OBJECT_WRITE_PROTECTED 0x200D
+#define MTP_RESPONSE_STORE_READ_ONLY 0x200E
+#define MTP_RESPONSE_ACCESS_DENIED 0x200F
+#define MTP_RESPONSE_NO_THUMBNAIL_PRESENT 0x2010
+#define MTP_RESPONSE_SELF_TEST_FAILED 0x2011
+#define MTP_RESPONSE_PARTIAL_DELETION 0x2012
+#define MTP_RESPONSE_STORE_NOT_AVAILABLE 0x2013
+#define MTP_RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED 0x2014
+#define MTP_RESPONSE_NO_VALID_OBJECT_INFO 0x2015
+#define MTP_RESPONSE_INVALID_CODE_FORMAT 0x2016
+#define MTP_RESPONSE_UNKNOWN_VENDOR_CODE 0x2017
+#define MTP_RESPONSE_CAPTURE_ALREADY_TERMINATED 0x2018
+#define MTP_RESPONSE_DEVICE_BUSY 0x2019
+#define MTP_RESPONSE_INVALID_PARENT_OBJECT 0x201A
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT 0x201B
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_VALUE 0x201C
+#define MTP_RESPONSE_INVALID_PARAMETER 0x201D
+#define MTP_RESPONSE_SESSION_ALREADY_OPEN 0x201E
+#define MTP_RESPONSE_TRANSACTION_CANCELLED 0x201F
+#define MTP_RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED 0x2020
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_CODE 0xA801
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT 0xA802
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_VALUE 0xA803
+#define MTP_RESPONSE_INVALID_OBJECT_REFERENCE 0xA804
+#define MTP_RESPONSE_GROUP_NOT_SUPPORTED 0xA805
+#define MTP_RESPONSE_INVALID_DATASET 0xA806
+#define MTP_RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED 0xA807
+#define MTP_RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED 0xA808
+#define MTP_RESPONSE_OBJECT_TOO_LARGE 0xA809
+#define MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED 0xA80A
+
+// MTP Event Codes
+#define MTP_EVENT_UNDEFINED 0x4000
+#define MTP_EVENT_CANCEL_TRANSACTION 0x4001
+#define MTP_EVENT_OBJECT_ADDED 0x4002
+#define MTP_EVENT_OBJECT_REMOVED 0x4003
+#define MTP_EVENT_STORE_ADDED 0x4004
+#define MTP_EVENT_STORE_REMOVED 0x4005
+#define MTP_EVENT_DEVICE_PROP_CHANGED 0x4006
+#define MTP_EVENT_OBJECT_INFO_CHANGED 0x4007
+#define MTP_EVENT_DEVICE_INFO_CHANGED 0x4008
+#define MTP_EVENT_REQUEST_OBJECT_TRANSFER 0x4009
+#define MTP_EVENT_STORE_FULL 0x400A
+#define MTP_EVENT_DEVICE_RESET 0x400B
+#define MTP_EVENT_STORAGE_INFO_CHANGED 0x400C
+#define MTP_EVENT_CAPTURE_COMPLETE 0x400D
+#define MTP_EVENT_UNREPORTED_STATUS 0x400E
+#define MTP_EVENT_OBJECT_PROP_CHANGED 0xC801
+#define MTP_EVENT_OBJECT_PROP_DESC_CHANGED 0xC802
+#define MTP_EVENT_OBJECT_REFERENCES_CHANGED 0xC803
+
+// Storage Type
+#define MTP_STORAGE_FIXED_ROM 0x0001
+#define MTP_STORAGE_REMOVABLE_ROM 0x0002
+#define MTP_STORAGE_FIXED_RAM 0x0003
+#define MTP_STORAGE_REMOVABLE_RAM 0x0004
+
+// Storage File System
+#define MTP_STORAGE_FILESYSTEM_FLAT 0x0001
+#define MTP_STORAGE_FILESYSTEM_HIERARCHICAL 0x0002
+#define MTP_STORAGE_FILESYSTEM_DCF 0x0003
+
+// Storage Access Capability
+#define MTP_STORAGE_READ_WRITE 0x0000
+#define MTP_STORAGE_READ_ONLY_WITHOUT_DELETE 0x0001
+#define MTP_STORAGE_READ_ONLY_WITH_DELETE 0x0002
+
+// Association Type
+#define MTP_ASSOCIATION_TYPE_UNDEFINED 0x0000
+#define MTP_ASSOCIATION_TYPE_GENERIC_FOLDER 0x0001
+
+#endif // _MTP_H
diff --git a/media/tests/CameraBrowser/Android.mk b/media/tests/CameraBrowser/Android.mk
new file mode 100644
index 0000000..1d81129
--- /dev/null
+++ b/media/tests/CameraBrowser/Android.mk
@@ -0,0 +1,10 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := $(call all-subdir-java-files)
+
+LOCAL_PACKAGE_NAME := CameraBrowser
+
+include $(BUILD_PACKAGE)
diff --git a/media/tests/CameraBrowser/AndroidManifest.xml b/media/tests/CameraBrowser/AndroidManifest.xml
new file mode 100644
index 0000000..eae0b01
--- /dev/null
+++ b/media/tests/CameraBrowser/AndroidManifest.xml
@@ -0,0 +1,28 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="com.android.camerabrowser">
+
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+
+ <application android:label="@string/app_label">
+ <activity android:name="CameraBrowser" android:label="Camera Browser">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+
+ <activity android:name="StorageBrowser" />
+ <activity android:name="ObjectBrowser" />
+ <activity android:name="ObjectViewer" />
+
+ <receiver android:name="UsbReceiver">
+ <intent-filter>
+ <action android:name="android.hardware.action.USB_CAMERA_ATTACHED" />
+ <data android:scheme="content"/>
+ </intent-filter>
+ </receiver>
+
+ </application>
+
+
+</manifest>
diff --git a/media/tests/CameraBrowser/res/layout/object_info.xml b/media/tests/CameraBrowser/res/layout/object_info.xml
new file mode 100644
index 0000000..ac210b9
--- /dev/null
+++ b/media/tests/CameraBrowser/res/layout/object_info.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+/*
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+-->
+<TableLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/object_info"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content">
+ <TableRow>
+ <TextView android:id="@+id/name_label"
+ android:text="@string/name_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/name"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/size_label"
+ android:text="@string/size_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/size"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/format_label"
+ android:text="@string/format_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/format"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/thumb_width_label"
+ android:text="@string/thumb_width_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/thumb_width"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/thumb_height_label"
+ android:text="@string/thumb_height_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/thumb_height"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/thumb_size_label"
+ android:text="@string/thumb_size_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/thumb_size"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/width_label"
+ android:text="@string/width_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/width"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/height_label"
+ android:text="@string/height_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/height"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/depth_label"
+ android:text="@string/depth_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/depth"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/sequence_label"
+ android:text="@string/sequence_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/sequence"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/created_label"
+ android:text="@string/created_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/created"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/modified_label"
+ android:text="@string/modified_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/modified"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <TextView android:id="@+id/keywords_label"
+ android:text="@string/keywords_label"
+ android:layout_gravity="right"
+ android:layout_marginRight="8dip"
+ style="@style/info_label" />
+
+ <TextView android:id="@+id/keywords"
+ style="@style/info_value" />
+ </TableRow>
+ <TableRow>
+ <ImageView android:id="@+id/thumbnail" />
+ </TableRow>
+</TableLayout>
+
diff --git a/media/tests/CameraBrowser/res/layout/object_list.xml b/media/tests/CameraBrowser/res/layout/object_list.xml
new file mode 100644
index 0000000..30c18bb
--- /dev/null
+++ b/media/tests/CameraBrowser/res/layout/object_list.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2010 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="horizontal"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent">
+
+ <ImageView android:id="@+id/thumbnail"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content" />
+
+ <TextView android:id="@+id/name"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textAppearance="?android:attr/textAppearanceLarge"
+ android:gravity="center_vertical"
+ android:paddingLeft="6dip"
+ android:minHeight="?android:attr/listPreferredItemHeight" />
+</LinearLayout>
diff --git a/media/tests/CameraBrowser/res/menu/object_menu.xml b/media/tests/CameraBrowser/res/menu/object_menu.xml
new file mode 100644
index 0000000..a0865f0
--- /dev/null
+++ b/media/tests/CameraBrowser/res/menu/object_menu.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2010 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<menu xmlns:android="http://schemas.android.com/apk/res/android">
+
+ <item android:id="@+id/save"
+ android:title="@string/save_item" />
+ <item android:id="@+id/delete"
+ android:title="@string/delete_item" />
+</menu>
diff --git a/media/tests/CameraBrowser/res/values/strings.xml b/media/tests/CameraBrowser/res/values/strings.xml
new file mode 100644
index 0000000..cd477f1
--- /dev/null
+++ b/media/tests/CameraBrowser/res/values/strings.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2010 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <string name="app_label">Camera Browser</string>
+
+ <!-- for object info -->
+ <string name="name_label">Name: </string>
+ <string name="size_label">Size: </string>
+ <string name="format_label">Format: </string>
+ <string name="thumb_width_label">Thumb Width: </string>
+ <string name="thumb_height_label">Thumb Height: </string>
+ <string name="thumb_size_label">Thumb Size: </string>
+ <string name="width_label">Width: </string>
+ <string name="height_label">Height: </string>
+ <string name="depth_label">Depth: </string>
+ <string name="sequence_label">Sequence: </string>
+ <string name="created_label">Created: </string>
+ <string name="modified_label">Modified: </string>
+ <string name="keywords_label">Keywords: </string>
+
+ <!-- menu items -->
+ <string name="save_item">Save</string>
+ <string name="delete_item">Delete</string>
+
+ <!-- toasts -->
+ <string name="object_saved_message">Object saved</string>
+ <string name="save_failed_message">Could not save object</string>
+ <string name="object_deleted_message">Object deleted</string>
+ <string name="delete_failed_message">Could not delete object</string>
+
+</resources>
diff --git a/media/tests/CameraBrowser/res/values/styles.xml b/media/tests/CameraBrowser/res/values/styles.xml
new file mode 100644
index 0000000..c869985
--- /dev/null
+++ b/media/tests/CameraBrowser/res/values/styles.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2008 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <style name="info_label">
+ <item name="android:layout_height">wrap_content</item>
+ <item name="android:layout_width">wrap_content</item>
+ <item name="android:textSize">14sp</item>
+ <item name="android:textStyle">bold</item>
+ <item name="android:paddingRight">4dip</item>
+ </style>
+
+ <style name="info_value">
+ <item name="android:layout_height">wrap_content</item>
+ <item name="android:layout_width">wrap_content</item>
+ <item name="android:textSize">14sp</item>
+ <item name="android:textStyle">normal</item>
+ </style>
+
+</resources>
+
diff --git a/media/tests/CameraBrowser/src/com/android/camerabrowser/CameraBrowser.java b/media/tests/CameraBrowser/src/com/android/camerabrowser/CameraBrowser.java
new file mode 100644
index 0000000..c04873a
--- /dev/null
+++ b/media/tests/CameraBrowser/src/com/android/camerabrowser/CameraBrowser.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camerabrowser;
+
+import android.app.ListActivity;
+import android.content.ContentResolver;
+import android.content.Intent;
+import android.database.ContentObserver;
+import android.database.Cursor;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.provider.Mtp;
+import android.util.Log;
+import android.view.View;
+import android.widget.ListAdapter;
+import android.widget.ListView;
+import android.widget.SimpleCursorAdapter;
+
+ /**
+ * A list view displaying all connected cameras.
+ */
+public class CameraBrowser extends ListActivity {
+
+ private static final String TAG = "CameraBrowser";
+
+ private ListAdapter mAdapter;
+ private ContentResolver mResolver;
+ private DeviceObserver mDeviceObserver;
+ private Cursor mCursor;
+
+ private class DeviceObserver extends ContentObserver {
+ DeviceObserver(Handler handler) {
+ super(handler);
+ }
+
+ @Override
+ public void onChange(boolean selfChange) {
+ Log.d(TAG, "DeviceObserver.onChange");
+ if (mCursor != null) {
+ mCursor.requery();
+ }
+ }
+ }
+
+ private static final String[] DEVICE_COLUMNS =
+ new String[] { Mtp.Device._ID, Mtp.Device.MANUFACTURER, Mtp.Device.MODEL };
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ mResolver = getContentResolver();
+ mDeviceObserver = new DeviceObserver(new Handler());
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+
+ Cursor c = getContentResolver().query(Mtp.Device.CONTENT_URI,
+ DEVICE_COLUMNS, null, null, null);
+ Log.d(TAG, "query returned " + c);
+ startManagingCursor(c);
+ mCursor = c;
+
+ // Map Cursor columns to views defined in simple_list_item_2.xml
+ mAdapter = new SimpleCursorAdapter(this,
+ android.R.layout.simple_list_item_2, c,
+ new String[] { Mtp.Device.MANUFACTURER, Mtp.Device.MODEL },
+ new int[] { android.R.id.text1, android.R.id.text2 });
+ setListAdapter(mAdapter);
+
+ // register for changes to the device list
+ mResolver.registerContentObserver(Mtp.Device.CONTENT_URI, true, mDeviceObserver);
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ mResolver.unregisterContentObserver(mDeviceObserver);
+ }
+
+ @Override
+ protected void onListItemClick(ListView l, View v, int position, long id) {
+ Intent intent = new Intent(this, StorageBrowser.class);
+ intent.putExtra("device", (int)mAdapter.getItemId(position));
+ startActivity(intent);
+ }
+}
diff --git a/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectBrowser.java b/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectBrowser.java
new file mode 100644
index 0000000..6d34fd4
--- /dev/null
+++ b/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectBrowser.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camerabrowser;
+
+import android.app.ListActivity;
+import android.content.Context;
+import android.content.Intent;
+import android.database.Cursor;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.media.MtpConstants;
+import android.net.Uri;
+import android.os.Bundle;
+import android.provider.Mtp;
+import android.util.Log;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ImageView;
+import android.widget.ListView;
+import android.widget.ResourceCursorAdapter;
+import android.widget.TextView;
+
+ /**
+ * A list view displaying all objects within a container (folder or storage unit).
+ */
+public class ObjectBrowser extends ListActivity {
+
+ private static final String TAG = "ObjectBrowser";
+
+ private Cursor mCursor;
+ private ObjectCursorAdapter mAdapter;
+ private int mDeviceID;
+ private long mStorageID;
+ private long mObjectID;
+
+ private static final String[] OBJECT_COLUMNS =
+ new String[] { Mtp.Object._ID, Mtp.Object.NAME, Mtp.Object.FORMAT, Mtp.Object.THUMB };
+
+ static final int ID_COLUMN = 0;
+ static final int NAME_COLUMN = 1;
+ static final int FORMAT_COLUMN = 2;
+ static final int THUMB_COLUMN = 3;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+
+ mDeviceID = getIntent().getIntExtra("device", 0);
+ mStorageID = getIntent().getLongExtra("storage", 0);
+ mObjectID = getIntent().getLongExtra("object", 0);
+ if (mDeviceID != 0 && mStorageID != 0) {
+ Cursor c;
+ Uri uri;
+ if (mObjectID == 0) {
+ uri = Mtp.Object.getContentUriForStorageChildren(mDeviceID, mStorageID);
+ } else {
+ uri = Mtp.Object.getContentUriForObjectChildren(mDeviceID, mObjectID);
+ }
+ Log.d(TAG, "query " + uri);
+ c = getContentResolver().query(uri, OBJECT_COLUMNS, null, null, null);
+ startManagingCursor(c);
+ mCursor = c;
+
+ // Map Cursor columns to views defined in simple_list_item_1.xml
+ mAdapter = new ObjectCursorAdapter(this, c);
+ setListAdapter(mAdapter);
+ }
+ }
+
+ @Override
+ protected void onListItemClick(ListView l, View v, int position, long id) {
+ long rowID = mAdapter.getItemId(position);
+ Cursor c = getContentResolver().query(
+ Mtp.Object.getContentUri(mDeviceID, rowID),
+ OBJECT_COLUMNS, null, null, null);
+ Log.d(TAG, "query returned " + c + " count: " + c.getCount());
+ long format = 0;
+ if (c != null && c.getCount() == 1) {
+ c.moveToFirst();
+ long rowId = c.getLong(ID_COLUMN);
+ String name = c.getString(NAME_COLUMN);
+ format = c.getLong(FORMAT_COLUMN);
+ Log.d(TAG, "rowId: " + rowId + " name: " + name + " format: " + format);
+ }
+ if (format == MtpConstants.FORMAT_ASSOCIATION) {
+ Intent intent = new Intent(this, ObjectBrowser.class);
+ intent.putExtra("device", mDeviceID);
+ intent.putExtra("storage", mStorageID);
+ intent.putExtra("object", rowID);
+ startActivity(intent);
+ } else {
+ Intent intent = new Intent(this, ObjectViewer.class);
+ intent.putExtra("device", mDeviceID);
+ intent.putExtra("storage", mStorageID);
+ intent.putExtra("object", rowID);
+ startActivity(intent);
+ }
+ }
+
+ private class ObjectCursorAdapter extends ResourceCursorAdapter {
+
+ public ObjectCursorAdapter(Context context, Cursor c) {
+ super(context, R.layout.object_list, c);
+ }
+
+ @Override
+ public void bindView(View view, Context context, Cursor cursor) {
+ ImageView thumbView = (ImageView)view.findViewById(R.id.thumbnail);
+ TextView nameView = (TextView)view.findViewById(R.id.name);
+
+ // get the thumbnail
+ byte[] thumbnail = cursor.getBlob(THUMB_COLUMN);
+ if (thumbnail != null) {
+ Bitmap bitmap = BitmapFactory.decodeByteArray(thumbnail, 0, thumbnail.length);
+ if (bitmap != null) {
+ thumbView.setImageBitmap(bitmap);
+ }
+ }
+
+ // get the name
+ String name = cursor.getString(NAME_COLUMN);
+ if (name == null) {
+ name = "";
+ }
+ nameView.setText(name);
+ }
+ }
+}
diff --git a/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectViewer.java b/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectViewer.java
new file mode 100644
index 0000000..9f2f98e
--- /dev/null
+++ b/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectViewer.java
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camerabrowser;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.database.Cursor;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.FileUtils;
+import android.os.ParcelFileDescriptor;
+import android.os.Process;
+import android.provider.Mtp;
+import android.util.Log;
+import android.view.Menu;
+import android.view.MenuInflater;
+import android.view.MenuItem;
+import android.view.View;
+import android.widget.ImageView;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.util.Calendar;
+import java.util.Date;
+
+/**
+ * A view to display the properties of an object.
+ */
+public class ObjectViewer extends Activity {
+
+ private static final String TAG = "ObjectViewer";
+
+ private int mDeviceID;
+ private long mStorageID;
+ private long mObjectID;
+
+ private static final String[] OBJECT_COLUMNS =
+ new String[] { Mtp.Object._ID,
+ Mtp.Object.NAME,
+ Mtp.Object.SIZE,
+ Mtp.Object.THUMB_WIDTH,
+ Mtp.Object.THUMB_HEIGHT,
+ Mtp.Object.THUMB_SIZE,
+ Mtp.Object.IMAGE_WIDTH,
+ Mtp.Object.IMAGE_HEIGHT,
+ Mtp.Object.IMAGE_DEPTH,
+ Mtp.Object.SEQUENCE_NUMBER,
+ Mtp.Object.DATE_CREATED,
+ Mtp.Object.DATE_MODIFIED,
+ Mtp.Object.KEYWORDS,
+ Mtp.Object.THUMB,
+ Mtp.Object.FORMAT,
+ };
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ setContentView(R.layout.object_info);
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+
+ mDeviceID = getIntent().getIntExtra("device", 0);
+ mStorageID = getIntent().getLongExtra("storage", 0);
+ mObjectID = getIntent().getLongExtra("object", 0);
+
+ if (mDeviceID != 0 && mObjectID != 0) {
+ Cursor c = getContentResolver().query(
+ Mtp.Object.getContentUri(mDeviceID, mObjectID),
+ OBJECT_COLUMNS, null, null, null);
+ c.moveToFirst();
+ TextView view = (TextView)findViewById(R.id.name);
+ view.setText(c.getString(1));
+ view = (TextView)findViewById(R.id.size);
+ view.setText(Long.toString(c.getLong(2)));
+ view = (TextView)findViewById(R.id.thumb_width);
+ view.setText(Long.toString(c.getLong(3)));
+ view = (TextView)findViewById(R.id.thumb_height);
+ view.setText(Long.toString(c.getLong(4)));
+ view = (TextView)findViewById(R.id.thumb_size);
+ view.setText(Long.toString(c.getLong(5)));
+ view = (TextView)findViewById(R.id.width);
+ view.setText(Long.toString(c.getLong(6)));
+ view = (TextView)findViewById(R.id.height);
+ view.setText(Long.toString(c.getLong(7)));
+ view = (TextView)findViewById(R.id.depth);
+ view.setText(Long.toString(c.getLong(8)));
+ view = (TextView)findViewById(R.id.sequence);
+ view.setText(Long.toString(c.getLong(9)));
+ view = (TextView)findViewById(R.id.created);
+ Date date = new Date(c.getLong(10) * 1000);
+ view.setText(date.toString());
+ view = (TextView)findViewById(R.id.modified);
+ date = new Date(c.getLong(11) * 1000);
+ view.setText(date.toString());
+ view = (TextView)findViewById(R.id.keywords);
+ view.setText(c.getString(12));
+ byte[] thumbnail = c.getBlob(13);
+ if (thumbnail != null) {
+ ImageView thumbView = (ImageView)findViewById(R.id.thumbnail);
+ Bitmap bitmap = BitmapFactory.decodeByteArray(thumbnail, 0, thumbnail.length);
+ if (bitmap != null) {
+ thumbView.setImageBitmap(bitmap);
+ }
+ }
+ view = (TextView)findViewById(R.id.format);
+ view.setText(Long.toHexString(c.getLong(14)).toUpperCase());
+ }
+ }
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ MenuInflater inflater = getMenuInflater();
+ inflater.inflate(R.menu.object_menu, menu);
+ return true;
+ }
+
+ @Override
+ public boolean onPrepareOptionsMenu(Menu menu) {
+ MenuItem item = menu.findItem(R.id.save);
+ item.setEnabled(true);
+ item = menu.findItem(R.id.delete);
+ item.setEnabled(true);
+ return true;
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ switch (item.getItemId()) {
+ case R.id.save:
+ save();
+ return true;
+ case R.id.delete:
+ delete();
+ return true;
+ }
+ return false;
+ }
+
+ private static String getTimestamp() {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(System.currentTimeMillis());
+ return String.format("%tY-%tm-%td-%tH-%tM-%tS", c, c, c, c, c, c);
+ }
+
+ private void save() {
+ boolean success = false;
+ Uri uri = Mtp.Object.getContentUri(mDeviceID, mObjectID);
+ File destFile = null;
+ ParcelFileDescriptor pfd = null;
+ FileInputStream fis = null;
+ FileOutputStream fos = null;
+
+ try {
+ pfd = getContentResolver().openFileDescriptor(uri, "r");
+ Log.d(TAG, "save got pfd " + pfd);
+ if (pfd != null) {
+ fis = new FileInputStream(pfd.getFileDescriptor());
+ Log.d(TAG, "save got fis " + fis);
+ File destDir = Environment.getExternalStoragePublicDirectory(
+ Environment.DIRECTORY_DCIM);
+ destDir.mkdirs();
+ destFile = new File(destDir, "CameraBrowser-" + getTimestamp() + ".jpeg");
+
+
+ Log.d(TAG, "save got destFile " + destFile);
+
+ if (destFile.exists()) {
+ destFile.delete();
+ }
+ fos = new FileOutputStream(destFile);
+
+ byte[] buffer = new byte[65536];
+ int bytesRead;
+ while ((bytesRead = fis.read(buffer)) >= 0) {
+ fos.write(buffer, 0, bytesRead);
+ }
+
+ // temporary workaround until we straighten out permissions in /data/media
+ FileUtils.setPermissions(destDir.getPath(), 0775, Process.myUid(), Process.SDCARD_RW_GID);
+ FileUtils.setPermissions(destFile.getPath(), 0664, Process.myUid(), Process.SDCARD_RW_GID);
+
+ success = true;
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Exception in ObjectView.save", e);
+ } finally {
+ if (fis != null) {
+ try {
+ fis.close();
+ } catch (Exception e) {
+ }
+ }
+ if (fos != null) {
+ try {
+ fos.close();
+ } catch (Exception e) {
+ }
+ }
+ if (pfd != null) {
+ try {
+ pfd.close();
+ } catch (Exception e) {
+ }
+ }
+ }
+
+ if (success) {
+ Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
+ intent.setData(Uri.fromFile(destFile));
+ sendBroadcast(intent);
+ Toast.makeText(this, R.string.object_saved_message, Toast.LENGTH_SHORT).show();
+ } else {
+ Toast.makeText(this, R.string.save_failed_message, Toast.LENGTH_SHORT).show();
+ }
+ }
+
+ private void delete() {
+ Uri uri = Mtp.Object.getContentUri(mDeviceID, mObjectID);
+
+ Log.d(TAG, "deleting " + uri);
+
+ int result = getContentResolver().delete(uri, null, null);
+ if (result > 0) {
+ Toast.makeText(this, R.string.object_deleted_message, Toast.LENGTH_SHORT).show();
+ finish();
+ } else {
+ Toast.makeText(this, R.string.delete_failed_message, Toast.LENGTH_SHORT).show();
+ }
+ }
+}
diff --git a/media/tests/CameraBrowser/src/com/android/camerabrowser/StorageBrowser.java b/media/tests/CameraBrowser/src/com/android/camerabrowser/StorageBrowser.java
new file mode 100644
index 0000000..63e036e
--- /dev/null
+++ b/media/tests/CameraBrowser/src/com/android/camerabrowser/StorageBrowser.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camerabrowser;
+
+import android.app.ListActivity;
+import android.content.Intent;
+import android.database.Cursor;
+import android.net.Uri;
+import android.os.Bundle;
+import android.provider.Mtp;
+import android.util.Log;
+import android.view.View;
+import android.widget.ListAdapter;
+import android.widget.ListView;
+import android.widget.SimpleCursorAdapter;
+
+/**
+ * A list view displaying all storage units on a device.
+ */
+public class StorageBrowser extends ListActivity {
+
+ private static final String TAG = "StorageBrowser";
+
+ private ListAdapter mAdapter;
+ private int mDeviceID;
+
+ private static final String[] STORAGE_COLUMNS =
+ new String[] { Mtp.Storage._ID, Mtp.Storage.DESCRIPTION };
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+
+ mDeviceID = getIntent().getIntExtra("device", 0);
+ if (mDeviceID != 0) {
+ Cursor c = getContentResolver().query(Mtp.Storage.getContentUri(mDeviceID),
+ STORAGE_COLUMNS, null, null, null);
+ Log.d(TAG, "query returned " + c);
+ startManagingCursor(c);
+
+ // Map Cursor columns to views defined in simple_list_item_1.xml
+ mAdapter = new SimpleCursorAdapter(this,
+ android.R.layout.simple_list_item_1, c,
+ new String[] { Mtp.Storage.DESCRIPTION },
+ new int[] { android.R.id.text1, android.R.id.text2 });
+ setListAdapter(mAdapter);
+ }
+ }
+
+ @Override
+ protected void onListItemClick(ListView l, View v, int position, long id) {
+ Intent intent = new Intent(this, ObjectBrowser.class);
+ intent.putExtra("device", mDeviceID);
+ intent.putExtra("storage", mAdapter.getItemId(position));
+ startActivity(intent);
+ }
+}
diff --git a/media/tests/CameraBrowser/src/com/android/camerabrowser/UsbReceiver.java b/media/tests/CameraBrowser/src/com/android/camerabrowser/UsbReceiver.java
new file mode 100644
index 0000000..c05b239
--- /dev/null
+++ b/media/tests/CameraBrowser/src/com/android/camerabrowser/UsbReceiver.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camerabrowser;
+
+import android.content.Context;
+import android.content.Intent;
+import android.content.BroadcastReceiver;
+import android.hardware.Usb;
+import android.net.Uri;
+import android.util.Log;
+
+public class UsbReceiver extends BroadcastReceiver
+{
+ private static final String TAG = "UsbReceiver";
+
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ Log.d(TAG, "onReceive " + intent);
+ if (Usb.ACTION_USB_CAMERA_ATTACHED.equals(intent.getAction())) {
+ Uri uri = intent.getData();
+ intent = new Intent(context, StorageBrowser.class);
+ intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+ try {
+ // TODO - add a wrapper to Mtp.Device for this
+ int id = Integer.parseInt(uri.getPathSegments().get(1));
+ intent.putExtra("device", id);
+ context.startActivity(intent);
+ } catch (NumberFormatException e) {
+ Log.e(TAG, "bad device Uri " + uri);
+ }
+ }
+ }
+}
diff --git a/media/tests/mtp/Android.mk b/media/tests/mtp/Android.mk
new file mode 100644
index 0000000..a9074ed
--- /dev/null
+++ b/media/tests/mtp/Android.mk
@@ -0,0 +1,61 @@
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ mtp.cpp \
+ MtpFile.cpp \
+
+LOCAL_C_INCLUDES += \
+ frameworks/base/media/mtp \
+
+LOCAL_CFLAGS := -DMTP_HOST
+
+LOCAL_MODULE := mtp
+
+LOCAL_STATIC_LIBRARIES := libmtp libusbhost libutils libcutils
+
+include $(BUILD_EXECUTABLE)
+
+endif
+
+ifeq ($(HOST_OS),linux)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ mtp.cpp \
+ MtpFile.cpp \
+ ../../../libs/utils/RefBase.cpp \
+ ../../../libs/utils/SharedBuffer.cpp \
+ ../../../libs/utils/Threads.cpp \
+ ../../../libs/utils/VectorImpl.cpp \
+
+LOCAL_C_INCLUDES += \
+ frameworks/base/media/mtp \
+
+LOCAL_CFLAGS := -DMTP_HOST -g -O0
+
+have_readline := $(wildcard /usr/include/readline/readline.h)
+have_history := $(wildcard /usr/lib/libhistory*)
+ifneq ($(strip $(have_readline)),)
+LOCAL_CFLAGS += -DHAVE_READLINE=1
+endif
+
+LOCAL_LDLIBS += -lpthread
+ifneq ($(strip $(have_readline)),)
+LOCAL_LDLIBS += -lreadline -lncurses
+endif
+ifneq ($(strip $(have_history)),)
+LOCAL_LDLIBS += -lhistory
+endif
+
+LOCAL_MODULE := mtp
+
+LOCAL_STATIC_LIBRARIES := libmtp libusbhost libcutils
+
+include $(BUILD_HOST_EXECUTABLE)
+
+endif
diff --git a/media/tests/mtp/MtpFile.cpp b/media/tests/mtp/MtpFile.cpp
new file mode 100644
index 0000000..00d328e
--- /dev/null
+++ b/media/tests/mtp/MtpFile.cpp
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MtpClient.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpStorage.h"
+#include "MtpUtils.h"
+
+#include "MtpFile.h"
+
+namespace android {
+
+MtpClient* MtpFile::sClient = NULL;
+
+MtpFile::MtpFile(MtpDevice* device)
+ : mDevice(device),
+ mStorage(0),
+ mHandle(0)
+{
+}
+
+MtpFile::MtpFile(MtpDevice* device, MtpStorageID storage)
+ : mDevice(device),
+ mStorage(storage),
+ mHandle(0)
+{
+}
+
+MtpFile::MtpFile(MtpDevice* device, MtpStorageID storage, MtpObjectHandle handle)
+ : mDevice(device),
+ mStorage(storage),
+ mHandle(handle)
+{
+}
+
+MtpFile::MtpFile(MtpFile* file)
+ : mDevice(file->mDevice),
+ mStorage(file->mStorage),
+ mHandle(file->mHandle)
+{
+}
+
+MtpFile::~MtpFile() {
+}
+
+void MtpFile::print() {
+ if (mHandle) {
+
+ } else if (mStorage) {
+ printf("%x\n", mStorage);
+ } else {
+ int id = mDevice->getID();
+ MtpDeviceInfo* info = mDevice->getDeviceInfo();
+ if (info)
+ printf("%d\t%s %s %s\n", id, info->mManufacturer, info->mModel, info->mSerial);
+ else
+ printf("%d\t(no device info available)\n", id);
+ delete info;
+ }
+}
+
+MtpObjectInfo* MtpFile::getObjectInfo() {
+ return mDevice->getObjectInfo(mHandle);
+}
+
+void MtpFile::list() {
+ if (mStorage) {
+ MtpObjectHandleList* handles = mDevice->getObjectHandles(mStorage, 0,
+ (mHandle ? mHandle : -1));
+ if (handles) {
+ for (int i = 0; i < handles->size(); i++) {
+ MtpObjectHandle handle = (*handles)[i];
+ MtpObjectInfo* info = mDevice->getObjectInfo(handle);
+ if (info) {
+ char modified[100];
+ struct tm tm;
+
+ gmtime_r(&info->mDateModified, &tm);
+ strftime(modified, sizeof(modified), "%a %b %e %H:%M:%S GMT %Y", &tm);
+ printf("%s Handle: %d Format: %04X Size: %d Modified: %s\n",
+ info->mName, handle, info->mFormat, info->mCompressedSize, modified);
+ delete info;
+ }
+ }
+ delete handles;
+ }
+ } else {
+ // list storage units for device
+ MtpStorageIDList* storageList = mDevice->getStorageIDs();
+ for (int i = 0; i < storageList->size(); i++) {
+ MtpStorageID storageID = (*storageList)[i];
+ printf("%x\n", storageID);
+ }
+ }
+}
+
+void MtpFile::init(MtpClient* client) {
+ sClient = client;
+}
+
+MtpFile* MtpFile::parsePath(MtpFile* base, char* path) {
+ MtpDevice* device = NULL;
+ MtpStorageID storage = 0;
+ MtpObjectHandle handle = 0;
+
+ if (path[0] != '/' && base) {
+ device = base->mDevice;
+ storage = base->mStorage;
+ handle = base->mHandle;
+ }
+
+ // parse an absolute path
+ if (path[0] == '/')
+ path++;
+ char* tok = strtok(path, "/");
+ while (tok) {
+ if (storage) {
+ // find child of current handle
+ MtpObjectHandleList* handles = device->getObjectHandles(storage, 0,
+ (handle ? handle : -1));
+ MtpObjectHandle childHandle = 0;
+
+ if (handles) {
+ for (int i = 0; i < handles->size() && !childHandle; i++) {
+ MtpObjectHandle handle = (*handles)[i];
+ MtpObjectInfo* info = device->getObjectInfo(handle);
+ if (info && !strcmp(tok, info->mName))
+ childHandle = handle;
+ delete info;
+ }
+ delete handles;
+ }
+ if (childHandle)
+ handle = childHandle;
+ else
+ return NULL;
+ } else if (device) {
+ unsigned int id;
+ // find storage for the device
+ if (sscanf(tok, "%x", &id) == 1) {
+ MtpStorageIDList* storageList = device->getStorageIDs();
+ bool found = false;
+ for (int i = 0; i < storageList->size(); i++) {
+ if ((*storageList)[i] == id) {
+ found = true;
+ break;
+ }
+ }
+ if (found)
+ storage = id;
+ else
+ return NULL;
+ }
+ } else {
+ // find device
+ unsigned int id;
+ if (sscanf(tok, "%d", &id) == 1)
+ device = sClient->getDevice(id);
+ if (!device)
+ return NULL;
+ }
+
+ tok = strtok(NULL, "/");
+ }
+
+ if (device)
+ return new MtpFile(device, storage, handle);
+ else
+ return NULL;
+}
+
+}
diff --git a/media/tests/mtp/MtpFile.h b/media/tests/mtp/MtpFile.h
new file mode 100644
index 0000000..ab8762b
--- /dev/null
+++ b/media/tests/mtp/MtpFile.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_FILE_H
+#define _MTP_FILE_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpClient;
+class MtpDevice;
+class MtpObjectInfo;
+
+// File-like abstraction for the interactive shell.
+// This can be used to represent an MTP device, storage unit or object
+// (either file or association).
+class MtpFile {
+private:
+ MtpDevice* mDevice;
+ MtpStorageID mStorage;
+ MtpObjectHandle mHandle;
+ static MtpClient* sClient;
+
+public:
+ MtpFile(MtpDevice* device);
+ MtpFile(MtpDevice* device, MtpStorageID storage);
+ MtpFile(MtpDevice* device, MtpStorageID storage, MtpObjectHandle handle);
+ MtpFile(MtpFile* file);
+ virtual ~MtpFile();
+
+ MtpObjectInfo* getObjectInfo();
+ void print();
+ void list();
+
+ inline MtpDevice* getDevice() const { return mDevice; }
+
+ static void init(MtpClient* client);
+ static MtpFile* parsePath(MtpFile* base, char* path);
+};
+
+}
+
+#endif // _MTP_DIRECTORY_H
diff --git a/media/tests/mtp/mtp.cpp b/media/tests/mtp/mtp.cpp
new file mode 100644
index 0000000..3202cae
--- /dev/null
+++ b/media/tests/mtp/mtp.cpp
@@ -0,0 +1,370 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <unistd.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#if HAVE_READLINE
+#include <readline/readline.h>
+#include <readline/history.h>
+#endif
+
+#include "MtpClient.h"
+#include "MtpDevice.h"
+#include "MtpObjectInfo.h"
+
+#include "MtpFile.h"
+
+#define PROMPT "mtp> "
+
+using namespace android;
+
+static MtpClient* sClient = NULL;
+
+// current working directory information for interactive shell
+static MtpFile* sCurrentDirectory = NULL;
+
+static MtpFile* parse_path(char* path) {
+ return MtpFile::parsePath(sCurrentDirectory, path);
+}
+
+class MyClient : public MtpClient {
+private:
+ virtual void deviceAdded(MtpDevice *device) {
+ }
+
+ virtual void deviceRemoved(MtpDevice *device) {
+ }
+
+public:
+};
+
+static void init() {
+ sClient = new MyClient;
+ sClient->start();
+ MtpFile::init(sClient);
+}
+
+static int set_cwd(int argc, char* argv[]) {
+ if (argc != 1) {
+ fprintf(stderr, "cd should have one argument\n");
+ return -1;
+ }
+ if (!strcmp(argv[0], "/")) {
+ delete sCurrentDirectory;
+ sCurrentDirectory = NULL;
+ }
+ else {
+ MtpFile* file = parse_path(argv[0]);
+ if (file) {
+ delete sCurrentDirectory;
+ sCurrentDirectory = file;
+ } else {
+ fprintf(stderr, "could not find %s\n", argv[0]);
+ return -1;
+ }
+ }
+ return 0;
+}
+
+static void list_devices() {
+ // TODO - need to make sure the list will not change while iterating
+ MtpDeviceList& devices = sClient->getDeviceList();
+ for (int i = 0; i < devices.size(); i++) {
+ MtpDevice* device = devices[i];
+ MtpFile* file = new MtpFile(device);
+ file->print();
+ delete file;
+ }
+}
+
+static int list(int argc, char* argv[]) {
+ if (argc == 0) {
+ // list cwd
+ if (sCurrentDirectory) {
+ sCurrentDirectory->list();
+ } else {
+ list_devices();
+ }
+ }
+
+ for (int i = 0; i < argc; i++) {
+ char* path = argv[i];
+ if (!strcmp(path, "/")) {
+ list_devices();
+ } else {
+ MtpFile* file = parse_path(path);
+ if (!file) {
+ fprintf(stderr, "could not find %s\n", path);
+ return -1;
+ }
+ file->list();
+ }
+ }
+
+ return 0;
+}
+
+static int get_file(int argc, char* argv[]) {
+ int ret = -1;
+ int srcFD = -1;
+ int destFD = -1;
+ MtpFile* srcFile = NULL;
+ MtpObjectInfo* info = NULL;
+ char* dest;
+
+ if (argc < 1) {
+ fprintf(stderr, "not enough arguments\n");
+ return -1;
+ } else if (argc > 2) {
+ fprintf(stderr, "too many arguments\n");
+ return -1;
+ }
+
+ // find source object
+ char* src = argv[0];
+ srcFile = parse_path(src);
+ if (!srcFile) {
+ fprintf(stderr, "could not find %s\n", src);
+ return -1;
+ }
+ info = srcFile->getObjectInfo();
+ if (!info) {
+ fprintf(stderr, "could not find object info for %s\n", src);
+ goto fail;
+ }
+ if (info->mFormat == MTP_FORMAT_ASSOCIATION) {
+ fprintf(stderr, "copying directories not implemented yet\n");
+ goto fail;
+ }
+
+ dest = (argc > 1 ? argv[1] : info->mName);
+ destFD = open(dest, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+ if (destFD < 0) {
+ fprintf(stderr, "could not create %s\n", dest);
+ goto fail;
+ }
+ srcFD = srcFile->getDevice()->readObject(info->mHandle, info->mCompressedSize);
+ if (srcFD < 0)
+ goto fail;
+
+ char buffer[65536];
+ while (1) {
+ int count = read(srcFD, buffer, sizeof(buffer));
+ if (count <= 0)
+ break;
+ write(destFD, buffer, count);
+ }
+ // FIXME - error checking and reporting
+ ret = 0;
+
+fail:
+ delete srcFile;
+ delete info;
+ if (srcFD >= 0)
+ close(srcFD);
+ if (destFD >= 0)
+ close(destFD);
+ return ret;
+}
+
+static int put_file(int argc, char* argv[]) {
+ int ret = -1;
+ int srcFD = -1;
+ MtpFile* destFile = NULL;
+ MtpObjectInfo* srcInfo = NULL;
+ MtpObjectInfo* destInfo = NULL;
+ MtpObjectHandle handle;
+ struct stat statbuf;
+ const char* lastSlash;
+
+ if (argc < 1) {
+ fprintf(stderr, "not enough arguments\n");
+ return -1;
+ } else if (argc > 2) {
+ fprintf(stderr, "too many arguments\n");
+ return -1;
+ }
+ const char* src = argv[0];
+ srcFD = open(src, O_RDONLY);
+ if (srcFD < 0) {
+ fprintf(stderr, "could not open %s\n", src);
+ goto fail;
+ }
+ if (argc == 2) {
+ char* dest = argv[1];
+ destFile = parse_path(dest);
+ if (!destFile) {
+ fprintf(stderr, "could not find %s\n", dest);
+ goto fail;
+ }
+ } else {
+ if (!sCurrentDirectory) {
+ fprintf(stderr, "current working directory not set\n");
+ goto fail;
+ }
+ destFile = new MtpFile(sCurrentDirectory);
+ }
+
+ destInfo = destFile->getObjectInfo();
+ if (!destInfo) {
+ fprintf(stderr, "could not find object info destination directory\n");
+ goto fail;
+ }
+ if (destInfo->mFormat != MTP_FORMAT_ASSOCIATION) {
+ fprintf(stderr, "destination not a directory\n");
+ goto fail;
+ }
+
+ if (fstat(srcFD, &statbuf))
+ goto fail;
+
+ srcInfo = new MtpObjectInfo(0);
+ srcInfo->mStorageID = destInfo->mStorageID;
+ srcInfo->mFormat = MTP_FORMAT_EXIF_JPEG; // FIXME
+ srcInfo->mCompressedSize = statbuf.st_size;
+ srcInfo->mParent = destInfo->mHandle;
+ lastSlash = strrchr(src, '/');
+ srcInfo->mName = strdup(lastSlash ? lastSlash + 1 : src);
+ srcInfo->mDateModified = statbuf.st_mtime;
+ handle = destFile->getDevice()->sendObjectInfo(srcInfo);
+ if (handle <= 0) {
+ printf("sendObjectInfo returned %04X\n", handle);
+ goto fail;
+ }
+ if (destFile->getDevice()->sendObject(srcInfo, srcFD))
+ ret = 0;
+
+fail:
+ delete destFile;
+ delete srcInfo;
+ delete destInfo;
+ if (srcFD >= 0)
+ close(srcFD);
+ printf("returning %d\n", ret);
+ return ret;
+}
+
+typedef int (* command_func)(int argc, char* argv[]);
+
+struct command_table_entry {
+ const char* name;
+ command_func func;
+};
+
+const command_table_entry command_list[] = {
+ { "cd", set_cwd },
+ { "ls", list },
+ { "get", get_file },
+ { "put", put_file },
+ { NULL, NULL },
+};
+
+
+static int do_command(int argc, char* argv[]) {
+ const command_table_entry* command = command_list;
+ const char* name = *argv++;
+ argc--;
+
+ while (command->name) {
+ if (!strcmp(command->name, name))
+ return command->func(argc, argv);
+ else
+ command++;
+ }
+ fprintf(stderr, "unknown command %s\n", name);
+ return -1;
+}
+
+static int shell() {
+ int argc;
+ int result = 0;
+#define MAX_ARGS 100
+ char* argv[MAX_ARGS];
+
+#if HAVE_READLINE
+ using_history();
+#endif
+
+ while (1) {
+#if HAVE_READLINE
+ char* line = readline(PROMPT);
+ if (!line) {
+ printf("\n");
+ exit(0);
+ }
+#else
+ char buffer[1000];
+ printf("%s", PROMPT);
+ char* line = NULL;
+ size_t length = 0;
+
+ buffer[0] = 0;
+ fgets(buffer, sizeof(buffer), stdin);
+ int count = strlen(buffer);
+ if (count > 0 && buffer[0] == (char)EOF) {
+ printf("\n");
+ exit(0);
+ }
+ if (count > 0 && line[count - 1] == '\n')
+ line[count - 1] == 0;
+#endif
+ char* tok = strtok(line, " \t\n\r");
+ if (!tok)
+ continue;
+ if (!strcmp(tok, "quit") || !strcmp(tok, "exit")) {
+ exit(0);
+ }
+#if HAVE_READLINE
+ add_history(line);
+#endif
+ argc = 0;
+ while (tok) {
+ if (argc + 1 == MAX_ARGS) {
+ fprintf(stderr, "too many arguments\n");
+ result = -1;
+ goto bottom_of_loop;
+ }
+
+ argv[argc++] = strdup(tok);
+ tok = strtok(NULL, " \t\n\r");
+ }
+
+ result = do_command(argc, argv);
+
+bottom_of_loop:
+ for (int i = 0; i < argc; i++)
+ free(argv[i]);
+ free(line);
+ }
+
+ return result;
+}
+
+int main(int argc, char* argv[]) {
+ init();
+
+ if (argc == 1)
+ return shell();
+ else
+ return do_command(argc - 1, argv + 1);
+}
diff --git a/media/tests/players/invoke_mock_media_player.cpp b/media/tests/players/invoke_mock_media_player.cpp
index b3cc8b6..53308be 100644
--- a/media/tests/players/invoke_mock_media_player.cpp
+++ b/media/tests/players/invoke_mock_media_player.cpp
@@ -26,6 +26,7 @@
using android::INVALID_OPERATION;
using android::ISurface;
+using android::Surface;
using android::MediaPlayerBase;
using android::OK;
using android::Parcel;
@@ -67,7 +68,8 @@ class Player: public MediaPlayerBase
}
virtual status_t setDataSource(int fd, int64_t offset, int64_t length) {return OK;}
- virtual status_t setVideoSurface(const sp<ISurface>& surface) {return OK;}
+ virtual status_t setVideoISurface(const sp<ISurface>& surface) {return OK;}
+ virtual status_t setVideoSurface(const sp<Surface>& surface) {return OK;}
virtual status_t prepare() {return OK;}
virtual status_t prepareAsync() {return OK;}
virtual status_t start() {return OK;}