summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--api/14.txt4
-rw-r--r--api/current.txt22
-rw-r--r--core/java/android/app/ActivityThread.java7
-rw-r--r--core/java/android/app/SearchDialog.java26
-rw-r--r--core/java/android/app/Service.java6
-rw-r--r--core/java/android/app/WallpaperManager.java8
-rw-r--r--core/java/android/content/SearchRecentSuggestionsProvider.java6
-rw-r--r--core/java/android/database/MatrixCursor.java6
-rw-r--r--core/java/android/hardware/Camera.java18
-rw-r--r--core/java/android/hardware/usb/UsbManager.java5
-rw-r--r--core/java/android/service/wallpaper/WallpaperService.java4
-rw-r--r--core/java/android/speech/tts/AudioPlaybackHandler.java36
-rw-r--r--core/java/android/util/Config.java56
-rw-r--r--core/java/android/util/Patterns.java12
-rw-r--r--core/java/android/view/ActionMode.java30
-rw-r--r--core/java/android/view/GLES20Canvas.java32
-rw-r--r--core/java/android/view/HardwareRenderer.java24
-rw-r--r--core/java/android/view/SurfaceView.java2
-rw-r--r--core/java/android/view/TextureView.java95
-rw-r--r--core/java/android/view/View.java38
-rw-r--r--core/java/android/view/ViewDebug.java8
-rw-r--r--core/java/android/view/ViewGroup.java60
-rw-r--r--core/java/android/view/ViewRootImpl.java (renamed from core/java/android/view/ViewAncestor.java)50
-rw-r--r--core/java/android/view/WindowManagerImpl.java51
-rw-r--r--core/java/android/view/inputmethod/BaseInputConnection.java4
-rw-r--r--core/java/android/view/inputmethod/InputMethodManager.java19
-rw-r--r--core/java/android/webkit/BrowserFrame.java4
-rw-r--r--core/java/android/webkit/WebViewCore.java4
-rw-r--r--core/java/android/widget/Gallery.java183
-rw-r--r--core/java/android/widget/GridLayout.java113
-rw-r--r--core/java/android/widget/SearchView.java117
-rw-r--r--core/java/android/widget/SuggestionsAdapter.java37
-rw-r--r--core/java/android/widget/TextView.java6
-rw-r--r--core/java/android/widget/ZoomButtonsController.java10
-rw-r--r--core/java/com/android/internal/view/IInputMethodManager.aidl2
-rw-r--r--core/java/com/android/internal/widget/PasswordEntryKeyboardHelper.java8
-rw-r--r--core/jni/Android.mk1
-rw-r--r--core/jni/android/graphics/TextLayoutCache.cpp43
-rw-r--r--core/jni/android/graphics/TextLayoutCache.h6
-rwxr-xr-xcore/jni/android_bluetooth_BluetoothAudioGateway.cpp4
-rw-r--r--core/jni/android_bluetooth_BluetoothSocket.cpp3
-rwxr-xr-xcore/jni/android_bluetooth_c.c31
-rw-r--r--core/jni/android_bluetooth_c.h39
-rw-r--r--core/jni/android_view_GLES20Canvas.cpp14
-rw-r--r--core/jni/android_view_TextureView.cpp188
-rw-r--r--core/res/res/layout/search_bar.xml1
-rw-r--r--core/res/res/layout/search_dropdown_item_icons_2line.xml8
-rw-r--r--core/res/res/layout/search_view.xml33
-rw-r--r--core/res/res/values/dimens.xml12
-rw-r--r--core/res/res/values/public.xml8
-rw-r--r--core/res/res/values/styles.xml4
-rw-r--r--core/tests/coretests/src/android/database/MatrixCursorTest.java24
-rw-r--r--core/tests/coretests/src/android/util/JsonReaderTest.java2
-rw-r--r--core/tests/coretests/src/android/util/PatternsTest.java7
-rw-r--r--include/gui/SurfaceTextureClient.h78
-rw-r--r--include/media/mediaplayer.h6
-rw-r--r--include/surfaceflinger/Surface.h69
-rw-r--r--libs/gui/Surface.cpp288
-rw-r--r--libs/gui/SurfaceTexture.cpp8
-rw-r--r--libs/gui/SurfaceTextureClient.cpp257
-rw-r--r--libs/gui/tests/SurfaceTextureClient_test.cpp86
-rw-r--r--libs/hwui/Caches.cpp34
-rw-r--r--libs/hwui/Caches.h12
-rw-r--r--libs/hwui/Debug.h3
-rw-r--r--libs/ui/FramebufferNativeWindow.cpp4
-rw-r--r--media/java/android/media/AudioTrack.java4
-rw-r--r--media/libmedia/mediaplayer.cpp106
-rw-r--r--media/libstagefright/codecs/aacenc/src/bit_cnt.c2
-rw-r--r--media/libstagefright/codecs/aacenc/src/memalign.c9
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/cmnMemory.c73
-rw-r--r--media/libstagefright/codecs/avc/dec/Android.mk55
-rw-r--r--media/libstagefright/codecs/avc/dec/SoftAVC.cpp720
-rw-r--r--media/libstagefright/codecs/avc/dec/SoftAVC.h109
-rw-r--r--media/libstagefright/codecs/avc/dec/include/avcdec_api.h200
-rw-r--r--media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h49
-rw-r--r--media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h48
-rw-r--r--media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp276
-rw-r--r--media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp1036
-rw-r--r--media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h125
-rw-r--r--media/libstagefright/codecs/avc/dec/src/avcdec_int.h88
-rw-r--r--media/libstagefright/codecs/avc/dec/src/avcdec_lib.h555
-rw-r--r--media/libstagefright/codecs/avc/dec/src/header.cpp1391
-rw-r--r--media/libstagefright/codecs/avc/dec/src/itrans.cpp307
-rw-r--r--media/libstagefright/codecs/avc/dec/src/pred_inter.cpp2329
-rw-r--r--media/libstagefright/codecs/avc/dec/src/pred_intra.cpp1786
-rw-r--r--media/libstagefright/codecs/avc/dec/src/residual.cpp523
-rw-r--r--media/libstagefright/codecs/avc/dec/src/slice.cpp772
-rw-r--r--media/libstagefright/codecs/avc/dec/src/vlc.cpp815
-rw-r--r--media/libstagefright/codecs/common/cmnMemory.c4
-rw-r--r--media/libstagefright/codecs/common/include/voType.h2
-rw-r--r--media/libstagefright/omx/SoftOMXPlugin.cpp1
-rw-r--r--native/android/native_window.cpp34
-rw-r--r--native/include/android/native_window.h8
-rw-r--r--packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBarPolicy.java160
-rw-r--r--services/camera/libcameraservice/CameraService.cpp97
-rw-r--r--services/camera/libcameraservice/CameraService.h4
-rw-r--r--services/java/com/android/server/InputMethodManagerService.java36
-rw-r--r--services/java/com/android/server/usb/UsbDeviceManager.java5
-rw-r--r--services/java/com/android/server/wm/DragState.java34
-rw-r--r--services/java/com/android/server/wm/InputMonitor.java46
-rw-r--r--telephony/java/com/android/internal/telephony/BaseCommands.java28
-rw-r--r--telephony/java/com/android/internal/telephony/TelephonyProperties.java9
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_a.pngbin0 -> 1518 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_b.pngbin0 -> 1373 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_c.pngbin0 -> 1772 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_d.pngbin0 -> 1382 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_e.pngbin0 -> 518 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_f.pngbin0 -> 510 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_g.pngbin0 -> 1688 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_h.pngbin0 -> 568 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_i.pngbin0 -> 440 bytes
-rw-r--r--tests/BiDiTests/res/drawable/alphabet_j.pngbin0 -> 759 bytes
-rw-r--r--tests/BiDiTests/res/layout/canvas2.xml66
-rw-r--r--tests/BiDiTests/res/layout/gallery_ltr.xml30
-rw-r--r--tests/BiDiTests/res/layout/gallery_rtl.xml30
-rw-r--r--tests/BiDiTests/res/values/attrs.xml8
-rw-r--r--tests/BiDiTests/res/values/strings.xml3
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java5
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestCanvas2.java36
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java68
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java35
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java35
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestView.java66
-rw-r--r--tests/BiDiTests/src/com/android/bidi/BiDiTestViewDrawText.java66
-rw-r--r--tests/GridLayoutTest/src/com/android/test/layout/Activity2.java4
-rw-r--r--tests/HwAccelerationTest/AndroidManifest.xml9
-rw-r--r--tests/HwAccelerationTest/src/com/android/test/hwui/CanvasTextureViewActivity.java120
-rw-r--r--tools/aapt/AaptAssets.cpp27
-rw-r--r--tools/aapt/AaptAssets.h21
-rw-r--r--tools/aapt/Bundle.h11
-rw-r--r--tools/aapt/Command.cpp43
-rw-r--r--tools/aapt/Main.cpp15
-rw-r--r--tools/aapt/Main.h1
-rw-r--r--tools/aapt/Resource.cpp29
-rw-r--r--tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeIInputMethodManager.java2
135 files changed, 2674 insertions, 12288 deletions
diff --git a/api/14.txt b/api/14.txt
index d25d2b9..2a1bcac 100644
--- a/api/14.txt
+++ b/api/14.txt
@@ -19820,8 +19820,8 @@ package android.util {
field public static final java.util.regex.Pattern IP_ADDRESS;
field public static final java.util.regex.Pattern PHONE;
field public static final java.util.regex.Pattern TOP_LEVEL_DOMAIN;
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw])";
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw]))";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw])";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw]))";
field public static final java.util.regex.Pattern WEB_URL;
}
diff --git a/api/current.txt b/api/current.txt
index ecbc9d2..b462695 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -207,6 +207,7 @@ package android {
field public static final int actionModeCutDrawable = 16843537; // 0x1010311
field public static final int actionModePasteDrawable = 16843539; // 0x1010313
field public static final int actionModeSelectAllDrawable = 16843647; // 0x101037f
+ field public static final int actionModeStyle = 16843688; // 0x10103a8
field public static final int actionOverflowButtonStyle = 16843510; // 0x10102f6
field public static final int actionProviderClass = 16843677; // 0x101039d
field public static final int actionViewClass = 16843516; // 0x10102fc
@@ -8904,6 +8905,8 @@ package android.hardware {
method public final void takePicture(android.hardware.Camera.ShutterCallback, android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback);
method public final void takePicture(android.hardware.Camera.ShutterCallback, android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback);
method public final void unlock();
+ field public static final java.lang.String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE";
+ field public static final java.lang.String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO";
field public static final int CAMERA_ERROR_SERVER_DIED = 100; // 0x64
field public static final int CAMERA_ERROR_UNKNOWN = 1; // 0x1
}
@@ -20494,6 +20497,14 @@ package android.util {
ctor public Base64OutputStream(java.io.OutputStream, int);
}
+ public final deprecated class Config {
+ field public static final deprecated boolean DEBUG = false;
+ field public static final deprecated boolean LOGD = true;
+ field public static final deprecated boolean LOGV = false;
+ field public static final deprecated boolean PROFILE = false;
+ field public static final deprecated boolean RELEASE = true;
+ }
+
public class DebugUtils {
method public static boolean isObjectSelected(java.lang.Object);
}
@@ -20704,8 +20715,8 @@ package android.util {
field public static final java.util.regex.Pattern IP_ADDRESS;
field public static final java.util.regex.Pattern PHONE;
field public static final java.util.regex.Pattern TOP_LEVEL_DOMAIN;
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw])";
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw]))";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw])";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw]))";
field public static final java.util.regex.Pattern WEB_URL;
}
@@ -20922,11 +20933,13 @@ package android.view {
method public abstract android.view.Menu getMenu();
method public abstract android.view.MenuInflater getMenuInflater();
method public abstract java.lang.CharSequence getSubtitle();
+ method public java.lang.Object getTag();
method public abstract java.lang.CharSequence getTitle();
method public abstract void invalidate();
method public abstract void setCustomView(android.view.View);
method public abstract void setSubtitle(java.lang.CharSequence);
method public abstract void setSubtitle(int);
+ method public void setTag(java.lang.Object);
method public abstract void setTitle(java.lang.CharSequence);
method public abstract void setTitle(int);
}
@@ -22039,9 +22052,12 @@ package android.view {
method public android.graphics.SurfaceTexture getSurfaceTexture();
method public android.view.TextureView.SurfaceTextureListener getSurfaceTextureListener();
method public boolean isAvailable();
+ method public android.graphics.Canvas lockCanvas();
+ method public android.graphics.Canvas lockCanvas(android.graphics.Rect);
method protected final void onDraw(android.graphics.Canvas);
method public void setOpaque(boolean);
method public void setSurfaceTextureListener(android.view.TextureView.SurfaceTextureListener);
+ method public void unlockCanvasAndPost(android.graphics.Canvas);
}
public static abstract interface TextureView.SurfaceTextureListener {
@@ -23894,7 +23910,7 @@ package android.view.inputmethod {
method public boolean isWatchingCursor(android.view.View);
method public void restartInput(android.view.View);
method public void sendAppPrivateCommand(android.view.View, java.lang.String, android.os.Bundle);
- method public boolean setAdditionalInputMethodSubtypes(android.os.IBinder, android.view.inputmethod.InputMethodSubtype[]);
+ method public boolean setAdditionalInputMethodSubtypes(java.lang.String, android.view.inputmethod.InputMethodSubtype[]);
method public boolean setCurrentInputMethodSubtype(android.view.inputmethod.InputMethodSubtype);
method public void setInputMethod(android.os.IBinder, java.lang.String);
method public void setInputMethodAndSubtype(android.os.IBinder, java.lang.String, android.view.inputmethod.InputMethodSubtype);
diff --git a/core/java/android/app/ActivityThread.java b/core/java/android/app/ActivityThread.java
index c6a746b..f6cd866 100644
--- a/core/java/android/app/ActivityThread.java
+++ b/core/java/android/app/ActivityThread.java
@@ -70,7 +70,7 @@ import android.view.HardwareRenderer;
import android.view.View;
import android.view.ViewDebug;
import android.view.ViewManager;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import android.view.Window;
import android.view.WindowManager;
import android.view.WindowManagerImpl;
@@ -3558,6 +3558,7 @@ public final class ActivityThread {
}
final void handleTrimMemory(int level) {
+ WindowManagerImpl.getDefault().trimMemory(level);
}
private final void handleBindApplication(AppBindData data) {
@@ -4071,7 +4072,7 @@ public final class ActivityThread {
sThreadLocal.set(this);
mSystemThread = system;
if (!system) {
- ViewAncestor.addFirstDrawHandler(new Runnable() {
+ ViewRootImpl.addFirstDrawHandler(new Runnable() {
public void run() {
ensureJitEnabled();
}
@@ -4101,7 +4102,7 @@ public final class ActivityThread {
}
}
- ViewAncestor.addConfigCallback(new ComponentCallbacks() {
+ ViewRootImpl.addConfigCallback(new ComponentCallbacks() {
public void onConfigurationChanged(Configuration newConfig) {
synchronized (mPackages) {
// We need to apply this change to the resources
diff --git a/core/java/android/app/SearchDialog.java b/core/java/android/app/SearchDialog.java
index 42eda02..8e2d360 100644
--- a/core/java/android/app/SearchDialog.java
+++ b/core/java/android/app/SearchDialog.java
@@ -168,6 +168,7 @@ public class SearchDialog extends Dialog {
SearchBar searchBar = (SearchBar) findViewById(com.android.internal.R.id.search_bar);
searchBar.setSearchDialog(this);
mSearchView = (SearchView) findViewById(com.android.internal.R.id.search_view);
+ mSearchView.setIconified(false);
mSearchView.setOnCloseListener(mOnCloseListener);
mSearchView.setOnQueryTextListener(mOnQueryChangeListener);
mSearchView.setOnSuggestionListener(mOnSuggestionSelectionListener);
@@ -633,31 +634,6 @@ public class SearchDialog extends Dialog {
}
/**
- * Overrides the handling of the back key to move back to the previous
- * sources or dismiss the search dialog, instead of dismissing the input
- * method.
- */
- @Override
- public boolean dispatchKeyEventPreIme(KeyEvent event) {
- if (DBG)
- Log.d(LOG_TAG, "onKeyPreIme(" + event + ")");
- if (mSearchDialog != null && event.getKeyCode() == KeyEvent.KEYCODE_BACK) {
- KeyEvent.DispatcherState state = getKeyDispatcherState();
- if (state != null) {
- if (event.getAction() == KeyEvent.ACTION_DOWN && event.getRepeatCount() == 0) {
- state.startTracking(event, this);
- return true;
- } else if (event.getAction() == KeyEvent.ACTION_UP && !event.isCanceled()
- && state.isTracking(event)) {
- mSearchDialog.onBackPressed();
- return true;
- }
- }
- }
- return super.dispatchKeyEventPreIme(event);
- }
-
- /**
* Don't allow action modes in a SearchBar, it looks silly.
*/
@Override
diff --git a/core/java/android/app/Service.java b/core/java/android/app/Service.java
index c179b35..4c21d04 100644
--- a/core/java/android/app/Service.java
+++ b/core/java/android/app/Service.java
@@ -662,12 +662,6 @@ public abstract class Service extends ContextWrapper implements ComponentCallbac
protected void dump(FileDescriptor fd, PrintWriter writer, String[] args) {
writer.println("nothing to dump");
}
-
- @Override
- protected void finalize() throws Throwable {
- super.finalize();
- //Log.i("Service", "Finalizing Service: " + this);
- }
// ------------------ Internal API ------------------
diff --git a/core/java/android/app/WallpaperManager.java b/core/java/android/app/WallpaperManager.java
index 7fd5a7d..8472b31 100644
--- a/core/java/android/app/WallpaperManager.java
+++ b/core/java/android/app/WallpaperManager.java
@@ -40,7 +40,7 @@ import android.os.RemoteException;
import android.os.ServiceManager;
import android.util.DisplayMetrics;
import android.util.Log;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import java.io.FileOutputStream;
import java.io.IOException;
@@ -592,7 +592,7 @@ public class WallpaperManager {
public void setWallpaperOffsets(IBinder windowToken, float xOffset, float yOffset) {
try {
//Log.v(TAG, "Sending new wallpaper offsets from app...");
- ViewAncestor.getWindowSession(mContext.getMainLooper()).setWallpaperPosition(
+ ViewRootImpl.getWindowSession(mContext.getMainLooper()).setWallpaperPosition(
windowToken, xOffset, yOffset, mWallpaperXStep, mWallpaperYStep);
//Log.v(TAG, "...app returning after sending offsets!");
} catch (RemoteException e) {
@@ -630,7 +630,7 @@ public class WallpaperManager {
int x, int y, int z, Bundle extras) {
try {
//Log.v(TAG, "Sending new wallpaper offsets from app...");
- ViewAncestor.getWindowSession(mContext.getMainLooper()).sendWallpaperCommand(
+ ViewRootImpl.getWindowSession(mContext.getMainLooper()).sendWallpaperCommand(
windowToken, action, x, y, z, extras, false);
//Log.v(TAG, "...app returning after sending offsets!");
} catch (RemoteException e) {
@@ -650,7 +650,7 @@ public class WallpaperManager {
*/
public void clearWallpaperOffsets(IBinder windowToken) {
try {
- ViewAncestor.getWindowSession(mContext.getMainLooper()).setWallpaperPosition(
+ ViewRootImpl.getWindowSession(mContext.getMainLooper()).setWallpaperPosition(
windowToken, -1, -1, -1, -1);
} catch (RemoteException e) {
// Ignore.
diff --git a/core/java/android/content/SearchRecentSuggestionsProvider.java b/core/java/android/content/SearchRecentSuggestionsProvider.java
index 3d89e92..e1a8d21 100644
--- a/core/java/android/content/SearchRecentSuggestionsProvider.java
+++ b/core/java/android/content/SearchRecentSuggestionsProvider.java
@@ -186,6 +186,9 @@ public class SearchRecentSuggestionsProvider extends ContentProvider {
mSuggestionProjection = new String [] {
"0 AS " + SearchManager.SUGGEST_COLUMN_FORMAT,
+ "'android.resource://system/"
+ + com.android.internal.R.drawable.ic_menu_recent_history + "' AS "
+ + SearchManager.SUGGEST_COLUMN_ICON_1,
"display1 AS " + SearchManager.SUGGEST_COLUMN_TEXT_1,
"display2 AS " + SearchManager.SUGGEST_COLUMN_TEXT_2,
"query AS " + SearchManager.SUGGEST_COLUMN_QUERY,
@@ -196,6 +199,9 @@ public class SearchRecentSuggestionsProvider extends ContentProvider {
mSuggestionProjection = new String [] {
"0 AS " + SearchManager.SUGGEST_COLUMN_FORMAT,
+ "'android.resource://system/"
+ + com.android.internal.R.drawable.ic_menu_recent_history + "' AS "
+ + SearchManager.SUGGEST_COLUMN_ICON_1,
"display1 AS " + SearchManager.SUGGEST_COLUMN_TEXT_1,
"query AS " + SearchManager.SUGGEST_COLUMN_QUERY,
"_id"
diff --git a/core/java/android/database/MatrixCursor.java b/core/java/android/database/MatrixCursor.java
index 5c1b968..6e68b6b 100644
--- a/core/java/android/database/MatrixCursor.java
+++ b/core/java/android/database/MatrixCursor.java
@@ -272,6 +272,12 @@ public class MatrixCursor extends AbstractCursor {
}
@Override
+ public byte[] getBlob(int column) {
+ Object value = get(column);
+ return (byte[]) value;
+ }
+
+ @Override
public int getType(int column) {
return DatabaseUtils.getTypeOfObject(get(column));
}
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index 7d67e11..a168260 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -16,6 +16,8 @@
package android.hardware;
+import android.annotation.SdkConstant;
+import android.annotation.SdkConstant.SdkConstantType;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
@@ -142,6 +144,22 @@ public class Camera {
private boolean mWithBuffer;
/**
+ * Broadcast Action: A new picture is taken by the camera, and the entry of
+ * the picture has been added to the media store.
+ * {@link android.content.Intent#getData} is URI of the picture.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE";
+
+ /**
+ * Broadcast Action: A new video is recorded by the camera, and the entry
+ * of the video has been added to the media store.
+ * {@link android.content.Intent#getData} is URI of the video.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO";
+
+ /**
* Returns the number of physical cameras available on this device.
*/
public native static int getNumberOfCameras();
diff --git a/core/java/android/hardware/usb/UsbManager.java b/core/java/android/hardware/usb/UsbManager.java
index 67d200c..b548623 100644
--- a/core/java/android/hardware/usb/UsbManager.java
+++ b/core/java/android/hardware/usb/UsbManager.java
@@ -409,9 +409,10 @@ public class UsbManager {
/**
* Sets the current USB function.
+ * If function is null, then the current function is set to the default function.
*
- * @param function name of the USB function
- * @param makeDefault true if this should be set as the default
+ * @param function name of the USB function, or null to restore the default function
+ * @param makeDefault true if the function should be set as the new default function
*
* {@hide}
*/
diff --git a/core/java/android/service/wallpaper/WallpaperService.java b/core/java/android/service/wallpaper/WallpaperService.java
index 8fc8b9d..c51ba2a 100644
--- a/core/java/android/service/wallpaper/WallpaperService.java
+++ b/core/java/android/service/wallpaper/WallpaperService.java
@@ -51,7 +51,7 @@ import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.ViewGroup;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import android.view.WindowManager;
import android.view.WindowManagerImpl;
import android.view.WindowManagerPolicy;
@@ -650,7 +650,7 @@ public abstract class WallpaperService extends Service {
mWindowToken = wrapper.mWindowToken;
mSurfaceHolder.setSizeFromLayout();
mInitializing = true;
- mSession = ViewAncestor.getWindowSession(getMainLooper());
+ mSession = ViewRootImpl.getWindowSession(getMainLooper());
mWindow.setSession(mSession);
diff --git a/core/java/android/speech/tts/AudioPlaybackHandler.java b/core/java/android/speech/tts/AudioPlaybackHandler.java
index 96864c4..255b333 100644
--- a/core/java/android/speech/tts/AudioPlaybackHandler.java
+++ b/core/java/android/speech/tts/AudioPlaybackHandler.java
@@ -418,22 +418,30 @@ class AudioPlaybackHandler {
if (DBG) Log.d(TAG, "handleSynthesisDone()");
final AudioTrack audioTrack = params.getAudioTrack();
- try {
- if (audioTrack != null) {
- if (DBG) Log.d(TAG, "Waiting for audio track to complete : " +
- audioTrack.hashCode());
- blockUntilDone(params);
- if (DBG) Log.d(TAG, "Releasing audio track [" + audioTrack.hashCode() + "]");
- // The last call to AudioTrack.write( ) will return only after
- // all data from the audioTrack has been sent to the mixer, so
- // it's safe to release at this point.
- audioTrack.release();
- }
- } finally {
+ if (audioTrack == null) {
+ return;
+ }
+
+ if (DBG) Log.d(TAG, "Waiting for audio track to complete : " +
+ audioTrack.hashCode());
+ blockUntilDone(params);
+ if (DBG) Log.d(TAG, "Releasing audio track [" + audioTrack.hashCode() + "]");
+
+ // The last call to AudioTrack.write( ) will return only after
+ // all data from the audioTrack has been sent to the mixer, so
+ // it's safe to release at this point. Make sure release() and the call
+ // that set the audio track to null are performed atomically.
+ synchronized (this) {
+ // Never allow the audioTrack to be observed in a state where
+ // it is released but non null. The only case this might happen
+ // is in the various stopFoo methods that call AudioTrack#stop from
+ // different threads, but they are synchronized on AudioPlayBackHandler#this
+ // too.
+ audioTrack.release();
params.setAudioTrack(null);
- params.getDispatcher().dispatchUtteranceCompleted();
- mLastSynthesisRequest = null;
}
+ params.getDispatcher().dispatchUtteranceCompleted();
+ mLastSynthesisRequest = null;
}
private static void blockUntilDone(SynthesisMessageParams params) {
diff --git a/core/java/android/util/Config.java b/core/java/android/util/Config.java
new file mode 100644
index 0000000..70dc9aa
--- /dev/null
+++ b/core/java/android/util/Config.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.util;
+
+/**
+ * @deprecated This class is not useful, it just returns the same value for
+ * all constants, and has always done this. Do not use it.
+ */
+@Deprecated
+public final class Config {
+ /** @hide */ public Config() {}
+
+ /**
+ * @deprecated Always false.
+ */
+ @Deprecated
+ public static final boolean DEBUG = false;
+
+ /**
+ * @deprecated Always true.
+ */
+ @Deprecated
+ public static final boolean RELEASE = true;
+
+ /**
+ * @deprecated Always false.
+ */
+ @Deprecated
+ public static final boolean PROFILE = false;
+
+ /**
+ * @deprecated Always false.
+ */
+ @Deprecated
+ public static final boolean LOGV = false;
+
+ /**
+ * @deprecated Always true.
+ */
+ @Deprecated
+ public static final boolean LOGD = true;
+}
diff --git a/core/java/android/util/Patterns.java b/core/java/android/util/Patterns.java
index 3bcd266..152827d 100644
--- a/core/java/android/util/Patterns.java
+++ b/core/java/android/util/Patterns.java
@@ -25,9 +25,9 @@ import java.util.regex.Pattern;
public class Patterns {
/**
* Regular expression to match all IANA top-level domains.
- * List accurate as of 2010/05/06. List taken from:
+ * List accurate as of 2011/07/18. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
- * This pattern is auto-generated by frameworks/base/common/tools/make-iana-tld-pattern.py
+ * This pattern is auto-generated by frameworks/ex/common/tools/make-iana-tld-pattern.py
*/
public static final String TOP_LEVEL_DOMAIN_STR =
"((aero|arpa|asia|a[cdefgilmnoqrstuwxz])"
@@ -53,7 +53,7 @@ public class Patterns {
+ "|u[agksyz]"
+ "|v[aceginu]"
+ "|w[fs]"
- + "|(xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)"
+ + "|(\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)"
+ "|y[et]"
+ "|z[amw])";
@@ -65,9 +65,9 @@ public class Patterns {
/**
* Regular expression to match all IANA top-level domains for WEB_URL.
- * List accurate as of 2010/05/06. List taken from:
+ * List accurate as of 2011/07/18. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
- * This pattern is auto-generated by frameworks/base/common/tools/make-iana-tld-pattern.py
+ * This pattern is auto-generated by frameworks/ex/common/tools/make-iana-tld-pattern.py
*/
public static final String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL =
"(?:"
@@ -94,7 +94,7 @@ public class Patterns {
+ "|u[agksyz]"
+ "|v[aceginu]"
+ "|w[fs]"
- + "|(?:xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)"
+ + "|(?:\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)"
+ "|y[et]"
+ "|z[amw]))";
diff --git a/core/java/android/view/ActionMode.java b/core/java/android/view/ActionMode.java
index bfafa98..e954983 100644
--- a/core/java/android/view/ActionMode.java
+++ b/core/java/android/view/ActionMode.java
@@ -23,6 +23,36 @@ package android.view;
* Examples of good action modes include selection modes, search, content editing, etc.
*/
public abstract class ActionMode {
+ private Object mTag;
+
+ /**
+ * Set a tag object associated with this ActionMode.
+ *
+ * <p>Like the tag available to views, this allows applications to associate arbitrary
+ * data with an ActionMode for later reference.
+ *
+ * @param tag Tag to associate with this ActionMode
+ *
+ * @see #getTag()
+ */
+ public void setTag(Object tag) {
+ mTag = tag;
+ }
+
+ /**
+ * Retrieve the tag object associated with this ActionMode.
+ *
+ * <p>Like the tag available to views, this allows applications to associate arbitrary
+ * data with an ActionMode for later reference.
+ *
+ * @return Tag associated with this ActionMode
+ *
+ * @see #setTag(Object)
+ */
+ public Object getTag() {
+ return mTag;
+ }
+
/**
* Set the title of the action mode. This method will have no visible effect if
* a custom view has been set.
diff --git a/core/java/android/view/GLES20Canvas.java b/core/java/android/view/GLES20Canvas.java
index 4987e2f..80244bb 100644
--- a/core/java/android/view/GLES20Canvas.java
+++ b/core/java/android/view/GLES20Canvas.java
@@ -286,6 +286,38 @@ class GLES20Canvas extends HardwareCanvas {
private static native boolean nCallDrawGLFunction(int renderer, int drawGLFunction);
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Memory
+ ///////////////////////////////////////////////////////////////////////////
+
+ /**
+ * @see #flushCaches(int)
+ */
+ public static final int FLUSH_CACHES_MODERATE = 0;
+
+ /**
+ * @see #flushCaches(int)
+ */
+ public static final int FLUSH_CACHES_FULL = 1;
+
+ /**
+ * Flush caches to reclaim as much memory as possible. The amount of memory
+ * to reclaim is indicate by the level parameter.
+ *
+ * The level can be one of {@link #FLUSH_CACHES_MODERATE} or
+ * {@link #FLUSH_CACHES_FULL}.
+ *
+ * @param level Hint about the amount of memory to reclaim
+ *
+ * @hide
+ */
+ public static void flushCaches(int level) {
+ nFlushCaches(level);
+ }
+
+ private static native void nFlushCaches(int level);
+
///////////////////////////////////////////////////////////////////////////
// Display list
///////////////////////////////////////////////////////////////////////////
diff --git a/core/java/android/view/HardwareRenderer.java b/core/java/android/view/HardwareRenderer.java
index 011e44c..9a2564f 100644
--- a/core/java/android/view/HardwareRenderer.java
+++ b/core/java/android/view/HardwareRenderer.java
@@ -17,6 +17,7 @@
package android.view;
+import android.content.ComponentCallbacks;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
@@ -263,6 +264,18 @@ public abstract class HardwareRenderer {
}
/**
+ * Invoke this method when the system is running out of memory. This
+ * method will attempt to recover as much memory as possible, based on
+ * the specified hint.
+ *
+ * @param level Hint about the amount of memory that should be trimmed,
+ * see {@link android.content.ComponentCallbacks}
+ */
+ static void trimMemory(int level) {
+ Gl20Renderer.flushCaches(level);
+ }
+
+ /**
* Indicates whether hardware acceleration is currently enabled.
*
* @return True if hardware acceleration is in use, false otherwise.
@@ -858,5 +871,16 @@ public abstract class HardwareRenderer {
}
return null;
}
+
+ static void flushCaches(int level) {
+ switch (level) {
+ case ComponentCallbacks.TRIM_MEMORY_MODERATE:
+ GLES20Canvas.flushCaches(GLES20Canvas.FLUSH_CACHES_MODERATE);
+ break;
+ case ComponentCallbacks.TRIM_MEMORY_COMPLETE:
+ GLES20Canvas.flushCaches(GLES20Canvas.FLUSH_CACHES_FULL);
+ break;
+ }
+ }
}
}
diff --git a/core/java/android/view/SurfaceView.java b/core/java/android/view/SurfaceView.java
index 764899f..cbdb38e 100644
--- a/core/java/android/view/SurfaceView.java
+++ b/core/java/android/view/SurfaceView.java
@@ -426,7 +426,7 @@ public class SurfaceView extends View {
if (!mHaveFrame) {
return;
}
- ViewAncestor viewRoot = (ViewAncestor) getRootView().getParent();
+ ViewRootImpl viewRoot = (ViewRootImpl) getRootView().getParent();
if (viewRoot != null) {
mTranslator = viewRoot.mTranslator;
}
diff --git a/core/java/android/view/TextureView.java b/core/java/android/view/TextureView.java
index d656f31..96d6f09 100644
--- a/core/java/android/view/TextureView.java
+++ b/core/java/android/view/TextureView.java
@@ -20,6 +20,7 @@ import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
+import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.util.AttributeSet;
import android.util.Log;
@@ -107,6 +108,14 @@ public class TextureView extends View {
private SurfaceTexture.OnFrameAvailableListener mUpdateListener;
+ private Canvas mCanvas;
+ private int mSaveCount;
+
+ private final Object[] mNativeWindowLock = new Object[0];
+ // Used from native code, do not write!
+ @SuppressWarnings({"UnusedDeclaration"})
+ private int mNativeWindow;
+
/**
* Creates a new TextureView.
*
@@ -190,7 +199,11 @@ public class TextureView extends View {
mListener.onSurfaceTextureDestroyed(mSurface);
}
- mLayer.destroy();
+ synchronized (mNativeWindowLock) {
+ nDestroyNativeWindow();
+ }
+
+ mLayer.destroy();
mSurface = null;
mLayer = null;
}
@@ -274,6 +287,7 @@ public class TextureView extends View {
mLayer = mAttachInfo.mHardwareRenderer.createHardwareLayer(mOpaque);
mSurface = mAttachInfo.mHardwareRenderer.createSurfaceTexture(mLayer);
nSetDefaultBufferSize(mSurface, getWidth(), getHeight());
+ nCreateNativeWindow(mSurface);
mUpdateListener = new SurfaceTexture.OnFrameAvailableListener() {
@Override
@@ -431,6 +445,79 @@ public class TextureView extends View {
}
/**
+ * <p>Start editing the pixels in the surface. The returned Canvas can be used
+ * to draw into the surface's bitmap. A null is returned if the surface has
+ * not been created or otherwise cannot be edited. You will usually need
+ * to implement
+ * {@link SurfaceTextureListener#onSurfaceTextureAvailable(android.graphics.SurfaceTexture, int, int)}
+ * to find out when the Surface is available for use.</p>
+ *
+ * <p>The content of the Surface is never preserved between unlockCanvas()
+ * and lockCanvas(), for this reason, every pixel within the Surface area
+ * must be written. The only exception to this rule is when a dirty
+ * rectangle is specified, in which case, non-dirty pixels will be
+ * preserved.</p>
+ *
+ * @return A Canvas used to draw into the surface.
+ *
+ * @see #lockCanvas(android.graphics.Rect)
+ * @see #unlockCanvasAndPost(android.graphics.Canvas)
+ */
+ public Canvas lockCanvas() {
+ return lockCanvas(null);
+ }
+
+ /**
+ * Just like {@link #lockCanvas()} but allows specification of a dirty
+ * rectangle. Every pixel within that rectangle must be written; however
+ * pixels outside the dirty rectangle will be preserved by the next call
+ * to lockCanvas().
+ *
+ * @param dirty Area of the surface that will be modified.
+
+ * @return A Canvas used to draw into the surface.
+ *
+ * @see #lockCanvas()
+ * @see #unlockCanvasAndPost(android.graphics.Canvas)
+ */
+ public Canvas lockCanvas(Rect dirty) {
+ if (!isAvailable()) return null;
+
+ if (mCanvas == null) {
+ mCanvas = new Canvas();
+ }
+
+ synchronized (mNativeWindowLock) {
+ nLockCanvas(mNativeWindow, mCanvas, dirty);
+ }
+ mSaveCount = mCanvas.save();
+
+ return mCanvas;
+ }
+
+ /**
+ * Finish editing pixels in the surface. After this call, the surface's
+ * current pixels will be shown on the screen, but its content is lost,
+ * in particular there is no guarantee that the content of the Surface
+ * will remain unchanged when lockCanvas() is called again.
+ *
+ * @param canvas The Canvas previously returned by lockCanvas()
+ *
+ * @see #lockCanvas()
+ * @see #lockCanvas(android.graphics.Rect)
+ */
+ public void unlockCanvasAndPost(Canvas canvas) {
+ if (mCanvas != null && canvas == mCanvas) {
+ canvas.restoreToCount(mSaveCount);
+ mSaveCount = 0;
+
+ synchronized (mNativeWindowLock) {
+ nUnlockCanvasAndPost(mNativeWindow, mCanvas);
+ }
+ }
+ }
+
+ /**
* Returns the {@link SurfaceTexture} used by this view. This method
* may return null if the view is not attached to a window or if the surface
* texture has not been initialized yet.
@@ -506,6 +593,12 @@ public class TextureView extends View {
public void onSurfaceTextureUpdated(SurfaceTexture surface);
}
+ private native void nCreateNativeWindow(SurfaceTexture surface);
+ private native void nDestroyNativeWindow();
+
private static native void nSetDefaultBufferSize(SurfaceTexture surfaceTexture,
int width, int height);
+
+ private static native void nLockCanvas(int nativeWindow, Canvas canvas, Rect dirty);
+ private static native void nUnlockCanvasAndPost(int nativeWindow, Canvas canvas);
}
diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java
index 4385c2f..59cb216 100644
--- a/core/java/android/view/View.java
+++ b/core/java/android/view/View.java
@@ -5043,9 +5043,9 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
}
/** Gets the ViewAncestor, or null if not attached. */
- /*package*/ ViewAncestor getViewAncestor() {
+ /*package*/ ViewRootImpl getViewRootImpl() {
View root = getRootView();
- return root != null ? (ViewAncestor)root.getParent() : null;
+ return root != null ? (ViewRootImpl)root.getParent() : null;
}
/**
@@ -5061,7 +5061,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
public final boolean requestFocusFromTouch() {
// Leave touch mode if we need to
if (isInTouchMode()) {
- ViewAncestor viewRoot = getViewAncestor();
+ ViewRootImpl viewRoot = getViewRootImpl();
if (viewRoot != null) {
viewRoot.ensureTouchMode(false);
}
@@ -5653,7 +5653,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
if (mAttachInfo != null) {
return mAttachInfo.mInTouchMode;
} else {
- return ViewAncestor.isInTouchMode();
+ return ViewRootImpl.isInTouchMode();
}
}
@@ -8254,7 +8254,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
handler = attachInfo.mHandler;
} else {
// Assume that post will succeed later
- ViewAncestor.getRunQueue().post(action);
+ ViewRootImpl.getRunQueue().post(action);
return true;
}
@@ -8284,7 +8284,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
handler = attachInfo.mHandler;
} else {
// Assume that post will succeed later
- ViewAncestor.getRunQueue().postDelayed(action, delayMillis);
+ ViewRootImpl.getRunQueue().postDelayed(action, delayMillis);
return true;
}
@@ -8308,7 +8308,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
handler = attachInfo.mHandler;
} else {
// Assume that post will succeed later
- ViewAncestor.getRunQueue().removeCallbacks(action);
+ ViewRootImpl.getRunQueue().removeCallbacks(action);
return true;
}
@@ -10756,12 +10756,14 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
// Remember our drawn bit
int drawn = mPrivateFlags & DRAWN;
- // Invalidate our old position
- invalidate(true);
-
-
int oldWidth = mRight - mLeft;
int oldHeight = mBottom - mTop;
+ int newWidth = right - left;
+ int newHeight = bottom - top;
+ boolean sizeChanged = (newWidth != oldWidth) || (newHeight != oldHeight);
+
+ // Invalidate our old position
+ invalidate(sizeChanged);
mLeft = left;
mTop = top;
@@ -10770,10 +10772,8 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
mPrivateFlags |= HAS_BOUNDS;
- int newWidth = right - left;
- int newHeight = bottom - top;
- if (newWidth != oldWidth || newHeight != oldHeight) {
+ if (sizeChanged) {
if ((mPrivateFlags & PIVOT_EXPLICITLY_SET) == 0) {
// A change in dimension means an auto-centered pivot point changes, too
mMatrixDirty = true;
@@ -10788,7 +10788,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
// before this call to setFrame came in, thereby clearing
// the DRAWN bit.
mPrivateFlags |= DRAWN;
- invalidate(true);
+ invalidate(sizeChanged);
// parent display list may need to be recreated based on a change in the bounds
// of any child
invalidateParentCaches();
@@ -11580,9 +11580,9 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
viewParent = view.mParent;
}
- if (viewParent instanceof ViewAncestor) {
+ if (viewParent instanceof ViewRootImpl) {
// *cough*
- final ViewAncestor vr = (ViewAncestor)viewParent;
+ final ViewRootImpl vr = (ViewRootImpl)viewParent;
location[1] -= vr.mCurScrollY;
}
}
@@ -12709,7 +12709,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
surface.unlockCanvasAndPost(canvas);
}
- final ViewAncestor root = getViewAncestor();
+ final ViewRootImpl root = getViewRootImpl();
// Cache the local state object for delivery with DragEvents
root.setLocalDragState(myLocalState);
@@ -13916,7 +13916,7 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
Canvas mCanvas;
/**
- * A Handler supplied by a view's {@link android.view.ViewAncestor}. This
+ * A Handler supplied by a view's {@link android.view.ViewRootImpl}. This
* handler can be used to pump events in the UI events queue.
*/
final Handler mHandler;
diff --git a/core/java/android/view/ViewDebug.java b/core/java/android/view/ViewDebug.java
index f7f5a21..b85159b 100644
--- a/core/java/android/view/ViewDebug.java
+++ b/core/java/android/view/ViewDebug.java
@@ -366,7 +366,7 @@ public class ViewDebug {
}
private static BufferedWriter sHierarchyTraces;
- private static ViewAncestor sHierarhcyRoot;
+ private static ViewRootImpl sHierarhcyRoot;
private static String sHierarchyTracePrefix;
/**
@@ -415,7 +415,7 @@ public class ViewDebug {
* @hide
*/
public static long getViewAncestorInstanceCount() {
- return Debug.countInstancesOfClass(ViewAncestor.class);
+ return Debug.countInstancesOfClass(ViewRootImpl.class);
}
/**
@@ -748,7 +748,7 @@ public class ViewDebug {
return;
}
- sHierarhcyRoot = (ViewAncestor) view.getRootView().getParent();
+ sHierarhcyRoot = (ViewRootImpl) view.getRootView().getParent();
}
/**
@@ -1100,7 +1100,7 @@ public class ViewDebug {
private static void outputDisplayList(View root, String parameter) throws IOException {
final View view = findView(root, parameter);
- view.getViewAncestor().outputDisplayList(view);
+ view.getViewRootImpl().outputDisplayList(view);
}
private static void capture(View root, final OutputStream clientStream, String parameter)
diff --git a/core/java/android/view/ViewGroup.java b/core/java/android/view/ViewGroup.java
index c333c0a..5624eb4 100644
--- a/core/java/android/view/ViewGroup.java
+++ b/core/java/android/view/ViewGroup.java
@@ -954,7 +954,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
final float tx = event.mX;
final float ty = event.mY;
- ViewAncestor root = getViewAncestor();
+ ViewRootImpl root = getViewRootImpl();
// Dispatch down the view hierarchy
switch (event.mAction) {
@@ -3183,6 +3183,24 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
}
/**
+ * @hide
+ */
+ protected void onViewAdded(View child) {
+ if (mOnHierarchyChangeListener != null) {
+ mOnHierarchyChangeListener.onChildViewAdded(this, child);
+ }
+ }
+
+ /**
+ * @hide
+ */
+ protected void onViewRemoved(View child) {
+ if (mOnHierarchyChangeListener != null) {
+ mOnHierarchyChangeListener.onChildViewRemoved(this, child);
+ }
+ }
+
+ /**
* Adds a view during layout. This is useful if in your onLayout() method,
* you need to add more views (as does the list view for example).
*
@@ -3283,9 +3301,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
ai.mKeepScreenOn = lastKeepOn;
}
- if (mOnHierarchyChangeListener != null) {
- mOnHierarchyChangeListener.onChildViewAdded(this, child);
- }
+ onViewAdded(child);
if ((child.mViewFlags & DUPLICATE_PARENT_STATE) == DUPLICATE_PARENT_STATE) {
mGroupFlags |= FLAG_NOTIFY_CHILDREN_ON_DRAWABLE_STATE_CHANGE;
@@ -3486,9 +3502,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
view.dispatchDetachedFromWindow();
}
- if (mOnHierarchyChangeListener != null) {
- mOnHierarchyChangeListener.onChildViewRemoved(this, view);
- }
+ onViewRemoved(view);
needGlobalAttributesUpdate(false);
@@ -3533,8 +3547,6 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
}
private void removeViewsInternal(int start, int count) {
- final OnHierarchyChangeListener onHierarchyChangeListener = mOnHierarchyChangeListener;
- final boolean notifyListener = onHierarchyChangeListener != null;
final View focused = mFocused;
final boolean detach = mAttachInfo != null;
View clearChildFocus = null;
@@ -3563,9 +3575,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
needGlobalAttributesUpdate(false);
- if (notifyListener) {
- onHierarchyChangeListener.onChildViewRemoved(this, view);
- }
+ onViewRemoved(view);
}
removeFromArray(start, count);
@@ -3603,8 +3613,6 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
final View[] children = mChildren;
mChildrenCount = 0;
- final OnHierarchyChangeListener listener = mOnHierarchyChangeListener;
- final boolean notify = listener != null;
final View focused = mFocused;
final boolean detach = mAttachInfo != null;
View clearChildFocus = null;
@@ -3630,9 +3638,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
view.dispatchDetachedFromWindow();
}
- if (notify) {
- listener.onChildViewRemoved(this, view);
- }
+ onViewRemoved(view);
view.mParent = null;
children[i] = null;
@@ -3672,9 +3678,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
child.dispatchDetachedFromWindow();
}
- if (mOnHierarchyChangeListener != null) {
- mOnHierarchyChangeListener.onChildViewRemoved(this, child);
- }
+ onViewRemoved(child);
}
/**
@@ -3839,13 +3843,13 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
if (drawAnimation) {
if (view != null) {
view.mPrivateFlags |= DRAW_ANIMATION;
- } else if (parent instanceof ViewAncestor) {
- ((ViewAncestor) parent).mIsAnimating = true;
+ } else if (parent instanceof ViewRootImpl) {
+ ((ViewRootImpl) parent).mIsAnimating = true;
}
}
- if (parent instanceof ViewAncestor) {
- ((ViewAncestor) parent).invalidate();
+ if (parent instanceof ViewRootImpl) {
+ ((ViewRootImpl) parent).invalidate();
parent = null;
} else if (view != null) {
if ((view.mPrivateFlags & DRAWN) == DRAWN ||
@@ -3902,8 +3906,8 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
if (drawAnimation) {
if (view != null) {
view.mPrivateFlags |= DRAW_ANIMATION;
- } else if (parent instanceof ViewAncestor) {
- ((ViewAncestor) parent).mIsAnimating = true;
+ } else if (parent instanceof ViewRootImpl) {
+ ((ViewRootImpl) parent).mIsAnimating = true;
}
}
@@ -4431,7 +4435,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
// If this group is dirty, check that the parent is dirty as well
if ((mPrivateFlags & DIRTY_MASK) != 0) {
final ViewParent parent = getParent();
- if (parent != null && !(parent instanceof ViewAncestor)) {
+ if (parent != null && !(parent instanceof ViewRootImpl)) {
if ((((View) parent).mPrivateFlags & DIRTY_MASK) == 0) {
result = false;
android.util.Log.d(ViewDebug.CONSISTENCY_LOG_TAG,
@@ -5000,7 +5004,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
* @hide
*/
public void requestTransitionStart(LayoutTransition transition) {
- ViewAncestor viewAncestor = getViewAncestor();
+ ViewRootImpl viewAncestor = getViewRootImpl();
viewAncestor.requestTransitionStart(transition);
}
diff --git a/core/java/android/view/ViewAncestor.java b/core/java/android/view/ViewRootImpl.java
index ac73611..470493d 100644
--- a/core/java/android/view/ViewAncestor.java
+++ b/core/java/android/view/ViewRootImpl.java
@@ -92,7 +92,7 @@ import java.util.List;
* {@hide}
*/
@SuppressWarnings({"EmptyCatchBlock", "PointlessBooleanExpression"})
-public final class ViewAncestor extends Handler implements ViewParent,
+public final class ViewRootImpl extends Handler implements ViewParent,
View.AttachInfo.Callbacks, HardwareRenderer.HardwareDrawCallbacks {
private static final String TAG = "ViewAncestor";
private static final boolean DBG = false;
@@ -303,7 +303,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
}
- public ViewAncestor(Context context) {
+ public ViewRootImpl(Context context) {
super();
if (MEASURE_LATENCY) {
@@ -3807,14 +3807,14 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
static class InputMethodCallback extends IInputMethodCallback.Stub {
- private WeakReference<ViewAncestor> mViewAncestor;
+ private WeakReference<ViewRootImpl> mViewAncestor;
- public InputMethodCallback(ViewAncestor viewAncestor) {
- mViewAncestor = new WeakReference<ViewAncestor>(viewAncestor);
+ public InputMethodCallback(ViewRootImpl viewAncestor) {
+ mViewAncestor = new WeakReference<ViewRootImpl>(viewAncestor);
}
public void finishedEvent(int seq, boolean handled) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchFinishedEvent(seq, handled);
}
@@ -3826,15 +3826,15 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
static class W extends IWindow.Stub {
- private final WeakReference<ViewAncestor> mViewAncestor;
+ private final WeakReference<ViewRootImpl> mViewAncestor;
- W(ViewAncestor viewAncestor) {
- mViewAncestor = new WeakReference<ViewAncestor>(viewAncestor);
+ W(ViewRootImpl viewAncestor) {
+ mViewAncestor = new WeakReference<ViewRootImpl>(viewAncestor);
}
public void resized(int w, int h, Rect coveredInsets, Rect visibleInsets,
boolean reportDraw, Configuration newConfig) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchResized(w, h, coveredInsets, visibleInsets, reportDraw,
newConfig);
@@ -3842,21 +3842,21 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
public void dispatchAppVisibility(boolean visible) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchAppVisibility(visible);
}
}
public void dispatchGetNewSurface() {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchGetNewSurface();
}
}
public void windowFocusChanged(boolean hasFocus, boolean inTouchMode) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.windowFocusChanged(hasFocus, inTouchMode);
}
@@ -3872,7 +3872,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
public void executeCommand(String command, String parameters, ParcelFileDescriptor out) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
final View view = viewAncestor.mView;
if (view != null) {
@@ -3903,7 +3903,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
public void closeSystemDialogs(String reason) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchCloseSystemDialogs(reason);
}
@@ -3931,14 +3931,14 @@ public final class ViewAncestor extends Handler implements ViewParent,
/* Drag/drop */
public void dispatchDragEvent(DragEvent event) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchDragEvent(event);
}
}
public void dispatchSystemUiVisibilityChanged(int visibility) {
- final ViewAncestor viewAncestor = mViewAncestor.get();
+ final ViewRootImpl viewAncestor = mViewAncestor.get();
if (viewAncestor != null) {
viewAncestor.dispatchSystemUiVisibilityChanged(visibility);
}
@@ -4269,7 +4269,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
if (!registered) {
mAttachInfo.mAccessibilityWindowId =
mAccessibilityManager.addAccessibilityInteractionConnection(mWindow,
- new AccessibilityInteractionConnection(ViewAncestor.this));
+ new AccessibilityInteractionConnection(ViewRootImpl.this));
}
}
@@ -4289,10 +4289,10 @@ public final class ViewAncestor extends Handler implements ViewParent,
*/
final class AccessibilityInteractionConnection
extends IAccessibilityInteractionConnection.Stub {
- private final WeakReference<ViewAncestor> mViewAncestor;
+ private final WeakReference<ViewRootImpl> mViewAncestor;
- AccessibilityInteractionConnection(ViewAncestor viewAncestor) {
- mViewAncestor = new WeakReference<ViewAncestor>(viewAncestor);
+ AccessibilityInteractionConnection(ViewRootImpl viewAncestor) {
+ mViewAncestor = new WeakReference<ViewRootImpl>(viewAncestor);
}
public void findAccessibilityNodeInfoByAccessibilityId(int accessibilityId,
@@ -4421,7 +4421,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
try {
FindByAccessibilitytIdPredicate predicate = mFindByAccessibilityIdPredicate;
predicate.init(accessibilityId);
- View root = ViewAncestor.this.mView;
+ View root = ViewRootImpl.this.mView;
View target = root.findViewByPredicate(predicate);
if (target != null && target.isShown()) {
info = target.createAccessibilityNodeInfo();
@@ -4453,7 +4453,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
AccessibilityNodeInfo info = null;
try {
- View root = ViewAncestor.this.mView;
+ View root = ViewRootImpl.this.mView;
View target = root.findViewById(viewId);
if (target != null && target.isShown()) {
info = target.createAccessibilityNodeInfo();
@@ -4499,7 +4499,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
if (accessibilityViewId != View.NO_ID) {
root = findViewByAccessibilityId(accessibilityViewId);
} else {
- root = ViewAncestor.this.mView;
+ root = ViewRootImpl.this.mView;
}
if (root == null || !root.isShown()) {
@@ -4624,7 +4624,7 @@ public final class ViewAncestor extends Handler implements ViewParent,
}
private View findViewByAccessibilityId(int accessibilityId) {
- View root = ViewAncestor.this.mView;
+ View root = ViewRootImpl.this.mView;
if (root == null) {
return null;
}
diff --git a/core/java/android/view/WindowManagerImpl.java b/core/java/android/view/WindowManagerImpl.java
index 54e7c04..a451bb5 100644
--- a/core/java/android/view/WindowManagerImpl.java
+++ b/core/java/android/view/WindowManagerImpl.java
@@ -16,18 +16,16 @@
package android.view;
-import java.util.HashMap;
-
import android.content.res.CompatibilityInfo;
import android.content.res.Configuration;
import android.graphics.PixelFormat;
import android.os.IBinder;
import android.util.AndroidRuntimeException;
import android.util.Log;
-import android.util.Slog;
-import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
+import java.util.HashMap;
+
final class WindowLeaked extends AndroidRuntimeException {
public WindowLeaked(String msg) {
super(msg);
@@ -80,7 +78,7 @@ public class WindowManagerImpl implements WindowManager {
public static final int ADD_PERMISSION_DENIED = -8;
private View[] mViews;
- private ViewAncestor[] mRoots;
+ private ViewRootImpl[] mRoots;
private WindowManager.LayoutParams[] mParams;
private final static Object sLock = new Object();
@@ -204,7 +202,7 @@ public class WindowManagerImpl implements WindowManager {
final WindowManager.LayoutParams wparams
= (WindowManager.LayoutParams)params;
- ViewAncestor root;
+ ViewRootImpl root;
View panelParentView = null;
synchronized (this) {
@@ -241,7 +239,7 @@ public class WindowManagerImpl implements WindowManager {
}
}
- root = new ViewAncestor(view.getContext());
+ root = new ViewRootImpl(view.getContext());
root.mAddNesting = 1;
if (cih == null) {
root.mCompatibilityInfo = new CompatibilityInfoHolder();
@@ -254,7 +252,7 @@ public class WindowManagerImpl implements WindowManager {
if (mViews == null) {
index = 1;
mViews = new View[1];
- mRoots = new ViewAncestor[1];
+ mRoots = new ViewRootImpl[1];
mParams = new WindowManager.LayoutParams[1];
} else {
index = mViews.length + 1;
@@ -262,7 +260,7 @@ public class WindowManagerImpl implements WindowManager {
mViews = new View[index];
System.arraycopy(old, 0, mViews, 0, index-1);
old = mRoots;
- mRoots = new ViewAncestor[index];
+ mRoots = new ViewRootImpl[index];
System.arraycopy(old, 0, mRoots, 0, index-1);
old = mParams;
mParams = new WindowManager.LayoutParams[index];
@@ -290,7 +288,7 @@ public class WindowManagerImpl implements WindowManager {
synchronized (this) {
int index = findViewLocked(view, true);
- ViewAncestor root = mRoots[index];
+ ViewRootImpl root = mRoots[index];
mParams[index] = wparams;
root.setLayoutParams(wparams, false);
}
@@ -312,7 +310,7 @@ public class WindowManagerImpl implements WindowManager {
public void removeViewImmediate(View view) {
synchronized (this) {
int index = findViewLocked(view, true);
- ViewAncestor root = mRoots[index];
+ ViewRootImpl root = mRoots[index];
View curView = root.getView();
root.mAddNesting = 0;
@@ -328,7 +326,7 @@ public class WindowManagerImpl implements WindowManager {
}
View removeViewLocked(int index) {
- ViewAncestor root = mRoots[index];
+ ViewRootImpl root = mRoots[index];
View view = root.getView();
// Don't really remove until we have matched all calls to add().
@@ -356,7 +354,7 @@ public class WindowManagerImpl implements WindowManager {
removeItem(tmpViews, mViews, index);
mViews = tmpViews;
- ViewAncestor[] tmpRoots = new ViewAncestor[count-1];
+ ViewRootImpl[] tmpRoots = new ViewRootImpl[count-1];
removeItem(tmpRoots, mRoots, index);
mRoots = tmpRoots;
@@ -383,7 +381,7 @@ public class WindowManagerImpl implements WindowManager {
//Log.i("foo", "@ " + i + " token " + mParams[i].token
// + " view " + mRoots[i].getView());
if (token == null || mParams[i].token == token) {
- ViewAncestor root = mRoots[i];
+ ViewRootImpl root = mRoots[i];
root.mAddNesting = 1;
//Log.i("foo", "Force closing " + root);
@@ -402,7 +400,16 @@ public class WindowManagerImpl implements WindowManager {
}
}
}
-
+
+ /**
+ * @param level See {@link android.content.ComponentCallbacks}
+ */
+ public void trimMemory(int level) {
+ if (HardwareRenderer.isAvailable()) {
+ HardwareRenderer.trimMemory(level);
+ }
+ }
+
public void setStoppedState(IBinder token, boolean stopped) {
synchronized (this) {
if (mViews == null)
@@ -410,7 +417,7 @@ public class WindowManagerImpl implements WindowManager {
int count = mViews.length;
for (int i=0; i<count; i++) {
if (token == null || mParams[i].token == token) {
- ViewAncestor root = mRoots[i];
+ ViewRootImpl root = mRoots[i];
root.setStopped(stopped);
}
}
@@ -422,7 +429,7 @@ public class WindowManagerImpl implements WindowManager {
int count = mViews.length;
config = new Configuration(config);
for (int i=0; i<count; i++) {
- ViewAncestor root = mRoots[i];
+ ViewRootImpl root = mRoots[i];
root.requestUpdateConfiguration(config);
}
}
@@ -430,13 +437,13 @@ public class WindowManagerImpl implements WindowManager {
public WindowManager.LayoutParams getRootViewLayoutParameter(View view) {
ViewParent vp = view.getParent();
- while (vp != null && !(vp instanceof ViewAncestor)) {
+ while (vp != null && !(vp instanceof ViewRootImpl)) {
vp = vp.getParent();
}
if (vp == null) return null;
- ViewAncestor vr = (ViewAncestor)vp;
+ ViewRootImpl vr = (ViewRootImpl)vp;
int N = mRoots.length;
for (int i = 0; i < N; ++i) {
@@ -456,8 +463,7 @@ public class WindowManagerImpl implements WindowManager {
return new Display(Display.DEFAULT_DISPLAY, null);
}
- private static void removeItem(Object[] dst, Object[] src, int index)
- {
+ private static void removeItem(Object[] dst, Object[] src, int index) {
if (dst.length > 0) {
if (index > 0) {
System.arraycopy(src, 0, dst, 0, index);
@@ -468,8 +474,7 @@ public class WindowManagerImpl implements WindowManager {
}
}
- private int findViewLocked(View view, boolean required)
- {
+ private int findViewLocked(View view, boolean required) {
synchronized (this) {
final int count = mViews != null ? mViews.length : 0;
for (int i=0; i<count; i++) {
diff --git a/core/java/android/view/inputmethod/BaseInputConnection.java b/core/java/android/view/inputmethod/BaseInputConnection.java
index abe3c2c..5ec1ec3 100644
--- a/core/java/android/view/inputmethod/BaseInputConnection.java
+++ b/core/java/android/view/inputmethod/BaseInputConnection.java
@@ -34,7 +34,7 @@ import android.util.LogPrinter;
import android.view.KeyCharacterMap;
import android.view.KeyEvent;
import android.view.View;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
class ComposingText implements NoCopySpan {
}
@@ -502,7 +502,7 @@ public class BaseInputConnection implements InputConnection {
}
}
if (h != null) {
- h.sendMessage(h.obtainMessage(ViewAncestor.DISPATCH_KEY_FROM_IME,
+ h.sendMessage(h.obtainMessage(ViewRootImpl.DISPATCH_KEY_FROM_IME,
event));
}
}
diff --git a/core/java/android/view/inputmethod/InputMethodManager.java b/core/java/android/view/inputmethod/InputMethodManager.java
index a1a7281..da5baf8 100644
--- a/core/java/android/view/inputmethod/InputMethodManager.java
+++ b/core/java/android/view/inputmethod/InputMethodManager.java
@@ -26,6 +26,7 @@ import com.android.internal.view.IInputMethodSession;
import com.android.internal.view.InputBindResult;
import android.content.Context;
+import android.content.pm.PackageManager;
import android.graphics.Rect;
import android.os.Bundle;
import android.os.Handler;
@@ -42,7 +43,7 @@ import android.util.Printer;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import java.io.FileDescriptor;
import java.io.PrintWriter;
@@ -655,7 +656,7 @@ public final class InputMethodManager {
if (vh != null) {
// This will result in a call to reportFinishInputConnection()
// below.
- vh.sendMessage(vh.obtainMessage(ViewAncestor.FINISH_INPUT_CONNECTION,
+ vh.sendMessage(vh.obtainMessage(ViewRootImpl.FINISH_INPUT_CONNECTION,
mServedInputConnection));
}
}
@@ -1112,9 +1113,9 @@ public final class InputMethodManager {
void scheduleCheckFocusLocked(View view) {
Handler vh = view.getHandler();
- if (vh != null && !vh.hasMessages(ViewAncestor.CHECK_FOCUS)) {
+ if (vh != null && !vh.hasMessages(ViewRootImpl.CHECK_FOCUS)) {
// This will result in a call to checkFocus() below.
- vh.sendMessage(vh.obtainMessage(ViewAncestor.CHECK_FOCUS));
+ vh.sendMessage(vh.obtainMessage(ViewRootImpl.CHECK_FOCUS));
}
}
@@ -1580,16 +1581,16 @@ public final class InputMethodManager {
}
/**
- * Set additional input method subtypes.
- * @param imeToken Supplies the identifying token given to an input method.
+ * Set additional input method subtypes. Only a process which shares the same uid with the IME
+ * can add additional input method subtypes to the IME.
+ * @param imiId Id of InputMethodInfo which additional input method subtypes will be added to.
* @param subtypes subtypes will be added as additional subtypes of the current input method.
* @return true if the additional input method subtypes are successfully added.
*/
- public boolean setAdditionalInputMethodSubtypes(
- IBinder imeToken, InputMethodSubtype[] subtypes) {
+ public boolean setAdditionalInputMethodSubtypes(String imiId, InputMethodSubtype[] subtypes) {
synchronized (mH) {
try {
- return mService.setAdditionalInputMethodSubtypes(imeToken, subtypes);
+ return mService.setAdditionalInputMethodSubtypes(imiId, subtypes);
} catch (RemoteException e) {
Log.w(TAG, "IME died: " + mCurId, e);
return false;
diff --git a/core/java/android/webkit/BrowserFrame.java b/core/java/android/webkit/BrowserFrame.java
index 5aa60f4..738bcb9 100644
--- a/core/java/android/webkit/BrowserFrame.java
+++ b/core/java/android/webkit/BrowserFrame.java
@@ -35,7 +35,7 @@ import android.os.Message;
import android.util.Log;
import android.util.TypedValue;
import android.view.Surface;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import android.view.WindowManager;
import junit.framework.Assert;
@@ -228,7 +228,7 @@ class BrowserFrame extends Handler {
sConfigCallback = new ConfigCallback(
(WindowManager) appContext.getSystemService(
Context.WINDOW_SERVICE));
- ViewAncestor.addConfigCallback(sConfigCallback);
+ ViewRootImpl.addConfigCallback(sConfigCallback);
}
sConfigCallback.addHandler(this);
diff --git a/core/java/android/webkit/WebViewCore.java b/core/java/android/webkit/WebViewCore.java
index 5414b79..d7a2526 100644
--- a/core/java/android/webkit/WebViewCore.java
+++ b/core/java/android/webkit/WebViewCore.java
@@ -2344,7 +2344,9 @@ public final class WebViewCore {
webViewWidth = mWebView.getViewWidth();
viewportWidth = (int) (webViewWidth / adjust);
if (viewportWidth == 0) {
- Log.w(LOGTAG, "Can't get the viewWidth after the first layout");
+ if (DebugFlags.WEB_VIEW_CORE) {
+ Log.v(LOGTAG, "Can't get the viewWidth yet");
+ }
}
} else {
webViewWidth = Math.round(viewportWidth * mCurrentViewScale);
diff --git a/core/java/android/widget/Gallery.java b/core/java/android/widget/Gallery.java
index 0ffd087..3f5b571 100644
--- a/core/java/android/widget/Gallery.java
+++ b/core/java/android/widget/Gallery.java
@@ -16,28 +16,28 @@
package android.widget;
-import com.android.internal.R;
-
import android.annotation.Widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
+import android.view.ContextMenu.ContextMenuInfo;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.HapticFeedbackConstants;
import android.view.KeyEvent;
import android.view.MotionEvent;
+import android.view.SoundEffectConstants;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
-import android.view.SoundEffectConstants;
-import android.view.ContextMenu.ContextMenuInfo;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.animation.Transformation;
+import com.android.internal.R;
+
/**
* A view that shows items in a center-locked, horizontally scrolling list.
* <p>
@@ -122,6 +122,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
* in the future. It will also trigger a selection changed.
*/
private Runnable mDisableSuppressSelectionChangedRunnable = new Runnable() {
+ @Override
public void run() {
mSuppressSelectionChanged = false;
selectionChanged();
@@ -171,6 +172,12 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
* drag sends many onScrolls).
*/
private boolean mIsFirstScroll;
+
+ /**
+ * If true, mFirstPosition is the position of the rightmost child, and
+ * the children are ordered right to left.
+ */
+ private boolean mIsRtl = true;
public Gallery(Context context) {
this(context, null);
@@ -418,7 +425,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
}
int getLimitedMotionScrollAmount(boolean motionToLeft, int deltaX) {
- int extremeItemPosition = motionToLeft ? mItemCount - 1 : 0;
+ int extremeItemPosition = motionToLeft != mIsRtl ? mItemCount - 1 : 0;
View extremeChild = getChildAt(extremeItemPosition - mFirstPosition);
if (extremeChild == null) {
@@ -490,31 +497,40 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
if (toLeft) {
final int galleryLeft = mPaddingLeft;
for (int i = 0; i < numChildren; i++) {
- final View child = getChildAt(i);
+ int n = mIsRtl ? (numChildren - 1 - i) : i;
+ final View child = getChildAt(n);
if (child.getRight() >= galleryLeft) {
break;
} else {
+ start = n;
count++;
- mRecycler.put(firstPosition + i, child);
+ mRecycler.put(firstPosition + n, child);
}
}
+ if (!mIsRtl) {
+ start = 0;
+ }
} else {
final int galleryRight = getWidth() - mPaddingRight;
for (int i = numChildren - 1; i >= 0; i--) {
- final View child = getChildAt(i);
+ int n = mIsRtl ? numChildren - 1 - i : i;
+ final View child = getChildAt(n);
if (child.getLeft() <= galleryRight) {
break;
} else {
- start = i;
+ start = n;
count++;
- mRecycler.put(firstPosition + i, child);
+ mRecycler.put(firstPosition + n, child);
}
}
+ if (mIsRtl) {
+ start = 0;
+ }
}
detachViewsFromParent(start, count);
- if (toLeft) {
+ if (toLeft != mIsRtl) {
mFirstPosition += count;
}
}
@@ -614,6 +630,8 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
@Override
void layout(int delta, boolean animate) {
+ mIsRtl = isLayoutRtl();
+
int childrenLeft = mSpinnerPadding.left;
int childrenWidth = mRight - mLeft - mSpinnerPadding.left - mSpinnerPadding.right;
@@ -676,6 +694,45 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
}
private void fillToGalleryLeft() {
+ if (mIsRtl) {
+ fillToGalleryLeftRtl();
+ } else {
+ fillToGalleryLeftLtr();
+ }
+ }
+
+ private void fillToGalleryLeftRtl() {
+ int itemSpacing = mSpacing;
+ int galleryLeft = mPaddingLeft;
+ int numChildren = getChildCount();
+ int numItems = mItemCount;
+
+ // Set state for initial iteration
+ View prevIterationView = getChildAt(numChildren - 1);
+ int curPosition;
+ int curRightEdge;
+
+ if (prevIterationView != null) {
+ curPosition = mFirstPosition + numChildren;
+ curRightEdge = prevIterationView.getLeft() - itemSpacing;
+ } else {
+ // No children available!
+ mFirstPosition = curPosition = mItemCount - 1;
+ curRightEdge = mRight - mLeft - mPaddingRight;
+ mShouldStopFling = true;
+ }
+
+ while (curRightEdge > galleryLeft && curPosition < mItemCount) {
+ prevIterationView = makeAndAddView(curPosition, curPosition - mSelectedPosition,
+ curRightEdge, false);
+
+ // Set state for next iteration
+ curRightEdge = prevIterationView.getLeft() - itemSpacing;
+ curPosition++;
+ }
+ }
+
+ private void fillToGalleryLeftLtr() {
int itemSpacing = mSpacing;
int galleryLeft = mPaddingLeft;
@@ -708,6 +765,45 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
}
private void fillToGalleryRight() {
+ if (mIsRtl) {
+ fillToGalleryRightRtl();
+ } else {
+ fillToGalleryRightLtr();
+ }
+ }
+
+ private void fillToGalleryRightRtl() {
+ int itemSpacing = mSpacing;
+ int galleryRight = mRight - mLeft - mPaddingRight;
+
+ // Set state for initial iteration
+ View prevIterationView = getChildAt(0);
+ int curPosition;
+ int curLeftEdge;
+
+ if (prevIterationView != null) {
+ curPosition = mFirstPosition -1;
+ curLeftEdge = prevIterationView.getRight() + itemSpacing;
+ } else {
+ curPosition = 0;
+ curLeftEdge = mPaddingLeft;
+ mShouldStopFling = true;
+ }
+
+ while (curLeftEdge < galleryRight && curPosition >= 0) {
+ prevIterationView = makeAndAddView(curPosition, curPosition - mSelectedPosition,
+ curLeftEdge, true);
+
+ // Remember some state
+ mFirstPosition = curPosition;
+
+ // Set state for next iteration
+ curLeftEdge = prevIterationView.getRight() + itemSpacing;
+ curPosition--;
+ }
+ }
+
+ private void fillToGalleryRightLtr() {
int itemSpacing = mSpacing;
int galleryRight = mRight - mLeft - mPaddingRight;
int numChildren = getChildCount();
@@ -745,18 +841,16 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
*
* @param position Position in the gallery for the view to obtain
* @param offset Offset from the selected position
- * @param x X-coordintate indicating where this view should be placed. This
+ * @param x X-coordinate indicating where this view should be placed. This
* will either be the left or right edge of the view, depending on
- * the fromLeft paramter
- * @param fromLeft Are we posiitoning views based on the left edge? (i.e.,
+ * the fromLeft parameter
+ * @param fromLeft Are we positioning views based on the left edge? (i.e.,
* building from left to right)?
* @return A view that has been added to the gallery
*/
- private View makeAndAddView(int position, int offset, int x,
- boolean fromLeft) {
+ private View makeAndAddView(int position, int offset, int x, boolean fromLeft) {
View child;
-
if (!mDataChanged) {
child = mRecycler.get(position);
if (child != null) {
@@ -786,27 +880,26 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
/**
* Helper for makeAndAddView to set the position of a view and fill out its
- * layout paramters.
+ * layout parameters.
*
* @param child The view to position
* @param offset Offset from the selected position
- * @param x X-coordintate indicating where this view should be placed. This
+ * @param x X-coordinate indicating where this view should be placed. This
* will either be the left or right edge of the view, depending on
- * the fromLeft paramter
- * @param fromLeft Are we posiitoning views based on the left edge? (i.e.,
+ * the fromLeft parameter
+ * @param fromLeft Are we positioning views based on the left edge? (i.e.,
* building from left to right)?
*/
private void setUpChild(View child, int offset, int x, boolean fromLeft) {
// Respect layout params that are already in the view. Otherwise
// make some up...
- Gallery.LayoutParams lp = (Gallery.LayoutParams)
- child.getLayoutParams();
+ Gallery.LayoutParams lp = (Gallery.LayoutParams) child.getLayoutParams();
if (lp == null) {
lp = (Gallery.LayoutParams) generateDefaultLayoutParams();
}
- addViewInLayout(child, fromLeft ? -1 : 0, lp);
+ addViewInLayout(child, fromLeft != mIsRtl ? -1 : 0, lp);
child.setSelected(offset == 0);
@@ -883,9 +976,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
return retValue;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onSingleTapUp(MotionEvent e) {
if (mDownTouchPosition >= 0) {
@@ -905,9 +996,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
return false;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (!mShouldCallbackDuringFling) {
@@ -926,9 +1015,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
return true;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (localLOGV) Log.v(TAG, String.valueOf(e2.getX() - e1.getX()));
@@ -967,9 +1054,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
return true;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onDown(MotionEvent e) {
// Kill any existing fling/scroll
@@ -1009,9 +1094,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
onUp();
}
- /**
- * {@inheritDoc}
- */
+ @Override
public void onLongPress(MotionEvent e) {
if (mDownTouchPosition < 0) {
@@ -1025,9 +1108,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
// Unused methods from GestureDetector.OnGestureListener below
- /**
- * {@inheritDoc}
- */
+ @Override
public void onShowPress(MotionEvent e) {
}
@@ -1164,6 +1245,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
dispatchPress(mSelectedChild);
postDelayed(new Runnable() {
+ @Override
public void run() {
dispatchUnpress();
}
@@ -1278,10 +1360,10 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
// Draw the selected child last
return selectedIndex;
} else if (i >= selectedIndex) {
- // Move the children to the right of the selected child earlier one
+ // Move the children after the selected child earlier one
return i + 1;
} else {
- // Keep the children to the left of the selected child the same
+ // Keep the children before the selected child the same
return i;
}
}
@@ -1306,7 +1388,6 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
* Responsible for fling behavior. Use {@link #startUsingVelocity(int)} to
* initiate a fling. Each frame of the fling is handled in {@link #run()}.
* A FlingRunnable will keep re-posting itself until the fling is done.
- *
*/
private class FlingRunnable implements Runnable {
/**
@@ -1365,6 +1446,7 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
if (scrollIntoSlots) scrollIntoSlots();
}
+ @Override
public void run() {
if (mItemCount == 0) {
@@ -1384,15 +1466,17 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
// Pretend that each frame of a fling scroll is a touch scroll
if (delta > 0) {
- // Moving towards the left. Use first view as mDownTouchPosition
- mDownTouchPosition = mFirstPosition;
+ // Moving towards the left. Use leftmost view as mDownTouchPosition
+ mDownTouchPosition = mIsRtl ? (mFirstPosition + getChildCount() - 1) :
+ mFirstPosition;
// Don't fling more than 1 screen
delta = Math.min(getWidth() - mPaddingLeft - mPaddingRight - 1, delta);
} else {
- // Moving towards the right. Use last view as mDownTouchPosition
+ // Moving towards the right. Use rightmost view as mDownTouchPosition
int offsetToLast = getChildCount() - 1;
- mDownTouchPosition = mFirstPosition + offsetToLast;
+ mDownTouchPosition = mIsRtl ? mFirstPosition :
+ (mFirstPosition + getChildCount() - 1);
// Don't fling more than 1 screen
delta = Math.max(-(getWidth() - mPaddingRight - mPaddingLeft - 1), delta);
@@ -1414,7 +1498,6 @@ public class Gallery extends AbsSpinner implements GestureDetector.OnGestureList
* Gallery extends LayoutParams to provide a place to hold current
* Transformation information along with previous position/transformation
* info.
- *
*/
public static class LayoutParams extends ViewGroup.LayoutParams {
public LayoutParams(Context c, AttributeSet attrs) {
diff --git a/core/java/android/widget/GridLayout.java b/core/java/android/widget/GridLayout.java
index b9eb5ff..46fe407 100644
--- a/core/java/android/widget/GridLayout.java
+++ b/core/java/android/widget/GridLayout.java
@@ -28,7 +28,7 @@ import android.util.Pair;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
-import com.android.internal.R.styleable;
+import com.android.internal.R;
import java.lang.reflect.Array;
import java.util.ArrayList;
@@ -167,7 +167,7 @@ public class GridLayout extends ViewGroup {
// Misc constants
private static final String TAG = GridLayout.class.getName();
- static final boolean DEBUG = false;
+ static boolean DEBUG = false;
private static final int PRF = 1;
// Defaults
@@ -178,19 +178,17 @@ public class GridLayout extends ViewGroup {
private static final boolean DEFAULT_ORDER_PRESERVED = false;
private static final int DEFAULT_ALIGNMENT_MODE = ALIGN_MARGINS;
private static final int DEFAULT_CONTAINER_MARGIN = 0;
- private static final int DEFAULT_MARGIN = 8;
- private static final int DEFAULT_CONTAINER_PADDING = 16;
private static final int MAX_SIZE = 100000;
// TypedArray indices
- private static final int ORIENTATION = styleable.GridLayout_orientation;
- private static final int ROW_COUNT = styleable.GridLayout_rowCount;
- private static final int COLUMN_COUNT = styleable.GridLayout_columnCount;
- private static final int USE_DEFAULT_MARGINS = styleable.GridLayout_useDefaultMargins;
- private static final int ALIGNMENT_MODE = styleable.GridLayout_alignmentMode;
- private static final int ROW_ORDER_PRESERVED = styleable.GridLayout_rowOrderPreserved;
- private static final int COLUMN_ORDER_PRESERVED = styleable.GridLayout_columnOrderPreserved;
+ private static final int ORIENTATION = R.styleable.GridLayout_orientation;
+ private static final int ROW_COUNT = R.styleable.GridLayout_rowCount;
+ private static final int COLUMN_COUNT = R.styleable.GridLayout_columnCount;
+ private static final int USE_DEFAULT_MARGINS = R.styleable.GridLayout_useDefaultMargins;
+ private static final int ALIGNMENT_MODE = R.styleable.GridLayout_alignmentMode;
+ private static final int ROW_ORDER_PRESERVED = R.styleable.GridLayout_rowOrderPreserved;
+ private static final int COLUMN_ORDER_PRESERVED = R.styleable.GridLayout_columnOrderPreserved;
// Instance variables
@@ -201,6 +199,7 @@ public class GridLayout extends ViewGroup {
private boolean mUseDefaultMargins = DEFAULT_USE_DEFAULT_MARGINS;
private int mAlignmentMode = DEFAULT_ALIGNMENT_MODE;
private int mDefaultGravity = Gravity.NO_GRAVITY;
+ private int mDefaultGap;
// Constructors
@@ -212,7 +211,8 @@ public class GridLayout extends ViewGroup {
if (DEBUG) {
setWillNotDraw(false);
}
- TypedArray a = context.obtainStyledAttributes(attrs, styleable.GridLayout);
+ mDefaultGap = context.getResources().getDimensionPixelOffset(R.dimen.default_gap);
+ TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.GridLayout);
try {
setRowCount(a.getInt(ROW_COUNT, DEFAULT_COUNT));
setColumnCount(a.getInt(COLUMN_COUNT, DEFAULT_COUNT));
@@ -382,7 +382,7 @@ public class GridLayout extends ViewGroup {
public void setUseDefaultMargins(boolean useDefaultMargins) {
mUseDefaultMargins = useDefaultMargins;
if (useDefaultMargins) {
- int padding = DEFAULT_CONTAINER_PADDING;
+ int padding = mDefaultGap;
setPadding(padding, padding, padding, padding);
}
requestLayout();
@@ -538,7 +538,7 @@ public class GridLayout extends ViewGroup {
}
private int getDefaultMargin(View c, boolean horizontal, boolean leading) {
- return DEFAULT_MARGIN;
+ return mDefaultGap / 2;
}
private int getDefaultMargin(View c, boolean isAtEdge, boolean horizontal, boolean leading) {
@@ -757,33 +757,21 @@ public class GridLayout extends ViewGroup {
// Add/remove
+ /**
+ * @hide
+ */
@Override
- public void addView(View child, int index, ViewGroup.LayoutParams params) {
- super.addView(child, index, params);
- invalidateStructure();
- }
-
- @Override
- public void removeView(View view) {
- super.removeView(view);
- invalidateStructure();
- }
-
- @Override
- public void removeViewInLayout(View view) {
- super.removeViewInLayout(view);
- invalidateStructure();
- }
-
- @Override
- public void removeViewsInLayout(int start, int count) {
- super.removeViewsInLayout(start, count);
+ protected void onViewAdded(View child) {
+ super.onViewAdded(child);
invalidateStructure();
}
+ /**
+ * @hide
+ */
@Override
- public void removeViewAt(int index) {
- super.removeViewAt(index);
+ protected void onViewRemoved(View child) {
+ super.onViewRemoved(child);
invalidateStructure();
}
@@ -1596,8 +1584,8 @@ public class GridLayout extends ViewGroup {
* each cell group. The fundamental parameters associated with each cell group are
* gathered into their vertical and horizontal components and stored
* in the {@link #rowSpec} and {@link #columnSpec} layout parameters.
- * {@link android.widget.GridLayout.Spec Specs} are immutable structures and may be shared between the layout
- * parameters of different children.
+ * {@link android.widget.GridLayout.Spec Specs} are immutable structures
+ * and may be shared between the layout parameters of different children.
* <p>
* The row and column specs contain the leading and trailing indices along each axis
* and together specify the four grid indices that delimit the cells of this cell group.
@@ -1667,24 +1655,25 @@ public class GridLayout extends ViewGroup {
// TypedArray indices
- private static final int MARGIN = styleable.ViewGroup_MarginLayout_layout_margin;
- private static final int LEFT_MARGIN = styleable.ViewGroup_MarginLayout_layout_marginLeft;
- private static final int TOP_MARGIN = styleable.ViewGroup_MarginLayout_layout_marginTop;
- private static final int RIGHT_MARGIN = styleable.ViewGroup_MarginLayout_layout_marginRight;
+ private static final int MARGIN = R.styleable.ViewGroup_MarginLayout_layout_margin;
+ private static final int LEFT_MARGIN = R.styleable.ViewGroup_MarginLayout_layout_marginLeft;
+ private static final int TOP_MARGIN = R.styleable.ViewGroup_MarginLayout_layout_marginTop;
+ private static final int RIGHT_MARGIN =
+ R.styleable.ViewGroup_MarginLayout_layout_marginRight;
private static final int BOTTOM_MARGIN =
- styleable.ViewGroup_MarginLayout_layout_marginBottom;
+ R.styleable.ViewGroup_MarginLayout_layout_marginBottom;
- private static final int COLUMN = styleable.GridLayout_Layout_layout_column;
- private static final int COLUMN_SPAN = styleable.GridLayout_Layout_layout_columnSpan;
+ private static final int COLUMN = R.styleable.GridLayout_Layout_layout_column;
+ private static final int COLUMN_SPAN = R.styleable.GridLayout_Layout_layout_columnSpan;
private static final int COLUMN_FLEXIBILITY =
- styleable.GridLayout_Layout_layout_columnFlexibility;
+ R.styleable.GridLayout_Layout_layout_columnFlexibility;
- private static final int ROW = styleable.GridLayout_Layout_layout_row;
- private static final int ROW_SPAN = styleable.GridLayout_Layout_layout_rowSpan;
+ private static final int ROW = R.styleable.GridLayout_Layout_layout_row;
+ private static final int ROW_SPAN = R.styleable.GridLayout_Layout_layout_rowSpan;
private static final int ROW_FLEXIBILITY =
- styleable.GridLayout_Layout_layout_rowFlexibility;
+ R.styleable.GridLayout_Layout_layout_rowFlexibility;
- private static final int GRAVITY = styleable.GridLayout_Layout_layout_gravity;
+ private static final int GRAVITY = R.styleable.GridLayout_Layout_layout_gravity;
// Instance variables
@@ -1804,7 +1793,8 @@ public class GridLayout extends ViewGroup {
// This method could be parametrized and moved into MarginLayout.
private void reInitSuper(Context context, AttributeSet attrs) {
- TypedArray a = context.obtainStyledAttributes(attrs, styleable.ViewGroup_MarginLayout);
+ TypedArray a =
+ context.obtainStyledAttributes(attrs, R.styleable.ViewGroup_MarginLayout);
try {
int margin = a.getDimensionPixelSize(MARGIN, DEFAULT_MARGIN);
@@ -1840,7 +1830,7 @@ public class GridLayout extends ViewGroup {
}
private void init(Context context, AttributeSet attrs, int defaultGravity) {
- TypedArray a = context.obtainStyledAttributes(attrs, styleable.GridLayout_Layout);
+ TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.GridLayout_Layout);
try {
int gravity = a.getInt(GRAVITY, defaultGravity);
@@ -2293,25 +2283,6 @@ public class GridLayout extends ViewGroup {
}
/**
- * Temporary backward compatibility class for Launcher - to avoid
- * dependent multi-project commit. This class will be deleted after
- * AppsCustomizePagedView is updated to new API.
- *
- * @hide
- */
- @Deprecated
- public static class Group extends Spec {
- /**
- * @deprecated Please replace with {@link #spec(int, int, Alignment)}
- * @hide
- */
- @Deprecated
- public Group(int start, int size, Alignment alignment) {
- super(start, size, alignment, UNDEFINED_FLEXIBILITY);
- }
- }
-
- /**
* Return a Spec, {@code spec}, where:
* <ul>
* <li> {@code spec.span = [start, start + size]} </li>
diff --git a/core/java/android/widget/SearchView.java b/core/java/android/widget/SearchView.java
index f3bda43..b2d1a1e 100644
--- a/core/java/android/widget/SearchView.java
+++ b/core/java/android/widget/SearchView.java
@@ -18,8 +18,6 @@ package android.widget;
import static android.widget.SuggestionsAdapter.getColumnString;
-import com.android.internal.R;
-
import android.app.PendingIntent;
import android.app.SearchManager;
import android.app.SearchableInfo;
@@ -39,10 +37,14 @@ import android.net.Uri;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.text.Editable;
+import android.text.Spannable;
+import android.text.SpannableStringBuilder;
import android.text.TextUtils;
import android.text.TextWatcher;
+import android.text.style.ImageSpan;
import android.util.AttributeSet;
import android.util.Log;
+import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
@@ -51,6 +53,8 @@ import android.widget.AdapterView.OnItemClickListener;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.TextView.OnEditorActionListener;
+import com.android.internal.R;
+
import java.util.WeakHashMap;
/**
@@ -87,6 +91,8 @@ public class SearchView extends LinearLayout {
private View mSearchEditFrame;
private View mVoiceButton;
private SearchAutoComplete mQueryTextView;
+ private View mDropDownAnchor;
+ private ImageView mSearchHintIcon;
private boolean mSubmitButtonEnabled;
private CharSequence mQueryHint;
private boolean mQueryRefinement;
@@ -195,6 +201,7 @@ public class SearchView extends LinearLayout {
mSubmitButton = findViewById(R.id.search_go_btn);
mCloseButton = (ImageView) findViewById(R.id.search_close_btn);
mVoiceButton = findViewById(R.id.search_voice_btn);
+ mSearchHintIcon = (ImageView) findViewById(R.id.search_mag_icon);
mSearchButton.setOnClickListener(mOnClickListener);
mCloseButton.setOnClickListener(mOnClickListener);
@@ -244,7 +251,20 @@ public class SearchView extends LinearLayout {
mVoiceAppSearchIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
mVoiceAppSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+ mDropDownAnchor = findViewById(mQueryTextView.getDropDownAnchor());
+ if (mDropDownAnchor != null) {
+ mDropDownAnchor.addOnLayoutChangeListener(new OnLayoutChangeListener() {
+ @Override
+ public void onLayoutChange(View v, int left, int top, int right, int bottom,
+ int oldLeft, int oldTop, int oldRight, int oldBottom) {
+ adjustDropDownSizeAndPosition();
+ }
+
+ });
+ }
+
updateViewsVisibility(mIconifiedByDefault);
+ updateQueryHint();
}
/**
@@ -263,7 +283,7 @@ public class SearchView extends LinearLayout {
}
// Cache the voice search capability
mVoiceButtonEnabled = hasVoiceSearch();
- updateViewsVisibility(mIconifiedByDefault);
+ updateViewsVisibility(isIconified());
}
/**
@@ -300,7 +320,6 @@ public class SearchView extends LinearLayout {
mQueryTextView.clearFocus();
setImeVisibility(false);
mClearingFocus = false;
- updateViewsVisibility(mIconifiedByDefault);
}
/**
@@ -555,6 +574,7 @@ public class SearchView extends LinearLayout {
mSearchButton.setVisibility(visCollapsed);
updateSubmitButton(hasText);
mSearchEditFrame.setVisibility(collapsed ? GONE : VISIBLE);
+ mSearchHintIcon.setVisibility(mIconifiedByDefault ? GONE : VISIBLE);
updateCloseButton();
updateVoiceButton(!hasText);
updateSubmitArea();
@@ -822,9 +842,29 @@ public class SearchView extends LinearLayout {
return result;
}
+ private int getSearchIconId() {
+ TypedValue outValue = new TypedValue();
+ getContext().getTheme().resolveAttribute(com.android.internal.R.attr.searchViewSearchIcon,
+ outValue, true);
+ return outValue.resourceId;
+ }
+
+ private CharSequence getDecoratedHint(CharSequence hintText) {
+ // If the field is always expanded, then don't add the search icon to the hint
+ if (!mIconifiedByDefault) return hintText;
+
+ SpannableStringBuilder ssb = new SpannableStringBuilder(" "); // for the icon
+ ssb.append(hintText);
+ Drawable searchIcon = getContext().getResources().getDrawable(getSearchIconId());
+ int textSize = (int) (mQueryTextView.getTextSize() * 1.25);
+ searchIcon.setBounds(0, 0, textSize, textSize);
+ ssb.setSpan(new ImageSpan(searchIcon), 1, 2, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
+ return ssb;
+ }
+
private void updateQueryHint() {
if (mQueryHint != null) {
- mQueryTextView.setHint(mQueryHint);
+ mQueryTextView.setHint(getDecoratedHint(mQueryHint));
} else if (mSearchable != null) {
CharSequence hint = null;
int hintId = mSearchable.getHintId();
@@ -832,8 +872,10 @@ public class SearchView extends LinearLayout {
hint = getContext().getString(hintId);
}
if (hint != null) {
- mQueryTextView.setHint(hint);
+ mQueryTextView.setHint(getDecoratedHint(hint));
}
+ } else {
+ mQueryTextView.setHint(getDecoratedHint(""));
}
}
@@ -922,9 +964,13 @@ public class SearchView extends LinearLayout {
CharSequence text = mQueryTextView.getText();
if (TextUtils.isEmpty(text)) {
if (mIconifiedByDefault) {
- // query field already empty, hide the keyboard and remove focus
- clearFocus();
- setImeVisibility(false);
+ // If the app doesn't override the close behavior
+ if (mOnCloseListener == null || !mOnCloseListener.onClose()) {
+ // hide the keyboard and remove focus
+ clearFocus();
+ // collapse the search field
+ updateViewsVisibility(true);
+ }
}
} else {
mQueryTextView.setText("");
@@ -932,10 +978,6 @@ public class SearchView extends LinearLayout {
setImeVisibility(true);
}
- if (mIconifiedByDefault && (mOnCloseListener == null || !mOnCloseListener.onClose())) {
- updateViewsVisibility(mIconifiedByDefault);
- setImeVisibility(false);
- }
}
private void onSearchClicked() {
@@ -975,6 +1017,28 @@ public class SearchView extends LinearLayout {
updateFocusedState(mQueryTextView.hasFocus());
}
+ @Override
+ protected void onAttachedToWindow() {
+ super.onAttachedToWindow();
+ }
+
+ private void adjustDropDownSizeAndPosition() {
+ if (mDropDownAnchor.getWidth() > 1) {
+ Resources res = getContext().getResources();
+ int anchorPadding = mSearchPlate.getPaddingLeft();
+ Rect dropDownPadding = new Rect();
+ int iconOffset = mIconifiedByDefault
+ ? res.getDimensionPixelSize(R.dimen.dropdownitem_icon_width)
+ + res.getDimensionPixelSize(R.dimen.dropdownitem_text_padding_left)
+ : 0;
+ mQueryTextView.getDropDownBackground().getPadding(dropDownPadding);
+ mQueryTextView.setDropDownHorizontalOffset(-(dropDownPadding.left + iconOffset)
+ + anchorPadding);
+ mQueryTextView.setDropDownWidth(mDropDownAnchor.getWidth() + dropDownPadding.left
+ + dropDownPadding.right + iconOffset - (anchorPadding));
+ }
+ }
+
private boolean onItemClicked(int position, int actionKey, String actionMsg) {
if (mOnSuggestionListener == null
|| !mOnSuggestionListener.onSuggestionClick(position)) {
@@ -1393,5 +1457,32 @@ public class SearchView extends LinearLayout {
public boolean enoughToFilter() {
return mThreshold <= 0 || super.enoughToFilter();
}
+
+ @Override
+ public boolean onKeyPreIme(int keyCode, KeyEvent event) {
+ if (keyCode == KeyEvent.KEYCODE_BACK) {
+ // special case for the back key, we do not even try to send it
+ // to the drop down list but instead, consume it immediately
+ if (event.getAction() == KeyEvent.ACTION_DOWN && event.getRepeatCount() == 0) {
+ KeyEvent.DispatcherState state = getKeyDispatcherState();
+ if (state != null) {
+ state.startTracking(event, this);
+ }
+ return true;
+ } else if (event.getAction() == KeyEvent.ACTION_UP) {
+ KeyEvent.DispatcherState state = getKeyDispatcherState();
+ if (state != null) {
+ state.handleUpEvent(event);
+ }
+ if (event.isTracking() && !event.isCanceled()) {
+ mSearchView.clearFocus();
+ mSearchView.setImeVisibility(false);
+ return true;
+ }
+ }
+ }
+ return super.onKeyPreIme(keyCode, event);
+ }
+
}
}
diff --git a/core/java/android/widget/SuggestionsAdapter.java b/core/java/android/widget/SuggestionsAdapter.java
index 2cfc016..9e32c9a 100644
--- a/core/java/android/widget/SuggestionsAdapter.java
+++ b/core/java/android/widget/SuggestionsAdapter.java
@@ -16,8 +16,6 @@
package android.widget;
-import com.android.internal.R;
-
import android.app.SearchDialog;
import android.app.SearchManager;
import android.app.SearchableInfo;
@@ -47,6 +45,8 @@ import android.view.View;
import android.view.ViewGroup;
import android.view.View.OnClickListener;
+import com.android.internal.R;
+
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
@@ -88,8 +88,8 @@ class SuggestionsAdapter extends ResourceCursorAdapter implements OnClickListene
private int mIconName2Col = INVALID_INDEX;
private int mFlagsCol = INVALID_INDEX;
- private final Runnable mStartSpinnerRunnable;
- private final Runnable mStopSpinnerRunnable;
+ // private final Runnable mStartSpinnerRunnable;
+ // private final Runnable mStopSpinnerRunnable;
/**
* The amount of time we delay in the filter when the user presses the delete key.
@@ -113,17 +113,18 @@ class SuggestionsAdapter extends ResourceCursorAdapter implements OnClickListene
mOutsideDrawablesCache = outsideDrawablesCache;
- mStartSpinnerRunnable = new Runnable() {
- public void run() {
- // mSearchView.setWorking(true); // TODO:
- }
- };
- mStopSpinnerRunnable = new Runnable() {
- public void run() {
- // mSearchView.setWorking(false); // TODO:
- }
- };
+ // mStartSpinnerRunnable = new Runnable() {
+ // public void run() {
+ // // mSearchView.setWorking(true); // TODO:
+ // }
+ // };
+ //
+ // mStopSpinnerRunnable = new Runnable() {
+ // public void run() {
+ // // mSearchView.setWorking(false); // TODO:
+ // }
+ // };
// delay 500ms when deleting
getFilter().setDelayer(new Filter.Delayer() {
@@ -341,10 +342,10 @@ class SuggestionsAdapter extends ResourceCursorAdapter implements OnClickListene
}
if (views.mIcon1 != null) {
- setViewDrawable(views.mIcon1, getIcon1(cursor));
+ setViewDrawable(views.mIcon1, getIcon1(cursor), View.INVISIBLE);
}
if (views.mIcon2 != null) {
- setViewDrawable(views.mIcon2, getIcon2(cursor));
+ setViewDrawable(views.mIcon2, getIcon2(cursor), View.GONE);
}
if (mQueryRefinement == REFINE_ALL
|| (mQueryRefinement == REFINE_BY_ENTRY
@@ -414,13 +415,13 @@ class SuggestionsAdapter extends ResourceCursorAdapter implements OnClickListene
* Sets the drawable in an image view, makes sure the view is only visible if there
* is a drawable.
*/
- private void setViewDrawable(ImageView v, Drawable drawable) {
+ private void setViewDrawable(ImageView v, Drawable drawable, int nullVisibility) {
// Set the icon even if the drawable is null, since we need to clear any
// previous icon.
v.setImageDrawable(drawable);
if (drawable == null) {
- v.setVisibility(View.GONE);
+ v.setVisibility(nullVisibility);
} else {
v.setVisibility(View.VISIBLE);
diff --git a/core/java/android/widget/TextView.java b/core/java/android/widget/TextView.java
index 769f5e3..66a07d3 100644
--- a/core/java/android/widget/TextView.java
+++ b/core/java/android/widget/TextView.java
@@ -117,7 +117,7 @@ import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import android.view.ViewConfiguration;
import android.view.ViewDebug;
import android.view.ViewGroup;
@@ -3731,13 +3731,13 @@ public class TextView extends View implements ViewTreeObserver.OnPreDrawListener
Handler h = getHandler();
if (h != null) {
long eventTime = SystemClock.uptimeMillis();
- h.sendMessage(h.obtainMessage(ViewAncestor.DISPATCH_KEY_FROM_IME,
+ h.sendMessage(h.obtainMessage(ViewRootImpl.DISPATCH_KEY_FROM_IME,
new KeyEvent(eventTime, eventTime,
KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_ENTER, 0, 0,
KeyCharacterMap.VIRTUAL_KEYBOARD, 0,
KeyEvent.FLAG_SOFT_KEYBOARD | KeyEvent.FLAG_KEEP_TOUCH_MODE
| KeyEvent.FLAG_EDITOR_ACTION)));
- h.sendMessage(h.obtainMessage(ViewAncestor.DISPATCH_KEY_FROM_IME,
+ h.sendMessage(h.obtainMessage(ViewRootImpl.DISPATCH_KEY_FROM_IME,
new KeyEvent(SystemClock.uptimeMillis(), eventTime,
KeyEvent.ACTION_UP, KeyEvent.KEYCODE_ENTER, 0, 0,
KeyCharacterMap.VIRTUAL_KEYBOARD, 0,
diff --git a/core/java/android/widget/ZoomButtonsController.java b/core/java/android/widget/ZoomButtonsController.java
index 9e37c7b..f3d891d 100644
--- a/core/java/android/widget/ZoomButtonsController.java
+++ b/core/java/android/widget/ZoomButtonsController.java
@@ -33,7 +33,7 @@ import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewParent;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.view.WindowManager.LayoutParams;
@@ -501,7 +501,7 @@ public class ZoomButtonsController implements View.OnTouchListener {
} else {
- ViewAncestor viewRoot = getOwnerViewAncestor();
+ ViewRootImpl viewRoot = getOwnerViewRootImpl();
if (viewRoot != null) {
viewRoot.dispatchKey(event);
}
@@ -526,15 +526,15 @@ public class ZoomButtonsController implements View.OnTouchListener {
}
}
- private ViewAncestor getOwnerViewAncestor() {
+ private ViewRootImpl getOwnerViewRootImpl() {
View rootViewOfOwner = mOwnerView.getRootView();
if (rootViewOfOwner == null) {
return null;
}
ViewParent parentOfRootView = rootViewOfOwner.getParent();
- if (parentOfRootView instanceof ViewAncestor) {
- return (ViewAncestor) parentOfRootView;
+ if (parentOfRootView instanceof ViewRootImpl) {
+ return (ViewRootImpl) parentOfRootView;
} else {
return null;
}
diff --git a/core/java/com/android/internal/view/IInputMethodManager.aidl b/core/java/com/android/internal/view/IInputMethodManager.aidl
index 812f92b..ce0299c 100644
--- a/core/java/com/android/internal/view/IInputMethodManager.aidl
+++ b/core/java/com/android/internal/view/IInputMethodManager.aidl
@@ -68,5 +68,5 @@ interface IInputMethodManager {
boolean setCurrentInputMethodSubtype(in InputMethodSubtype subtype);
boolean switchToLastInputMethod(in IBinder token);
boolean setInputMethodEnabled(String id, boolean enabled);
- boolean setAdditionalInputMethodSubtypes(in IBinder token, in InputMethodSubtype[] subtypes);
+ boolean setAdditionalInputMethodSubtypes(String id, in InputMethodSubtype[] subtypes);
}
diff --git a/core/java/com/android/internal/widget/PasswordEntryKeyboardHelper.java b/core/java/com/android/internal/widget/PasswordEntryKeyboardHelper.java
index 3070e3e..fb33748 100644
--- a/core/java/com/android/internal/widget/PasswordEntryKeyboardHelper.java
+++ b/core/java/com/android/internal/widget/PasswordEntryKeyboardHelper.java
@@ -29,7 +29,7 @@ import android.util.Log;
import android.view.KeyCharacterMap;
import android.view.KeyEvent;
import android.view.View;
-import android.view.ViewAncestor;
+import android.view.ViewRootImpl;
import com.android.internal.R;
public class PasswordEntryKeyboardHelper implements OnKeyboardActionListener {
@@ -150,7 +150,7 @@ public class PasswordEntryKeyboardHelper implements OnKeyboardActionListener {
KeyEvent event = events[i];
event = KeyEvent.changeFlags(event, event.getFlags()
| KeyEvent.FLAG_SOFT_KEYBOARD | KeyEvent.FLAG_KEEP_TOUCH_MODE);
- handler.sendMessage(handler.obtainMessage(ViewAncestor.DISPATCH_KEY, event));
+ handler.sendMessage(handler.obtainMessage(ViewRootImpl.DISPATCH_KEY, event));
}
}
}
@@ -158,11 +158,11 @@ public class PasswordEntryKeyboardHelper implements OnKeyboardActionListener {
public void sendDownUpKeyEvents(int keyEventCode) {
long eventTime = SystemClock.uptimeMillis();
Handler handler = mTargetView.getHandler();
- handler.sendMessage(handler.obtainMessage(ViewAncestor.DISPATCH_KEY_FROM_IME,
+ handler.sendMessage(handler.obtainMessage(ViewRootImpl.DISPATCH_KEY_FROM_IME,
new KeyEvent(eventTime, eventTime, KeyEvent.ACTION_DOWN, keyEventCode, 0, 0,
KeyCharacterMap.VIRTUAL_KEYBOARD, 0,
KeyEvent.FLAG_SOFT_KEYBOARD|KeyEvent.FLAG_KEEP_TOUCH_MODE)));
- handler.sendMessage(handler.obtainMessage(ViewAncestor.DISPATCH_KEY_FROM_IME,
+ handler.sendMessage(handler.obtainMessage(ViewRootImpl.DISPATCH_KEY_FROM_IME,
new KeyEvent(eventTime, eventTime, KeyEvent.ACTION_UP, keyEventCode, 0, 0,
KeyCharacterMap.VIRTUAL_KEYBOARD, 0,
KeyEvent.FLAG_SOFT_KEYBOARD|KeyEvent.FLAG_KEEP_TOUCH_MODE)));
diff --git a/core/jni/Android.mk b/core/jni/Android.mk
index 06dc083..514e59d 100644
--- a/core/jni/Android.mk
+++ b/core/jni/Android.mk
@@ -139,6 +139,7 @@ LOCAL_SRC_FILES:= \
android_bluetooth_common.cpp \
android_bluetooth_BluetoothAudioGateway.cpp \
android_bluetooth_BluetoothSocket.cpp \
+ android_bluetooth_c.c \
android_server_BluetoothService.cpp \
android_server_BluetoothEventLoop.cpp \
android_server_BluetoothA2dpService.cpp \
diff --git a/core/jni/android/graphics/TextLayoutCache.cpp b/core/jni/android/graphics/TextLayoutCache.cpp
index 6a13876..30fe298 100644
--- a/core/jni/android/graphics/TextLayoutCache.cpp
+++ b/core/jni/android/graphics/TextLayoutCache.cpp
@@ -323,9 +323,7 @@ size_t TextLayoutCacheValue::getSize() {
void TextLayoutCacheValue::setupShaperItem(HB_ShaperItem* shaperItem, HB_FontRec* font,
FontData* fontData, SkPaint* paint, const UChar* chars, size_t start, size_t count,
- size_t contextCount, int dirFlags) {
- bool isRTL = dirFlags & 0x1;
-
+ size_t contextCount, bool isRTL) {
font->klass = &harfbuzzSkiaClass;
font->userData = 0;
// The values which harfbuzzSkiaClass returns are already scaled to
@@ -374,10 +372,10 @@ void TextLayoutCacheValue::setupShaperItem(HB_ShaperItem* shaperItem, HB_FontRec
void TextLayoutCacheValue::shapeWithHarfbuzz(HB_ShaperItem* shaperItem, HB_FontRec* font,
FontData* fontData, SkPaint* paint, const UChar* chars, size_t start, size_t count,
- size_t contextCount, int dirFlags) {
+ size_t contextCount, bool isRTL) {
// Setup Harfbuzz Shaper
setupShaperItem(shaperItem, font, fontData, paint, chars, start, count,
- contextCount, dirFlags);
+ contextCount, isRTL);
// Shape
resetGlyphArrays(shaperItem);
@@ -430,7 +428,7 @@ void TextLayoutCacheValue::computeValuesWithHarfbuzz(SkPaint* paint, const UChar
LOGD("computeValuesWithHarfbuzz -- forcing run with LTR=%d RTL=%d",
forceLTR, forceRTL);
#endif
- computeRunValuesWithHarfbuzz(paint, chars, start, count, contextCount, dirFlags,
+ computeRunValuesWithHarfbuzz(paint, chars, start, count, contextCount, forceRTL,
outAdvances, outTotalAdvance, outGlyphs, outGlyphsCount);
if (forceRTL && *outGlyphsCount > 1) {
@@ -451,10 +449,15 @@ void TextLayoutCacheValue::computeValuesWithHarfbuzz(SkPaint* paint, const UChar
LOGD("computeValuesWithHarfbuzz -- dirFlags=%d run-count=%d paraDir=%d", dirFlags, rc, paraDir);
#endif
if (rc == 1 || !U_SUCCESS(status)) {
+ bool isRTL = (paraDir == 1);
+#if DEBUG_GLYPHS
+ LOGD("computeValuesWithHarfbuzz -- processing SINGLE run "
+ "-- run-start=%d run-len=%d isRTL=%d", start, count, isRTL);
+#endif
computeRunValuesWithHarfbuzz(paint, chars, start, count, contextCount,
- dirFlags, outAdvances, outTotalAdvance, outGlyphs, outGlyphsCount);
+ isRTL, outAdvances, outTotalAdvance, outGlyphs, outGlyphsCount);
- if (dirFlags == 1 && *outGlyphsCount > 1) {
+ if (isRTL && *outGlyphsCount > 1) {
reverseGlyphArray(*outGlyphs, *outGlyphsCount);
}
} else {
@@ -485,14 +488,14 @@ void TextLayoutCacheValue::computeValuesWithHarfbuzz(SkPaint* paint, const UChar
lengthRun = endRun - startRun;
- int newFlags = (runDir == UBIDI_RTL) ? kDirection_RTL : kDirection_LTR;
+ bool isRTL = (runDir == UBIDI_RTL);
jfloat runTotalAdvance = 0;
#if DEBUG_GLYPHS
- LOGD("computeValuesWithHarfbuzz -- run-start=%d run-len=%d newFlags=%d",
- startRun, lengthRun, newFlags);
+ LOGD("computeValuesWithHarfbuzz -- run-start=%d run-len=%d isRTL=%d",
+ startRun, lengthRun, isRTL);
#endif
computeRunValuesWithHarfbuzz(paint, chars, startRun,
- lengthRun, contextCount, newFlags,
+ lengthRun, contextCount, isRTL,
outAdvances, &runTotalAdvance,
&runGlyphs, &runGlyphsCount);
@@ -506,7 +509,7 @@ void TextLayoutCacheValue::computeValuesWithHarfbuzz(SkPaint* paint, const UChar
LOGD(" -- glyphs[%d]=%d", j, runGlyphs[j]);
}
#endif
- glyphRuns.push(GlyphRun(runGlyphs, runGlyphsCount, newFlags));
+ glyphRuns.push(GlyphRun(runGlyphs, runGlyphsCount, isRTL));
}
*outGlyphs = new jchar[*outGlyphsCount];
@@ -528,13 +531,15 @@ void TextLayoutCacheValue::computeValuesWithHarfbuzz(SkPaint* paint, const UChar
ubidi_close(bidi);
} else {
// Cannot run BiDi, just consider one Run
+ bool isRTL = (bidiReq = 1) || (bidiReq = UBIDI_DEFAULT_RTL);
#if DEBUG_GLYPHS
- LOGD("computeValuesWithHarfbuzz -- cannot run BiDi, considering only one Run");
+ LOGD("computeValuesWithHarfbuzz -- cannot run BiDi, considering a SINGLE Run "
+ "-- run-start=%d run-len=%d isRTL=%d", start, count, isRTL);
#endif
- computeRunValuesWithHarfbuzz(paint, chars, start, count, contextCount, dirFlags,
+ computeRunValuesWithHarfbuzz(paint, chars, start, count, contextCount, isRTL,
outAdvances, outTotalAdvance, outGlyphs, outGlyphsCount);
- if (dirFlags == 1 && *outGlyphsCount > 1) {
+ if (isRTL && *outGlyphsCount > 1) {
reverseGlyphArray(*outGlyphs, *outGlyphsCount);
}
}
@@ -545,17 +550,15 @@ void TextLayoutCacheValue::computeValuesWithHarfbuzz(SkPaint* paint, const UChar
}
void TextLayoutCacheValue::computeRunValuesWithHarfbuzz(SkPaint* paint, const UChar* chars,
- size_t start, size_t count, size_t contextCount, int dirFlags,
+ size_t start, size_t count, size_t contextCount, bool isRTL,
jfloat* outAdvances, jfloat* outTotalAdvance,
jchar** outGlyphs, size_t* outGlyphsCount) {
- bool isRTL = dirFlags & 0x1;
-
HB_ShaperItem shaperItem;
HB_FontRec font;
FontData fontData;
shapeWithHarfbuzz(&shaperItem, &font, &fontData, paint, chars, start, count,
- contextCount, dirFlags);
+ contextCount, isRTL);
#if DEBUG_GLYPHS
LOGD("HARFBUZZ -- num_glypth=%d - kerning_applied=%d", shaperItem.num_glyphs,
diff --git a/core/jni/android/graphics/TextLayoutCache.h b/core/jni/android/graphics/TextLayoutCache.h
index 690caac..10dee87 100644
--- a/core/jni/android/graphics/TextLayoutCache.h
+++ b/core/jni/android/graphics/TextLayoutCache.h
@@ -128,11 +128,11 @@ public:
static void setupShaperItem(HB_ShaperItem* shaperItem, HB_FontRec* font, FontData* fontData,
SkPaint* paint, const UChar* chars, size_t start, size_t count, size_t contextCount,
- int dirFlags);
+ bool isRTL);
static void shapeWithHarfbuzz(HB_ShaperItem* shaperItem, HB_FontRec* font, FontData* fontData,
SkPaint* paint, const UChar* chars, size_t start, size_t count, size_t contextCount,
- int dirFlags);
+ bool isRTL);
static void computeValuesWithHarfbuzz(SkPaint* paint, const UChar* chars, size_t start,
size_t count, size_t contextCount, int dirFlags,
@@ -179,7 +179,7 @@ private:
static void resetGlyphArrays(HB_ShaperItem* shaperItem);
static void computeRunValuesWithHarfbuzz(SkPaint* paint, const UChar* chars, size_t start,
- size_t count, size_t contextCount, int dirFlags,
+ size_t count, size_t contextCount, bool isRTL,
jfloat* outAdvances, jfloat* outTotalAdvance,
jchar** outGlyphs, size_t* outGlyphsCount);
}; // TextLayoutCacheValue
diff --git a/core/jni/android_bluetooth_BluetoothAudioGateway.cpp b/core/jni/android_bluetooth_BluetoothAudioGateway.cpp
index cb742a3..29c9c2d 100755
--- a/core/jni/android_bluetooth_BluetoothAudioGateway.cpp
+++ b/core/jni/android_bluetooth_BluetoothAudioGateway.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "BluetoothAudioGateway.cpp"
#include "android_bluetooth_common.h"
+#include "android_bluetooth_c.h"
#include "android_runtime/AndroidRuntime.h"
#include "JNIHelp.h"
#include "jni.h"
@@ -491,7 +492,8 @@ static int setup_listening_socket(int dev, int channel) {
}
laddr.rc_family = AF_BLUETOOTH;
- memcpy(&laddr.rc_bdaddr, BDADDR_ANY, sizeof(bdaddr_t));
+ bdaddr_t any = android_bluetooth_bdaddr_any();
+ memcpy(&laddr.rc_bdaddr, &any, sizeof(bdaddr_t));
laddr.rc_channel = channel;
if (bind(sk, (struct sockaddr *)&laddr, sizeof(laddr)) < 0) {
diff --git a/core/jni/android_bluetooth_BluetoothSocket.cpp b/core/jni/android_bluetooth_BluetoothSocket.cpp
index d09c4e9..4c84324 100644
--- a/core/jni/android_bluetooth_BluetoothSocket.cpp
+++ b/core/jni/android_bluetooth_BluetoothSocket.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "BluetoothSocket.cpp"
#include "android_bluetooth_common.h"
+#include "android_bluetooth_c.h"
#include "android_runtime/AndroidRuntime.h"
#include "JNIHelp.h"
#include "utils/Log.h"
@@ -245,7 +246,7 @@ static int bindListenNative(JNIEnv *env, jobject obj) {
jint type;
socklen_t addr_sz;
struct sockaddr *addr;
- bdaddr_t bdaddr = *BDADDR_ANY;
+ bdaddr_t bdaddr = android_bluetooth_bdaddr_any();
struct asocket *s = get_socketData(env, obj);
if (!s)
diff --git a/core/jni/android_bluetooth_c.c b/core/jni/android_bluetooth_c.c
new file mode 100755
index 0000000..b4c6727
--- /dev/null
+++ b/core/jni/android_bluetooth_c.c
@@ -0,0 +1,31 @@
+/*
+** Copyright 2011, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifdef HAVE_BLUETOOTH
+
+#include "android_bluetooth_c.h"
+
+/*
+ * A C helper for creating a bdaddr_t object with the value BDADDR_ANY.
+ * We have to do this in C because the macro BDADDR_ANY in bluetooth.h
+ * is not valid C++ code.
+ */
+bdaddr_t android_bluetooth_bdaddr_any(void)
+{
+ bdaddr_t any = *BDADDR_ANY;
+ return any;
+}
+#endif
diff --git a/core/jni/android_bluetooth_c.h b/core/jni/android_bluetooth_c.h
new file mode 100644
index 0000000..e890244
--- /dev/null
+++ b/core/jni/android_bluetooth_c.h
@@ -0,0 +1,39 @@
+/*
+** Copyright 2010, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_BLUETOOTH_C_H
+#define ANDROID_BLUETOOTH_C_H
+#ifdef HAVE_BLUETOOTH
+
+#include <bluetooth/bluetooth.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * A C helper for creating a bdaddr_t object with the value BDADDR_ANY.
+ * We have to do this in C because the macro BDADDR_ANY in bluetooth.h
+ * is not valid C++ code.
+ */
+bdaddr_t android_bluetooth_bdaddr_any(void);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /*HAVE_BLUETOOTH*/
+#endif /*ANDROID_BLUETOOTH_C_H*/
diff --git a/core/jni/android_view_GLES20Canvas.cpp b/core/jni/android_view_GLES20Canvas.cpp
index 681f43f..b0c2f2c 100644
--- a/core/jni/android_view_GLES20Canvas.cpp
+++ b/core/jni/android_view_GLES20Canvas.cpp
@@ -127,6 +127,13 @@ static void android_view_GLES20Canvas_disableVsync(JNIEnv* env, jobject clazz) {
}
}
+static void android_view_GLES20Canvas_flushCaches(JNIEnv* env, jobject clazz,
+ Caches::FlushMode mode) {
+ if (Caches::hasInstance()) {
+ Caches::getInstance().flush(mode);
+ }
+}
+
// ----------------------------------------------------------------------------
// Constructors
// ----------------------------------------------------------------------------
@@ -735,6 +742,7 @@ static JNINativeMethod gMethods[] = {
{ "nIsBackBufferPreserved", "()Z", (void*) android_view_GLES20Canvas_isBackBufferPreserved },
{ "nPreserveBackBuffer", "()Z", (void*) android_view_GLES20Canvas_preserveBackBuffer },
{ "nDisableVsync", "()V", (void*) android_view_GLES20Canvas_disableVsync },
+ { "nFlushCaches", "(I)V", (void*) android_view_GLES20Canvas_flushCaches },
{ "nCreateRenderer", "()I", (void*) android_view_GLES20Canvas_createRenderer },
{ "nDestroyRenderer", "(I)V", (void*) android_view_GLES20Canvas_destroyRenderer },
@@ -859,10 +867,8 @@ int register_android_view_GLES20Canvas(JNIEnv* env) {
const char* const kActivityThreadPathName = "android/app/ActivityThread";
-int register_android_app_ActivityThread(JNIEnv* env)
-{
- return AndroidRuntime::registerNativeMethods(
- env, kActivityThreadPathName,
+int register_android_app_ActivityThread(JNIEnv* env) {
+ return AndroidRuntime::registerNativeMethods(env, kActivityThreadPathName,
gActivityThreadMethods, NELEM(gActivityThreadMethods));
}
diff --git a/core/jni/android_view_TextureView.cpp b/core/jni/android_view_TextureView.cpp
index b046b23..9484c6b 100644
--- a/core/jni/android_view_TextureView.cpp
+++ b/core/jni/android_view_TextureView.cpp
@@ -19,11 +19,48 @@
#include <android_runtime/AndroidRuntime.h>
#include <android_runtime/android_graphics_SurfaceTexture.h>
+#include <ui/Region.h>
+#include <ui/Rect.h>
+
#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+
+#include <SkBitmap.h>
+#include <SkCanvas.h>
namespace android {
// ----------------------------------------------------------------------------
+// JNI Glue
+// ----------------------------------------------------------------------------
+
+static struct {
+ jmethodID set;
+ jfieldID left;
+ jfieldID top;
+ jfieldID right;
+ jfieldID bottom;
+} gRectClassInfo;
+
+static struct {
+ jfieldID nativeCanvas;
+ jfieldID surfaceFormat;
+} gCanvasClassInfo;
+
+static struct {
+ jfieldID nativeWindow;
+} gTextureViewClassInfo;
+
+#define GET_INT(object, field) \
+ env->GetIntField(object, field)
+
+#define SET_INT(object, field, value) \
+ env->SetIntField(object, field, value)
+
+#define INVOKEV(object, method, ...) \
+ env->CallVoidMethod(object, method, __VA_ARGS__)
+
+// ----------------------------------------------------------------------------
// Native layer
// ----------------------------------------------------------------------------
@@ -34,6 +71,118 @@ static void android_view_TextureView_setDefaultBufferSize(JNIEnv* env, jobject,
surfaceTexture->setDefaultBufferSize(width, height);
}
+static inline SkBitmap::Config convertPixelFormat(int32_t format) {
+ switch (format) {
+ case WINDOW_FORMAT_RGBA_8888:
+ return SkBitmap::kARGB_8888_Config;
+ case WINDOW_FORMAT_RGBX_8888:
+ return SkBitmap::kARGB_8888_Config;
+ case WINDOW_FORMAT_RGB_565:
+ return SkBitmap::kRGB_565_Config;
+ default:
+ return SkBitmap::kNo_Config;
+ }
+}
+
+/**
+ * This is a private API, and this implementation is also provided in the NDK.
+ * However, the NDK links against android_runtime, which means that using the
+ * NDK implementation would create a circular dependency between the libraries.
+ */
+static int32_t native_window_lock(ANativeWindow* window, ANativeWindow_Buffer* outBuffer,
+ Rect* inOutDirtyBounds) {
+ return window->perform(window, NATIVE_WINDOW_LOCK, outBuffer, inOutDirtyBounds);
+}
+
+static int32_t native_window_unlockAndPost(ANativeWindow* window) {
+ return window->perform(window, NATIVE_WINDOW_UNLOCK_AND_POST);
+}
+
+static void android_view_TextureView_createNativeWindow(JNIEnv* env, jobject textureView,
+ jobject surface) {
+
+ sp<SurfaceTexture> surfaceTexture(SurfaceTexture_getSurfaceTexture(env, surface));
+ sp<ANativeWindow> window = new SurfaceTextureClient(surfaceTexture);
+
+ window->incStrong(0);
+ SET_INT(textureView, gTextureViewClassInfo.nativeWindow, jint(window.get()));
+}
+
+static void android_view_TextureView_destroyNativeWindow(JNIEnv* env, jobject textureView) {
+
+ ANativeWindow* nativeWindow = (ANativeWindow*)
+ GET_INT(textureView, gTextureViewClassInfo.nativeWindow);
+
+ if (nativeWindow) {
+ sp<ANativeWindow> window(nativeWindow);
+ window->decStrong(0);
+ SET_INT(textureView, gTextureViewClassInfo.nativeWindow, 0);
+ }
+}
+
+static void android_view_TextureView_lockCanvas(JNIEnv* env, jobject,
+ jint nativeWindow, jobject canvas, jobject dirtyRect) {
+
+ if (!nativeWindow) {
+ return;
+ }
+
+ ANativeWindow_Buffer buffer;
+
+ Rect rect;
+ if (dirtyRect) {
+ rect.left = GET_INT(dirtyRect, gRectClassInfo.left);
+ rect.top = GET_INT(dirtyRect, gRectClassInfo.top);
+ rect.right = GET_INT(dirtyRect, gRectClassInfo.right);
+ rect.bottom = GET_INT(dirtyRect, gRectClassInfo.bottom);
+ } else {
+ rect.set(Rect(0x3FFF, 0x3FFF));
+ }
+
+ sp<ANativeWindow> window((ANativeWindow*) nativeWindow);
+ native_window_lock(window.get(), &buffer, &rect);
+
+ ssize_t bytesCount = buffer.stride * bytesPerPixel(buffer.format);
+
+ SkBitmap bitmap;
+ bitmap.setConfig(convertPixelFormat(buffer.format), buffer.width, buffer.height, bytesCount);
+
+ if (buffer.format == WINDOW_FORMAT_RGBX_8888) {
+ bitmap.setIsOpaque(true);
+ }
+
+ if (buffer.width > 0 && buffer.height > 0) {
+ bitmap.setPixels(buffer.bits);
+ } else {
+ bitmap.setPixels(NULL);
+ }
+
+ SET_INT(canvas, gCanvasClassInfo.surfaceFormat, buffer.format);
+ SkCanvas* nativeCanvas = (SkCanvas*) GET_INT(canvas, gCanvasClassInfo.nativeCanvas);
+ nativeCanvas->setBitmapDevice(bitmap);
+
+ SkRect clipRect;
+ clipRect.set(rect.left, rect.top, rect.right, rect.bottom);
+ nativeCanvas->clipRect(clipRect);
+
+ if (dirtyRect) {
+ INVOKEV(dirtyRect, gRectClassInfo.set,
+ int(rect.left), int(rect.top), int(rect.right), int(rect.bottom));
+ }
+}
+
+static void android_view_TextureView_unlockCanvasAndPost(JNIEnv* env, jobject,
+ jint nativeWindow, jobject canvas) {
+
+ SkCanvas* nativeCanvas = (SkCanvas*) GET_INT(canvas, gCanvasClassInfo.nativeCanvas);
+ nativeCanvas->setBitmapDevice(SkBitmap());
+
+ if (nativeWindow) {
+ sp<ANativeWindow> window((ANativeWindow*) nativeWindow);
+ native_window_unlockAndPost(window.get());
+ }
+}
+
// ----------------------------------------------------------------------------
// JNI Glue
// ----------------------------------------------------------------------------
@@ -42,10 +191,47 @@ const char* const kClassPathName = "android/view/TextureView";
static JNINativeMethod gMethods[] = {
{ "nSetDefaultBufferSize", "(Landroid/graphics/SurfaceTexture;II)V",
- (void*) android_view_TextureView_setDefaultBufferSize }
+ (void*) android_view_TextureView_setDefaultBufferSize },
+
+ { "nCreateNativeWindow", "(Landroid/graphics/SurfaceTexture;)V",
+ (void*) android_view_TextureView_createNativeWindow },
+ { "nDestroyNativeWindow", "()V",
+ (void*) android_view_TextureView_destroyNativeWindow },
+
+ { "nLockCanvas", "(ILandroid/graphics/Canvas;Landroid/graphics/Rect;)V",
+ (void*) android_view_TextureView_lockCanvas },
+ { "nUnlockCanvasAndPost", "(ILandroid/graphics/Canvas;)V",
+ (void*) android_view_TextureView_unlockCanvasAndPost },
};
+#define FIND_CLASS(var, className) \
+ var = env->FindClass(className); \
+ LOG_FATAL_IF(!var, "Unable to find class " className);
+
+#define GET_METHOD_ID(var, clazz, methodName, methodDescriptor) \
+ var = env->GetMethodID(clazz, methodName, methodDescriptor); \
+ LOG_FATAL_IF(!var, "Unable to find method " methodName);
+
+#define GET_FIELD_ID(var, clazz, fieldName, fieldDescriptor) \
+ var = env->GetFieldID(clazz, fieldName, fieldDescriptor); \
+ LOG_FATAL_IF(!var, "Unable to find field" fieldName);
+
int register_android_view_TextureView(JNIEnv* env) {
+ jclass clazz;
+ FIND_CLASS(clazz, "android/graphics/Rect");
+ GET_METHOD_ID(gRectClassInfo.set, clazz, "set", "(IIII)V");
+ GET_FIELD_ID(gRectClassInfo.left, clazz, "left", "I");
+ GET_FIELD_ID(gRectClassInfo.top, clazz, "top", "I");
+ GET_FIELD_ID(gRectClassInfo.right, clazz, "right", "I");
+ GET_FIELD_ID(gRectClassInfo.bottom, clazz, "bottom", "I");
+
+ FIND_CLASS(clazz, "android/graphics/Canvas");
+ GET_FIELD_ID(gCanvasClassInfo.nativeCanvas, clazz, "mNativeCanvas", "I");
+ GET_FIELD_ID(gCanvasClassInfo.surfaceFormat, clazz, "mSurfaceFormat", "I");
+
+ FIND_CLASS(clazz, "android/view/TextureView");
+ GET_FIELD_ID(gTextureViewClassInfo.nativeWindow, clazz, "mNativeWindow", "I");
+
return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
}
diff --git a/core/res/res/layout/search_bar.xml b/core/res/res/layout/search_bar.xml
index 790ac6b..f6b5b53 100644
--- a/core/res/res/layout/search_bar.xml
+++ b/core/res/res/layout/search_bar.xml
@@ -66,7 +66,6 @@
android:layout_height="wrap_content"
android:layout_weight="1"
android:maxWidth="600dip"
- android:iconifiedByDefault="false"
android:layout_gravity="center_vertical"
/>
diff --git a/core/res/res/layout/search_dropdown_item_icons_2line.xml b/core/res/res/layout/search_dropdown_item_icons_2line.xml
index 53906f9..acef2cc 100644
--- a/core/res/res/layout/search_dropdown_item_icons_2line.xml
+++ b/core/res/res/layout/search_dropdown_item_icons_2line.xml
@@ -19,21 +19,21 @@
-->
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
- android:paddingLeft="4dip"
- android:paddingRight="2dip"
+ android:paddingLeft="@dimen/dropdownitem_text_padding_left"
+ android:paddingRight="4dip"
android:layout_width="match_parent"
android:layout_height="?android:attr/searchResultListItemHeight" >
<!-- Icons come first in the layout, since their placement doesn't depend on
the placement of the text views. -->
<ImageView android:id="@android:id/icon1"
- android:layout_width="48dip"
+ android:layout_width="@dimen/dropdownitem_icon_width"
android:layout_height="48dip"
android:scaleType="centerInside"
android:layout_alignParentLeft="true"
android:layout_alignParentTop="true"
android:layout_alignParentBottom="true"
- android:visibility="gone" />
+ android:visibility="invisible" />
<ImageView android:id="@+id/edit_query"
android:layout_width="48dip"
diff --git a/core/res/res/layout/search_view.xml b/core/res/res/layout/search_view.xml
index fee27eb..6b70d8d 100644
--- a/core/res/res/layout/search_view.xml
+++ b/core/res/res/layout/search_view.xml
@@ -22,6 +22,8 @@
android:id="@+id/search_bar"
android:layout_width="match_parent"
android:layout_height="match_parent"
+ android:paddingLeft="8dip"
+ android:paddingRight="8dip"
android:orientation="horizontal"
>
@@ -30,7 +32,7 @@
android:id="@+id/search_badge"
android:layout_width="wrap_content"
android:layout_height="match_parent"
- android:layout_gravity="center_vertical"
+ android:gravity="center_vertical"
android:layout_marginBottom="2dip"
android:drawablePadding="0dip"
android:textAppearance="?android:attr/textAppearanceMedium"
@@ -54,12 +56,21 @@
android:layout_height="wrap_content"
android:layout_weight="1"
android:layout_gravity="center_vertical"
- android:layout_marginLeft="8dip"
- android:layout_marginRight="8dip"
android:layout_marginTop="4dip"
android:layout_marginBottom="4dip"
android:orientation="horizontal">
+ <ImageView
+ android:id="@+id/search_mag_icon"
+ android:layout_width="@dimen/dropdownitem_icon_width"
+ android:layout_height="wrap_content"
+ android:scaleType="centerInside"
+ android:layout_marginLeft="@dimen/dropdownitem_text_padding_left"
+ android:layout_gravity="center_vertical"
+ android:src="?android:attr/searchViewSearchIcon"
+ android:visibility="gone"
+ />
+
<!-- Inner layout contains the app icon, button(s) and EditText -->
<LinearLayout
android:id="@+id/search_plate"
@@ -70,14 +81,6 @@
android:orientation="horizontal"
android:background="?android:attr/searchViewTextField">
- <ImageView
- android:id="@+id/search_app_icon"
- android:layout_width="wrap_content"
- android:layout_height="match_parent"
- android:layout_gravity="center_vertical"
- android:src="?android:attr/searchViewSearchIcon"
- />
-
<view class="android.widget.SearchView$SearchAutoComplete"
android:id="@+id/search_src_text"
android:layout_height="36dip"
@@ -85,8 +88,8 @@
android:layout_weight="1"
android:minWidth="@dimen/search_view_text_min_width"
android:layout_gravity="bottom"
- android:paddingLeft="8dip"
- android:paddingRight="6dip"
+ android:paddingLeft="@dimen/dropdownitem_text_padding_left"
+ android:paddingRight="@dimen/dropdownitem_text_padding_right"
android:singleLine="true"
android:ellipsize="end"
android:background="@null"
@@ -100,7 +103,7 @@
<ImageView
android:id="@+id/search_close_btn"
- android:layout_width="wrap_content"
+ android:layout_width="@dimen/dropdownitem_icon_width"
android:layout_height="match_parent"
android:paddingLeft="8dip"
android:paddingRight="8dip"
@@ -131,7 +134,7 @@
android:visibility="gone"
android:focusable="true"
/>
-
+
<ImageView
android:id="@+id/search_voice_btn"
android:layout_width="wrap_content"
diff --git a/core/res/res/values/dimens.xml b/core/res/res/values/dimens.xml
index abc56ec..2ba4e66 100644
--- a/core/res/res/values/dimens.xml
+++ b/core/res/res/values/dimens.xml
@@ -138,4 +138,16 @@
<!-- Minimum popup width for selecting an activity in ActivityChooserDialog/ActivityChooserView. -->
<dimen name="activity_chooser_popup_min_width">200dip</dimen>
+ <!-- The default gap between components in a layout. -->
+ <dimen name="default_gap">16dip</dimen>
+
+ <!-- Text padding for dropdown items -->
+ <dimen name="dropdownitem_text_padding_left">6dip</dimen>
+
+ <!-- Text padding for dropdown items -->
+ <dimen name="dropdownitem_text_padding_right">6dip</dimen>
+
+ <!-- Width of the icon in a dropdown list -->
+ <dimen name="dropdownitem_icon_width">48dip</dimen>
+
</resources>
diff --git a/core/res/res/values/public.xml b/core/res/res/values/public.xml
index 6dedc83..f464623 100644
--- a/core/res/res/values/public.xml
+++ b/core/res/res/values/public.xml
@@ -1787,6 +1787,11 @@
<public type="attr" name="colorActivatedHighlight" />
<public type="attr" name="colorMultiSelectHighlight" />
+ <public type="attr" name="drawableStart" />
+ <public type="attr" name="drawableEnd" />
+
+ <public type="attr" name="actionModeStyle" />
+
<public type="style" name="TextAppearance.SuggestionHighlight" />
<public type="style" name="Theme.Holo.SplitActionBarWhenNarrow" />
<public type="style" name="Theme.Holo.Light.SplitActionBarWhenNarrow" />
@@ -1826,7 +1831,4 @@
<public type="color" name="holo_purple" />
<public type="color" name="holo_blue_bright" />
- <public type="attr" name="drawableStart" />
- <public type="attr" name="drawableEnd" />
-
</resources>
diff --git a/core/res/res/values/styles.xml b/core/res/res/values/styles.xml
index 5b5e7c3..d647467 100644
--- a/core/res/res/values/styles.xml
+++ b/core/res/res/values/styles.xml
@@ -563,8 +563,8 @@
<style name="Widget.DropDownItem">
<item name="android:textAppearance">@style/TextAppearance.Widget.DropDownItem</item>
- <item name="android:paddingLeft">6dip</item>
- <item name="android:paddingRight">6dip</item>
+ <item name="android:paddingLeft">@dimen/dropdownitem_text_padding_left</item>
+ <item name="android:paddingRight">@dimen/dropdownitem_text_padding_right</item>
<item name="android:gravity">center_vertical</item>
</style>
diff --git a/core/tests/coretests/src/android/database/MatrixCursorTest.java b/core/tests/coretests/src/android/database/MatrixCursorTest.java
index cddc6c4..cdab638 100644
--- a/core/tests/coretests/src/android/database/MatrixCursorTest.java
+++ b/core/tests/coretests/src/android/database/MatrixCursorTest.java
@@ -16,6 +16,8 @@
package android.database;
+import android.test.MoreAsserts;
+
import junit.framework.TestCase;
import java.util.*;
@@ -33,6 +35,7 @@ public class MatrixCursorTest extends TestCase {
cursor.moveToNext();
assertTrue(cursor.isNull(0));
assertNull(cursor.getString(0));
+ assertNull(cursor.getBlob(0));
assertEquals(0, cursor.getShort(0));
assertEquals(0, cursor.getInt(0));
assertEquals(0L, cursor.getLong(0));
@@ -49,7 +52,8 @@ public class MatrixCursorTest extends TestCase {
.add(2)
.add(3)
.add(4)
- .add(5);
+ .add(5)
+ .add(new byte[] {(byte) 0xaa, (byte) 0x55});
cursor.moveToNext();
@@ -61,7 +65,8 @@ public class MatrixCursorTest extends TestCase {
.add("2")
.add("3")
.add("4")
- .add("5");
+ .add("5")
+ .add(new byte[] {(byte) 0xaa, (byte) 0x55});
cursor.moveToNext();
checkValues(cursor);
@@ -73,7 +78,7 @@ public class MatrixCursorTest extends TestCase {
public void testAddArray() {
MatrixCursor cursor = newMatrixCursor();
- cursor.addRow(new Object[] { "a", 1, 2, 3, 4, 5 });
+ cursor.addRow(new Object[] { "a", 1, 2, 3, 4, 5, new byte[] {(byte) 0xaa, (byte) 0x55} });
cursor.moveToNext();
checkValues(cursor);
@@ -86,7 +91,7 @@ public class MatrixCursorTest extends TestCase {
public void testAddIterable() {
MatrixCursor cursor = newMatrixCursor();
- cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5));
+ cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5, new byte[] {(byte) 0xaa, (byte) 0x55}));
cursor.moveToNext();
checkValues(cursor);
@@ -96,7 +101,8 @@ public class MatrixCursorTest extends TestCase {
} catch (IllegalArgumentException e) { /* expected */ }
try {
- cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5, "Too many!"));
+ cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5,
+ new byte[] {(byte) 0xaa, (byte) 0x55}, "Too many!"));
fail();
} catch (IllegalArgumentException e) { /* expected */ }
}
@@ -105,7 +111,7 @@ public class MatrixCursorTest extends TestCase {
MatrixCursor cursor = newMatrixCursor();
cursor.addRow(new NonIterableArrayList<Object>(
- Arrays.asList("a", 1, 2, 3, 4, 5)));
+ Arrays.asList("a", 1, 2, 3, 4, 5, new byte[] {(byte) 0xaa, (byte) 0x55})));
cursor.moveToNext();
checkValues(cursor);
@@ -116,7 +122,8 @@ public class MatrixCursorTest extends TestCase {
try {
cursor.addRow(new NonIterableArrayList<Object>(
- Arrays.asList("a", 1, 2, 3, 4, 5, "Too many!")));
+ Arrays.asList("a", 1, 2, 3, 4, 5,
+ new byte[] {(byte) 0xaa, (byte) 0x55}, "Too many!")));
fail();
} catch (IllegalArgumentException e) { /* expected */ }
}
@@ -137,7 +144,7 @@ public class MatrixCursorTest extends TestCase {
private MatrixCursor newMatrixCursor() {
return new MatrixCursor(new String[] {
- "string", "short", "int", "long", "float", "double" });
+ "string", "short", "int", "long", "float", "double", "blob" });
}
private void checkValues(MatrixCursor cursor) {
@@ -147,6 +154,7 @@ public class MatrixCursorTest extends TestCase {
assertEquals(3, cursor.getLong(3));
assertEquals(4.0f, cursor.getFloat(4));
assertEquals(5.0D, cursor.getDouble(5));
+ MoreAsserts.assertEquals(new byte[] {(byte) 0xaa, (byte) 0x55}, cursor.getBlob(6));
}
}
diff --git a/core/tests/coretests/src/android/util/JsonReaderTest.java b/core/tests/coretests/src/android/util/JsonReaderTest.java
index 440aeb5..0b50af3 100644
--- a/core/tests/coretests/src/android/util/JsonReaderTest.java
+++ b/core/tests/coretests/src/android/util/JsonReaderTest.java
@@ -858,7 +858,7 @@ public final class JsonReaderTest extends TestCase {
}
public void testFailWithPosition() throws IOException {
- testFailWithPosition("Expected literal value at line 6 column 3",
+ testFailWithPosition("Expected literal value at line 6 column 3",
"[\n\n\n\n\n0,}]");
}
diff --git a/core/tests/coretests/src/android/util/PatternsTest.java b/core/tests/coretests/src/android/util/PatternsTest.java
index aad3fe1..9519b9f 100644
--- a/core/tests/coretests/src/android/util/PatternsTest.java
+++ b/core/tests/coretests/src/android/util/PatternsTest.java
@@ -39,6 +39,10 @@ public class PatternsTest extends TestCase {
t = Patterns.TOP_LEVEL_DOMAIN.matcher("xn--0zwm56d").matches();
assertTrue("Missed valid TLD", t);
+ // One of the new top level internationalized domain.
+ t = Patterns.TOP_LEVEL_DOMAIN.matcher("\uD55C\uAD6D").matches();
+ assertTrue("Missed valid TLD", t);
+
t = Patterns.TOP_LEVEL_DOMAIN.matcher("mem").matches();
assertFalse("Matched invalid TLD!", t);
@@ -80,6 +84,9 @@ public class PatternsTest extends TestCase {
assertTrue("Valid URL", t);
t = Patterns.WEB_URL.matcher("\uD604\uAE08\uC601\uC218\uC99D.kr").matches();
assertTrue("Valid URL", t);
+ // URL with international TLD.
+ t = Patterns.WEB_URL.matcher("\uB3C4\uBA54\uC778.\uD55C\uAD6D").matches();
+ assertTrue("Valid URL", t);
t = Patterns.WEB_URL.matcher("http://brainstormtech.blogs.fortune.cnn.com/2010/03/11/" +
"top-five-moments-from-eric-schmidt\u2019s-talk-in-abu-dhabi/").matches();
diff --git a/include/gui/SurfaceTextureClient.h b/include/gui/SurfaceTextureClient.h
index 5ec469e..cfe2aa1 100644
--- a/include/gui/SurfaceTextureClient.h
+++ b/include/gui/SurfaceTextureClient.h
@@ -21,6 +21,7 @@
#include <gui/SurfaceTexture.h>
#include <ui/egl/android_natives.h>
+#include <ui/Region.h>
#include <utils/RefBase.h>
#include <utils/threads.h>
@@ -37,29 +38,24 @@ public:
sp<ISurfaceTexture> getISurfaceTexture() const;
-private:
- friend class Surface;
+protected:
+ SurfaceTextureClient();
+ void setISurfaceTexture(const sp<ISurfaceTexture>& surfaceTexture);
+private:
// can't be copied
SurfaceTextureClient& operator = (const SurfaceTextureClient& rhs);
SurfaceTextureClient(const SurfaceTextureClient& rhs);
+ void init();
// ANativeWindow hooks
- static int cancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
- static int dequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer);
- static int lockBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
- static int perform(ANativeWindow* window, int operation, ...);
- static int query(const ANativeWindow* window, int what, int* value);
- static int queueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
- static int setSwapInterval(ANativeWindow* window, int interval);
-
- int cancelBuffer(ANativeWindowBuffer* buffer);
- int dequeueBuffer(ANativeWindowBuffer** buffer);
- int lockBuffer(ANativeWindowBuffer* buffer);
- int perform(int operation, va_list args);
- int query(int what, int* value) const;
- int queueBuffer(ANativeWindowBuffer* buffer);
- int setSwapInterval(int interval);
+ static int hook_cancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
+ static int hook_dequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer);
+ static int hook_lockBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
+ static int hook_perform(ANativeWindow* window, int operation, ...);
+ static int hook_query(const ANativeWindow* window, int what, int* value);
+ static int hook_queueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
+ static int hook_setSwapInterval(ANativeWindow* window, int interval);
int dispatchConnect(va_list args);
int dispatchDisconnect(va_list args);
@@ -71,26 +67,38 @@ private:
int dispatchSetBuffersTimestamp(va_list args);
int dispatchSetCrop(va_list args);
int dispatchSetUsage(va_list args);
-
- int connect(int api);
- int disconnect(int api);
- int setBufferCount(int bufferCount);
- int setBuffersDimensions(int w, int h);
- int setBuffersFormat(int format);
- int setBuffersTransform(int transform);
- int setBuffersTimestamp(int64_t timestamp);
- int setCrop(Rect const* rect);
- int setUsage(uint32_t reqUsage);
-
- void freeAllBuffers();
- int getSlotFromBufferLocked(android_native_buffer_t* buffer) const;
-
- int getConnectedApi() const;
+ int dispatchLock(va_list args);
+ int dispatchUnlockAndPost(va_list args);
+
+protected:
+ virtual int cancelBuffer(ANativeWindowBuffer* buffer);
+ virtual int dequeueBuffer(ANativeWindowBuffer** buffer);
+ virtual int lockBuffer(ANativeWindowBuffer* buffer);
+ virtual int perform(int operation, va_list args);
+ virtual int query(int what, int* value) const;
+ virtual int queueBuffer(ANativeWindowBuffer* buffer);
+ virtual int setSwapInterval(int interval);
+
+ virtual int connect(int api);
+ virtual int disconnect(int api);
+ virtual int setBufferCount(int bufferCount);
+ virtual int setBuffersDimensions(int w, int h);
+ virtual int setBuffersFormat(int format);
+ virtual int setBuffersTransform(int transform);
+ virtual int setBuffersTimestamp(int64_t timestamp);
+ virtual int setCrop(Rect const* rect);
+ virtual int setUsage(uint32_t reqUsage);
+ virtual int lock(ANativeWindow_Buffer* outBuffer, ARect* inOutDirtyBounds);
+ virtual int unlockAndPost();
enum { MIN_UNDEQUEUED_BUFFERS = SurfaceTexture::MIN_UNDEQUEUED_BUFFERS };
enum { NUM_BUFFER_SLOTS = SurfaceTexture::NUM_BUFFER_SLOTS };
enum { DEFAULT_FORMAT = PIXEL_FORMAT_RGBA_8888 };
+private:
+ void freeAllBuffers();
+ int getSlotFromBufferLocked(android_native_buffer_t* buffer) const;
+
// mSurfaceTexture is the interface to the surface texture server. All
// operations on the surface texture client ultimately translate into
// interactions with the server using this interface.
@@ -145,6 +153,12 @@ private:
// variables of SurfaceTexture objects. It must be locked whenever the
// member variables are accessed.
mutable Mutex mMutex;
+
+ // must be used from the lock/unlock thread
+ sp<GraphicBuffer> mLockedBuffer;
+ sp<GraphicBuffer> mPostedBuffer;
+ mutable Region mOldDirtyRegion;
+ bool mConnectedToCpu;
};
}; // namespace android
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index ea5a9d3..1136f6c 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -25,6 +25,8 @@
#include <utils/KeyedVector.h>
#include <utils/String8.h>
+class ANativeWindow;
+
namespace android {
class Surface;
@@ -196,6 +198,8 @@ private:
status_t prepareAsync_l();
status_t getDuration_l(int *msec);
status_t setDataSource(const sp<IMediaPlayer>& player);
+ void disconnectNativeWindow();
+ status_t reset_l();
sp<IMediaPlayer> mPlayer;
thread_id_t mLockThreadId;
@@ -218,6 +222,8 @@ private:
int mVideoHeight;
int mAudioSessionId;
float mSendLevel;
+ sp<ANativeWindow> mConnectedWindow;
+ sp<IBinder> mConnectedWindowBinder;
};
}; // namespace android
diff --git a/include/surfaceflinger/Surface.h b/include/surfaceflinger/Surface.h
index dc2a845..c2a494d 100644
--- a/include/surfaceflinger/Surface.h
+++ b/include/surfaceflinger/Surface.h
@@ -28,6 +28,8 @@
#include <ui/Region.h>
#include <ui/egl/android_natives.h>
+#include <gui/SurfaceTextureClient.h>
+
#include <surfaceflinger/ISurface.h>
#include <surfaceflinger/ISurfaceComposerClient.h>
@@ -37,14 +39,9 @@ namespace android {
// ---------------------------------------------------------------------------
-class GraphicBuffer;
-class GraphicBufferMapper;
-class IOMX;
class ISurfaceTexture;
-class Rect;
class Surface;
class SurfaceComposerClient;
-class SurfaceTextureClient;
// ---------------------------------------------------------------------------
@@ -129,8 +126,7 @@ private:
// ---------------------------------------------------------------------------
-class Surface
- : public EGLNativeBase<ANativeWindow, Surface, RefBase>
+class Surface : public SurfaceTextureClient
{
public:
struct SurfaceInfo {
@@ -158,32 +154,14 @@ public:
sp<ISurfaceTexture> getSurfaceTexture();
// the lock/unlock APIs must be used from the same thread
- status_t lock(SurfaceInfo* info, bool blocking = true);
- status_t lock(SurfaceInfo* info, Region* dirty, bool blocking = true);
+ status_t lock(SurfaceInfo* info, Region* dirty = NULL);
status_t unlockAndPost();
sp<IBinder> asBinder() const;
private:
- /*
- * Android frameworks friends
- * (eventually this should go away and be replaced by proper APIs)
- */
- // camera and camcorder need access to the ISurface binder interface for preview
- friend class CameraService;
- friend class MediaRecorder;
- // MediaPlayer needs access to ISurface for display
- friend class MediaPlayer;
- friend class IOMX;
- friend class SoftwareRenderer;
// this is just to be able to write some unit tests
friend class Test;
- // videoEditor preview classes
- friend class VideoEditorPreviewController;
- friend class PreviewRenderer;
-
-private:
- friend class SurfaceComposerClient;
friend class SurfaceControl;
// can't be copied
@@ -194,62 +172,27 @@ private:
Surface(const Parcel& data, const sp<IBinder>& ref);
~Surface();
-
- /*
- * ANativeWindow hooks
- */
- static int setSwapInterval(ANativeWindow* window, int interval);
- static int dequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer);
- static int cancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
- static int lockBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
- static int queueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer);
- static int query(const ANativeWindow* window, int what, int* value);
- static int perform(ANativeWindow* window, int operation, ...);
-
- int setSwapInterval(int interval);
- int dequeueBuffer(ANativeWindowBuffer** buffer);
- int lockBuffer(ANativeWindowBuffer* buffer);
- int queueBuffer(ANativeWindowBuffer* buffer);
- int cancelBuffer(ANativeWindowBuffer* buffer);
- int query(int what, int* value) const;
- int perform(int operation, va_list args);
-
/*
* private stuff...
*/
void init();
status_t validate(bool inCancelBuffer = false) const;
- int getConnectedApi() const;
-
static void cleanCachedSurfacesLocked();
+ virtual int query(int what, int* value) const;
+
// constants
status_t mInitCheck;
sp<ISurface> mSurface;
- sp<SurfaceTextureClient> mSurfaceTextureClient;
uint32_t mIdentity;
PixelFormat mFormat;
uint32_t mFlags;
-
- // protected by mSurfaceLock. These are also used from lock/unlock
- // but in that case, they must be called form the same thread.
- mutable Region mDirtyRegion;
-
- // must be used from the lock/unlock thread
- sp<GraphicBuffer> mLockedBuffer;
- sp<GraphicBuffer> mPostedBuffer;
- mutable Region mOldDirtyRegion;
- bool mReserved;
// query() must be called from dequeueBuffer() thread
uint32_t mWidth;
uint32_t mHeight;
- // Inherently thread-safe
- mutable Mutex mSurfaceLock;
- mutable Mutex mApiLock;
-
// A cache of Surface objects that have been deserialized into this process.
static Mutex sCachedSurfacesLock;
static DefaultKeyedVector<wp<IBinder>, wp<Surface> > sCachedSurfaces;
diff --git a/libs/gui/Surface.cpp b/libs/gui/Surface.cpp
index 9185e1e..dabe643f 100644
--- a/libs/gui/Surface.cpp
+++ b/libs/gui/Surface.cpp
@@ -46,59 +46,6 @@
namespace android {
-// ----------------------------------------------------------------------
-
-static status_t copyBlt(
- const sp<GraphicBuffer>& dst,
- const sp<GraphicBuffer>& src,
- const Region& reg)
-{
- // src and dst with, height and format must be identical. no verification
- // is done here.
- status_t err;
- uint8_t const * src_bits = NULL;
- err = src->lock(GRALLOC_USAGE_SW_READ_OFTEN, reg.bounds(), (void**)&src_bits);
- LOGE_IF(err, "error locking src buffer %s", strerror(-err));
-
- uint8_t* dst_bits = NULL;
- err = dst->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, reg.bounds(), (void**)&dst_bits);
- LOGE_IF(err, "error locking dst buffer %s", strerror(-err));
-
- Region::const_iterator head(reg.begin());
- Region::const_iterator tail(reg.end());
- if (head != tail && src_bits && dst_bits) {
- const size_t bpp = bytesPerPixel(src->format);
- const size_t dbpr = dst->stride * bpp;
- const size_t sbpr = src->stride * bpp;
-
- while (head != tail) {
- const Rect& r(*head++);
- ssize_t h = r.height();
- if (h <= 0) continue;
- size_t size = r.width() * bpp;
- uint8_t const * s = src_bits + (r.left + src->stride * r.top) * bpp;
- uint8_t * d = dst_bits + (r.left + dst->stride * r.top) * bpp;
- if (dbpr==sbpr && size==sbpr) {
- size *= h;
- h = 1;
- }
- do {
- memcpy(d, s, size);
- d += dbpr;
- s += sbpr;
- } while (--h > 0);
- }
- }
-
- if (src_bits)
- src->unlock();
-
- if (dst_bits)
- dst->unlock();
-
- return err;
-}
-
// ============================================================================
// SurfaceControl
// ============================================================================
@@ -277,7 +224,8 @@ sp<Surface> SurfaceControl::getSurface() const
// ---------------------------------------------------------------------------
Surface::Surface(const sp<SurfaceControl>& surface)
- : mInitCheck(NO_INIT),
+ : SurfaceTextureClient(),
+ mInitCheck(NO_INIT),
mSurface(surface->mSurface),
mIdentity(surface->mIdentity),
mFormat(surface->mFormat), mFlags(surface->mFlags),
@@ -287,7 +235,8 @@ Surface::Surface(const sp<SurfaceControl>& surface)
}
Surface::Surface(const Parcel& parcel, const sp<IBinder>& ref)
- : mInitCheck(NO_INIT)
+ : SurfaceTextureClient(),
+ mInitCheck(NO_INIT)
{
mSurface = interface_cast<ISurface>(ref);
mIdentity = parcel.readInt32();
@@ -363,36 +312,21 @@ void Surface::cleanCachedSurfacesLocked() {
void Surface::init()
{
- ANativeWindow::setSwapInterval = setSwapInterval;
- ANativeWindow::dequeueBuffer = dequeueBuffer;
- ANativeWindow::cancelBuffer = cancelBuffer;
- ANativeWindow::lockBuffer = lockBuffer;
- ANativeWindow::queueBuffer = queueBuffer;
- ANativeWindow::query = query;
- ANativeWindow::perform = perform;
-
if (mSurface != NULL) {
sp<ISurfaceTexture> surfaceTexture(mSurface->getSurfaceTexture());
LOGE_IF(surfaceTexture==0, "got a NULL ISurfaceTexture from ISurface");
if (surfaceTexture != NULL) {
- mSurfaceTextureClient = new SurfaceTextureClient(surfaceTexture);
- mSurfaceTextureClient->setUsage(GraphicBuffer::USAGE_HW_RENDER);
+ setISurfaceTexture(surfaceTexture);
+ setUsage(GraphicBuffer::USAGE_HW_RENDER);
}
DisplayInfo dinfo;
SurfaceComposerClient::getDisplayInfo(0, &dinfo);
const_cast<float&>(ANativeWindow::xdpi) = dinfo.xdpi;
const_cast<float&>(ANativeWindow::ydpi) = dinfo.ydpi;
-
- const_cast<int&>(ANativeWindow::minSwapInterval) =
- mSurfaceTextureClient->minSwapInterval;
-
- const_cast<int&>(ANativeWindow::maxSwapInterval) =
- mSurfaceTextureClient->maxSwapInterval;
-
const_cast<uint32_t&>(ANativeWindow::flags) = 0;
- if (mSurfaceTextureClient != 0) {
+ if (surfaceTexture != NULL) {
mInitCheck = NO_ERROR;
}
}
@@ -402,7 +336,6 @@ Surface::~Surface()
{
// clear all references and trigger an IPC now, to make sure things
// happen without delay, since these resources are quite heavy.
- mSurfaceTextureClient.clear();
mSurface.clear();
IPCThreadState::self()->flushCommands();
}
@@ -431,77 +364,6 @@ sp<IBinder> Surface::asBinder() const {
// ----------------------------------------------------------------------------
-int Surface::setSwapInterval(ANativeWindow* window, int interval) {
- Surface* self = getSelf(window);
- return self->setSwapInterval(interval);
-}
-
-int Surface::dequeueBuffer(ANativeWindow* window,
- ANativeWindowBuffer** buffer) {
- Surface* self = getSelf(window);
- return self->dequeueBuffer(buffer);
-}
-
-int Surface::cancelBuffer(ANativeWindow* window,
- ANativeWindowBuffer* buffer) {
- Surface* self = getSelf(window);
- return self->cancelBuffer(buffer);
-}
-
-int Surface::lockBuffer(ANativeWindow* window,
- ANativeWindowBuffer* buffer) {
- Surface* self = getSelf(window);
- return self->lockBuffer(buffer);
-}
-
-int Surface::queueBuffer(ANativeWindow* window,
- ANativeWindowBuffer* buffer) {
- Surface* self = getSelf(window);
- return self->queueBuffer(buffer);
-}
-
-int Surface::query(const ANativeWindow* window,
- int what, int* value) {
- const Surface* self = getSelf(window);
- return self->query(what, value);
-}
-
-int Surface::perform(ANativeWindow* window,
- int operation, ...) {
- va_list args;
- va_start(args, operation);
- Surface* self = getSelf(window);
- int res = self->perform(operation, args);
- va_end(args);
- return res;
-}
-
-// ----------------------------------------------------------------------------
-
-int Surface::setSwapInterval(int interval) {
- return mSurfaceTextureClient->setSwapInterval(interval);
-}
-
-int Surface::dequeueBuffer(ANativeWindowBuffer** buffer) {
- status_t err = mSurfaceTextureClient->dequeueBuffer(buffer);
- if (err == NO_ERROR) {
- mDirtyRegion.set(buffer[0]->width, buffer[0]->height);
- }
- return err;
-}
-
-int Surface::cancelBuffer(ANativeWindowBuffer* buffer) {
- return mSurfaceTextureClient->cancelBuffer(buffer);
-}
-
-int Surface::lockBuffer(ANativeWindowBuffer* buffer) {
- return mSurfaceTextureClient->lockBuffer(buffer);
-}
-
-int Surface::queueBuffer(ANativeWindowBuffer* buffer) {
- return mSurfaceTextureClient->queueBuffer(buffer);
-}
-
int Surface::query(int what, int* value) const {
switch (what) {
case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
@@ -509,141 +371,39 @@ int Surface::query(int what, int* value) const {
*value = 1;
return NO_ERROR;
case NATIVE_WINDOW_CONCRETE_TYPE:
- // TODO: this is not needed anymore
*value = NATIVE_WINDOW_SURFACE;
return NO_ERROR;
}
- return mSurfaceTextureClient->query(what, value);
-}
-
-int Surface::perform(int operation, va_list args) {
- return mSurfaceTextureClient->perform(operation, args);
+ return SurfaceTextureClient::query(what, value);
}
// ----------------------------------------------------------------------------
-int Surface::getConnectedApi() const {
- return mSurfaceTextureClient->getConnectedApi();
-}
+status_t Surface::lock(SurfaceInfo* other, Region* dirtyIn) {
+ ANativeWindow_Buffer outBuffer;
-// ----------------------------------------------------------------------------
-
-status_t Surface::lock(SurfaceInfo* info, bool blocking) {
- return Surface::lock(info, NULL, blocking);
-}
-
-status_t Surface::lock(SurfaceInfo* other, Region* dirtyIn, bool blocking)
-{
- if (getConnectedApi()) {
- LOGE("Surface::lock(%p) failed. Already connected to another API",
- (ANativeWindow*)this);
- CallStack stack;
- stack.update();
- stack.dump("");
- return INVALID_OPERATION;
- }
-
- if (mApiLock.tryLock() != NO_ERROR) {
- LOGE("calling Surface::lock from different threads!");
- CallStack stack;
- stack.update();
- stack.dump("");
- return WOULD_BLOCK;
+ ARect temp;
+ ARect* inOutDirtyBounds = NULL;
+ if (dirtyIn) {
+ temp = dirtyIn->getBounds();
+ inOutDirtyBounds = &temp;
}
- /* Here we're holding mApiLock */
-
- if (mLockedBuffer != 0) {
- LOGE("Surface::lock failed, already locked");
- mApiLock.unlock();
- return INVALID_OPERATION;
- }
-
- // we're intending to do software rendering from this point
- mSurfaceTextureClient->setUsage(
- GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN);
+ status_t err = SurfaceTextureClient::lock(&outBuffer, inOutDirtyBounds);
- ANativeWindowBuffer* out;
- status_t err = mSurfaceTextureClient->dequeueBuffer(&out);
- LOGE_IF(err, "dequeueBuffer failed (%s)", strerror(-err));
if (err == NO_ERROR) {
- sp<GraphicBuffer> backBuffer(GraphicBuffer::getSelf(out));
- err = mSurfaceTextureClient->lockBuffer(backBuffer.get());
- LOGE_IF(err, "lockBuffer (handle=%p) failed (%s)",
- backBuffer->handle, strerror(-err));
- if (err == NO_ERROR) {
- const Rect bounds(backBuffer->width, backBuffer->height);
- const Region boundsRegion(bounds);
- Region scratch(boundsRegion);
- Region& newDirtyRegion(dirtyIn ? *dirtyIn : scratch);
- newDirtyRegion &= boundsRegion;
-
- // figure out if we can copy the frontbuffer back
- const sp<GraphicBuffer>& frontBuffer(mPostedBuffer);
- const bool canCopyBack = (frontBuffer != 0 &&
- backBuffer->width == frontBuffer->width &&
- backBuffer->height == frontBuffer->height &&
- backBuffer->format == frontBuffer->format &&
- !(mFlags & ISurfaceComposer::eDestroyBackbuffer));
-
- // the dirty region we report to surfaceflinger is the one
- // given by the user (as opposed to the one *we* return to the
- // user).
- mDirtyRegion = newDirtyRegion;
-
- if (canCopyBack) {
- // copy the area that is invalid and not repainted this round
- const Region copyback(mOldDirtyRegion.subtract(newDirtyRegion));
- if (!copyback.isEmpty())
- copyBlt(backBuffer, frontBuffer, copyback);
- } else {
- // if we can't copy-back anything, modify the user's dirty
- // region to make sure they redraw the whole buffer
- newDirtyRegion = boundsRegion;
- }
-
- // keep track of the are of the buffer that is "clean"
- // (ie: that will be redrawn)
- mOldDirtyRegion = newDirtyRegion;
-
- void* vaddr;
- status_t res = backBuffer->lock(
- GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
- newDirtyRegion.bounds(), &vaddr);
-
- LOGW_IF(res, "failed locking buffer (handle = %p)",
- backBuffer->handle);
-
- mLockedBuffer = backBuffer;
- other->w = backBuffer->width;
- other->h = backBuffer->height;
- other->s = backBuffer->stride;
- other->usage = backBuffer->usage;
- other->format = backBuffer->format;
- other->bits = vaddr;
- }
+ other->w = uint32_t(outBuffer.width);
+ other->h = uint32_t(outBuffer.height);
+ other->s = uint32_t(outBuffer.stride);
+ other->usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN;
+ other->format = uint32_t(outBuffer.format);
+ other->bits = outBuffer.bits;
}
- mApiLock.unlock();
return err;
}
-
-status_t Surface::unlockAndPost()
-{
- if (mLockedBuffer == 0) {
- LOGE("Surface::unlockAndPost failed, no locked buffer");
- return INVALID_OPERATION;
- }
-
- status_t err = mLockedBuffer->unlock();
- LOGE_IF(err, "failed unlocking buffer (%p)", mLockedBuffer->handle);
-
- err = mSurfaceTextureClient->queueBuffer(mLockedBuffer.get());
- LOGE_IF(err, "queueBuffer (handle=%p) failed (%s)",
- mLockedBuffer->handle, strerror(-err));
- mPostedBuffer = mLockedBuffer;
- mLockedBuffer = 0;
- return err;
+status_t Surface::unlockAndPost() {
+ return SurfaceTextureClient::unlockAndPost();
}
// ----------------------------------------------------------------------------
diff --git a/libs/gui/SurfaceTexture.cpp b/libs/gui/SurfaceTexture.cpp
index 1410481..a12d40a 100644
--- a/libs/gui/SurfaceTexture.cpp
+++ b/libs/gui/SurfaceTexture.cpp
@@ -495,7 +495,7 @@ status_t SurfaceTexture::setTransform(uint32_t transform) {
}
status_t SurfaceTexture::connect(int api) {
- LOGV("SurfaceTexture::connect");
+ LOGV("SurfaceTexture::connect(this=%p, %d)", this, api);
Mutex::Autolock lock(mMutex);
int err = NO_ERROR;
switch (api) {
@@ -504,6 +504,8 @@ status_t SurfaceTexture::connect(int api) {
case NATIVE_WINDOW_API_MEDIA:
case NATIVE_WINDOW_API_CAMERA:
if (mConnectedApi != NO_CONNECTED_API) {
+ LOGE("connect: already connected (cur=%d, req=%d)",
+ mConnectedApi, api);
err = -EINVAL;
} else {
mConnectedApi = api;
@@ -517,7 +519,7 @@ status_t SurfaceTexture::connect(int api) {
}
status_t SurfaceTexture::disconnect(int api) {
- LOGV("SurfaceTexture::disconnect");
+ LOGV("SurfaceTexture::disconnect(this=%p, %d)", this, api);
Mutex::Autolock lock(mMutex);
int err = NO_ERROR;
switch (api) {
@@ -528,6 +530,8 @@ status_t SurfaceTexture::disconnect(int api) {
if (mConnectedApi == api) {
mConnectedApi = NO_CONNECTED_API;
} else {
+ LOGE("disconnect: connected to another api (cur=%d, req=%d)",
+ mConnectedApi, api);
err = -EINVAL;
}
break;
diff --git a/libs/gui/SurfaceTextureClient.cpp b/libs/gui/SurfaceTextureClient.cpp
index f39cabf..d5b7c89 100644
--- a/libs/gui/SurfaceTextureClient.cpp
+++ b/libs/gui/SurfaceTextureClient.cpp
@@ -24,24 +24,45 @@
namespace android {
SurfaceTextureClient::SurfaceTextureClient(
- const sp<ISurfaceTexture>& surfaceTexture):
- mSurfaceTexture(surfaceTexture), mAllocator(0), mReqWidth(0),
- mReqHeight(0), mReqFormat(0), mReqUsage(0),
- mTimestamp(NATIVE_WINDOW_TIMESTAMP_AUTO),
- mQueryWidth(0), mQueryHeight(0), mQueryFormat(0),
- mMutex() {
+ const sp<ISurfaceTexture>& surfaceTexture)
+{
+ SurfaceTextureClient::init();
+ SurfaceTextureClient::setISurfaceTexture(surfaceTexture);
+}
+
+SurfaceTextureClient::SurfaceTextureClient() {
+ SurfaceTextureClient::init();
+}
+
+void SurfaceTextureClient::init() {
// Initialize the ANativeWindow function pointers.
- ANativeWindow::setSwapInterval = setSwapInterval;
- ANativeWindow::dequeueBuffer = dequeueBuffer;
- ANativeWindow::cancelBuffer = cancelBuffer;
- ANativeWindow::lockBuffer = lockBuffer;
- ANativeWindow::queueBuffer = queueBuffer;
- ANativeWindow::query = query;
- ANativeWindow::perform = perform;
+ ANativeWindow::setSwapInterval = hook_setSwapInterval;
+ ANativeWindow::dequeueBuffer = hook_dequeueBuffer;
+ ANativeWindow::cancelBuffer = hook_cancelBuffer;
+ ANativeWindow::lockBuffer = hook_lockBuffer;
+ ANativeWindow::queueBuffer = hook_queueBuffer;
+ ANativeWindow::query = hook_query;
+ ANativeWindow::perform = hook_perform;
const_cast<int&>(ANativeWindow::minSwapInterval) = 0;
const_cast<int&>(ANativeWindow::maxSwapInterval) = 1;
+ mReqWidth = 0;
+ mReqHeight = 0;
+ mReqFormat = 0;
+ mReqUsage = 0;
+ mTimestamp = NATIVE_WINDOW_TIMESTAMP_AUTO;
+ mQueryWidth = 0;
+ mQueryHeight = 0;
+ mQueryFormat = 0;
+ mConnectedToCpu = false;
+}
+
+void SurfaceTextureClient::setISurfaceTexture(
+ const sp<ISurfaceTexture>& surfaceTexture)
+{
+ mSurfaceTexture = surfaceTexture;
+
// Get a reference to the allocator.
mAllocator = mSurfaceTexture->getAllocator();
}
@@ -50,42 +71,42 @@ sp<ISurfaceTexture> SurfaceTextureClient::getISurfaceTexture() const {
return mSurfaceTexture;
}
-int SurfaceTextureClient::setSwapInterval(ANativeWindow* window, int interval) {
+int SurfaceTextureClient::hook_setSwapInterval(ANativeWindow* window, int interval) {
SurfaceTextureClient* c = getSelf(window);
return c->setSwapInterval(interval);
}
-int SurfaceTextureClient::dequeueBuffer(ANativeWindow* window,
+int SurfaceTextureClient::hook_dequeueBuffer(ANativeWindow* window,
ANativeWindowBuffer** buffer) {
SurfaceTextureClient* c = getSelf(window);
return c->dequeueBuffer(buffer);
}
-int SurfaceTextureClient::cancelBuffer(ANativeWindow* window,
+int SurfaceTextureClient::hook_cancelBuffer(ANativeWindow* window,
ANativeWindowBuffer* buffer) {
SurfaceTextureClient* c = getSelf(window);
return c->cancelBuffer(buffer);
}
-int SurfaceTextureClient::lockBuffer(ANativeWindow* window,
+int SurfaceTextureClient::hook_lockBuffer(ANativeWindow* window,
ANativeWindowBuffer* buffer) {
SurfaceTextureClient* c = getSelf(window);
return c->lockBuffer(buffer);
}
-int SurfaceTextureClient::queueBuffer(ANativeWindow* window,
+int SurfaceTextureClient::hook_queueBuffer(ANativeWindow* window,
ANativeWindowBuffer* buffer) {
SurfaceTextureClient* c = getSelf(window);
return c->queueBuffer(buffer);
}
-int SurfaceTextureClient::query(const ANativeWindow* window,
+int SurfaceTextureClient::hook_query(const ANativeWindow* window,
int what, int* value) {
const SurfaceTextureClient* c = getSelf(window);
return c->query(what, value);
}
-int SurfaceTextureClient::perform(ANativeWindow* window, int operation, ...) {
+int SurfaceTextureClient::hook_perform(ANativeWindow* window, int operation, ...) {
va_list args;
va_start(args, operation);
SurfaceTextureClient* c = getSelf(window);
@@ -219,7 +240,6 @@ int SurfaceTextureClient::query(int what, int* value) const {
*value = 0;
return NO_ERROR;
case NATIVE_WINDOW_CONCRETE_TYPE:
- // TODO: this is not needed anymore
*value = NATIVE_WINDOW_SURFACE_TEXTURE_CLIENT;
return NO_ERROR;
}
@@ -260,6 +280,12 @@ int SurfaceTextureClient::perform(int operation, va_list args)
case NATIVE_WINDOW_SET_BUFFERS_FORMAT:
res = dispatchSetBuffersFormat(args);
break;
+ case NATIVE_WINDOW_LOCK:
+ res = dispatchLock(args);
+ break;
+ case NATIVE_WINDOW_UNLOCK_AND_POST:
+ res = dispatchUnlockAndPost(args);
+ break;
default:
res = NAME_NOT_FOUND;
break;
@@ -324,28 +350,37 @@ int SurfaceTextureClient::dispatchSetBuffersTimestamp(va_list args) {
return setBuffersTimestamp(timestamp);
}
+int SurfaceTextureClient::dispatchLock(va_list args) {
+ ANativeWindow_Buffer* outBuffer = va_arg(args, ANativeWindow_Buffer*);
+ ARect* inOutDirtyBounds = va_arg(args, ARect*);
+ return lock(outBuffer, inOutDirtyBounds);
+}
+
+int SurfaceTextureClient::dispatchUnlockAndPost(va_list args) {
+ return unlockAndPost();
+}
+
+
int SurfaceTextureClient::connect(int api) {
LOGV("SurfaceTextureClient::connect");
Mutex::Autolock lock(mMutex);
- return mSurfaceTexture->connect(api);
+ int err = mSurfaceTexture->connect(api);
+ if (!err && api == NATIVE_WINDOW_API_CPU) {
+ mConnectedToCpu = true;
+ }
+ return err;
}
int SurfaceTextureClient::disconnect(int api) {
LOGV("SurfaceTextureClient::disconnect");
Mutex::Autolock lock(mMutex);
- return mSurfaceTexture->disconnect(api);
-}
-
-int SurfaceTextureClient::getConnectedApi() const
-{
- // XXX: This method will be going away shortly, and is currently bogus. It
- // always returns "nothing is connected". It will go away once Surface gets
- // updated to actually connect as the 'CPU' API when locking a buffer.
- Mutex::Autolock lock(mMutex);
- return 0;
+ int err = mSurfaceTexture->disconnect(api);
+ if (!err && api == NATIVE_WINDOW_API_CPU) {
+ mConnectedToCpu = false;
+ }
+ return err;
}
-
int SurfaceTextureClient::setUsage(uint32_t reqUsage)
{
LOGV("SurfaceTextureClient::setUsage");
@@ -443,4 +478,160 @@ void SurfaceTextureClient::freeAllBuffers() {
}
}
+// ----------------------------------------------------------------------
+// the lock/unlock APIs must be used from the same thread
+
+static status_t copyBlt(
+ const sp<GraphicBuffer>& dst,
+ const sp<GraphicBuffer>& src,
+ const Region& reg)
+{
+ // src and dst with, height and format must be identical. no verification
+ // is done here.
+ status_t err;
+ uint8_t const * src_bits = NULL;
+ err = src->lock(GRALLOC_USAGE_SW_READ_OFTEN, reg.bounds(), (void**)&src_bits);
+ LOGE_IF(err, "error locking src buffer %s", strerror(-err));
+
+ uint8_t* dst_bits = NULL;
+ err = dst->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, reg.bounds(), (void**)&dst_bits);
+ LOGE_IF(err, "error locking dst buffer %s", strerror(-err));
+
+ Region::const_iterator head(reg.begin());
+ Region::const_iterator tail(reg.end());
+ if (head != tail && src_bits && dst_bits) {
+ const size_t bpp = bytesPerPixel(src->format);
+ const size_t dbpr = dst->stride * bpp;
+ const size_t sbpr = src->stride * bpp;
+
+ while (head != tail) {
+ const Rect& r(*head++);
+ ssize_t h = r.height();
+ if (h <= 0) continue;
+ size_t size = r.width() * bpp;
+ uint8_t const * s = src_bits + (r.left + src->stride * r.top) * bpp;
+ uint8_t * d = dst_bits + (r.left + dst->stride * r.top) * bpp;
+ if (dbpr==sbpr && size==sbpr) {
+ size *= h;
+ h = 1;
+ }
+ do {
+ memcpy(d, s, size);
+ d += dbpr;
+ s += sbpr;
+ } while (--h > 0);
+ }
+ }
+
+ if (src_bits)
+ src->unlock();
+
+ if (dst_bits)
+ dst->unlock();
+
+ return err;
+}
+
+// ----------------------------------------------------------------------------
+
+status_t SurfaceTextureClient::lock(
+ ANativeWindow_Buffer* outBuffer, ARect* inOutDirtyBounds)
+{
+ if (mLockedBuffer != 0) {
+ LOGE("Surface::lock failed, already locked");
+ return INVALID_OPERATION;
+ }
+
+ if (!mConnectedToCpu) {
+ int err = SurfaceTextureClient::connect(NATIVE_WINDOW_API_CPU);
+ if (err) {
+ return err;
+ }
+ // we're intending to do software rendering from this point
+ setUsage(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN);
+ }
+
+ ANativeWindowBuffer* out;
+ status_t err = dequeueBuffer(&out);
+ LOGE_IF(err, "dequeueBuffer failed (%s)", strerror(-err));
+ if (err == NO_ERROR) {
+ sp<GraphicBuffer> backBuffer(GraphicBuffer::getSelf(out));
+ err = lockBuffer(backBuffer.get());
+ LOGE_IF(err, "lockBuffer (handle=%p) failed (%s)",
+ backBuffer->handle, strerror(-err));
+ if (err == NO_ERROR) {
+ const Rect bounds(backBuffer->width, backBuffer->height);
+
+ Region newDirtyRegion;
+ if (inOutDirtyBounds) {
+ newDirtyRegion.set(static_cast<Rect const&>(*inOutDirtyBounds));
+ newDirtyRegion.andSelf(bounds);
+ } else {
+ newDirtyRegion.set(bounds);
+ }
+
+ // figure out if we can copy the frontbuffer back
+ const sp<GraphicBuffer>& frontBuffer(mPostedBuffer);
+ const bool canCopyBack = (frontBuffer != 0 &&
+ backBuffer->width == frontBuffer->width &&
+ backBuffer->height == frontBuffer->height &&
+ backBuffer->format == frontBuffer->format);
+
+ if (canCopyBack) {
+ // copy the area that is invalid and not repainted this round
+ const Region copyback(mOldDirtyRegion.subtract(newDirtyRegion));
+ if (!copyback.isEmpty())
+ copyBlt(backBuffer, frontBuffer, copyback);
+ } else {
+ // if we can't copy-back anything, modify the user's dirty
+ // region to make sure they redraw the whole buffer
+ newDirtyRegion.set(bounds);
+ }
+
+ // keep track of the are of the buffer that is "clean"
+ // (ie: that will be redrawn)
+ mOldDirtyRegion = newDirtyRegion;
+
+ if (inOutDirtyBounds) {
+ *inOutDirtyBounds = newDirtyRegion.getBounds();
+ }
+
+ void* vaddr;
+ status_t res = backBuffer->lock(
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+ newDirtyRegion.bounds(), &vaddr);
+
+ LOGW_IF(res, "failed locking buffer (handle = %p)",
+ backBuffer->handle);
+
+ mLockedBuffer = backBuffer;
+ outBuffer->width = backBuffer->width;
+ outBuffer->height = backBuffer->height;
+ outBuffer->stride = backBuffer->stride;
+ outBuffer->format = backBuffer->format;
+ outBuffer->bits = vaddr;
+ }
+ }
+ return err;
+}
+
+status_t SurfaceTextureClient::unlockAndPost()
+{
+ if (mLockedBuffer == 0) {
+ LOGE("Surface::unlockAndPost failed, no locked buffer");
+ return INVALID_OPERATION;
+ }
+
+ status_t err = mLockedBuffer->unlock();
+ LOGE_IF(err, "failed unlocking buffer (%p)", mLockedBuffer->handle);
+
+ err = queueBuffer(mLockedBuffer.get());
+ LOGE_IF(err, "queueBuffer (handle=%p) failed (%s)",
+ mLockedBuffer->handle, strerror(-err));
+
+ mPostedBuffer = mLockedBuffer;
+ mLockedBuffer = 0;
+ return err;
+}
+
}; // namespace android
diff --git a/libs/gui/tests/SurfaceTextureClient_test.cpp b/libs/gui/tests/SurfaceTextureClient_test.cpp
index 519b40e..2b8f204 100644
--- a/libs/gui/tests/SurfaceTextureClient_test.cpp
+++ b/libs/gui/tests/SurfaceTextureClient_test.cpp
@@ -613,4 +613,90 @@ TEST_F(SurfaceTextureClientTest, QueryFormatAfterSettingWorks) {
}
}
+class MultiSurfaceTextureClientTest : public ::testing::Test {
+
+public:
+ MultiSurfaceTextureClientTest() :
+ mEglDisplay(EGL_NO_DISPLAY),
+ mEglContext(EGL_NO_CONTEXT) {
+ for (int i = 0; i < NUM_SURFACE_TEXTURES; i++) {
+ mEglSurfaces[i] = EGL_NO_CONTEXT;
+ }
+ }
+
+protected:
+
+ enum { NUM_SURFACE_TEXTURES = 32 };
+
+ virtual void SetUp() {
+ mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+ ASSERT_EQ(EGL_SUCCESS, eglGetError());
+ ASSERT_NE(EGL_NO_DISPLAY, mEglDisplay);
+
+ EGLint majorVersion, minorVersion;
+ EXPECT_TRUE(eglInitialize(mEglDisplay, &majorVersion, &minorVersion));
+ ASSERT_EQ(EGL_SUCCESS, eglGetError());
+
+ EGLConfig myConfig;
+ EGLint numConfigs = 0;
+ EGLint configAttribs[] = {
+ EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
+ EGL_NONE
+ };
+ EXPECT_TRUE(eglChooseConfig(mEglDisplay, configAttribs, &myConfig, 1,
+ &numConfigs));
+ ASSERT_EQ(EGL_SUCCESS, eglGetError());
+
+ mEglContext = eglCreateContext(mEglDisplay, myConfig, EGL_NO_CONTEXT,
+ 0);
+ ASSERT_EQ(EGL_SUCCESS, eglGetError());
+ ASSERT_NE(EGL_NO_CONTEXT, mEglContext);
+
+ for (int i = 0; i < NUM_SURFACE_TEXTURES; i++) {
+ sp<SurfaceTexture> st(new SurfaceTexture(i));
+ sp<SurfaceTextureClient> stc(new SurfaceTextureClient(st));
+ mEglSurfaces[i] = eglCreateWindowSurface(mEglDisplay, myConfig,
+ static_cast<ANativeWindow*>(stc.get()), NULL);
+ ASSERT_EQ(EGL_SUCCESS, eglGetError());
+ ASSERT_NE(EGL_NO_SURFACE, mEglSurfaces[i]);
+ }
+ }
+
+ virtual void TearDown() {
+ eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
+ EGL_NO_CONTEXT);
+
+ for (int i = 0; i < NUM_SURFACE_TEXTURES; i++) {
+ if (mEglSurfaces[i] != EGL_NO_SURFACE) {
+ eglDestroySurface(mEglDisplay, mEglSurfaces[i]);
+ }
+ }
+
+ if (mEglContext != EGL_NO_CONTEXT) {
+ eglDestroyContext(mEglDisplay, mEglContext);
+ }
+
+ if (mEglDisplay != EGL_NO_DISPLAY) {
+ eglTerminate(mEglDisplay);
+ }
+ }
+
+ EGLDisplay mEglDisplay;
+ EGLSurface mEglSurfaces[NUM_SURFACE_TEXTURES];
+ EGLContext mEglContext;
+};
+
+// XXX: This test is disabled because it causes a hang on some devices. See bug
+// 5015672.
+TEST_F(MultiSurfaceTextureClientTest, DISABLED_MakeCurrentBetweenSurfacesWorks) {
+ for (int iter = 0; iter < 8; iter++) {
+ for (int i = 0; i < NUM_SURFACE_TEXTURES; i++) {
+ eglMakeCurrent(mEglDisplay, mEglSurfaces[i], mEglSurfaces[i],
+ mEglContext);
+ glClear(GL_COLOR_BUFFER_BIT);
+ eglSwapBuffers(mEglDisplay, mEglSurfaces[i]);
+ }
+ }
+}
+
} // namespace android
diff --git a/libs/hwui/Caches.cpp b/libs/hwui/Caches.cpp
index e232ddd..7114b6a 100644
--- a/libs/hwui/Caches.cpp
+++ b/libs/hwui/Caches.cpp
@@ -33,6 +33,16 @@ ANDROID_SINGLETON_STATIC_INSTANCE(Caches);
namespace uirenderer {
///////////////////////////////////////////////////////////////////////////////
+// Macros
+///////////////////////////////////////////////////////////////////////////////
+
+#if DEBUG_CACHE_FLUSH
+ #define FLUSH_LOGD(...) LOGD(__VA_ARGS__)
+#else
+ #define FLUSH_LOGD(...)
+#endif
+
+///////////////////////////////////////////////////////////////////////////////
// Constructors/destructor
///////////////////////////////////////////////////////////////////////////////
@@ -150,6 +160,30 @@ void Caches::deleteLayerDeferred(Layer* layer) {
mLayerGarbage.push(layer);
}
+void Caches::flush(FlushMode mode) {
+ FLUSH_LOGD("Flushing caches (mode %d)", mode);
+
+ clearGarbage();
+
+ switch (mode) {
+ case kFlushMode_Full:
+ textureCache.clear();
+ patchCache.clear();
+ dropShadowCache.clear();
+ gradientCache.clear();
+ // fall through
+ case kFlushMode_Moderate:
+ layerCache.clear();
+ pathCache.clear();
+ roundRectShapeCache.clear();
+ circleShapeCache.clear();
+ ovalShapeCache.clear();
+ rectShapeCache.clear();
+ arcShapeCache.clear();
+ break;
+ }
+}
+
///////////////////////////////////////////////////////////////////////////////
// VBO
///////////////////////////////////////////////////////////////////////////////
diff --git a/libs/hwui/Caches.h b/libs/hwui/Caches.h
index e64d8ac..76dff4b 100644
--- a/libs/hwui/Caches.h
+++ b/libs/hwui/Caches.h
@@ -100,6 +100,18 @@ class Caches: public Singleton<Caches> {
Vector<Layer*> mLayerGarbage;
public:
+ enum FlushMode {
+ kFlushMode_Moderate = 0,
+ kFlushMode_Full
+ };
+
+ /**
+ * Flush the cache.
+ *
+ * @param mode Indicates how much of the cache should be flushed
+ */
+ void flush(FlushMode mode);
+
/**
* Indicates whether the renderer is in debug mode.
* This debug mode provides limited information to app developers.
diff --git a/libs/hwui/Debug.h b/libs/hwui/Debug.h
index 2cdc8c3..5db73db 100644
--- a/libs/hwui/Debug.h
+++ b/libs/hwui/Debug.h
@@ -26,6 +26,9 @@
// Turn on to enable memory usage summary on each frame
#define DEBUG_MEMORY_USAGE 0
+// Turn on to enable debugging of cache flushes
+#define DEBUG_CACHE_FLUSH 1
+
// Turn on to enable layers debugging when rendered as regions
#define DEBUG_LAYERS_AS_REGIONS 0
diff --git a/libs/ui/FramebufferNativeWindow.cpp b/libs/ui/FramebufferNativeWindow.cpp
index 9c10c75..794747d 100644
--- a/libs/ui/FramebufferNativeWindow.cpp
+++ b/libs/ui/FramebufferNativeWindow.cpp
@@ -303,6 +303,10 @@ int FramebufferNativeWindow::perform(ANativeWindow* window,
case NATIVE_WINDOW_CONNECT:
case NATIVE_WINDOW_DISCONNECT:
break;
+ case NATIVE_WINDOW_LOCK:
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_UNLOCK_AND_POST:
+ return INVALID_OPERATION;
default:
return NAME_NOT_FOUND;
}
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index b20a6e9..4f9eb2b 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -551,7 +551,9 @@ public class AudioTrack
* @see #PLAYSTATE_PLAYING
*/
public int getPlayState() {
- return mPlayState;
+ synchronized (mPlayStateLock) {
+ return mPlayState;
+ }
}
/**
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 7b7ba74..178039c 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -27,6 +27,8 @@
#include <binder/IServiceManager.h>
#include <binder/IPCThreadState.h>
+#include <gui/SurfaceTextureClient.h>
+
#include <media/mediaplayer.h>
#include <media/AudioTrack.h>
@@ -38,6 +40,7 @@
#include <utils/String8.h>
#include <system/audio.h>
+#include <system/window.h>
namespace android {
@@ -194,13 +197,62 @@ status_t MediaPlayer::getMetadata(bool update_only, bool apply_filter, Parcel *m
return mPlayer->getMetadata(update_only, apply_filter, metadata);
}
+void MediaPlayer::disconnectNativeWindow() {
+ if (mConnectedWindow != NULL) {
+ status_t err = native_window_disconnect(mConnectedWindow.get(),
+ NATIVE_WINDOW_API_MEDIA);
+
+ if (err != OK) {
+ LOGW("native_window_disconnect returned an error: %s (%d)",
+ strerror(-err), err);
+ }
+ }
+ mConnectedWindow.clear();
+}
+
status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
{
LOGV("setVideoSurface");
Mutex::Autolock _l(mLock);
if (mPlayer == 0) return NO_INIT;
- return mPlayer->setVideoSurface(surface);
+ sp<IBinder> binder(surface == NULL ? NULL : surface->asBinder());
+ if (mConnectedWindowBinder == binder) {
+ return OK;
+ }
+
+ if (surface != NULL) {
+ status_t err = native_window_connect(surface.get(),
+ NATIVE_WINDOW_API_MEDIA);
+
+ if (err != OK) {
+ // Note that we must do the reset before disconnecting from the ANW.
+ // Otherwise queue/dequeue calls could be made on the disconnected
+ // ANW, which may result in errors.
+ reset_l();
+
+ disconnectNativeWindow();
+
+ return err;
+ }
+ }
+
+ // Note that we must set the player's new surface before disconnecting the
+ // old one. Otherwise queue/dequeue calls could be made on the disconnected
+ // ANW, which may result in errors.
+ status_t err = mPlayer->setVideoSurface(surface);
+
+ disconnectNativeWindow();
+
+ mConnectedWindow = surface;
+
+ if (err == OK) {
+ mConnectedWindowBinder = binder;
+ } else {
+ disconnectNativeWindow();
+ }
+
+ return err;
}
status_t MediaPlayer::setVideoSurfaceTexture(
@@ -210,7 +262,46 @@ status_t MediaPlayer::setVideoSurfaceTexture(
Mutex::Autolock _l(mLock);
if (mPlayer == 0) return NO_INIT;
- return mPlayer->setVideoSurfaceTexture(surfaceTexture);
+ sp<IBinder> binder(surfaceTexture == NULL ? NULL :
+ surfaceTexture->asBinder());
+ if (mConnectedWindowBinder == binder) {
+ return OK;
+ }
+
+ sp<ANativeWindow> anw;
+ if (surfaceTexture != NULL) {
+ anw = new SurfaceTextureClient(surfaceTexture);
+ status_t err = native_window_connect(anw.get(),
+ NATIVE_WINDOW_API_MEDIA);
+
+ if (err != OK) {
+ // Note that we must do the reset before disconnecting from the ANW.
+ // Otherwise queue/dequeue calls could be made on the disconnected
+ // ANW, which may result in errors.
+ reset_l();
+
+ disconnectNativeWindow();
+
+ return err;
+ }
+ }
+
+ // Note that we must set the player's new SurfaceTexture before
+ // disconnecting the old one. Otherwise queue/dequeue calls could be made
+ // on the disconnected ANW, which may result in errors.
+ status_t err = mPlayer->setVideoSurfaceTexture(surfaceTexture);
+
+ disconnectNativeWindow();
+
+ mConnectedWindow = anw;
+
+ if (err == OK) {
+ mConnectedWindowBinder = binder;
+ } else {
+ disconnectNativeWindow();
+ }
+
+ return err;
}
// must call with lock held
@@ -434,10 +525,8 @@ status_t MediaPlayer::seekTo(int msec)
return result;
}
-status_t MediaPlayer::reset()
+status_t MediaPlayer::reset_l()
{
- LOGV("reset");
- Mutex::Autolock _l(mLock);
mLoop = false;
if (mCurrentState == MEDIA_PLAYER_IDLE) return NO_ERROR;
mPrepareSync = false;
@@ -458,6 +547,13 @@ status_t MediaPlayer::reset()
return NO_ERROR;
}
+status_t MediaPlayer::reset()
+{
+ LOGV("reset");
+ Mutex::Autolock _l(mLock);
+ return reset_l();
+}
+
status_t MediaPlayer::setAudioStreamType(int type)
{
LOGV("MediaPlayer::setAudioStreamType");
diff --git a/media/libstagefright/codecs/aacenc/src/bit_cnt.c b/media/libstagefright/codecs/aacenc/src/bit_cnt.c
index dd0b9b4..8853efc 100644
--- a/media/libstagefright/codecs/aacenc/src/bit_cnt.c
+++ b/media/libstagefright/codecs/aacenc/src/bit_cnt.c
@@ -496,7 +496,7 @@ Word16 codeValues(Word16 *values, Word16 width, Word16 codeBook, HANDLE_BIT_BUF
{
Word32 i, t0, t1, t2, t3, t00, t01;
- Word16 codeWord, codeLength;
+ UWord16 codeWord, codeLength;
Word16 sign, signLength;
diff --git a/media/libstagefright/codecs/aacenc/src/memalign.c b/media/libstagefright/codecs/aacenc/src/memalign.c
index 7d20352..44dd4ba 100644
--- a/media/libstagefright/codecs/aacenc/src/memalign.c
+++ b/media/libstagefright/codecs/aacenc/src/memalign.c
@@ -23,6 +23,11 @@
#include "memalign.h"
+#ifdef _MSC_VER
+#include <stddef.h>
+#else
+#include <stdint.h>
+#endif
/*****************************************************************************
*
@@ -66,8 +71,8 @@ mem_malloc(VO_MEM_OPERATOR *pMemop, unsigned int size, unsigned char alignment,
pMemop->Set(CodecID, tmp, 0, size + alignment);
mem_ptr =
- (unsigned char *) ((unsigned int) (tmp + alignment - 1) &
- (~((unsigned int) (alignment - 1))));
+ (unsigned char *) ((intptr_t) (tmp + alignment - 1) &
+ (~((intptr_t) (alignment - 1))));
if (mem_ptr == tmp)
mem_ptr += alignment;
diff --git a/media/libstagefright/codecs/amrwbenc/src/cmnMemory.c b/media/libstagefright/codecs/amrwbenc/src/cmnMemory.c
deleted file mode 100644
index dd7c26d..0000000
--- a/media/libstagefright/codecs/amrwbenc/src/cmnMemory.c
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- ** Copyright 2003-2010, VisualOn, Inc.
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- ** http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-/*******************************************************************************
- File: cmnMemory.c
-
- Content: sample code for memory operator implementation
-
-*******************************************************************************/
-#include "cmnMemory.h"
-
-#include <malloc.h>
-#if defined LINUX
-#include <string.h>
-#endif
-
-//VO_MEM_OPERATOR g_memOP;
-
-VO_U32 cmnMemAlloc (VO_S32 uID, VO_MEM_INFO * pMemInfo)
-{
- if (!pMemInfo)
- return VO_ERR_INVALID_ARG;
-
- pMemInfo->VBuffer = malloc (pMemInfo->Size);
- return 0;
-}
-
-VO_U32 cmnMemFree (VO_S32 uID, VO_PTR pMem)
-{
- free (pMem);
- return 0;
-}
-
-VO_U32 cmnMemSet (VO_S32 uID, VO_PTR pBuff, VO_U8 uValue, VO_U32 uSize)
-{
- memset (pBuff, uValue, uSize);
- return 0;
-}
-
-VO_U32 cmnMemCopy (VO_S32 uID, VO_PTR pDest, VO_PTR pSource, VO_U32 uSize)
-{
- memcpy (pDest, pSource, uSize);
- return 0;
-}
-
-VO_U32 cmnMemCheck (VO_S32 uID, VO_PTR pBuffer, VO_U32 uSize)
-{
- return 0;
-}
-
-VO_S32 cmnMemCompare (VO_S32 uID, VO_PTR pBuffer1, VO_PTR pBuffer2, VO_U32 uSize)
-{
- return memcmp(pBuffer1, pBuffer2, uSize);
-}
-
-VO_U32 cmnMemMove (VO_S32 uID, VO_PTR pDest, VO_PTR pSource, VO_U32 uSize)
-{
- memmove (pDest, pSource, uSize);
- return 0;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/Android.mk b/media/libstagefright/codecs/avc/dec/Android.mk
deleted file mode 100644
index 2949a04..0000000
--- a/media/libstagefright/codecs/avc/dec/Android.mk
+++ /dev/null
@@ -1,55 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
- src/avcdec_api.cpp \
- src/avc_bitstream.cpp \
- src/header.cpp \
- src/itrans.cpp \
- src/pred_inter.cpp \
- src/pred_intra.cpp \
- src/residual.cpp \
- src/slice.cpp \
- src/vlc.cpp
-
-LOCAL_MODULE := libstagefright_avcdec
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/src \
- $(LOCAL_PATH)/include \
- $(LOCAL_PATH)/../common/include \
- $(TOP)/frameworks/base/media/libstagefright/include \
- frameworks/base/include/media/stagefright/openmax \
-
-LOCAL_CFLAGS := -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
-
-include $(BUILD_STATIC_LIBRARY)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
- SoftAVC.cpp
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/src \
- $(LOCAL_PATH)/include \
- $(LOCAL_PATH)/../common/include \
- frameworks/base/media/libstagefright/include \
- frameworks/base/include/media/stagefright/openmax \
-
-LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
-
-LOCAL_STATIC_LIBRARIES := \
- libstagefright_avcdec
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright_avc_common \
- libstagefright libstagefright_omx libstagefright_foundation libutils
-
-LOCAL_MODULE := libstagefright_soft_avcdec
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/media/libstagefright/codecs/avc/dec/SoftAVC.cpp b/media/libstagefright/codecs/avc/dec/SoftAVC.cpp
deleted file mode 100644
index 6a476f6..0000000
--- a/media/libstagefright/codecs/avc/dec/SoftAVC.cpp
+++ /dev/null
@@ -1,720 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAVC"
-#include <utils/Log.h>
-
-#include "SoftAVC.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/IOMX.h>
-
-#include "avcdec_api.h"
-#include "avcdec_int.h"
-
-namespace android {
-
-static const char kStartCode[4] = { 0x00, 0x00, 0x00, 0x01 };
-
-static const CodecProfileLevel kProfileLevels[] = {
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
-};
-
-template<class T>
-static void InitOMXParams(T *params) {
- params->nSize = sizeof(T);
- params->nVersion.s.nVersionMajor = 1;
- params->nVersion.s.nVersionMinor = 0;
- params->nVersion.s.nRevision = 0;
- params->nVersion.s.nStep = 0;
-}
-
-static int32_t Malloc(void *userData, int32_t size, int32_t attrs) {
- return reinterpret_cast<int32_t>(malloc(size));
-}
-
-static void Free(void *userData, int32_t ptr) {
- free(reinterpret_cast<void *>(ptr));
-}
-
-SoftAVC::SoftAVC(
- const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
- mHandle(new tagAVCHandle),
- mInputBufferCount(0),
- mWidth(160),
- mHeight(120),
- mCropLeft(0),
- mCropTop(0),
- mCropRight(mWidth - 1),
- mCropBottom(mHeight - 1),
- mSPSSeen(false),
- mPPSSeen(false),
- mCurrentTimeUs(-1),
- mEOSStatus(INPUT_DATA_AVAILABLE),
- mOutputPortSettingsChange(NONE) {
- initPorts();
- CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftAVC::~SoftAVC() {
- PVAVCCleanUpDecoder(mHandle);
-
- delete mHandle;
- mHandle = NULL;
-}
-
-void SoftAVC::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
-
- def.nPortIndex = 0;
- def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumInputBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = 8192;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 1;
-
- def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_AVC);
- def.format.video.pNativeRender = NULL;
- def.format.video.nFrameWidth = mWidth;
- def.format.video.nFrameHeight = mHeight;
- def.format.video.nStride = def.format.video.nFrameWidth;
- def.format.video.nSliceHeight = def.format.video.nFrameHeight;
- def.format.video.nBitrate = 0;
- def.format.video.xFramerate = 0;
- def.format.video.bFlagErrorConcealment = OMX_FALSE;
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
- def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- def.format.video.pNativeWindow = NULL;
-
- addPort(def);
-
- def.nPortIndex = 1;
- def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumOutputBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
-
- def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
- def.format.video.pNativeRender = NULL;
- def.format.video.nFrameWidth = mWidth;
- def.format.video.nFrameHeight = mHeight;
- def.format.video.nStride = def.format.video.nFrameWidth;
- def.format.video.nSliceHeight = def.format.video.nFrameHeight;
- def.format.video.nBitrate = 0;
- def.format.video.xFramerate = 0;
- def.format.video.bFlagErrorConcealment = OMX_FALSE;
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
- def.format.video.pNativeWindow = NULL;
-
- def.nBufferSize =
- (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
-
- addPort(def);
-}
-
-status_t SoftAVC::initDecoder() {
- memset(mHandle, 0, sizeof(tagAVCHandle));
- mHandle->AVCObject = NULL;
- mHandle->userData = this;
- mHandle->CBAVC_DPBAlloc = ActivateSPSWrapper;
- mHandle->CBAVC_FrameBind = BindFrameWrapper;
- mHandle->CBAVC_FrameUnbind = UnbindFrame;
- mHandle->CBAVC_Malloc = Malloc;
- mHandle->CBAVC_Free = Free;
-
- return OK;
-}
-
-OMX_ERRORTYPE SoftAVC::internalGetParameter(
- OMX_INDEXTYPE index, OMX_PTR params) {
- switch (index) {
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex != 0) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- formatParams->xFramerate = 0;
- } else {
- CHECK_EQ(formatParams->nPortIndex, 1u);
-
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- formatParams->xFramerate = 0;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoProfileLevelQuerySupported:
- {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params;
-
- if (profileLevel->nPortIndex != 0) { // Input port only
- LOGE("Invalid port index: %ld", profileLevel->nPortIndex);
- return OMX_ErrorUnsupportedIndex;
- }
-
- size_t index = profileLevel->nProfileIndex;
- size_t nProfileLevels =
- sizeof(kProfileLevels) / sizeof(kProfileLevels[0]);
- if (index >= nProfileLevels) {
- return OMX_ErrorNoMore;
- }
-
- profileLevel->eProfile = kProfileLevels[index].mProfile;
- profileLevel->eLevel = kProfileLevels[index].mLevel;
- return OMX_ErrorNone;
- }
-
- default:
- return SimpleSoftOMXComponent::internalGetParameter(index, params);
- }
-}
-
-OMX_ERRORTYPE SoftAVC::internalSetParameter(
- OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
- case OMX_IndexParamStandardComponentRole:
- {
- const OMX_PARAM_COMPONENTROLETYPE *roleParams =
- (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
- if (strncmp((const char *)roleParams->cRole,
- "video_decoder.avc",
- OMX_MAX_STRINGNAME_SIZE - 1)) {
- return OMX_ErrorUndefined;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex != 0) {
- return OMX_ErrorNoMore;
- }
-
- return OMX_ErrorNone;
- }
-
- default:
- return SimpleSoftOMXComponent::internalSetParameter(index, params);
- }
-}
-
-OMX_ERRORTYPE SoftAVC::getConfig(
- OMX_INDEXTYPE index, OMX_PTR params) {
- switch (index) {
- case OMX_IndexConfigCommonOutputCrop:
- {
- OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
-
- if (rectParams->nPortIndex != 1) {
- return OMX_ErrorUndefined;
- }
-
- rectParams->nLeft = mCropLeft;
- rectParams->nTop = mCropTop;
- rectParams->nWidth = mCropRight - mCropLeft + 1;
- rectParams->nHeight = mCropBottom - mCropTop + 1;
-
- return OMX_ErrorNone;
- }
-
- default:
- return OMX_ErrorUnsupportedIndex;
- }
-}
-
-static void findNALFragment(
- const OMX_BUFFERHEADERTYPE *inHeader,
- const uint8_t **fragPtr, size_t *fragSize) {
- const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
-
- size_t size = inHeader->nFilledLen;
-
- CHECK(size >= 4);
- CHECK(!memcmp(kStartCode, data, 4));
-
- size_t offset = 4;
- while (offset + 3 < size && memcmp(kStartCode, &data[offset], 4)) {
- ++offset;
- }
-
- *fragPtr = &data[4];
- if (offset + 3 >= size) {
- *fragSize = size - 4;
- } else {
- *fragSize = offset - 4;
- }
-}
-
-void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
- if (mOutputPortSettingsChange != NONE) {
- return;
- }
-
- List<BufferInfo *> &inQueue = getPortQueue(0);
- List<BufferInfo *> &outQueue = getPortQueue(1);
-
- if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
- return;
- }
-
- while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
- && outQueue.size() == kNumOutputBuffers) {
- if (mEOSStatus == INPUT_EOS_SEEN) {
- OMX_BUFFERHEADERTYPE *outHeader;
- if (drainOutputBuffer(&outHeader)) {
- List<BufferInfo *>::iterator it = outQueue.begin();
- while ((*it)->mHeader != outHeader) {
- ++it;
- }
-
- BufferInfo *outInfo = *it;
- outInfo->mOwnedByUs = false;
- outQueue.erase(it);
- outInfo = NULL;
-
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
- return;
- }
-
- BufferInfo *outInfo = *outQueue.begin();
- outHeader = outInfo->mHeader;
-
- outHeader->nOffset = 0;
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- outHeader->nTimeStamp = 0;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
-
- mEOSStatus = OUTPUT_FRAMES_FLUSHED;
- return;
- }
-
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- mEOSStatus = INPUT_EOS_SEEN;
- continue;
- }
-
- mCurrentTimeUs = inHeader->nTimeStamp;
-
- const uint8_t *fragPtr;
- size_t fragSize;
- findNALFragment(inHeader, &fragPtr, &fragSize);
-
- bool releaseFragment;
- OMX_BUFFERHEADERTYPE *outHeader;
- status_t err = decodeFragment(
- fragPtr, fragSize,
- &releaseFragment, &outHeader);
-
- if (releaseFragment) {
- CHECK_GE(inHeader->nFilledLen, fragSize + 4);
-
- inHeader->nOffset += fragSize + 4;
- inHeader->nFilledLen -= fragSize + 4;
-
- if (inHeader->nFilledLen == 0) {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
- }
- }
-
- if (outHeader != NULL) {
- List<BufferInfo *>::iterator it = outQueue.begin();
- while ((*it)->mHeader != outHeader) {
- ++it;
- }
-
- BufferInfo *outInfo = *it;
- outInfo->mOwnedByUs = false;
- outQueue.erase(it);
- outInfo = NULL;
-
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
- return;
- }
-
- if (err == INFO_FORMAT_CHANGED) {
- return;
- }
-
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
- return;
- }
- }
-}
-
-status_t SoftAVC::decodeFragment(
- const uint8_t *fragPtr, size_t fragSize,
- bool *releaseFragment,
- OMX_BUFFERHEADERTYPE **outHeader) {
- *releaseFragment = true;
- *outHeader = NULL;
-
- int nalType;
- int nalRefIdc;
- AVCDec_Status res =
- PVAVCDecGetNALType(
- const_cast<uint8_t *>(fragPtr), fragSize,
- &nalType, &nalRefIdc);
-
- if (res != AVCDEC_SUCCESS) {
- LOGV("cannot determine nal type");
- return ERROR_MALFORMED;
- }
-
- if (nalType != AVC_NALTYPE_SPS && nalType != AVC_NALTYPE_PPS
- && (!mSPSSeen || !mPPSSeen)) {
- // We haven't seen SPS or PPS yet.
- return OK;
- }
-
- switch (nalType) {
- case AVC_NALTYPE_SPS:
- {
- mSPSSeen = true;
-
- res = PVAVCDecSeqParamSet(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res != AVCDEC_SUCCESS) {
- return ERROR_MALFORMED;
- }
-
- AVCDecObject *pDecVid = (AVCDecObject *)mHandle->AVCObject;
-
- int32_t width =
- (pDecVid->seqParams[0]->pic_width_in_mbs_minus1 + 1) * 16;
-
- int32_t height =
- (pDecVid->seqParams[0]->pic_height_in_map_units_minus1 + 1) * 16;
-
- int32_t crop_left, crop_right, crop_top, crop_bottom;
- if (pDecVid->seqParams[0]->frame_cropping_flag)
- {
- crop_left = 2 * pDecVid->seqParams[0]->frame_crop_left_offset;
- crop_right =
- width - (2 * pDecVid->seqParams[0]->frame_crop_right_offset + 1);
-
- if (pDecVid->seqParams[0]->frame_mbs_only_flag)
- {
- crop_top = 2 * pDecVid->seqParams[0]->frame_crop_top_offset;
- crop_bottom =
- height -
- (2 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
- }
- else
- {
- crop_top = 4 * pDecVid->seqParams[0]->frame_crop_top_offset;
- crop_bottom =
- height -
- (4 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
- }
- } else {
- crop_bottom = height - 1;
- crop_right = width - 1;
- crop_top = crop_left = 0;
- }
-
- status_t err = OK;
-
- if (mWidth != width || mHeight != height) {
- mWidth = width;
- mHeight = height;
-
- updatePortDefinitions();
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
-
- err = INFO_FORMAT_CHANGED;
- }
-
- if (mCropLeft != crop_left
- || mCropTop != crop_top
- || mCropRight != crop_right
- || mCropBottom != crop_bottom) {
- mCropLeft = crop_left;
- mCropTop = crop_top;
- mCropRight = crop_right;
- mCropBottom = crop_bottom;
-
- notify(OMX_EventPortSettingsChanged,
- 1,
- OMX_IndexConfigCommonOutputCrop,
- NULL);
- }
-
- return err;
- }
-
- case AVC_NALTYPE_PPS:
- {
- mPPSSeen = true;
-
- res = PVAVCDecPicParamSet(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res != AVCDEC_SUCCESS) {
- LOGV("PVAVCDecPicParamSet returned error %d", res);
- return ERROR_MALFORMED;
- }
-
- return OK;
- }
-
- case AVC_NALTYPE_SLICE:
- case AVC_NALTYPE_IDR:
- {
- res = PVAVCDecodeSlice(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res == AVCDEC_PICTURE_OUTPUT_READY) {
- *releaseFragment = false;
-
- if (!drainOutputBuffer(outHeader)) {
- return UNKNOWN_ERROR;
- }
-
- return OK;
- }
-
- if (res == AVCDEC_PICTURE_READY || res == AVCDEC_SUCCESS) {
- return OK;
- } else {
- LOGV("PVAVCDecodeSlice returned error %d", res);
- return ERROR_MALFORMED;
- }
- }
-
- case AVC_NALTYPE_SEI:
- {
- res = PVAVCDecSEI(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res != AVCDEC_SUCCESS) {
- return ERROR_MALFORMED;
- }
-
- return OK;
- }
-
- case AVC_NALTYPE_AUD:
- case AVC_NALTYPE_FILL:
- case AVC_NALTYPE_EOSEQ:
- {
- return OK;
- }
-
- default:
- {
- LOGE("Should not be here, unknown nalType %d", nalType);
-
- return ERROR_MALFORMED;
- }
- }
-
- return OK;
-}
-
-bool SoftAVC::drainOutputBuffer(OMX_BUFFERHEADERTYPE **outHeader) {
- int32_t index;
- int32_t Release;
- AVCFrameIO Output;
- Output.YCbCr[0] = Output.YCbCr[1] = Output.YCbCr[2] = NULL;
- AVCDec_Status status =
- PVAVCDecGetOutput(mHandle, &index, &Release, &Output);
-
- if (status != AVCDEC_SUCCESS) {
- return false;
- }
-
- PortInfo *port = editPortInfo(1);
- CHECK_GE(index, 0);
- CHECK_LT((size_t)index, port->mBuffers.size());
- CHECK(port->mBuffers.editItemAt(index).mOwnedByUs);
-
- *outHeader = port->mBuffers.editItemAt(index).mHeader;
- (*outHeader)->nOffset = 0;
- (*outHeader)->nFilledLen = port->mDef.nBufferSize;
- (*outHeader)->nFlags = 0;
-
- return true;
-}
-
-void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
- if (portIndex == 0) {
- PVAVCDecReset(mHandle);
-
- mEOSStatus = INPUT_DATA_AVAILABLE;
- }
-}
-
-void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
- if (portIndex != 1) {
- return;
- }
-
- switch (mOutputPortSettingsChange) {
- case NONE:
- break;
-
- case AWAITING_DISABLED:
- {
- CHECK(!enabled);
- mOutputPortSettingsChange = AWAITING_ENABLED;
- break;
- }
-
- default:
- {
- CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
- CHECK(enabled);
- mOutputPortSettingsChange = NONE;
- break;
- }
- }
-}
-
-void SoftAVC::updatePortDefinitions() {
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def = &editPortInfo(1)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def->nBufferSize =
- (def->format.video.nFrameWidth
- * def->format.video.nFrameHeight * 3) / 2;
-}
-
-// static
-int32_t SoftAVC::ActivateSPSWrapper(
- void *userData, unsigned int sizeInMbs, unsigned int numBuffers) {
- return static_cast<SoftAVC *>(userData)->activateSPS(sizeInMbs, numBuffers);
-}
-
-// static
-int32_t SoftAVC::BindFrameWrapper(
- void *userData, int32_t index, uint8_t **yuv) {
- return static_cast<SoftAVC *>(userData)->bindFrame(index, yuv);
-}
-
-// static
-void SoftAVC::UnbindFrame(void *userData, int32_t index) {
-}
-
-int32_t SoftAVC::activateSPS(
- unsigned int sizeInMbs, unsigned int numBuffers) {
- PortInfo *port = editPortInfo(1);
- CHECK_GE(port->mBuffers.size(), numBuffers);
- CHECK_GE(port->mDef.nBufferSize, (sizeInMbs << 7) * 3);
-
- return 1;
-}
-
-int32_t SoftAVC::bindFrame(int32_t index, uint8_t **yuv) {
- PortInfo *port = editPortInfo(1);
-
- CHECK_GE(index, 0);
- CHECK_LT((size_t)index, port->mBuffers.size());
-
- BufferInfo *outBuffer =
- &port->mBuffers.editItemAt(index);
-
- CHECK(outBuffer->mOwnedByUs);
-
- outBuffer->mHeader->nTimeStamp = mCurrentTimeUs;
- *yuv = outBuffer->mHeader->pBuffer;
-
- return 1;
-}
-
-} // namespace android
-
-android::SoftOMXComponent *createSoftOMXComponent(
- const char *name, const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData, OMX_COMPONENTTYPE **component) {
- return new android::SoftAVC(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/avc/dec/SoftAVC.h b/media/libstagefright/codecs/avc/dec/SoftAVC.h
deleted file mode 100644
index 1594b4d..0000000
--- a/media/libstagefright/codecs/avc/dec/SoftAVC.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AVC_H_
-
-#define SOFT_AVC_H_
-
-#include "SimpleSoftOMXComponent.h"
-
-struct tagAVCHandle;
-
-namespace android {
-
-struct SoftAVC : public SimpleSoftOMXComponent {
- SoftAVC(const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component);
-
-protected:
- virtual ~SoftAVC();
-
- virtual OMX_ERRORTYPE internalGetParameter(
- OMX_INDEXTYPE index, OMX_PTR params);
-
- virtual OMX_ERRORTYPE internalSetParameter(
- OMX_INDEXTYPE index, const OMX_PTR params);
-
- virtual OMX_ERRORTYPE getConfig(OMX_INDEXTYPE index, OMX_PTR params);
-
- virtual void onQueueFilled(OMX_U32 portIndex);
- virtual void onPortFlushCompleted(OMX_U32 portIndex);
- virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-
-private:
- enum {
- kNumInputBuffers = 4,
- kNumOutputBuffers = 18,
- };
-
- enum EOSStatus {
- INPUT_DATA_AVAILABLE,
- INPUT_EOS_SEEN,
- OUTPUT_FRAMES_FLUSHED,
- };
-
- tagAVCHandle *mHandle;
-
- size_t mInputBufferCount;
-
- int32_t mWidth, mHeight;
- int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
-
- bool mSPSSeen, mPPSSeen;
-
- int64_t mCurrentTimeUs;
-
- EOSStatus mEOSStatus;
-
- enum {
- NONE,
- AWAITING_DISABLED,
- AWAITING_ENABLED
- } mOutputPortSettingsChange;
-
- void initPorts();
- status_t initDecoder();
-
- status_t decodeFragment(
- const uint8_t *fragPtr, size_t fragSize,
- bool *releaseFrames,
- OMX_BUFFERHEADERTYPE **outHeader);
-
- void updatePortDefinitions();
- bool drainOutputBuffer(OMX_BUFFERHEADERTYPE **outHeader);
-
- static int32_t ActivateSPSWrapper(
- void *userData, unsigned int sizeInMbs, unsigned int numBuffers);
-
- static int32_t BindFrameWrapper(
- void *userData, int32_t index, uint8_t **yuv);
-
- static void UnbindFrame(void *userData, int32_t index);
-
- int32_t activateSPS(
- unsigned int sizeInMbs, unsigned int numBuffers);
-
- int32_t bindFrame(int32_t index, uint8_t **yuv);
-
- DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
-};
-
-} // namespace android
-
-#endif // SOFT_AVC_H_
-
diff --git a/media/libstagefright/codecs/avc/dec/include/avcdec_api.h b/media/libstagefright/codecs/avc/dec/include/avcdec_api.h
deleted file mode 100644
index f6a14b7..0000000
--- a/media/libstagefright/codecs/avc/dec/include/avcdec_api.h
+++ /dev/null
@@ -1,200 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains application function interfaces to the AVC decoder library
-and necessary type defitionitions and enumerations.
-@publishedAll
-*/
-
-#ifndef _AVCDEC_API_H_
-#define _AVCDEC_API_H_
-
-#include "avcapi_common.h"
-
-/**
- This enumeration is used for the status returned from the library interface.
-*/
-typedef enum
-{
- /**
- The followings are fail with details. Their values are negative.
- */
- AVCDEC_NO_DATA = -4,
- AVCDEC_PACKET_LOSS = -3,
- /**
- Fail information
- */
- AVCDEC_NO_BUFFER = -2, /* no output picture buffer available */
- AVCDEC_MEMORY_FAIL = -1, /* memory allocation failed */
- AVCDEC_FAIL = 0,
- /**
- Generic success value
- */
- AVCDEC_SUCCESS = 1,
- AVCDEC_PICTURE_OUTPUT_READY = 2,
- AVCDEC_PICTURE_READY = 3,
-
- /**
- The followings are success with warnings. Their values are positive integers.
- */
- AVCDEC_NO_NEXT_SC = 4,
- AVCDEC_REDUNDANT_FRAME = 5,
- AVCDEC_CONCEALED_FRAME = 6 /* detect and conceal the error */
-} AVCDec_Status;
-
-
-/**
-This structure contains sequence parameters information.
-*/
-typedef struct tagAVCDecSPSInfo
-{
- int FrameWidth;
- int FrameHeight;
- uint frame_only_flag;
- int frame_crop_left;
- int frame_crop_right;
- int frame_crop_top;
- int frame_crop_bottom;
-
-} AVCDecSPSInfo;
-
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
- /** THE FOLLOWINGS ARE APIS */
- /**
- This function parses one NAL unit from byte stream format input according to Annex B.
- \param "bitstream" "Pointer to the bitstream buffer."
- \param "nal_unit" "Point to pointer and the location of the start of the first NAL unit
- found in bitstream."
- \param "size" "As input, the pointer to the size of bitstream in bytes. As output,
- the value is changed to be the size of the found NAL unit."
- \return "AVCDEC_SUCCESS if success, AVCDEC_FAIL if no first start code is found, AVCDEC_NO_NEX_SC if
- the first start code is found, but the second start code is missing (potential partial NAL)."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCAnnexBGetNALUnit(uint8 *bitstream, uint8 **nal_unit, int *size);
-
- /**
- This function sniffs the nal_unit_type such that users can call corresponding APIs.
- \param "bitstream" "Pointer to the beginning of a NAL unit (start with forbidden_zero_bit, etc.)."
- \param "size" "size of the bitstream (NumBytesInNALunit + 1)."
- \param "nal_unit_type" "Pointer to the return value of nal unit type."
- \return "AVCDEC_SUCCESS if success, AVCDEC_FAIL otherwise."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetNALType(uint8 *bitstream, int size, int *nal_type, int *nal_ref_idc);
-
- /**
- This function decodes the sequence parameters set, initializes related parameters and
- allocates memory (reference frames list), must also be compliant with Annex A.
- It is equivalent to decode VOL header of MPEG4.
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "nal_unit" "Pointer to the buffer containing single NAL unit.
- The content will change due to EBSP-to-RBSP conversion."
- \param "nal_size" "size of the bitstream NumBytesInNALunit."
- \return "AVCDEC_SUCCESS if success,
- AVCDEC_FAIL if profile and level is not supported,
- AVCDEC_MEMORY_FAIL if memory allocations return null."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecSeqParamSet(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
-
- /**
- This function returns sequence parameters such as dimension and field flag of the most recently
- decoded SPS. More can be added later or grouped together into a structure. This API can be called
- after PVAVCInitSequence. If no sequence parameter has been decoded yet, it will return AVCDEC_FAIL.
-
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "seqInfo" "Pointer to the AVCDecSeqParamInfo structure."
- \return "AVCDEC_SUCCESS if success and AVCDEC_FAIL if fail."
- \note "This API can be combined with PVAVCInitSequence if wanted to be consistent with m4vdec lib."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetSeqInfo(AVCHandle *avcHandle, AVCDecSPSInfo *seqInfo);
-
- /**
- This function decodes the picture parameters set and initializes related parameters. Note thate
- the PPS may not be present for every picture.
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "nal_unit" "Pointer to the buffer containing single NAL unit.
- The content will change due to EBSP-to-RBSP conversion."
- \param "nal_size" "size of the bitstream NumBytesInNALunit."
- \return "AVCDEC_SUCCESS if success, AVCDEC_FAIL if profile and level is not supported."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecPicParamSet(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
-
- /**
- This function decodes one NAL unit of bitstream. The type of nal unit is one of the
- followings, 1, 5. (for now, no data partitioning, type 2,3,4).
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "nal_unit" "Pointer to the buffer containing a single or partial NAL unit.
- The content will change due to EBSP-to-RBSP conversion."
- \param "buf_size" "Size of the buffer (less than or equal nal_size)."
- \param "nal_size" "size of the current NAL unit NumBytesInNALunit."
- \return "AVCDEC_PICTURE_READY for success and an output is ready,
- AVCDEC_SUCCESS for success but no output is ready,
- AVCDEC_PACKET_LOSS is GetData returns AVCDEC_PACKET_LOSS,
- AVCDEC_FAIL if syntax error is detected,
- AVCDEC_MEMORY_FAIL if memory is corrupted.
- AVCDEC_NO_PICTURE if no frame memory to write to (users need to get output and/or return picture).
- AVCDEC_REDUNDANT_PICTURE if error has been detected in the primary picture and redundant picture is available,
- AVCDEC_CONCEALED_PICTURE if error has been detected and decoder has concealed it."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecSEI(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
-
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecodeSlice(AVCHandle *avcHandle, uint8 *buffer, int buf_size);
-
- /**
- Check the availability of the decoded picture in decoding order (frame_num).
- The AVCFrameIO also provide displaying order information such that the application
- can re-order the frame for display. A picture can be retrieved only once.
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "output" "Pointer to the AVCOutput structure. Note that decoder library will
- not re-used the pixel memory in this structure until it has been returned
- thru PVAVCReleaseOutput API."
- \return "AVCDEC_SUCCESS for success, AVCDEC_FAIL if no picture is available to be displayed,
- AVCDEC_PICTURE_READY if there is another picture to be displayed."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetOutput(AVCHandle *avcHandle, int *indx, int *release_flag, AVCFrameIO *output);
-
- /**
- This function resets the decoder and expects to see the next IDR slice.
- \param "avcHandle" "Handle to the AVC decoder library object."
- */
- OSCL_IMPORT_REF void PVAVCDecReset(AVCHandle *avcHandle);
-
- /**
- This function performs clean up operation including memory deallocation.
- \param "avcHandle" "Handle to the AVC decoder library object."
- */
- OSCL_IMPORT_REF void PVAVCCleanUpDecoder(AVCHandle *avcHandle);
-//AVCDec_Status EBSPtoRBSP(uint8 *nal_unit,int *size);
-
-
-
- /** CALLBACK FUNCTION TO BE IMPLEMENTED BY APPLICATION */
- /** In AVCHandle structure, userData is a pointer to an object with the following
- member functions.
- */
- AVCDec_Status CBAVCDec_GetData(uint32 *userData, unsigned char **buffer, unsigned int *size);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _AVCDEC_API_H_ */
-
diff --git a/media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h b/media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h
deleted file mode 100644
index 6b196de..0000000
--- a/media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h
+++ /dev/null
@@ -1,49 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#ifndef PVAVCDECODER_H_INCLUDED
-#define PVAVCDECODER_H_INCLUDED
-
-#ifndef PVAVCDECODERINTERFACE_H_INCLUDED
-#include "pvavcdecoderinterface.h"
-#endif
-
-// AVC video decoder
-class PVAVCDecoder : public PVAVCDecoderInterface
-{
- public:
- virtual ~PVAVCDecoder();
- static PVAVCDecoder* New(void);
- virtual bool InitAVCDecoder(FunctionType_SPS, FunctionType_Alloc, FunctionType_Unbind,
- FunctionType_Malloc, FunctionType_Free, void *);
- virtual void CleanUpAVCDecoder(void);
- virtual void ResetAVCDecoder(void);
- virtual int32 DecodeSPS(uint8 *bitstream, int32 buffer_size);
- virtual int32 DecodePPS(uint8 *bitstream, int32 buffer_size);
- virtual int32 DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size);
- virtual bool GetDecOutput(int *indx, int *release);
- virtual void GetVideoDimensions(int32 *width, int32 *height, int32 *top, int32 *left, int32 *bottom, int32 *right);
- int AVC_Malloc(int32 size, int attribute);
- void AVC_Free(int mem);
-
- private:
- PVAVCDecoder();
- bool Construct(void);
- void *iAVCHandle;
-};
-
-#endif
diff --git a/media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h b/media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h
deleted file mode 100644
index 027212d..0000000
--- a/media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#ifndef PVAVCDECODERINTERFACE_H_INCLUDED
-#define PVAVCDECODERINTERFACE_H_INCLUDED
-
-typedef void (*FunctionType_Unbind)(void *, int);
-typedef int (*FunctionType_Alloc)(void *, int, uint8 **);
-typedef int (*FunctionType_SPS)(void *, uint, uint);
-typedef int (*FunctionType_Malloc)(void *, int32, int);
-typedef void(*FunctionType_Free)(void *, int);
-
-
-// PVAVCDecoderInterface pure virtual interface class
-class PVAVCDecoderInterface
-{
- public:
- virtual ~PVAVCDecoderInterface() {};
- virtual bool InitAVCDecoder(FunctionType_SPS, FunctionType_Alloc, FunctionType_Unbind,
- FunctionType_Malloc, FunctionType_Free, void *) = 0;
- virtual void CleanUpAVCDecoder(void) = 0;
- virtual void ResetAVCDecoder(void) = 0;
- virtual int32 DecodeSPS(uint8 *bitstream, int32 buffer_size) = 0;
- virtual int32 DecodePPS(uint8 *bitstream, int32 buffer_size) = 0;
- virtual int32 DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size) = 0;
- virtual bool GetDecOutput(int *indx, int *release) = 0;
- virtual void GetVideoDimensions(int32 *width, int32 *height, int32 *top, int32 *left, int32 *bottom, int32 *right) = 0;
-// virtual int AVC_Malloc(int32 size, int attribute);
-// virtual void AVC_Free(int mem);
-};
-
-#endif // PVAVCDECODERINTERFACE_H_INCLUDED
-
-
diff --git a/media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp b/media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp
deleted file mode 100644
index 270b664..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp
+++ /dev/null
@@ -1,276 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_bitstream.h"
-
-/* Swapping may not be needed anymore since we read one byte at a time and perform
-EBSP to RBSP conversion in bitstream. */
-#ifdef LITTLE_ENDIAN
-#if (WORD_SIZE==32) /* this can be replaced with assembly instructions */
-#define SWAP_BYTES(x) ((((x)&0xFF)<<24) | (((x)&0xFF00)<<8) | (((x)&0xFF0000)>>8) | (((x)&0xFF000000)>>24))
-#else /* for 16-bit */
-#define SWAP_BYTES(x) ((((x)&0xFF)<<8) | (((x)&0xFF00)>>8))
-#endif
-#else
-#define SWAP_BYTES(x) (x)
-#endif
-
-
-/* array for trailing bit pattern as function of number of bits */
-/* the first one is unused. */
-const static uint8 trailing_bits[9] = {0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80};
-
-/* ======================================================================== */
-/* Function : BitstreamInit() */
-/* Date : 11/4/2003 */
-/* Purpose : Populate bitstream structure with bitstream buffer and size */
-/* it also initializes internal data */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if failed. */
-/* Modified : */
-/* ======================================================================== */
-/* |--------|--------|----~~~~~-----|---------|---------|---------|
- ^ ^read_pos ^data_end_pos
- bitstreamBuffer <--------->
- current_word
-
- |xxxxxxxxxxxxx----| = current_word 32 or 16 bits
- <------------>
- bit_left
- ======================================================================== */
-
-
-/* ======================================================================== */
-/* Function : BitstreamNextWord() */
-/* Date : 12/4/2003 */
-/* Purpose : Read up to machine word. */
-/* In/out : */
-/* Return : Next word with emulation prevention code removed. Everything
- in the bitstream structure got modified except current_word */
-/* Modified : */
-/* ======================================================================== */
-
-AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int size)
-{
- EBSPtoRBSP(buffer, &size);
-
- stream->incnt = 0;
- stream->incnt_next = 0;
- stream->bitcnt = 0;
- stream->curr_word = stream->next_word = 0;
- stream->read_pos = 0;
-
- stream->bitstreamBuffer = buffer;
-
- stream->data_end_pos = size;
-
- stream->nal_size = size;
-
- return AVCDEC_SUCCESS;
-}
-/* ======================================================================== */
-/* Function : AVC_BitstreamFillCache() */
-/* Date : 1/1/2005 */
-/* Purpose : Read up to machine word. */
-/* In/out : */
-/* Return : Read in 4 bytes of input data */
-/* Modified : */
-/* ======================================================================== */
-
-AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream)
-{
- uint8 *bitstreamBuffer = stream->bitstreamBuffer;
- uint8 *v;
- int num_bits, i;
-
- stream->curr_word |= (stream->next_word >> stream->incnt); // stream->incnt cannot be 32
- stream->next_word <<= (31 - stream->incnt);
- stream->next_word <<= 1;
- num_bits = stream->incnt_next + stream->incnt;
- if (num_bits >= 32)
- {
- stream->incnt_next -= (32 - stream->incnt);
- stream->incnt = 32;
- return AVCDEC_SUCCESS;
- }
- /* this check can be removed if there is additional extra 4 bytes at the end of the bitstream */
- v = bitstreamBuffer + stream->read_pos;
-
- if (stream->read_pos > stream->data_end_pos - 4)
- {
- if (stream->data_end_pos <= stream->read_pos)
- {
- stream->incnt = num_bits;
- stream->incnt_next = 0;
- return AVCDEC_SUCCESS;
- }
-
- stream->next_word = 0;
-
- for (i = 0; i < stream->data_end_pos - stream->read_pos; i++)
- {
- stream->next_word |= (v[i] << ((3 - i) << 3));
- }
-
- stream->read_pos = stream->data_end_pos;
- stream->curr_word |= (stream->next_word >> num_bits); // this is safe
-
- stream->next_word <<= (31 - num_bits);
- stream->next_word <<= 1;
- num_bits = i << 3;
- stream->incnt += stream->incnt_next;
- stream->incnt_next = num_bits - (32 - stream->incnt);
- if (stream->incnt_next < 0)
- {
- stream->incnt += num_bits;
- stream->incnt_next = 0;
- }
- else
- {
- stream->incnt = 32;
- }
- return AVCDEC_SUCCESS;
- }
-
- stream->next_word = ((uint32)v[0] << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
- stream->read_pos += 4;
-
- stream->curr_word |= (stream->next_word >> num_bits); // this is safe
- stream->next_word <<= (31 - num_bits);
- stream->next_word <<= 1;
- stream->incnt_next += stream->incnt;
- stream->incnt = 32;
- return AVCDEC_SUCCESS;
-
-}
-/* ======================================================================== */
-/* Function : BitstreamReadBits() */
-/* Date : 11/4/2003 */
-/* Purpose : Read up to machine word. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
-/* is greater than the word-size, AVCDEC_PACKET_LOSS or */
-/* AVCDEC_NO_DATA if callback to get data fails. */
-/* Modified : */
-/* ======================================================================== */
-AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint *code)
-{
- if (stream->incnt < nBits)
- {
- /* frame-based decoding */
- AVC_BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> (32 - nBits);
- BitstreamFlushBits(stream, nBits);
- return AVCDEC_SUCCESS;
-}
-
-
-
-/* ======================================================================== */
-/* Function : BitstreamShowBits() */
-/* Date : 11/4/2003 */
-/* Purpose : Show up to machine word without advancing the pointer. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
-/* is greater than the word-size, AVCDEC_NO_DATA if it needs */
-/* to callback to get data. */
-/* Modified : */
-/* ======================================================================== */
-AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint *code)
-{
- if (stream->incnt < nBits)
- {
- /* frame-based decoding */
- AVC_BitstreamFillCache(stream);
- }
-
- *code = stream->curr_word >> (32 - nBits);
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : BitstreamRead1Bit() */
-/* Date : 11/4/2003 */
-/* Purpose : Read 1 bit from the bitstream. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
-/* is greater than the word-size, AVCDEC_PACKET_LOSS or */
-/* AVCDEC_NO_DATA if callback to get data fails. */
-/* Modified : */
-/* ======================================================================== */
-
-AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code)
-{
- if (stream->incnt < 1)
- {
- /* frame-based decoding */
- AVC_BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> 31;
- BitstreamFlushBits(stream, 1);
- return AVCDEC_SUCCESS;
-}
-
-
-
-AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream)
-{
- uint n_stuffed;
-
- n_stuffed = (8 - (stream->bitcnt & 0x7)) & 0x7; /* 07/05/01 */
-
- stream->bitcnt += n_stuffed;
- stream->incnt -= n_stuffed;
-
- if (stream->incnt < 0)
- {
- stream->bitcnt += stream->incnt;
- stream->incnt = 0;
- }
- stream->curr_word <<= n_stuffed;
- return AVCDEC_SUCCESS;
-}
-
-/* check whether there are more RBSP data. */
-/* ignore the emulation prevention code, assume it has been taken out. */
-bool more_rbsp_data(AVCDecBitstream *stream)
-{
- int total_bit_left;
- uint code;
-
- if (stream->read_pos >= stream->nal_size)
- {
- total_bit_left = stream->incnt_next + stream->incnt;
- if (total_bit_left <= 0)
- {
- return FALSE;
- }
- else if (total_bit_left <= 8)
- {
- BitstreamShowBits(stream, total_bit_left, &code);
- if (code == trailing_bits[total_bit_left])
- {
- return FALSE;
- }
- }
- }
-
- return TRUE;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp b/media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp
deleted file mode 100644
index 0a75f17..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp
+++ /dev/null
@@ -1,1036 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains application function interfaces to the AVC decoder library.
-@publishedAll
-*/
-
-#include <string.h>
-
-#include "avcdec_api.h"
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-/* ======================================================================== */
-/* Function : EBSPtoRBSP() */
-/* Date : 11/4/2003 */
-/* Purpose : Convert EBSP to RBSP and overwrite it. */
-/* Assuming that forbidden_zero, nal_ref_idc and nal_unit_type */
-/* (first byte), has been taken out of the nal_unit. */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-/**
-@pseudocode "
- NumBytesInRBSP = 0;
- for(i=0:i< *size; i++){
- if(i+2 < *size && next_bits(24)==0x000003){
- rbsp_byte[NumBytesInRBSP++];
- rbsp_byte[NumBytesInRBSP++];
- i+=2;
- emulation_prevention_three_byte (0x03)
- }
- else
- rbsp_byte[NumBytesInRBSP++];
- }"
-*/
-AVCDec_Status EBSPtoRBSP(uint8 *nal_unit, int *size)
-{
- int i, j;
- int count = 0;
-
- /* This code is based on EBSPtoRBSP of JM */
- j = 0;
-
- for (i = 0; i < *size; i++)
- {
- if (count == 2 && nal_unit[i] == 0x03)
- {
- i++;
- count = 0;
- }
- nal_unit[j] = nal_unit[i];
- if (nal_unit[i] == 0x00)
- count++;
- else
- count = 0;
- j++;
- }
-
- *size = j;
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCAnnexBGetNALUnit() */
-/* Date : 11/3/2003 */
-/* Purpose : Parse a NAL from byte stream format. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-/**
-@pseudocode "
- byte_stream_nal_unit(NumBytesInNalunit){
- while(next_bits(24) != 0x000001)
- zero_byte
- if(more_data_in_byte_stream()){
- start_code_prefix_one_3bytes // equal 0x000001
- nal_unit(NumBytesInNALunit)
- }
- }"
-*/
-OSCL_EXPORT_REF AVCDec_Status PVAVCAnnexBGetNALUnit(uint8 *bitstream, uint8 **nal_unit,
- int *size)
-{
- int i, j, FoundStartCode = 0;
- int end;
-
- i = 0;
- while (bitstream[i] == 0 && i < *size)
- {
- i++;
- }
- if (i >= *size)
- {
- *nal_unit = bitstream;
- return AVCDEC_FAIL; /* cannot find any start_code_prefix. */
- }
- else if (bitstream[i] != 0x1)
- {
- i = -1; /* start_code_prefix is not at the beginning, continue */
- }
-
- i++;
- *nal_unit = bitstream + i; /* point to the beginning of the NAL unit */
-
- j = end = i;
- while (!FoundStartCode)
- {
- while ((j + 1 < *size) && (bitstream[j] != 0 || bitstream[j+1] != 0)) /* see 2 consecutive zero bytes */
- {
- j++;
- }
- end = j; /* stop and check for start code */
- while (j + 2 < *size && bitstream[j+2] == 0) /* keep reading for zero byte */
- {
- j++;
- }
- if (j + 2 >= *size)
- {
- *size -= i;
- return AVCDEC_NO_NEXT_SC; /* cannot find the second start_code_prefix */
- }
- if (bitstream[j+2] == 0x1)
- {
- FoundStartCode = 1;
- }
- else
- {
- /* could be emulation code 0x3 */
- j += 2; /* continue the search */
- }
- }
-
- *size = end - i;
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCGetNALType() */
-/* Date : 11/4/2003 */
-/* Purpose : Sniff NAL type from the bitstream */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetNALType(uint8 *bitstream, int size,
- int *nal_type, int *nal_ref_idc)
-{
- int forbidden_zero_bit;
- if (size > 0)
- {
- forbidden_zero_bit = bitstream[0] >> 7;
- if (forbidden_zero_bit != 0)
- return AVCDEC_FAIL;
- *nal_ref_idc = (bitstream[0] & 0x60) >> 5;
- *nal_type = bitstream[0] & 0x1F;
- return AVCDEC_SUCCESS;
- }
-
- return AVCDEC_FAIL;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecSeqParamSet() */
-/* Date : 11/4/2003 */
-/* Purpose : Initialize sequence, memory allocation if necessary. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecSeqParamSet(AVCHandle *avcHandle, uint8 *nal_unit,
- int nal_size)
-{
- AVCDec_Status status;
- AVCDecObject *decvid;
- AVCCommonObj *video;
- AVCDecBitstream *bitstream;
- void *userData = avcHandle->userData;
- bool first_seq = FALSE;
- int i;
-
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "PVAVCDecSeqParamSet", -1, -1);
-
- if (avcHandle->AVCObject == NULL)
- {
- first_seq = TRUE;
-
- //avcHandle->memory_usage = 0;
- /* allocate AVCDecObject */
- avcHandle->AVCObject = (void*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecObject), 0/*DEFAULT_ATTR*/);
- if (avcHandle->AVCObject == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- decvid = (AVCDecObject*) avcHandle->AVCObject;
-
- memset(decvid, 0, sizeof(AVCDecObject));
-
- decvid->common = (AVCCommonObj*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCCommonObj), 0);
- if (decvid->common == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- video = decvid->common;
- memset(video, 0, sizeof(AVCCommonObj));
-
- video->seq_parameter_set_id = 9999; /* set it to some illegal value */
-
- decvid->bitstream = (AVCDecBitstream *) avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecBitstream), 1/*DEFAULT_ATTR*/);
- if (decvid->bitstream == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- decvid->bitstream->userData = avcHandle->userData; /* callback for more data */
- decvid->avcHandle = avcHandle;
- decvid->debugEnable = avcHandle->debugEnable;
- }
-
- decvid = (AVCDecObject*) avcHandle->AVCObject;
- video = decvid->common;
- bitstream = decvid->bitstream;
-
- /* check if we can reuse the memory without re-allocating it. */
- /* always check if(first_seq==TRUE) */
-
- /* Conversion from EBSP to RBSP */
- video->forbidden_bit = nal_unit[0] >> 7;
- if (video->forbidden_bit) return AVCDEC_FAIL;
- video->nal_ref_idc = (nal_unit[0] & 0x60) >> 5;
- video->nal_unit_type = (AVCNalUnitType)(nal_unit[0] & 0x1F);
-
- if (video->nal_unit_type != AVC_NALTYPE_SPS) /* not a SPS NAL */
- {
- return AVCDEC_FAIL;
- }
-
- /* Initialize bitstream structure*/
- BitstreamInit(bitstream, nal_unit + 1, nal_size - 1);
-
- /* if first_seq == TRUE, allocate the following memory */
- if (first_seq == TRUE)
- {
- video->currSeqParams = NULL; /* initialize it to NULL */
- video->currPicParams = NULL;
-
- /* There are 32 pointers to sequence param set, seqParams.
- There are 255 pointers to picture param set, picParams.*/
- for (i = 0; i < 32; i++)
- decvid->seqParams[i] = NULL;
-
- for (i = 0; i < 256; i++)
- decvid->picParams[i] = NULL;
-
- video->MbToSliceGroupMap = NULL;
-
- video->mem_mgr_ctrl_eq_5 = FALSE;
- video->newPic = TRUE;
- video->newSlice = TRUE;
- video->currPic = NULL;
- video->currFS = NULL;
- video->prevRefPic = NULL;
-
- video->mbNum = 0; // MC_Conceal
- /* Allocate sliceHdr. */
-
- video->sliceHdr = (AVCSliceHeader*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCSliceHeader), 5/*DEFAULT_ATTR*/);
- if (video->sliceHdr == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- video->decPicBuf = (AVCDecPicBuffer*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecPicBuffer), 3/*DEFAULT_ATTR*/);
- if (video->decPicBuf == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
- memset(video->decPicBuf, 0, sizeof(AVCDecPicBuffer));
- }
-
- /* Decode SPS, allocate video->seqParams[i] and assign video->currSeqParams */
- status = DecodeSPS(decvid, bitstream);
-
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecGetSeqInfo() */
-/* Date : 11/4/2003 */
-/* Purpose : Get sequence parameter info. after SPS NAL is decoded. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* 12/20/03: change input argument, use structure instead. */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetSeqInfo(AVCHandle *avcHandle, AVCDecSPSInfo *seqInfo)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- int PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs;
-
- if (decvid == NULL || decvid->seqParams[0] == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
-
- PicWidthInMbs = decvid->seqParams[0]->pic_width_in_mbs_minus1 + 1;
- PicHeightInMapUnits = decvid->seqParams[0]->pic_height_in_map_units_minus1 + 1 ;
- FrameHeightInMbs = (2 - decvid->seqParams[0]->frame_mbs_only_flag) * PicHeightInMapUnits ;
-
- seqInfo->FrameWidth = PicWidthInMbs << 4;
- seqInfo->FrameHeight = FrameHeightInMbs << 4;
-
- seqInfo->frame_only_flag = decvid->seqParams[0]->frame_mbs_only_flag;
-
- if (decvid->seqParams[0]->frame_cropping_flag)
- {
- seqInfo->frame_crop_left = 2 * decvid->seqParams[0]->frame_crop_left_offset;
- seqInfo->frame_crop_right = seqInfo->FrameWidth - (2 * decvid->seqParams[0]->frame_crop_right_offset + 1);
-
- if (seqInfo->frame_only_flag)
- {
- seqInfo->frame_crop_top = 2 * decvid->seqParams[0]->frame_crop_top_offset;
- seqInfo->frame_crop_bottom = seqInfo->FrameHeight - (2 * decvid->seqParams[0]->frame_crop_bottom_offset + 1);
- /* Note in 7.4.2.1, there is a contraint on the value of frame_crop_left and frame_crop_top
- such that they have to be less than or equal to frame_crop_right/2 and frame_crop_bottom/2, respectively. */
- }
- else
- {
- seqInfo->frame_crop_top = 4 * decvid->seqParams[0]->frame_crop_top_offset;
- seqInfo->frame_crop_bottom = seqInfo->FrameHeight - (4 * decvid->seqParams[0]->frame_crop_bottom_offset + 1);
- /* Note in 7.4.2.1, there is a contraint on the value of frame_crop_left and frame_crop_top
- such that they have to be less than or equal to frame_crop_right/2 and frame_crop_bottom/4, respectively. */
- }
- }
- else /* no cropping flag, just give the first and last pixel */
- {
- seqInfo->frame_crop_bottom = seqInfo->FrameHeight - 1;
- seqInfo->frame_crop_right = seqInfo->FrameWidth - 1;
- seqInfo->frame_crop_top = seqInfo->frame_crop_left = 0;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecPicParamSet() */
-/* Date : 11/4/2003 */
-/* Purpose : Initialize picture */
-/* create reference picture list. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-/**
-Since PPS doesn't contain much data, most of the picture initialization will
-be done after decoding the slice header in PVAVCDecodeSlice. */
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecPicParamSet(AVCHandle *avcHandle, uint8 *nal_unit,
- int nal_size)
-{
- AVCDec_Status status;
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecBitstream *bitstream;
-
- if (decvid == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
- bitstream = decvid->bitstream;
- /* 1. Convert EBSP to RBSP. Create bitstream structure */
- video->forbidden_bit = nal_unit[0] >> 7;
- video->nal_ref_idc = (nal_unit[0] & 0x60) >> 5;
- video->nal_unit_type = (AVCNalUnitType)(nal_unit[0] & 0x1F);
-
- if (video->nal_unit_type != AVC_NALTYPE_PPS) /* not a PPS NAL */
- {
- return AVCDEC_FAIL;
- }
-
-
- /* 2. Initialize bitstream structure*/
- BitstreamInit(bitstream, nal_unit + 1, nal_size - 1);
-
- /* 2. Decode pic_parameter_set_rbsp syntax. Allocate video->picParams[i] and assign to currPicParams */
- status = DecodePPS(decvid, video, bitstream);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-
- video->SliceGroupChangeRate = video->currPicParams->slice_group_change_rate_minus1 + 1 ;
-
- return AVCDEC_SUCCESS;
-}
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecSEI(AVCHandle *avcHandle, uint8 *nal_unit,
- int nal_size)
-{
- OSCL_UNUSED_ARG(avcHandle);
- OSCL_UNUSED_ARG(nal_unit);
- OSCL_UNUSED_ARG(nal_size);
-
- return AVCDEC_SUCCESS;
-}
-/* ======================================================================== */
-/* Function : PVAVCDecodeSlice() */
-/* Date : 11/4/2003 */
-/* Purpose : Decode one NAL unit. */
-/* In/out : */
-/* Return : See enum AVCDec_Status for return values. */
-/* Modified : */
-/* ======================================================================== */
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecodeSlice(AVCHandle *avcHandle, uint8 *buffer,
- int buf_size)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecBitstream *bitstream;
- AVCDec_Status status;
-
- if (decvid == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
- bitstream = decvid->bitstream;
-
- if (video->mem_mgr_ctrl_eq_5)
- {
- return AVCDEC_PICTURE_OUTPUT_READY; // to flushout frame buffers
- }
-
- if (video->newSlice)
- {
- /* 2. Check NAL type */
- if (buffer == NULL)
- {
- return AVCDEC_FAIL;
- }
- video->prev_nal_unit_type = video->nal_unit_type;
- video->forbidden_bit = buffer[0] >> 7;
- video->nal_ref_idc = (buffer[0] & 0x60) >> 5;
- video->nal_unit_type = (AVCNalUnitType)(buffer[0] & 0x1F);
-
-
- if (video->nal_unit_type == AVC_NALTYPE_AUD)
- {
- return AVCDEC_SUCCESS;
- }
-
- if (video->nal_unit_type != AVC_NALTYPE_SLICE &&
- video->nal_unit_type != AVC_NALTYPE_IDR)
- {
- return AVCDEC_FAIL; /* not supported */
- }
-
-
-
- if (video->nal_unit_type >= 2 && video->nal_unit_type <= 4)
- {
- return AVCDEC_FAIL; /* not supported */
- }
- else
- {
- video->slice_data_partitioning = FALSE;
- }
-
- video->newSlice = FALSE;
- /* Initialize bitstream structure*/
- BitstreamInit(bitstream, buffer + 1, buf_size - 1);
-
-
- /* 2.1 Decode Slice Header (separate function)*/
- status = DecodeSliceHeader(decvid, video, bitstream);
- if (status != AVCDEC_SUCCESS)
- {
- video->newSlice = TRUE;
- return status;
- }
-
- if (video->sliceHdr->frame_num != video->prevFrameNum || (video->sliceHdr->first_mb_in_slice < (uint)video->mbNum && video->currSeqParams->constrained_set1_flag == 1))
- {
- video->newPic = TRUE;
- if (video->numMBs > 0)
- {
- // Conceal missing MBs of previously decoded frame
- ConcealSlice(decvid, video->PicSizeInMbs - video->numMBs, video->PicSizeInMbs); // Conceal
- video->numMBs = 0;
-
- // DeblockPicture(video); // No need to deblock
-
- /* 3.2 Decoded frame reference marking. */
- /* 3.3 Put the decoded picture in output buffers */
- /* set video->mem_mge_ctrl_eq_5 */
- AVCNalUnitType temp = video->nal_unit_type;
- video->nal_unit_type = video->prev_nal_unit_type;
- StorePictureInDPB(avcHandle, video);
- video->nal_unit_type = temp;
- video->mbNum = 0; // MC_Conceal
- return AVCDEC_PICTURE_OUTPUT_READY;
- }
- }
-
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->prevFrameNum = 0;
- video->PrevRefFrameNum = 0;
- }
-
- if (!video->currSeqParams->gaps_in_frame_num_value_allowed_flag)
- { /* no gaps allowed, frame_num has to increase by one only */
- /* if(sliceHdr->frame_num != (video->PrevRefFrameNum + 1)%video->MaxFrameNum) */
- if (video->sliceHdr->frame_num != video->PrevRefFrameNum && video->sliceHdr->frame_num != (video->PrevRefFrameNum + 1) % video->MaxFrameNum)
- {
- // Conceal missing MBs of previously decoded frame
- video->numMBs = 0;
- video->newPic = TRUE;
- video->prevFrameNum++; // FIX
- video->PrevRefFrameNum++;
- AVCNalUnitType temp = video->nal_unit_type;
- video->nal_unit_type = AVC_NALTYPE_SLICE; //video->prev_nal_unit_type;
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- video->currFS->IsOutputted = 0x01;
- video->currFS->IsReference = 3;
- video->currFS->IsLongTerm = 0;
-
- DecodePOC(video);
- /* find an empty memory from DPB and assigned to currPic */
- DPBInitPic(video, video->PrevRefFrameNum % video->MaxFrameNum);
- RefListInit(video);
- ConcealSlice(decvid, 0, video->PicSizeInMbs); // Conceal
- video->currFS->IsOutputted |= 0x02;
- //conceal frame
- /* 3.2 Decoded frame reference marking. */
- /* 3.3 Put the decoded picture in output buffers */
- /* set video->mem_mge_ctrl_eq_5 */
- video->mbNum = 0; // Conceal
- StorePictureInDPB(avcHandle, video);
- video->nal_unit_type = temp;
-
- return AVCDEC_PICTURE_OUTPUT_READY;
- }
- }
- }
-
- if (video->newPic == TRUE)
- {
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- }
-
- video->newSlice = TRUE;
-
- /* function pointer setting at slice-level */
- // OPTIMIZE
- decvid->residual_block = &residual_block_cavlc;
-
- /* derive picture order count */
- if (video->newPic == TRUE)
- {
- video->numMBs = video->PicSizeInMbs;
-
- if (video->nal_unit_type != AVC_NALTYPE_IDR && video->currSeqParams->gaps_in_frame_num_value_allowed_flag)
- {
- if (video->sliceHdr->frame_num != (video->PrevRefFrameNum + 1) % video->MaxFrameNum)
- {
- status = fill_frame_num_gap(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- video->numMBs = 0;
- return status;
- }
-
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- video->numMBs = 0;
- return status;
- }
-
-
- }
- }
- /* if there's gap in the frame_num, we have to fill in the gap with
- imaginary frames that won't get used for short-term ref. */
- /* see fill_frame_num_gap() in JM */
-
-
- DecodePOC(video);
- /* find an empty memory from DPB and assigned to currPic */
- DPBInitPic(video, video->CurrPicNum);
-
- video->currPic->isReference = TRUE; // FIX
-
- if (video->nal_ref_idc == 0)
- {
- video->currPic->isReference = FALSE;
- video->currFS->IsOutputted |= 0x02; /* The MASK 0x02 means not needed for reference, or returned */
- /* node need to check for freeing of this buffer */
- }
-
- FMOInit(video);
-
- if (video->currPic->isReference)
- {
- video->PrevRefFrameNum = video->sliceHdr->frame_num;
- }
-
-
- video->prevFrameNum = video->sliceHdr->frame_num;
- }
-
- video->newPic = FALSE;
-
-
- /* Initialize refListIdx for this picture */
- RefListInit(video);
-
- /* Re-order the reference list according to the ref_pic_list_reordering() */
- status = (AVCDec_Status)ReOrderList(video);
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
-
- /* 2.2 Decode Slice. */
- status = (AVCDec_Status)DecodeSlice(decvid);
-
- video->slice_id++; // slice
-
- if (status == AVCDEC_PICTURE_READY)
- {
- /* 3. Check complete picture */
-#ifndef MB_BASED_DEBLOCK
- /* 3.1 Deblock */
- DeblockPicture(video);
-#endif
- /* 3.2 Decoded frame reference marking. */
- /* 3.3 Put the decoded picture in output buffers */
- /* set video->mem_mge_ctrl_eq_5 */
- status = (AVCDec_Status)StorePictureInDPB(avcHandle, video); // CHECK check the retunr status
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
-
- if (video->mem_mgr_ctrl_eq_5)
- {
- video->PrevRefFrameNum = 0;
- video->prevFrameNum = 0;
- video->prevPicOrderCntMsb = 0;
- video->prevPicOrderCntLsb = video->TopFieldOrderCnt;
- video->prevFrameNumOffset = 0;
- }
- else
- {
- video->prevPicOrderCntMsb = video->PicOrderCntMsb;
- video->prevPicOrderCntLsb = video->sliceHdr->pic_order_cnt_lsb;
- video->prevFrameNumOffset = video->FrameNumOffset;
- }
-
- return AVCDEC_PICTURE_READY;
- }
- else if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecGetOutput() */
-/* Date : 11/3/2003 */
-/* Purpose : Get the next picture according to PicOrderCnt. */
-/* In/out : */
-/* Return : AVCFrameIO structure */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetOutput(AVCHandle *avcHandle, int *indx, int *release, AVCFrameIO *output)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecPicBuffer *dpb;
- AVCFrameStore *oldestFrame = NULL;
- int i, first = 1;
- int count_frame = 0;
- int index = 0;
- int min_poc = 0;
-
- if (decvid == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
- dpb = video->decPicBuf;
-
- if (dpb->num_fs == 0)
- {
- return AVCDEC_FAIL;
- }
-
- /* search for the oldest frame_num in dpb */
- /* extension to field decoding, we have to search for every top_field/bottom_field within
- each frame in the dpb. This code only works for frame based.*/
-
- if (video->mem_mgr_ctrl_eq_5 == FALSE)
- {
- for (i = 0; i < dpb->num_fs; i++)
- {
- if ((dpb->fs[i]->IsOutputted & 0x01) == 0)
- {
- count_frame++;
- if (first)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- first = 0;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- if (dpb->fs[i]->PicOrderCnt < min_poc)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- }
- }
- }
- else
- {
- for (i = 0; i < dpb->num_fs; i++)
- {
- if ((dpb->fs[i]->IsOutputted & 0x01) == 0 && dpb->fs[i] != video->currFS)
- {
- count_frame++;
- if (first)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- first = 0;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- if (dpb->fs[i]->PicOrderCnt < min_poc)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- }
- }
-
- if (count_frame < 2 && video->nal_unit_type != AVC_NALTYPE_IDR)
- {
- video->mem_mgr_ctrl_eq_5 = FALSE; // FIX
- }
- else if (count_frame < 1 && video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- for (i = 0; i < dpb->num_fs; i++)
- {
- if (dpb->fs[i] == video->currFS && (dpb->fs[i]->IsOutputted & 0x01) == 0)
- {
- oldestFrame = dpb->fs[i];
- index = i;
- break;
- }
- }
- video->mem_mgr_ctrl_eq_5 = FALSE;
- }
- }
-
- if (oldestFrame == NULL)
- {
-
- /* Check for Mem_mgmt_operation_5 based forced output */
- for (i = 0; i < dpb->num_fs; i++)
- {
- /* looking for the one not used or not reference and has been outputted */
- if (dpb->fs[i]->IsReference == 0 && dpb->fs[i]->IsOutputted == 3)
- {
- break;
- }
- }
- if (i < dpb->num_fs)
- {
- /* there are frames available for decoding */
- return AVCDEC_FAIL; /* no frame to be outputted */
- }
-
-
- /* no free frame available, we have to release one to continue decoding */
- int MinIdx = 0;
- int32 MinFrameNumWrap = 0x7FFFFFFF;
-
- for (i = 0; i < dpb->num_fs; i++)
- {
- if (dpb->fs[i]->IsReference && !dpb->fs[i]->IsLongTerm)
- {
- if (dpb->fs[i]->FrameNumWrap < MinFrameNumWrap)
- {
- MinFrameNumWrap = dpb->fs[i]->FrameNumWrap;
- MinIdx = i;
- }
- }
- }
- /* mark the frame with smallest PicOrderCnt to be unused for reference */
- dpb->fs[MinIdx]->IsReference = 0;
- dpb->fs[MinIdx]->IsLongTerm = 0;
- dpb->fs[MinIdx]->frame.isReference = FALSE;
- dpb->fs[MinIdx]->frame.isLongTerm = FALSE;
- dpb->fs[MinIdx]->IsOutputted |= 0x02;
-#ifdef PV_MEMORY_POOL
- if (dpb->fs[MinIdx]->IsOutputted == 3)
- {
- avcHandle->CBAVC_FrameUnbind(avcHandle->userData, MinIdx);
- }
-#endif
- return AVCDEC_FAIL;
- }
- /* MASK 0x01 means the frame is outputted (for display). A frame gets freed when it is
- outputted (0x01) and not needed for reference (0x02) */
- oldestFrame->IsOutputted |= 0x01;
-
- if (oldestFrame->IsOutputted == 3)
- {
- *release = 1; /* flag to release the buffer */
- }
- else
- {
- *release = 0;
- }
- /* do not release buffer here, release it after it is sent to the sink node */
-
- output->YCbCr[0] = oldestFrame->frame.Sl;
- output->YCbCr[1] = oldestFrame->frame.Scb;
- output->YCbCr[2] = oldestFrame->frame.Scr;
- output->height = oldestFrame->frame.height;
- output->pitch = oldestFrame->frame.width;
- output->disp_order = oldestFrame->PicOrderCnt;
- output->coding_order = oldestFrame->FrameNum;
- output->id = (uint32) oldestFrame->base_dpb; /* use the pointer as the id */
- *indx = index;
-
-
-
- return AVCDEC_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/* Function : PVAVCDecReset() */
-/* Date : 03/04/2004 */
-/* Purpose : Reset decoder, prepare it for a new IDR frame. */
-/* In/out : */
-/* Return : void */
-/* Modified : */
-/* ======================================================================== */
-OSCL_EXPORT_REF void PVAVCDecReset(AVCHandle *avcHandle)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecPicBuffer *dpb;
- int i;
-
- if (decvid == NULL)
- {
- return;
- }
-
- video = decvid->common;
- dpb = video->decPicBuf;
-
- /* reset the DPB */
-
-
- for (i = 0; i < dpb->num_fs; i++)
- {
- dpb->fs[i]->IsLongTerm = 0;
- dpb->fs[i]->IsReference = 0;
- dpb->fs[i]->IsOutputted = 3;
- dpb->fs[i]->frame.isReference = 0;
- dpb->fs[i]->frame.isLongTerm = 0;
- }
-
- video->mem_mgr_ctrl_eq_5 = FALSE;
- video->newPic = TRUE;
- video->newSlice = TRUE;
- video->currPic = NULL;
- video->currFS = NULL;
- video->prevRefPic = NULL;
- video->prevFrameNum = 0;
- video->PrevRefFrameNum = 0;
- video->prevFrameNumOffset = 0;
- video->FrameNumOffset = 0;
- video->mbNum = 0;
- video->numMBs = 0;
-
- return ;
-}
-
-
-/* ======================================================================== */
-/* Function : PVAVCCleanUpDecoder() */
-/* Date : 11/4/2003 */
-/* Purpose : Clean up the decoder, free all memories allocated. */
-/* In/out : */
-/* Return : void */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF void PVAVCCleanUpDecoder(AVCHandle *avcHandle)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- void *userData = avcHandle->userData;
- int i;
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "PVAVCCleanUpDecoder", -1, -1);
-
- if (decvid != NULL)
- {
- video = decvid->common;
- if (video != NULL)
- {
- if (video->MbToSliceGroupMap != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->MbToSliceGroupMap);
- }
-
-#ifdef MB_BASED_DEBLOCK
- if (video->intra_pred_top != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->intra_pred_top);
- }
- if (video->intra_pred_top_cb != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->intra_pred_top_cb);
- }
- if (video->intra_pred_top_cr != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->intra_pred_top_cr);
- }
-#endif
- if (video->mblock != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->mblock);
- }
-
- if (video->decPicBuf != NULL)
- {
- CleanUpDPB(avcHandle, video);
- avcHandle->CBAVC_Free(userData, (int)video->decPicBuf);
- }
-
- if (video->sliceHdr != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->sliceHdr);
- }
-
- avcHandle->CBAVC_Free(userData, (int)video); /* last thing to do */
-
- }
-
- for (i = 0; i < 256; i++)
- {
- if (decvid->picParams[i] != NULL)
- {
- if (decvid->picParams[i]->slice_group_id != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)decvid->picParams[i]->slice_group_id);
- }
- avcHandle->CBAVC_Free(userData, (int)decvid->picParams[i]);
- }
- }
- for (i = 0; i < 32; i++)
- {
- if (decvid->seqParams[i] != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)decvid->seqParams[i]);
- }
- }
- if (decvid->bitstream != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)decvid->bitstream);
- }
-
-
- avcHandle->CBAVC_Free(userData, (int)decvid);
- }
-
-
- return ;
-}
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h b/media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h
deleted file mode 100644
index bd1bc59..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains bitstream related functions.
-@publishedAll
-*/
-
-#ifndef _AVCDEC_BITSTREAM_H_
-#define _AVCDEC_BITSTREAM_H_
-
-#include "avcdec_lib.h"
-
-#define WORD_SIZE 32 /* this can vary, default to 32 bit for now */
-
-#ifndef __cplusplus
-
-#define AVC_GETDATA(x,y) userData->AVC_GetData(x,y)
-
-#endif
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-#define BitstreamFlushBits(A,B) {(A)->bitcnt += (B); (A)->incnt -= (B); (A)->curr_word <<= (B);}
-
- AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream);
- /**
- This function populates bitstream structure.
- \param "stream" "Pointer to bitstream structure."
- \param "buffer" "Pointer to the bitstream buffer."
- \param "size" "Size of the buffer."
- \param "nal_size" "Size of the NAL unit."
- \param "resetall" "Flag for reset everything."
- \return "AVCDEC_SUCCESS for success and AVCDEC_FAIL for fail."
- */
- AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int size);
-
- /**
- This function reads next aligned word and remove the emulation prevention code
- if necessary.
- \param "stream" "Pointer to bitstream structure."
- \return "Next word."
- */
- uint BitstreamNextWord(AVCDecBitstream *stream);
-
- /**
- This function reads nBits bits from the current position and advance the pointer.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \param "code" "Point to the read value."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size, AVCDEC_PACKET_LOSS or
- AVCDEC_NO_DATA if callback to get data fails."
- */
- AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint *code);
-
- /**
- This function shows nBits bits from the current position without advancing the pointer.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \param "code" "Point to the read value."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size, AVCDEC_NO_DATA if it needs
- to callback to get data."
- */
- AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint *code);
-
-
- /**
- This function flushes nBits bits from the current position.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size It will not call back to get
- more data. Users should call BitstreamShowBits to determine
- how much they want to flush."
- */
-
- /**
- This function read 1 bit from the current position and advance the pointer.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \param "code" "Point to the read value."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size, AVCDEC_PACKET_LOSS or
- AVCDEC_NO_DATA if callback to get data fails."
- */
- AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code);
-
- /**
- This function checks whether the current bit position is byte-aligned or not.
- \param "stream" "Pointer to the bitstream structure."
- \return "TRUE if byte-aligned, FALSE otherwise."
- */
- bool byte_aligned(AVCDecBitstream *stream);
- AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream);
- /**
- This function checks whether there are more RBSP data before the trailing bits.
- \param "stream" "Pointer to the bitstream structure."
- \return "TRUE if yes, FALSE otherwise."
- */
- bool more_rbsp_data(AVCDecBitstream *stream);
-
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-#endif /* _AVCDEC_BITSTREAM_H_ */
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_int.h b/media/libstagefright/codecs/avc/dec/src/avcdec_int.h
deleted file mode 100644
index 878f9b3..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_int.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains application function interfaces to the AVC decoder library
-and necessary type defitionitions and enumerations.
-Naming convention for variables:
-lower_case_with_under_line is syntax element in subclause 7.2 and 7.3
-noUnderLine or NoUnderLine is derived variables defined somewhere else in the draft
- or introduced by this decoder library.
-@publishedAll
-*/
-
-#ifndef _AVCDEC_INT_H_
-#define _AVCDEC_INT_H_
-
-#include "avcint_common.h"
-#include "avcdec_api.h"
-
-
-/**
-Bitstream structure contains bitstream related parameters such as the pointer
-to the buffer, the current byte position and bit position.
-@publishedAll
-*/
-typedef struct tagDecBitstream
-{
- uint8 *bitstreamBuffer; /* pointer to buffer memory */
- int nal_size; /* size of the current NAL unit */
- int data_end_pos; /* bitstreamBuffer size in bytes */
- int read_pos; /* next position to read from bitstreamBuffer */
- uint curr_word; /* byte-swapped (MSB left) current word read from buffer */
- int bit_left; /* number of bit left in current_word */
- uint next_word; /* in case for old data in previous buffer hasn't been flushed. */
- int incnt; /* bit left in the prev_word */
- int incnt_next;
- int bitcnt;
- void *userData;
-} AVCDecBitstream;
-
-/**
-This structure is the main object for AVC decoder library providing access to all
-global variables. It is allocated at PVAVCInitDecoder and freed at PVAVCCleanUpDecoder.
-@publishedAll
-*/
-typedef struct tagDecObject
-{
-
- AVCCommonObj *common;
-
- AVCDecBitstream *bitstream; /* for current NAL */
-
- /* sequence parameter set */
- AVCSeqParamSet *seqParams[32]; /* Array of pointers, get allocated at arrival of new seq_id */
-
- /* picture parameter set */
- AVCPicParamSet *picParams[256]; /* Array of pointers to picture param set structures */
-
- /* For internal operation, scratch memory for MV, prediction, transform, etc.*/
- uint ref_idx_l0[4]; /* [mbPartIdx], te(v) */
- uint ref_idx_l1[4];
-
- /* function pointers */
- AVCDec_Status(*residual_block)(struct tagDecObject*, int, int,
- int *, int *, int *);
- /* Application control data */
- AVCHandle *avcHandle;
- void (*AVC_DebugLog)(AVCLogType type, char *string1, char *string2);
- /*bool*/
- uint debugEnable;
-
-} AVCDecObject;
-
-#endif /* _AVCDEC_INT_H_ */
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_lib.h b/media/libstagefright/codecs/avc/dec/src/avcdec_lib.h
deleted file mode 100644
index fdead05..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_lib.h
+++ /dev/null
@@ -1,555 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains declarations of internal functions for AVC decoder library.
-@publishedAll
-*/
-#ifndef _AVCDEC_LIB_H_
-#define _AVCDEC_LIB_H_
-
-#include "avclib_common.h"
-#include "avcdec_int.h"
-
-/*----------- avcdec_api.c -------------*/
-/**
-This function takes out the emulation prevention bytes from the input to creat RBSP.
-The result is written over the input bitstream.
-\param "nal_unit" "(I/O) Pointer to the input buffer."
-\param "size" "(I/O) Pointer to the size of the input/output buffer."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status EBSPtoRBSP(uint8 *nal_unit, int *size);
-
-/*------------- pred_intra.c ---------------*/
-/**
-This function is the main entry point to intra prediction operation on a
-macroblock.
-\param "video" "Pointer to AVCCommonObj."
-*/
-AVCStatus IntraMBPrediction(AVCCommonObj *video);
-
-void SaveNeighborForIntraPred(AVCCommonObj *video, int offset);
-
-AVCStatus Intra_4x4(AVCCommonObj *video, int component, int SubBlock_indx, uint8 *comp);
-void Intra_4x4_Vertical(AVCCommonObj *video, int block_offset);
-void Intra_4x4_Horizontal(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_DC(AVCCommonObj *video, int pitch, int block_offset, AVCNeighborAvailability *availability);
-void Intra_4x4_Down_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability);
-void Intra_4x4_Diagonal_Down_Right(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_Diagonal_Vertical_Right(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_Diagonal_Horizontal_Down(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_Vertical_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability);
-void Intra_4x4_Horizontal_Up(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_16x16_Vertical(AVCCommonObj *video);
-void Intra_16x16_Horizontal(AVCCommonObj *video, int pitch);
-void Intra_16x16_DC(AVCCommonObj *video, int pitch);
-void Intra_16x16_Plane(AVCCommonObj *video, int pitch);
-void Intra_Chroma_DC(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
-void Intra_Chroma_Horizontal(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
-void Intra_Chroma_Vertical(AVCCommonObj *video, uint8 *predCb, uint8 *predCr);
-void Intra_Chroma_Plane(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
-
-/*------------ pred_inter.c ---------------*/
-/**
-This function is the main entrance to inter prediction operation for
-a macroblock. For decoding, this function also calls inverse transform and
-compensation.
-\param "video" "Pointer to AVCCommonObj."
-\return "void"
-*/
-void InterMBPrediction(AVCCommonObj *video);
-
-/**
-This function is called for luma motion compensation.
-\param "ref" "Pointer to the origin of a reference luma."
-\param "picwidth" "Width of the picture."
-\param "picheight" "Height of the picture."
-\param "x_pos" "X-coordinate of the predicted block in quarter pel resolution."
-\param "y_pos" "Y-coordinate of the predicted block in quarter pel resolution."
-\param "pred" "Pointer to the output predicted block."
-\param "pred_pitch" "Width of pred."
-\param "blkwidth" "Width of the current partition."
-\param "blkheight" "Height of the current partition."
-\return "void"
-*/
-void LumaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos,
- uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight);
-
-/**
-Functions below are special cases for luma motion compensation.
-LumaFullPelMC is for full pixel motion compensation.
-LumaBorderMC is for interpolation in only one dimension.
-LumaCrossMC is for interpolation in one dimension and half point in the other dimension.
-LumaDiagonalMC is for interpolation in diagonal direction.
-
-\param "ref" "Pointer to the origin of a reference luma."
-\param "picwidth" "Width of the picture."
-\param "picheight" "Height of the picture."
-\param "x_pos" "X-coordinate of the predicted block in full pel resolution."
-\param "y_pos" "Y-coordinate of the predicted block in full pel resolution."
-\param "dx" "Fraction of x_pos in quarter pel."
-\param "dy" "Fraction of y_pos in quarter pel."
-\param "curr" "Pointer to the current partition in the current picture."
-\param "residue" "Pointer to the current partition for the residue block."
-\param "blkwidth" "Width of the current partition."
-\param "blkheight" "Height of the current partition."
-\return "void"
-*/
-void CreatePad(uint8 *ref, int picwidth, int picheight, int x_pos, int y_pos,
- uint8 *out, int blkwidth, int blkheight);
-
-void FullPelMC(uint8 *in, int inwidth, uint8 *out, int outpitch,
- int blkwidth, int blkheight);
-
-void HorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx);
-
-void HorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx);
-
-void HorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight);
-
-void VertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy);
-
-void VertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight);
-
-void VertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy);
-
-void DiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
- uint8 *out, int outpitch,
- int blkwidth, int blkheight);
-
-
-void ChromaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos, uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight);
-
-void ChromaFullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight) ;
-void ChromaBorderMC(uint8 *ref, int picwidth, int dx, int dy,
- uint8 *pred, int pred_pitch, int blkwidth, int blkheight);
-void ChromaDiagonalMC(uint8 *ref, int picwidth, int dx, int dy,
- uint8 *pred, int pred_pitch, int blkwidth, int blkheight);
-
-void ChromaFullPelMCOutside(uint8 *ref, uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight, int x_inc,
- int y_inc0, int y_inc1, int x_mid, int y_mid);
-void ChromaBorderMCOutside(uint8 *ref, int picwidth, int dx, int dy,
- uint8 *pred, int pred_pitch, int blkwidth, int blkheight,
- int x_inc, int z_inc, int y_inc0, int y_inc1, int x_mid, int y_mid);
-void ChromaDiagonalMCOutside(uint8 *ref, int picwidth,
- int dx, int dy, uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight, int x_inc, int z_inc,
- int y_inc0, int y_inc1, int x_mid, int y_mid);
-
-void ChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-
-/*----------- slice.c ---------------*/
-/**
-This function performs the main decoding loop for slice data including
-INTRA/INTER prediction, transform and quantization and compensation.
-See decode_frame_slice() in JM.
-\param "video" "Pointer to AVCDecObject."
-\return "AVCDEC_SUCCESS for success, AVCDEC_PICTURE_READY for end-of-picture and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status DecodeSlice(AVCDecObject *video);
-AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mbnum_end);
-/**
-This function performs the decoding of one macroblock.
-\param "video" "Pointer to AVCDecObject."
-\param "prevMbSkipped" "A value derived in 7.3.4."
-\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status DecodeMB(AVCDecObject *video);
-
-/**
-This function performs macroblock prediction type decoding as in subclause 7.3.5.1.
-\param "video" "Pointer to AVCCommonObj."
-\param "currMB" "Pointer to the current macroblock."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/**
-This function performs sub-macroblock prediction type decoding as in subclause 7.3.5.2.
-\param "video" "Pointer to AVCCommonObj."
-\param "currMB" "Pointer to the current macroblock."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_I_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_P_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_B_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_SI_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the sub_mb_type and sets necessary information
-when the slice type is AVC_P_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "sub_mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type);
-
-/**
-This function interprets the sub_mb_type and sets necessary information
-when the slice type is AVC_B_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "sub_mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type);
-
-/**
-This function decodes the Intra4x4 prediction mode from neighboring information
-and from the decoded syntax.
-\param "video" "Pointer to AVCCommonObj."
-\param "currMB" "Pointer to current macroblock."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/*----------- vlc.c -------------------*/
-/**
-This function reads and decodes Exp-Golomb codes.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "codeNum" "Pointer to the value of the codeNum."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum);
-
-/**
-This function reads and decodes signed Exp-Golomb codes.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "value" "Pointer to syntax element value."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value);
-
-/**
-This function reads and decodes signed Exp-Golomb codes for
-32 bit codeword.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "value" "Pointer to syntax element value."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value);
-
-/**
-This function reads and decodes truncated Exp-Golomb codes.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "value" "Pointer to syntax element value."
-\param "range" "Range of the value as input to determine the algorithm."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range);
-
-/**
-This function parse Exp-Golomb code from the bitstream.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "leadingZeros" "Pointer to the number of leading zeros."
-\param "infobits" "Pointer to the value after leading zeros and the first one.
- The total number of bits read is 2*leadingZeros + 1."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status GetEGBitstring(AVCDecBitstream *bitstream, int *leadingZeros, int *infobits);
-
-/**
-This function parse Exp-Golomb code from the bitstream for 32 bit codewords.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "leadingZeros" "Pointer to the number of leading zeros."
-\param "infobits" "Pointer to the value after leading zeros and the first one.
- The total number of bits read is 2*leadingZeros + 1."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadingZeros, uint32 *infobits);
-
-/**
-This function performs CAVLC decoding of the CBP (coded block pattern) of a macroblock
-by calling ue_v() and then mapping the codeNum to the corresponding CBP value.
-\param "currMB" "Pointer to the current AVCMacroblock structure."
-\param "stream" "Pointer to the AVCDecBitstream."
-\return "void"
-*/
-AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/**
-This function decodes the syntax for trailing ones and total coefficient.
-Subject to optimization.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "TrailingOnes" "Pointer to the trailing one variable output."
-\param "TotalCoeff" "Pointer to the total coefficient variable output."
-\param "nC" "Context for number of nonzero coefficient (prediction context)."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff, int nC);
-
-/**
-This function decodes the syntax for trailing ones and total coefficient for
-chroma DC block. Subject to optimization.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "TrailingOnes" "Pointer to the trailing one variable output."
-\param "TotalCoeff" "Pointer to the total coefficient variable output."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff);
-
-/**
-This function decode a VLC table with 2 output.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "lentab" "Table for code length."
-\param "codtab" "Table for code value."
-\param "tabwidth" "Width of the table or alphabet size of the first output."
-\param "tabheight" "Height of the table or alphabet size of the second output."
-\param "code1" "Pointer to the first output."
-\param "code2" "Pointer to the second output."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status code_from_bitstream_2d(AVCDecBitstream *stream, int *lentab, int *codtab, int tabwidth,
- int tabheight, int *code1, int *code2);
-
-/**
-This function decodes the level_prefix VLC value as in Table 9-6.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code);
-
-/**
-This function decodes total_zeros VLC syntax as in Table 9-7 and 9-8.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\param "TotalCoeff" "Context parameter."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int TotalCoeff);
-
-/**
-This function decodes total_zeros VLC syntax for chroma DC as in Table 9-9.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\param "TotalCoeff" "Context parameter."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, int TotalCoeff);
-
-/**
-This function decodes run_before VLC syntax as in Table 9-10.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\param "zeroLeft" "Context parameter."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zeroLeft);
-
-/*----------- header.c -------------------*/
-/**
-This function parses vui_parameters.
-\param "decvid" "Pointer to AVCDecObject."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status vui_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCSeqParamSet *currSPS);
-AVCDec_Status sei_payload(AVCDecObject *decvid, AVCDecBitstream *stream, uint payloadType, uint payloadSize);
-
-AVCDec_Status buffering_period(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status pic_timing(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status recovery_point(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status dec_ref_pic_marking_repetition(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status motion_constrained_slice_group_set(AVCDecObject *decvid, AVCDecBitstream *stream);
-
-
-/**
-This function parses hrd_parameters.
-\param "decvid" "Pointer to AVCDecObject."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status hrd_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCHRDParams *HRDParam);
-
-/**
-This function decodes the syntax in sequence parameter set slice and fill up the AVCSeqParamSet
-structure.
-\param "decvid" "Pointer to AVCDecObject."
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status DecodeSPS(AVCDecObject *decvid, AVCDecBitstream *stream);
-
-/**
-This function decodes the syntax in picture parameter set and fill up the AVCPicParamSet
-structure.
-\param "decvid" "Pointer to AVCDecObject."
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status DecodePPS(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream);
-AVCDec_Status DecodeSEI(AVCDecObject *decvid, AVCDecBitstream *stream);
-
-/**
-This function decodes slice header, calls related functions such as
-reference picture list reordering, prediction weight table, decode ref marking.
-See FirstPartOfSliceHeader() and RestOfSliceHeader() in JM.
-\param "decvid" "Pointer to AVCDecObject."
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status DecodeSliceHeader(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream);
-
-/**
-This function performes necessary operations to create dummy frames when
-there is a gap in frame_num.
-\param "video" "Pointer to AVCCommonObj."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status fill_frame_num_gap(AVCHandle *avcHandle, AVCCommonObj *video);
-
-/**
-This function decodes ref_pic_list_reordering related syntax and fill up the AVCSliceHeader
-structure.
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\param "sliceHdr" "Pointer to AVCSliceHdr."
-\param "slice_type" "Value of slice_type - 5 if greater than 5."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status ref_pic_list_reordering(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr, int slice_type);
-
-/**
-This function decodes dec_ref_pic_marking related syntax and fill up the AVCSliceHeader
-structure.
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\param "sliceHdr" "Pointer to AVCSliceHdr."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status dec_ref_pic_marking(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr);
-
-/**
-This function performs POC related operation prior to decoding a picture
-\param "video" "Pointer to AVCCommonObj."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-See also PostPOC() for initialization of some variables.
-*/
-AVCDec_Status DecodePOC(AVCCommonObj *video);
-
-
-
-/*------------ residual.c ------------------*/
-/**
-This function decodes the intra pcm data and fill it in the corresponding location
-on the current picture.
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-*/
-AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream);
-
-/**
-This function performs residual syntax decoding as well as quantization and transformation of
-the decoded coefficients. See subclause 7.3.5.3.
-\param "video" "Pointer to AVCDecObject."
-\param "currMB" "Pointer to current macroblock."
-*/
-AVCDec_Status residual(AVCDecObject *video, AVCMacroblock *currMB);
-
-/**
-This function performs CAVLC syntax decoding to get the run and level information of the coefficients.
-\param "video" "Pointer to AVCDecObject."
-\param "type" "One of AVCResidualType for a particular 4x4 block."
-\param "bx" "Horizontal block index."
-\param "by" "Vertical block index."
-\param "level" "Pointer to array of level for output."
-\param "run" "Pointer to array of run for output."
-\param "numcoeff" "Pointer to the total number of nonzero coefficients."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status residual_block_cavlc(AVCDecObject *video, int nC, int maxNumCoeff,
- int *level, int *run, int *numcoeff);
-
-#endif /* _AVCDEC_LIB_H_ */
diff --git a/media/libstagefright/codecs/avc/dec/src/header.cpp b/media/libstagefright/codecs/avc/dec/src/header.cpp
deleted file mode 100644
index 8681e2b..0000000
--- a/media/libstagefright/codecs/avc/dec/src/header.cpp
+++ /dev/null
@@ -1,1391 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-#include "avcdec_api.h"
-
-/** see subclause 7.4.2.1 */
-AVCDec_Status DecodeSPS(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCDec_Status status = AVCDEC_SUCCESS;
- AVCSeqParamSet *seqParam;
- uint temp;
- int i;
- uint profile_idc, constrained_set0_flag, constrained_set1_flag, constrained_set2_flag;
- uint level_idc, seq_parameter_set_id;
- void *userData = decvid->avcHandle->userData;
- AVCHandle *avcHandle = decvid->avcHandle;
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "DecodeSPS", -1, -1);
-
- BitstreamReadBits(stream, 8, &profile_idc);
- BitstreamRead1Bit(stream, &constrained_set0_flag);
-// if (profile_idc != 66 && constrained_set0_flag != 1)
-// {
-// return AVCDEC_FAIL;
-// }
- BitstreamRead1Bit(stream, &constrained_set1_flag);
- BitstreamRead1Bit(stream, &constrained_set2_flag);
- BitstreamReadBits(stream, 5, &temp);
- BitstreamReadBits(stream, 8, &level_idc);
- if (level_idc > 51)
- {
- return AVCDEC_FAIL;
- }
- if (mapLev2Idx[level_idc] == 255)
- {
- return AVCDEC_FAIL;
- }
- ue_v(stream, &seq_parameter_set_id);
-
- if (seq_parameter_set_id > 31)
- {
- return AVCDEC_FAIL;
- }
-
- /* Allocate sequence param set for seqParams[seq_parameter_set_id]. */
- if (decvid->seqParams[seq_parameter_set_id] == NULL) /* allocate seqParams[id] */
- {
- decvid->seqParams[seq_parameter_set_id] =
- (AVCSeqParamSet*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCSeqParamSet), DEFAULT_ATTR);
-
- if (decvid->seqParams[seq_parameter_set_id] == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "done alloc seqParams", -1, -1);
-
- seqParam = decvid->seqParams[seq_parameter_set_id];
-
- seqParam->profile_idc = profile_idc;
- seqParam->constrained_set0_flag = constrained_set0_flag;
- seqParam->constrained_set1_flag = constrained_set1_flag;
- seqParam->constrained_set2_flag = constrained_set2_flag;
- seqParam->level_idc = level_idc;
- seqParam->seq_parameter_set_id = seq_parameter_set_id;
-
- /* continue decoding SPS */
- ue_v(stream, &(seqParam->log2_max_frame_num_minus4));
-
- if (seqParam->log2_max_frame_num_minus4 > 12)
- {
- return AVCDEC_FAIL;
- }
-
- ue_v(stream, &(seqParam->pic_order_cnt_type));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 1", seqParam->log2_max_frame_num_minus4, seqParam->pic_order_cnt_type);
-
- if (seqParam->pic_order_cnt_type == 0)
- {
- ue_v(stream, &(seqParam->log2_max_pic_order_cnt_lsb_minus4));
- }
- else if (seqParam->pic_order_cnt_type == 1)
- { // MC_CHECK
- BitstreamRead1Bit(stream, (uint*)&(seqParam->delta_pic_order_always_zero_flag));
- se_v32bit(stream, &(seqParam->offset_for_non_ref_pic));
- se_v32bit(stream, &(seqParam->offset_for_top_to_bottom_field));
- ue_v(stream, &(seqParam->num_ref_frames_in_pic_order_cnt_cycle));
-
- for (i = 0; i < (int)(seqParam->num_ref_frames_in_pic_order_cnt_cycle); i++)
- {
- se_v32bit(stream, &(seqParam->offset_for_ref_frame[i]));
- }
- }
-
- ue_v(stream, &(seqParam->num_ref_frames));
-
- if (seqParam->num_ref_frames > 16)
- {
- return AVCDEC_FAIL;
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 2", seqParam->num_ref_frames, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->gaps_in_frame_num_value_allowed_flag));
- ue_v(stream, &(seqParam->pic_width_in_mbs_minus1));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "picwidth", seqParam->pic_width_in_mbs_minus1, -1);
-
- ue_v(stream, &(seqParam->pic_height_in_map_units_minus1));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "picwidth", seqParam->pic_height_in_map_units_minus1, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->frame_mbs_only_flag));
-
- seqParam->mb_adaptive_frame_field_flag = 0; /* default value */
- if (!seqParam->frame_mbs_only_flag)
- {
- BitstreamRead1Bit(stream, (uint*)&(seqParam->mb_adaptive_frame_field_flag));
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 3", seqParam->frame_mbs_only_flag, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->direct_8x8_inference_flag));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 4", seqParam->direct_8x8_inference_flag, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->frame_cropping_flag));
- seqParam->frame_crop_left_offset = 0; /* default value */
- seqParam->frame_crop_right_offset = 0;/* default value */
- seqParam->frame_crop_top_offset = 0;/* default value */
- seqParam->frame_crop_bottom_offset = 0;/* default value */
- if (seqParam->frame_cropping_flag)
- {
- ue_v(stream, &(seqParam->frame_crop_left_offset));
- ue_v(stream, &(seqParam->frame_crop_right_offset));
- ue_v(stream, &(seqParam->frame_crop_top_offset));
- ue_v(stream, &(seqParam->frame_crop_bottom_offset));
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 5", seqParam->frame_cropping_flag, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->vui_parameters_present_flag));
- if (seqParam->vui_parameters_present_flag)
- {
- status = vui_parameters(decvid, stream, seqParam);
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
- }
-
- return status;
-}
-
-
-AVCDec_Status vui_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCSeqParamSet *currSPS)
-{
- uint temp;
- uint temp32;
- uint aspect_ratio_idc, overscan_appopriate_flag, video_format, video_full_range_flag;
- /* aspect_ratio_info_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- BitstreamReadBits(stream, 8, &aspect_ratio_idc);
- if (aspect_ratio_idc == 255)
- {
- /* sar_width */
- BitstreamReadBits(stream, 16, &temp);
- /* sar_height */
- BitstreamReadBits(stream, 16, &temp);
- }
- }
- /* overscan_info_present */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- BitstreamRead1Bit(stream, &overscan_appopriate_flag);
- }
- /* video_signal_type_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- BitstreamReadBits(stream, 3, &video_format);
- BitstreamRead1Bit(stream, &video_full_range_flag);
- /* colour_description_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* colour_primaries */
- BitstreamReadBits(stream, 8, &temp);
- /* transfer_characteristics */
- BitstreamReadBits(stream, 8, &temp);
- /* matrix coefficients */
- BitstreamReadBits(stream, 8, &temp);
- }
- }
- /* chroma_loc_info_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* chroma_sample_loc_type_top_field */
- ue_v(stream, &temp);
- /* chroma_sample_loc_type_bottom_field */
- ue_v(stream, &temp);
- }
-
- /* timing_info_present_flag*/
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* num_unit_in_tick*/
- BitstreamReadBits(stream, 32, &temp32);
- /* time_scale */
- BitstreamReadBits(stream, 32, &temp32);
- /* fixed_frame_rate_flag */
- BitstreamRead1Bit(stream, &temp);
- }
-
- /* nal_hrd_parameters_present_flag */
- BitstreamRead1Bit(stream, &temp);
- currSPS->vui_parameters.nal_hrd_parameters_present_flag = temp;
- if (temp)
- {
- hrd_parameters(decvid, stream, &(currSPS->vui_parameters.nal_hrd_parameters));
- }
- /* vcl_hrd_parameters_present_flag*/
- BitstreamRead1Bit(stream, &temp);
- currSPS->vui_parameters.vcl_hrd_parameters_present_flag = temp;
- if (temp)
- {
- hrd_parameters(decvid, stream, &(currSPS->vui_parameters.vcl_hrd_parameters));
- }
- if (currSPS->vui_parameters.nal_hrd_parameters_present_flag || currSPS->vui_parameters.vcl_hrd_parameters_present_flag)
- {
- /* low_delay_hrd_flag */
- BitstreamRead1Bit(stream, &temp);
- }
- /* pic_struct_present_flag */
- BitstreamRead1Bit(stream, &temp);
- currSPS->vui_parameters.pic_struct_present_flag = temp;
- /* bitstream_restriction_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* motion_vectors_over_pic_boundaries_flag */
- BitstreamRead1Bit(stream, &temp);
- /* max_bytes_per_pic_denom */
- ue_v(stream, &temp);
- /* max_bits_per_mb_denom */
- ue_v(stream, &temp);
- /* log2_max_mv_length_horizontal */
- ue_v(stream, &temp);
- /* log2_max_mv_length_vertical */
- ue_v(stream, &temp);
- /* num_reorder_frames */
- ue_v(stream, &temp);
- /* max_dec_frame_buffering */
- ue_v(stream, &temp);
- }
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status hrd_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCHRDParams *HRDParam)
-{
- OSCL_UNUSED_ARG(decvid);
- uint temp;
- uint cpb_cnt_minus1;
- uint i;
- ue_v(stream, &cpb_cnt_minus1);
- HRDParam->cpb_cnt_minus1 = cpb_cnt_minus1;
- /* bit_rate_scale */
- BitstreamReadBits(stream, 4, &temp);
- /* cpb_size_scale */
- BitstreamReadBits(stream, 4, &temp);
- for (i = 0; i <= cpb_cnt_minus1; i++)
- {
- /* bit_rate_value_minus1[i] */
- ue_v(stream, &temp);
- /* cpb_size_value_minus1[i] */
- ue_v(stream, &temp);
- /* cbr_flag[i] */
- ue_v(stream, &temp);
- }
- /* initial_cpb_removal_delay_length_minus1 */
- BitstreamReadBits(stream, 5, &temp);
- /* cpb_removal_delay_length_minus1 */
- BitstreamReadBits(stream, 5, &temp);
- HRDParam->cpb_removal_delay_length_minus1 = temp;
- /* dpb_output_delay_length_minus1 */
- BitstreamReadBits(stream, 5, &temp);
- HRDParam->dpb_output_delay_length_minus1 = temp;
- /* time_offset_length */
- BitstreamReadBits(stream, 5, &temp);
- HRDParam->time_offset_length = temp;
- return AVCDEC_SUCCESS;
-}
-
-
-/** see subclause 7.4.2.2 */
-AVCDec_Status DecodePPS(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream)
-{
- AVCPicParamSet *picParam;
- AVCDec_Status status;
- int i, iGroup, numBits;
- int PicWidthInMbs, PicHeightInMapUnits, PicSizeInMapUnits;
- uint pic_parameter_set_id, seq_parameter_set_id;
- void *userData = decvid->avcHandle->userData;
- AVCHandle *avcHandle = decvid->avcHandle;
-
- ue_v(stream, &pic_parameter_set_id);
- if (pic_parameter_set_id > 255)
- {
- return AVCDEC_FAIL;
- }
-
- ue_v(stream, &seq_parameter_set_id);
-
- if (seq_parameter_set_id > 31)
- {
- return AVCDEC_FAIL;
- }
-
- /* 2.1 if picParams[pic_param_set_id] is NULL, allocate it. */
- if (decvid->picParams[pic_parameter_set_id] == NULL)
- {
- decvid->picParams[pic_parameter_set_id] =
- (AVCPicParamSet*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCPicParamSet), DEFAULT_ATTR);
- if (decvid->picParams[pic_parameter_set_id] == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- decvid->picParams[pic_parameter_set_id]->slice_group_id = NULL;
- }
-
- video->currPicParams = picParam = decvid->picParams[pic_parameter_set_id];
- picParam->seq_parameter_set_id = seq_parameter_set_id;
- picParam->pic_parameter_set_id = pic_parameter_set_id;
-
- BitstreamRead1Bit(stream, (uint*)&(picParam->entropy_coding_mode_flag));
- if (picParam->entropy_coding_mode_flag)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
- BitstreamRead1Bit(stream, (uint*)&(picParam->pic_order_present_flag));
- ue_v(stream, &(picParam->num_slice_groups_minus1));
-
- if (picParam->num_slice_groups_minus1 > MAX_NUM_SLICE_GROUP - 1)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
-
- picParam->slice_group_change_rate_minus1 = 0; /* default value */
- if (picParam->num_slice_groups_minus1 > 0)
- {
- ue_v(stream, &(picParam->slice_group_map_type));
- if (picParam->slice_group_map_type == 0)
- {
- for (iGroup = 0; iGroup <= (int)picParam->num_slice_groups_minus1; iGroup++)
- {
- ue_v(stream, &(picParam->run_length_minus1[iGroup]));
- }
- }
- else if (picParam->slice_group_map_type == 2)
- { // MC_CHECK <= or <
- for (iGroup = 0; iGroup < (int)picParam->num_slice_groups_minus1; iGroup++)
- {
- ue_v(stream, &(picParam->top_left[iGroup]));
- ue_v(stream, &(picParam->bottom_right[iGroup]));
- }
- }
- else if (picParam->slice_group_map_type == 3 ||
- picParam->slice_group_map_type == 4 ||
- picParam->slice_group_map_type == 5)
- {
- BitstreamRead1Bit(stream, (uint*)&(picParam->slice_group_change_direction_flag));
- ue_v(stream, &(picParam->slice_group_change_rate_minus1));
- }
- else if (picParam->slice_group_map_type == 6)
- {
- ue_v(stream, &(picParam->pic_size_in_map_units_minus1));
-
- numBits = 0;/* ceil(log2(num_slice_groups_minus1+1)) bits */
- i = picParam->num_slice_groups_minus1;
- while (i > 0)
- {
- numBits++;
- i >>= 1;
- }
-
- i = picParam->seq_parameter_set_id;
- if (decvid->seqParams[i] == NULL)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
-
-
- PicWidthInMbs = decvid->seqParams[i]->pic_width_in_mbs_minus1 + 1;
- PicHeightInMapUnits = decvid->seqParams[i]->pic_height_in_map_units_minus1 + 1 ;
- PicSizeInMapUnits = PicWidthInMbs * PicHeightInMapUnits ;
-
- /* information has to be consistent with the seq_param */
- if ((int)picParam->pic_size_in_map_units_minus1 != PicSizeInMapUnits - 1)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
-
- if (picParam->slice_group_id)
- {
- avcHandle->CBAVC_Free(userData, (int)picParam->slice_group_id);
- }
- picParam->slice_group_id = (uint*)avcHandle->CBAVC_Malloc(userData, sizeof(uint) * PicSizeInMapUnits, DEFAULT_ATTR);
- if (picParam->slice_group_id == NULL)
- {
- status = AVCDEC_MEMORY_FAIL;
- goto clean_up;
- }
-
- for (i = 0; i < PicSizeInMapUnits; i++)
- {
- BitstreamReadBits(stream, numBits, &(picParam->slice_group_id[i]));
- }
- }
-
- }
-
- ue_v(stream, &(picParam->num_ref_idx_l0_active_minus1));
- if (picParam->num_ref_idx_l0_active_minus1 > 31)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- ue_v(stream, &(picParam->num_ref_idx_l1_active_minus1));
- if (picParam->num_ref_idx_l1_active_minus1 > 31)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- BitstreamRead1Bit(stream, (uint*)&(picParam->weighted_pred_flag));
- BitstreamReadBits(stream, 2, &(picParam->weighted_bipred_idc));
- if (picParam->weighted_bipred_idc > 2)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- se_v(stream, &(picParam->pic_init_qp_minus26));
- if (picParam->pic_init_qp_minus26 < -26 || picParam->pic_init_qp_minus26 > 25)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- se_v(stream, &(picParam->pic_init_qs_minus26));
- if (picParam->pic_init_qs_minus26 < -26 || picParam->pic_init_qs_minus26 > 25)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- se_v(stream, &(picParam->chroma_qp_index_offset));
- if (picParam->chroma_qp_index_offset < -12 || picParam->chroma_qp_index_offset > 12)
- {
- status = AVCDEC_FAIL; /* out of range */
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- BitstreamReadBits(stream, 3, &pic_parameter_set_id);
- picParam->deblocking_filter_control_present_flag = pic_parameter_set_id >> 2;
- picParam->constrained_intra_pred_flag = (pic_parameter_set_id >> 1) & 1;
- picParam->redundant_pic_cnt_present_flag = pic_parameter_set_id & 1;
-
- return AVCDEC_SUCCESS;
-clean_up:
- if (decvid->picParams[pic_parameter_set_id])
- {
- if (picParam->slice_group_id)
- {
- avcHandle->CBAVC_Free(userData, (int)picParam->slice_group_id);
- }
- decvid->picParams[pic_parameter_set_id]->slice_group_id = NULL;
- avcHandle->CBAVC_Free(userData, (int)decvid->picParams[pic_parameter_set_id]);
- decvid->picParams[pic_parameter_set_id] = NULL;
- return status;
- }
- return AVCDEC_SUCCESS;
-}
-
-
-/* FirstPartOfSliceHeader();
- RestOfSliceHeader() */
-/** see subclause 7.4.3 */
-AVCDec_Status DecodeSliceHeader(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream)
-{
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- AVCPicParamSet *currPPS;
- AVCSeqParamSet *currSPS;
- AVCDec_Status status;
- uint idr_pic_id;
- int slice_type, temp, i;
-
- ue_v(stream, &(sliceHdr->first_mb_in_slice));
- ue_v(stream, (uint*)&slice_type);
-
- if (sliceHdr->first_mb_in_slice != 0)
- {
- if ((int)sliceHdr->slice_type >= 5 && slice_type != (int)sliceHdr->slice_type - 5)
- {
- return AVCDEC_FAIL; /* slice type doesn't follow the first slice in the picture */
- }
- }
- sliceHdr->slice_type = (AVCSliceType) slice_type;
- if (slice_type > 4)
- {
- slice_type -= 5;
- }
-
- if (slice_type == 1 || slice_type > 2)
- {
- return AVCDEC_FAIL;
- }
-
- video->slice_type = (AVCSliceType) slice_type;
-
- ue_v(stream, &(sliceHdr->pic_parameter_set_id));
- /* end FirstPartSliceHeader() */
- /* begin RestOfSliceHeader() */
- /* after getting pic_parameter_set_id, we have to load corresponding SPS and PPS */
- if (sliceHdr->pic_parameter_set_id > 255)
- {
- return AVCDEC_FAIL;
- }
-
- if (decvid->picParams[sliceHdr->pic_parameter_set_id] == NULL)
- return AVCDEC_FAIL; /* PPS doesn't exist */
-
- currPPS = video->currPicParams = decvid->picParams[sliceHdr->pic_parameter_set_id];
-
- if (decvid->seqParams[currPPS->seq_parameter_set_id] == NULL)
- return AVCDEC_FAIL; /* SPS doesn't exist */
-
- currSPS = video->currSeqParams = decvid->seqParams[currPPS->seq_parameter_set_id];
-
- if (currPPS->seq_parameter_set_id != video->seq_parameter_set_id)
- {
- video->seq_parameter_set_id = currPPS->seq_parameter_set_id;
- status = (AVCDec_Status)AVCConfigureSequence(decvid->avcHandle, video, false);
- if (status != AVCDEC_SUCCESS)
- return status;
- video->level_idc = currSPS->level_idc;
- }
-
- /* derived variables from SPS */
- video->MaxFrameNum = 1 << (currSPS->log2_max_frame_num_minus4 + 4);
- // MC_OPTIMIZE
- video->PicWidthInMbs = currSPS->pic_width_in_mbs_minus1 + 1;
- video->PicWidthInSamplesL = video->PicWidthInMbs * 16 ;
- video->PicWidthInSamplesC = video->PicWidthInMbs * 8 ;
- video->PicHeightInMapUnits = currSPS->pic_height_in_map_units_minus1 + 1 ;
- video->PicSizeInMapUnits = video->PicWidthInMbs * video->PicHeightInMapUnits ;
- video->FrameHeightInMbs = (2 - currSPS->frame_mbs_only_flag) * video->PicHeightInMapUnits ;
-
- /* derived from PPS */
- video->SliceGroupChangeRate = currPPS->slice_group_change_rate_minus1 + 1;
-
- /* then we can continue decoding slice header */
-
- BitstreamReadBits(stream, currSPS->log2_max_frame_num_minus4 + 4, &(sliceHdr->frame_num));
-
- if (video->currFS == NULL && sliceHdr->frame_num != 0)
- {
- video->prevFrameNum = video->PrevRefFrameNum = sliceHdr->frame_num - 1;
- }
-
- if (!currSPS->frame_mbs_only_flag)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->field_pic_flag));
- if (sliceHdr->field_pic_flag)
- {
- return AVCDEC_FAIL;
- }
- }
-
- /* derived variables from slice header*/
- video->PicHeightInMbs = video->FrameHeightInMbs;
- video->PicHeightInSamplesL = video->PicHeightInMbs * 16;
- video->PicHeightInSamplesC = video->PicHeightInMbs * 8;
- video->PicSizeInMbs = video->PicWidthInMbs * video->PicHeightInMbs;
-
- if (sliceHdr->first_mb_in_slice >= video->PicSizeInMbs)
- {
- return AVCDEC_FAIL;
- }
- video->MaxPicNum = video->MaxFrameNum;
- video->CurrPicNum = sliceHdr->frame_num;
-
-
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- if (sliceHdr->frame_num != 0)
- {
- return AVCDEC_FAIL;
- }
- ue_v(stream, &idr_pic_id);
- }
-
- sliceHdr->delta_pic_order_cnt_bottom = 0; /* default value */
- sliceHdr->delta_pic_order_cnt[0] = 0; /* default value */
- sliceHdr->delta_pic_order_cnt[1] = 0; /* default value */
- if (currSPS->pic_order_cnt_type == 0)
- {
- BitstreamReadBits(stream, currSPS->log2_max_pic_order_cnt_lsb_minus4 + 4,
- &(sliceHdr->pic_order_cnt_lsb));
- video->MaxPicOrderCntLsb = 1 << (currSPS->log2_max_pic_order_cnt_lsb_minus4 + 4);
- if (sliceHdr->pic_order_cnt_lsb > video->MaxPicOrderCntLsb - 1)
- return AVCDEC_FAIL; /* out of range */
-
- if (currPPS->pic_order_present_flag)
- {
- se_v32bit(stream, &(sliceHdr->delta_pic_order_cnt_bottom));
- }
- }
- if (currSPS->pic_order_cnt_type == 1 && !currSPS->delta_pic_order_always_zero_flag)
- {
- se_v32bit(stream, &(sliceHdr->delta_pic_order_cnt[0]));
- if (currPPS->pic_order_present_flag)
- {
- se_v32bit(stream, &(sliceHdr->delta_pic_order_cnt[1]));
- }
- }
-
- sliceHdr->redundant_pic_cnt = 0; /* default value */
- if (currPPS->redundant_pic_cnt_present_flag)
- {
- // MC_CHECK
- ue_v(stream, &(sliceHdr->redundant_pic_cnt));
- if (sliceHdr->redundant_pic_cnt > 127) /* out of range */
- return AVCDEC_FAIL;
-
- if (sliceHdr->redundant_pic_cnt > 0) /* redundant picture */
- return AVCDEC_FAIL; /* not supported */
- }
- sliceHdr->num_ref_idx_l0_active_minus1 = currPPS->num_ref_idx_l0_active_minus1;
- sliceHdr->num_ref_idx_l1_active_minus1 = currPPS->num_ref_idx_l1_active_minus1;
-
- if (slice_type == AVC_P_SLICE)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->num_ref_idx_active_override_flag));
- if (sliceHdr->num_ref_idx_active_override_flag)
- {
- ue_v(stream, &(sliceHdr->num_ref_idx_l0_active_minus1));
- }
- else /* the following condition is not allowed if the flag is zero */
- {
- if ((slice_type == AVC_P_SLICE) && currPPS->num_ref_idx_l0_active_minus1 > 15)
- {
- return AVCDEC_FAIL; /* not allowed */
- }
- }
- }
-
-
- if (sliceHdr->num_ref_idx_l0_active_minus1 > 15 ||
- sliceHdr->num_ref_idx_l1_active_minus1 > 15)
- {
- return AVCDEC_FAIL; /* not allowed */
- }
- /* if MbaffFrameFlag =1,
- max value of index is num_ref_idx_l0_active_minus1 for frame MBs and
- 2*sliceHdr->num_ref_idx_l0_active_minus1 + 1 for field MBs */
-
- /* ref_pic_list_reordering() */
- status = ref_pic_list_reordering(video, stream, sliceHdr, slice_type);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-
-
- if (video->nal_ref_idc != 0)
- {
- dec_ref_pic_marking(video, stream, sliceHdr);
- }
- se_v(stream, &(sliceHdr->slice_qp_delta));
-
- video->QPy = 26 + currPPS->pic_init_qp_minus26 + sliceHdr->slice_qp_delta;
- if (video->QPy > 51 || video->QPy < 0)
- {
- video->QPy = AVC_CLIP3(0, 51, video->QPy);
-// return AVCDEC_FAIL;
- }
- video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->QPy + video->currPicParams->chroma_qp_index_offset)];
-
- video->QPy_div_6 = (video->QPy * 43) >> 8;
- video->QPy_mod_6 = video->QPy - 6 * video->QPy_div_6;
-
- video->QPc_div_6 = (video->QPc * 43) >> 8;
- video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;
-
- sliceHdr->slice_alpha_c0_offset_div2 = 0;
- sliceHdr->slice_beta_offset_div_2 = 0;
- sliceHdr->disable_deblocking_filter_idc = 0;
- video->FilterOffsetA = video->FilterOffsetB = 0;
-
- if (currPPS->deblocking_filter_control_present_flag)
- {
- ue_v(stream, &(sliceHdr->disable_deblocking_filter_idc));
- if (sliceHdr->disable_deblocking_filter_idc > 2)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- if (sliceHdr->disable_deblocking_filter_idc != 1)
- {
- se_v(stream, &(sliceHdr->slice_alpha_c0_offset_div2));
- if (sliceHdr->slice_alpha_c0_offset_div2 < -6 ||
- sliceHdr->slice_alpha_c0_offset_div2 > 6)
- {
- return AVCDEC_FAIL;
- }
- video->FilterOffsetA = sliceHdr->slice_alpha_c0_offset_div2 << 1;
-
- se_v(stream, &(sliceHdr->slice_beta_offset_div_2));
- if (sliceHdr->slice_beta_offset_div_2 < -6 ||
- sliceHdr->slice_beta_offset_div_2 > 6)
- {
- return AVCDEC_FAIL;
- }
- video->FilterOffsetB = sliceHdr->slice_beta_offset_div_2 << 1;
- }
- }
-
- if (currPPS->num_slice_groups_minus1 > 0 && currPPS->slice_group_map_type >= 3
- && currPPS->slice_group_map_type <= 5)
- {
- /* Ceil(Log2(PicSizeInMapUnits/(float)SliceGroupChangeRate + 1)) */
- temp = video->PicSizeInMapUnits / video->SliceGroupChangeRate;
- if (video->PicSizeInMapUnits % video->SliceGroupChangeRate)
- {
- temp++;
- }
- i = 0;
- temp++;
- while (temp)
- {
- temp >>= 1;
- i++;
- }
-
- BitstreamReadBits(stream, i, &(sliceHdr->slice_group_change_cycle));
- video->MapUnitsInSliceGroup0 =
- AVC_MIN(sliceHdr->slice_group_change_cycle * video->SliceGroupChangeRate, video->PicSizeInMapUnits);
- }
-
- return AVCDEC_SUCCESS;
-}
-
-
-AVCDec_Status fill_frame_num_gap(AVCHandle *avcHandle, AVCCommonObj *video)
-{
- AVCDec_Status status;
- int CurrFrameNum;
- int UnusedShortTermFrameNum;
- int tmp1 = video->sliceHdr->delta_pic_order_cnt[0];
- int tmp2 = video->sliceHdr->delta_pic_order_cnt[1];
- int tmp3 = video->CurrPicNum;
- int tmp4 = video->sliceHdr->adaptive_ref_pic_marking_mode_flag;
- UnusedShortTermFrameNum = (video->prevFrameNum + 1) % video->MaxFrameNum;
- CurrFrameNum = video->sliceHdr->frame_num;
-
- video->sliceHdr->delta_pic_order_cnt[0] = 0;
- video->sliceHdr->delta_pic_order_cnt[1] = 0;
- while (CurrFrameNum != UnusedShortTermFrameNum)
- {
- video->CurrPicNum = UnusedShortTermFrameNum;
- video->sliceHdr->frame_num = UnusedShortTermFrameNum;
-
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS) /* no buffer available */
- {
- return status;
- }
- DecodePOC(video);
- DPBInitPic(video, UnusedShortTermFrameNum);
-
-
- video->currFS->PicOrderCnt = video->PicOrderCnt;
- video->currFS->FrameNum = video->sliceHdr->frame_num;
-
- /* initialize everything to zero */
- video->currFS->IsOutputted = 0x01;
- video->currFS->IsReference = 3;
- video->currFS->IsLongTerm = 0;
- video->currFS->frame.isReference = TRUE;
- video->currFS->frame.isLongTerm = FALSE;
-
- video->sliceHdr->adaptive_ref_pic_marking_mode_flag = 0;
-
- status = (AVCDec_Status)StorePictureInDPB(avcHandle, video); // MC_CHECK check the return status
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
- video->prevFrameNum = UnusedShortTermFrameNum;
- UnusedShortTermFrameNum = (UnusedShortTermFrameNum + 1) % video->MaxFrameNum;
- }
- video->sliceHdr->frame_num = CurrFrameNum;
- video->CurrPicNum = tmp3;
- video->sliceHdr->delta_pic_order_cnt[0] = tmp1;
- video->sliceHdr->delta_pic_order_cnt[1] = tmp2;
- video->sliceHdr->adaptive_ref_pic_marking_mode_flag = tmp4;
- return AVCDEC_SUCCESS;
-}
-
-/** see subclause 7.4.3.1 */
-AVCDec_Status ref_pic_list_reordering(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr, int slice_type)
-{
- int i;
-
- if (slice_type != AVC_I_SLICE)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->ref_pic_list_reordering_flag_l0));
- if (sliceHdr->ref_pic_list_reordering_flag_l0)
- {
- i = 0;
- do
- {
- ue_v(stream, &(sliceHdr->reordering_of_pic_nums_idc_l0[i]));
- if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 0 ||
- sliceHdr->reordering_of_pic_nums_idc_l0[i] == 1)
- {
- ue_v(stream, &(sliceHdr->abs_diff_pic_num_minus1_l0[i]));
- if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 0 &&
- sliceHdr->abs_diff_pic_num_minus1_l0[i] > video->MaxPicNum / 2 - 1)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 1 &&
- sliceHdr->abs_diff_pic_num_minus1_l0[i] > video->MaxPicNum / 2 - 2)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- }
- else if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 2)
- {
- ue_v(stream, &(sliceHdr->long_term_pic_num_l0[i]));
- }
- i++;
- }
- while (sliceHdr->reordering_of_pic_nums_idc_l0[i-1] != 3
- && i <= (int)sliceHdr->num_ref_idx_l0_active_minus1 + 1) ;
- }
- }
- return AVCDEC_SUCCESS;
-}
-
-/** see subclause 7.4.3.3 */
-AVCDec_Status dec_ref_pic_marking(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr)
-{
- int i;
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->no_output_of_prior_pics_flag));
- BitstreamRead1Bit(stream, &(sliceHdr->long_term_reference_flag));
- if (sliceHdr->long_term_reference_flag == 0) /* used for short-term */
- {
- video->MaxLongTermFrameIdx = -1; /* no long-term frame indx */
- }
- else /* used for long-term */
- {
- video->MaxLongTermFrameIdx = 0;
- video->LongTermFrameIdx = 0;
- }
- }
- else
- {
- BitstreamRead1Bit(stream, &(sliceHdr->adaptive_ref_pic_marking_mode_flag));
- if (sliceHdr->adaptive_ref_pic_marking_mode_flag)
- {
- i = 0;
- do
- {
- ue_v(stream, &(sliceHdr->memory_management_control_operation[i]));
- if (sliceHdr->memory_management_control_operation[i] == 1 ||
- sliceHdr->memory_management_control_operation[i] == 3)
- {
- ue_v(stream, &(sliceHdr->difference_of_pic_nums_minus1[i]));
- }
- if (sliceHdr->memory_management_control_operation[i] == 2)
- {
- ue_v(stream, &(sliceHdr->long_term_pic_num[i]));
- }
- if (sliceHdr->memory_management_control_operation[i] == 3 ||
- sliceHdr->memory_management_control_operation[i] == 6)
- {
- ue_v(stream, &(sliceHdr->long_term_frame_idx[i]));
- }
- if (sliceHdr->memory_management_control_operation[i] == 4)
- {
- ue_v(stream, &(sliceHdr->max_long_term_frame_idx_plus1[i]));
- }
- i++;
- }
- while (sliceHdr->memory_management_control_operation[i-1] != 0 && i < MAX_DEC_REF_PIC_MARKING);
- if (i >= MAX_DEC_REF_PIC_MARKING)
- {
- return AVCDEC_FAIL; /* we're screwed!!, not enough memory */
- }
- }
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 8.2.1 Decoding process for picture order count. */
-AVCDec_Status DecodePOC(AVCCommonObj *video)
-{
- AVCSeqParamSet *currSPS = video->currSeqParams;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- int i;
-
- switch (currSPS->pic_order_cnt_type)
- {
- case 0: /* POC MODE 0 , subclause 8.2.1.1 */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->prevPicOrderCntMsb = 0;
- video->prevPicOrderCntLsb = 0;
- }
-
- /* Calculate the MSBs of current picture */
- if (sliceHdr->pic_order_cnt_lsb < video->prevPicOrderCntLsb &&
- (video->prevPicOrderCntLsb - sliceHdr->pic_order_cnt_lsb) >= (video->MaxPicOrderCntLsb / 2))
- video->PicOrderCntMsb = video->prevPicOrderCntMsb + video->MaxPicOrderCntLsb;
- else if (sliceHdr->pic_order_cnt_lsb > video->prevPicOrderCntLsb &&
- (sliceHdr->pic_order_cnt_lsb - video->prevPicOrderCntLsb) > (video->MaxPicOrderCntLsb / 2))
- video->PicOrderCntMsb = video->prevPicOrderCntMsb - video->MaxPicOrderCntLsb;
- else
- video->PicOrderCntMsb = video->prevPicOrderCntMsb;
-
- /* JVT-I010 page 81 is different from JM7.3 */
-
-
- video->PicOrderCnt = video->TopFieldOrderCnt = video->PicOrderCntMsb + sliceHdr->pic_order_cnt_lsb;
- video->BottomFieldOrderCnt = video->TopFieldOrderCnt + sliceHdr->delta_pic_order_cnt_bottom;
-
- break;
-
-
- case 1: /* POC MODE 1, subclause 8.2.1.2 */
- /* calculate FrameNumOffset */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->prevFrameNumOffset = 0;
- video->FrameNumOffset = 0;
- }
- else if (video->prevFrameNum > sliceHdr->frame_num)
- {
- video->FrameNumOffset = video->prevFrameNumOffset + video->MaxFrameNum;
- }
- else
- {
- video->FrameNumOffset = video->prevFrameNumOffset;
- }
- /* calculate absFrameNum */
- if (currSPS->num_ref_frames_in_pic_order_cnt_cycle)
- {
- video->absFrameNum = video->FrameNumOffset + sliceHdr->frame_num;
- }
- else
- {
- video->absFrameNum = 0;
- }
-
- if (video->absFrameNum > 0 && video->nal_ref_idc == 0)
- {
- video->absFrameNum--;
- }
-
- /* derive picOrderCntCycleCnt and frameNumInPicOrderCntCycle */
- if (video->absFrameNum > 0)
- {
- video->picOrderCntCycleCnt = (video->absFrameNum - 1) / currSPS->num_ref_frames_in_pic_order_cnt_cycle;
- video->frameNumInPicOrderCntCycle = (video->absFrameNum - 1) % currSPS->num_ref_frames_in_pic_order_cnt_cycle;
- }
- /* derive expectedDeltaPerPicOrderCntCycle */
- video->expectedDeltaPerPicOrderCntCycle = 0;
- for (i = 0; i < (int)currSPS->num_ref_frames_in_pic_order_cnt_cycle; i++)
- {
- video->expectedDeltaPerPicOrderCntCycle += currSPS->offset_for_ref_frame[i];
- }
- /* derive expectedPicOrderCnt */
- if (video->absFrameNum)
- {
- video->expectedPicOrderCnt = video->picOrderCntCycleCnt * video->expectedDeltaPerPicOrderCntCycle;
- for (i = 0; i <= video->frameNumInPicOrderCntCycle; i++)
- {
- video->expectedPicOrderCnt += currSPS->offset_for_ref_frame[i];
- }
- }
- else
- {
- video->expectedPicOrderCnt = 0;
- }
-
- if (video->nal_ref_idc == 0)
- {
- video->expectedPicOrderCnt += currSPS->offset_for_non_ref_pic;
- }
- /* derive TopFieldOrderCnt and BottomFieldOrderCnt */
-
- video->TopFieldOrderCnt = video->expectedPicOrderCnt + sliceHdr->delta_pic_order_cnt[0];
- video->BottomFieldOrderCnt = video->TopFieldOrderCnt + currSPS->offset_for_top_to_bottom_field + sliceHdr->delta_pic_order_cnt[1];
-
- video->PicOrderCnt = AVC_MIN(video->TopFieldOrderCnt, video->BottomFieldOrderCnt);
-
-
- break;
-
-
- case 2: /* POC MODE 2, subclause 8.2.1.3 */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->FrameNumOffset = 0;
- }
- else if (video->prevFrameNum > sliceHdr->frame_num)
- {
- video->FrameNumOffset = video->prevFrameNumOffset + video->MaxFrameNum;
- }
- else
- {
- video->FrameNumOffset = video->prevFrameNumOffset;
- }
- /* derive tempPicOrderCnt, we just use PicOrderCnt */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->PicOrderCnt = 0;
- }
- else if (video->nal_ref_idc == 0)
- {
- video->PicOrderCnt = 2 * (video->FrameNumOffset + sliceHdr->frame_num) - 1;
- }
- else
- {
- video->PicOrderCnt = 2 * (video->FrameNumOffset + sliceHdr->frame_num);
- }
- video->TopFieldOrderCnt = video->BottomFieldOrderCnt = video->PicOrderCnt;
- break;
- default:
- return AVCDEC_FAIL;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-
-AVCDec_Status DecodeSEI(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- OSCL_UNUSED_ARG(decvid);
- OSCL_UNUSED_ARG(stream);
- return AVCDEC_SUCCESS;
-}
-
-AVCDec_Status sei_payload(AVCDecObject *decvid, AVCDecBitstream *stream, uint payloadType, uint payloadSize)
-{
- AVCDec_Status status = AVCDEC_SUCCESS;
- uint i;
- switch (payloadType)
- {
- case 0:
- /* buffering period SEI */
- status = buffering_period(decvid, stream);
- break;
- case 1:
- /* picture timing SEI */
- status = pic_timing(decvid, stream);
- break;
- case 2:
-
- case 3:
-
- case 4:
-
- case 5:
-
- case 8:
-
- case 9:
-
- case 10:
-
- case 11:
-
- case 12:
-
- case 13:
-
- case 14:
-
- case 15:
-
- case 16:
-
- case 17:
- for (i = 0; i < payloadSize; i++)
- {
- BitstreamFlushBits(stream, 8);
- }
- break;
- case 6:
- /* recovery point SEI */
- status = recovery_point(decvid, stream);
- break;
- case 7:
- /* decoded reference picture marking repetition SEI */
- status = dec_ref_pic_marking_repetition(decvid, stream);
- break;
-
- case 18:
- /* motion-constrained slice group set SEI */
- status = motion_constrained_slice_group_set(decvid, stream);
- break;
- default:
- /* reserved_sei_message */
- for (i = 0; i < payloadSize; i++)
- {
- BitstreamFlushBits(stream, 8);
- }
- break;
- }
- BitstreamByteAlign(stream);
- return status;
-}
-
-AVCDec_Status buffering_period(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCSeqParamSet *currSPS;
- uint seq_parameter_set_id;
- uint temp;
- uint i;
- ue_v(stream, &seq_parameter_set_id);
- if (seq_parameter_set_id > 31)
- {
- return AVCDEC_FAIL;
- }
-
-// decvid->common->seq_parameter_set_id = seq_parameter_set_id;
-
- currSPS = decvid->seqParams[seq_parameter_set_id];
- if (currSPS->vui_parameters.nal_hrd_parameters_present_flag)
- {
- for (i = 0; i <= currSPS->vui_parameters.nal_hrd_parameters.cpb_cnt_minus1; i++)
- {
- /* initial_cpb_removal_delay[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- /*initial _cpb_removal_delay_offset[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- }
- }
-
- if (currSPS->vui_parameters.vcl_hrd_parameters_present_flag)
- {
- for (i = 0; i <= currSPS->vui_parameters.vcl_hrd_parameters.cpb_cnt_minus1; i++)
- {
- /* initial_cpb_removal_delay[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- /*initial _cpb_removal_delay_offset[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- }
- }
-
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status pic_timing(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCSeqParamSet *currSPS;
- uint temp, NumClockTs = 0, time_offset_length = 24, full_timestamp_flag;
- uint i;
-
- currSPS = decvid->seqParams[decvid->common->seq_parameter_set_id];
-
- if (currSPS->vui_parameters.nal_hrd_parameters_present_flag)
- {
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.dpb_output_delay_length_minus1 + 1, &temp);
- time_offset_length = currSPS->vui_parameters.nal_hrd_parameters.time_offset_length;
- }
- else if (currSPS->vui_parameters.vcl_hrd_parameters_present_flag)
- {
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.dpb_output_delay_length_minus1 + 1, &temp);
- time_offset_length = currSPS->vui_parameters.vcl_hrd_parameters.time_offset_length;
- }
-
- if (currSPS->vui_parameters.pic_struct_present_flag)
- {
- /* pic_struct */
- BitstreamReadBits(stream, 4, &temp);
-
- switch (temp)
- {
- case 0:
- case 1:
- case 2:
- NumClockTs = 1;
- break;
- case 3:
- case 4:
- case 7:
- NumClockTs = 2;
- break;
- case 5:
- case 6:
- case 8:
- NumClockTs = 3;
- break;
- default:
- NumClockTs = 0;
- break;
- }
-
- for (i = 0; i < NumClockTs; i++)
- {
- /* clock_timestamp_flag[i] */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* ct_type */
- BitstreamReadBits(stream, 2, &temp);
- /* nuit_field_based_flag */
- BitstreamRead1Bit(stream, &temp);
- /* counting_type */
- BitstreamReadBits(stream, 5, &temp);
- /* full_timestamp_flag */
- BitstreamRead1Bit(stream, &temp);
- full_timestamp_flag = temp;
- /* discontinuity_flag */
- BitstreamRead1Bit(stream, &temp);
- /* cnt_dropped_flag */
- BitstreamRead1Bit(stream, &temp);
- /* n_frames */
- BitstreamReadBits(stream, 8, &temp);
-
-
- if (full_timestamp_flag)
- {
- /* seconds_value */
- BitstreamReadBits(stream, 6, &temp);
- /* minutes_value */
- BitstreamReadBits(stream, 6, &temp);
- /* hours_value */
- BitstreamReadBits(stream, 5, &temp);
- }
- else
- {
- /* seconds_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* seconds_value */
- BitstreamReadBits(stream, 6, &temp);
- /* minutes_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* minutes_value */
- BitstreamReadBits(stream, 6, &temp);
-
- /* hourss_flag */
- BitstreamRead1Bit(stream, &temp);
-
- if (temp)
- {
- /* hours_value */
- BitstreamReadBits(stream, 5, &temp);
- }
-
- }
- }
- }
-
- if (time_offset_length)
- {
- /* time_offset */
- BitstreamReadBits(stream, time_offset_length, &temp);
- }
- else
- {
- /* time_offset */
- temp = 0;
- }
- }
- }
- }
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status recovery_point(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- OSCL_UNUSED_ARG(decvid);
- uint temp;
- /* recover_frame_cnt */
- ue_v(stream, &temp);
- /* exact_match_flag */
- BitstreamRead1Bit(stream, &temp);
- /* broken_link_flag */
- BitstreamRead1Bit(stream, &temp);
- /* changing slic_group_idc */
- BitstreamReadBits(stream, 2, &temp);
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status dec_ref_pic_marking_repetition(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCSeqParamSet *currSPS;
- uint temp;
- currSPS = decvid->seqParams[decvid->common->seq_parameter_set_id];
- /* original_idr_flag */
- BitstreamRead1Bit(stream, &temp);
- /* original_frame_num */
- ue_v(stream, &temp);
- if (currSPS->frame_mbs_only_flag == 0)
- {
- /* original_field_pic_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* original_bottom_field_flag */
- BitstreamRead1Bit(stream, &temp);
- }
- }
-
- /* dec_ref_pic_marking(video,stream,sliceHdr); */
-
-
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status motion_constrained_slice_group_set(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- OSCL_UNUSED_ARG(decvid);
- uint temp, i, numBits;
- /* num_slice_groups_in_set_minus1 */
- ue_v(stream, &temp);
-
- numBits = 0;/* ceil(log2(num_slice_groups_minus1+1)) bits */
- i = temp;
- while (i > 0)
- {
- numBits++;
- i >>= 1;
- }
- for (i = 0; i <= temp; i++)
- {
- /* slice_group_id */
- BitstreamReadBits(stream, numBits, &temp);
- }
- /* exact_sample_value_match_flag */
- BitstreamRead1Bit(stream, &temp);
- /* pan_scan_rect_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* pan_scan_rect_id */
- ue_v(stream, &temp);
- }
-
- return AVCDEC_SUCCESS;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/itrans.cpp b/media/libstagefright/codecs/avc/dec/src/itrans.cpp
deleted file mode 100644
index 02c550d..0000000
--- a/media/libstagefright/codecs/avc/dec/src/itrans.cpp
+++ /dev/null
@@ -1,307 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avclib_common.h"
-
-/* input are in the first 16 elements of block,
- output must be in the location specified in Figure 8-6. */
-/* subclause 8.5.6 */
-void Intra16DCTrans(int16 *block, int Qq, int Rq)
-{
- int m0, m1, m2, m3;
- int j, offset;
- int16 *inout;
- int scale = dequant_coefres[Rq][0];
-
- inout = block;
- for (j = 0; j < 4; j++)
- {
- m0 = inout[0] + inout[4];
- m1 = inout[0] - inout[4];
- m2 = inout[8] + inout[12];
- m3 = inout[8] - inout[12];
-
-
- inout[0] = m0 + m2;
- inout[4] = m0 - m2;
- inout[8] = m1 - m3;
- inout[12] = m1 + m3;
- inout += 64;
- }
-
- inout = block;
-
- if (Qq >= 2) /* this way should be faster than JM */
- { /* they use (((m4*scale)<<(QPy/6))+2)>>2 for both cases. */
- Qq -= 2;
- for (j = 0; j < 4; j++)
- {
- m0 = inout[0] + inout[64];
- m1 = inout[0] - inout[64];
- m2 = inout[128] + inout[192];
- m3 = inout[128] - inout[192];
-
- inout[0] = ((m0 + m2) * scale) << Qq;
- inout[64] = ((m0 - m2) * scale) << Qq;
- inout[128] = ((m1 - m3) * scale) << Qq;
- inout[192] = ((m1 + m3) * scale) << Qq;
- inout += 4;
- }
- }
- else
- {
- Qq = 2 - Qq;
- offset = 1 << (Qq - 1);
-
- for (j = 0; j < 4; j++)
- {
- m0 = inout[0] + inout[64];
- m1 = inout[0] - inout[64];
- m2 = inout[128] + inout[192];
- m3 = inout[128] - inout[192];
-
- inout[0] = (((m0 + m2) * scale + offset) >> Qq);
- inout[64] = (((m0 - m2) * scale + offset) >> Qq);
- inout[128] = (((m1 - m3) * scale + offset) >> Qq);
- inout[192] = (((m1 + m3) * scale + offset) >> Qq);
- inout += 4;
- }
- }
-
- return ;
-}
-
-/* see subclase 8.5.8 */
-void itrans(int16 *block, uint8 *pred, uint8 *cur, int width)
-{
- int e0, e1, e2, e3; /* note, at every step of the calculation, these values */
- /* shall never exceed 16bit sign value, but we don't check */
- int i; /* to save the cycles. */
- int16 *inout;
-
- inout = block;
-
- for (i = 4; i > 0; i--)
- {
- e0 = inout[0] + inout[2];
- e1 = inout[0] - inout[2];
- e2 = (inout[1] >> 1) - inout[3];
- e3 = inout[1] + (inout[3] >> 1);
-
- inout[0] = e0 + e3;
- inout[1] = e1 + e2;
- inout[2] = e1 - e2;
- inout[3] = e0 - e3;
-
- inout += 16;
- }
-
- for (i = 4; i > 0; i--)
- {
- e0 = block[0] + block[32];
- e1 = block[0] - block[32];
- e2 = (block[16] >> 1) - block[48];
- e3 = block[16] + (block[48] >> 1);
-
- e0 += e3;
- e3 = (e0 - (e3 << 1)); /* e0-e3 */
- e1 += e2;
- e2 = (e1 - (e2 << 1)); /* e1-e2 */
- e0 += 32;
- e1 += 32;
- e2 += 32;
- e3 += 32;
-#ifdef USE_PRED_BLOCK
- e0 = pred[0] + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- e1 = pred[20] + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- e2 = pred[40] + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- e3 = pred[60] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- *cur = e0;
- *(cur += width) = e1;
- *(cur += width) = e2;
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
- pred++;
-#else
- OSCL_UNUSED_ARG(pred);
-
- e0 = *cur + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- *cur = e0;
- e1 = *(cur += width) + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- *cur = e1;
- e2 = *(cur += width) + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- *cur = e2;
- e3 = cur[width] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
-#endif
- block++;
- }
-
- return ;
-}
-
-/* see subclase 8.5.8 */
-void ictrans(int16 *block, uint8 *pred, uint8 *cur, int width)
-{
- int e0, e1, e2, e3; /* note, at every step of the calculation, these values */
- /* shall never exceed 16bit sign value, but we don't check */
- int i; /* to save the cycles. */
- int16 *inout;
-
- inout = block;
-
- for (i = 4; i > 0; i--)
- {
- e0 = inout[0] + inout[2];
- e1 = inout[0] - inout[2];
- e2 = (inout[1] >> 1) - inout[3];
- e3 = inout[1] + (inout[3] >> 1);
-
- inout[0] = e0 + e3;
- inout[1] = e1 + e2;
- inout[2] = e1 - e2;
- inout[3] = e0 - e3;
-
- inout += 16;
- }
-
- for (i = 4; i > 0; i--)
- {
- e0 = block[0] + block[32];
- e1 = block[0] - block[32];
- e2 = (block[16] >> 1) - block[48];
- e3 = block[16] + (block[48] >> 1);
-
- e0 += e3;
- e3 = (e0 - (e3 << 1)); /* e0-e3 */
- e1 += e2;
- e2 = (e1 - (e2 << 1)); /* e1-e2 */
- e0 += 32;
- e1 += 32;
- e2 += 32;
- e3 += 32;
-#ifdef USE_PRED_BLOCK
- e0 = pred[0] + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- e1 = pred[12] + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- e2 = pred[24] + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- e3 = pred[36] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- *cur = e0;
- *(cur += width) = e1;
- *(cur += width) = e2;
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
- pred++;
-#else
- OSCL_UNUSED_ARG(pred);
-
- e0 = *cur + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- *cur = e0;
- e1 = *(cur += width) + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- *cur = e1;
- e2 = *(cur += width) + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- *cur = e2;
- e3 = cur[width] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
-#endif
- block++;
- }
-
- return ;
-}
-
-/* see subclause 8.5.7 */
-void ChromaDCTrans(int16 *block, int Qq, int Rq)
-{
- int c00, c01, c10, c11;
- int f0, f1, f2, f3;
- int scale = dequant_coefres[Rq][0];
-
- c00 = block[0] + block[4];
- c01 = block[0] - block[4];
- c10 = block[64] + block[68];
- c11 = block[64] - block[68];
-
- f0 = c00 + c10;
- f1 = c01 + c11;
- f2 = c00 - c10;
- f3 = c01 - c11;
-
- if (Qq >= 1)
- {
- Qq -= 1;
- block[0] = (f0 * scale) << Qq;
- block[4] = (f1 * scale) << Qq;
- block[64] = (f2 * scale) << Qq;
- block[68] = (f3 * scale) << Qq;
- }
- else
- {
- block[0] = (f0 * scale) >> 1;
- block[4] = (f1 * scale) >> 1;
- block[64] = (f2 * scale) >> 1;
- block[68] = (f3 * scale) >> 1;
- }
-
- return ;
-}
-
-
-void copy_block(uint8 *pred, uint8 *cur, int width, int pred_pitch)
-{
- uint32 temp;
-
- temp = *((uint32*)pred);
- pred += pred_pitch;
- *((uint32*)cur) = temp;
- cur += width;
- temp = *((uint32*)pred);
- pred += pred_pitch;
- *((uint32*)cur) = temp;
- cur += width;
- temp = *((uint32*)pred);
- pred += pred_pitch;
- *((uint32*)cur) = temp;
- cur += width;
- temp = *((uint32*)pred);
- *((uint32*)cur) = temp;
-
- return ;
-}
-
-
diff --git a/media/libstagefright/codecs/avc/dec/src/pred_inter.cpp b/media/libstagefright/codecs/avc/dec/src/pred_inter.cpp
deleted file mode 100644
index ba36c37..0000000
--- a/media/libstagefright/codecs/avc/dec/src/pred_inter.cpp
+++ /dev/null
@@ -1,2329 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-
-
-#define CLIP_RESULT(x) if((uint)x > 0xFF){ \
- x = 0xFF & (~(x>>31));}
-
-/* (blkwidth << 2) + (dy << 1) + dx */
-static void (*const ChromaMC_SIMD[8])(uint8 *, int , int , int , uint8 *, int, int , int) =
-{
- &ChromaFullMC_SIMD,
- &ChromaHorizontalMC_SIMD,
- &ChromaVerticalMC_SIMD,
- &ChromaDiagonalMC_SIMD,
- &ChromaFullMC_SIMD,
- &ChromaHorizontalMC2_SIMD,
- &ChromaVerticalMC2_SIMD,
- &ChromaDiagonalMC2_SIMD
-};
-/* Perform motion prediction and compensation with residue if exist. */
-void InterMBPrediction(AVCCommonObj *video)
-{
- AVCMacroblock *currMB = video->currMB;
- AVCPictureData *currPic = video->currPic;
- int mbPartIdx, subMbPartIdx;
- int ref_idx;
- int offset_MbPart_indx = 0;
- int16 *mv;
- uint32 x_pos, y_pos;
- uint8 *curL, *curCb, *curCr;
- uint8 *ref_l, *ref_Cb, *ref_Cr;
- uint8 *predBlock, *predCb, *predCr;
- int block_x, block_y, offset_x, offset_y, offsetP, offset;
- int x_position = (video->mb_x << 4);
- int y_position = (video->mb_y << 4);
- int MbHeight, MbWidth, mbPartIdx_X, mbPartIdx_Y, offset_indx;
- int picWidth = currPic->pitch;
- int picHeight = currPic->height;
- int16 *dataBlock;
- uint32 cbp4x4;
- uint32 tmp_word;
-
- tmp_word = y_position * picWidth;
- curL = currPic->Sl + tmp_word + x_position;
- offset = (tmp_word >> 2) + (x_position >> 1);
- curCb = currPic->Scb + offset;
- curCr = currPic->Scr + offset;
-
-#ifdef USE_PRED_BLOCK
- predBlock = video->pred + 84;
- predCb = video->pred + 452;
- predCr = video->pred + 596;
-#else
- predBlock = curL;
- predCb = curCb;
- predCr = curCr;
-#endif
-
- GetMotionVectorPredictor(video, false);
-
- for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
- {
- MbHeight = currMB->SubMbPartHeight[mbPartIdx];
- MbWidth = currMB->SubMbPartWidth[mbPartIdx];
- mbPartIdx_X = ((mbPartIdx + offset_MbPart_indx) & 1);
- mbPartIdx_Y = (mbPartIdx + offset_MbPart_indx) >> 1;
- ref_idx = currMB->ref_idx_L0[(mbPartIdx_Y << 1) + mbPartIdx_X];
- offset_indx = 0;
-
- ref_l = video->RefPicList0[ref_idx]->Sl;
- ref_Cb = video->RefPicList0[ref_idx]->Scb;
- ref_Cr = video->RefPicList0[ref_idx]->Scr;
-
- for (subMbPartIdx = 0; subMbPartIdx < currMB->NumSubMbPart[mbPartIdx]; subMbPartIdx++)
- {
- block_x = (mbPartIdx_X << 1) + ((subMbPartIdx + offset_indx) & 1); // check this
- block_y = (mbPartIdx_Y << 1) + (((subMbPartIdx + offset_indx) >> 1) & 1);
- mv = (int16*)(currMB->mvL0 + block_x + (block_y << 2));
- offset_x = x_position + (block_x << 2);
- offset_y = y_position + (block_y << 2);
- x_pos = (offset_x << 2) + *mv++; /*quarter pel */
- y_pos = (offset_y << 2) + *mv; /*quarter pel */
-
- //offset = offset_y * currPic->width;
- //offsetC = (offset >> 2) + (offset_x >> 1);
-#ifdef USE_PRED_BLOCK
- offsetP = (block_y * 80) + (block_x << 2);
- LumaMotionComp(ref_l, picWidth, picHeight, x_pos, y_pos,
- /*comp_Sl + offset + offset_x,*/
- predBlock + offsetP, 20, MbWidth, MbHeight);
-#else
- offsetP = (block_y << 2) * picWidth + (block_x << 2);
- LumaMotionComp(ref_l, picWidth, picHeight, x_pos, y_pos,
- /*comp_Sl + offset + offset_x,*/
- predBlock + offsetP, picWidth, MbWidth, MbHeight);
-#endif
-
-#ifdef USE_PRED_BLOCK
- offsetP = (block_y * 24) + (block_x << 1);
- ChromaMotionComp(ref_Cb, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scb + offsetC,*/
- predCb + offsetP, 12, MbWidth >> 1, MbHeight >> 1);
- ChromaMotionComp(ref_Cr, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scr + offsetC,*/
- predCr + offsetP, 12, MbWidth >> 1, MbHeight >> 1);
-#else
- offsetP = (block_y * picWidth) + (block_x << 1);
- ChromaMotionComp(ref_Cb, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scb + offsetC,*/
- predCb + offsetP, picWidth >> 1, MbWidth >> 1, MbHeight >> 1);
- ChromaMotionComp(ref_Cr, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scr + offsetC,*/
- predCr + offsetP, picWidth >> 1, MbWidth >> 1, MbHeight >> 1);
-#endif
-
- offset_indx = currMB->SubMbPartWidth[mbPartIdx] >> 3;
- }
- offset_MbPart_indx = currMB->MbPartWidth >> 4;
- }
-
- /* used in decoder, used to be if(!encFlag) */
-
- /* transform in raster scan order */
- dataBlock = video->block;
- cbp4x4 = video->cbp4x4;
- /* luma */
- for (block_y = 4; block_y > 0; block_y--)
- {
- for (block_x = 4; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- itrans(dataBlock, predBlock, predBlock, 20);
- }
-#else
- if (cbp4x4&1)
- {
- itrans(dataBlock, curL, curL, picWidth);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predBlock += 4;
-#else
- curL += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- predBlock += 64;
-#else
- curL += ((picWidth << 2) - 16);
-#endif
- }
-
- /* chroma */
- picWidth = (picWidth >> 1);
- for (block_y = 2; block_y > 0; block_y--)
- {
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCb, predCb, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCb, curCb, picWidth);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCb += 4;
-#else
- curCb += 4;
-#endif
- }
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCr, predCr, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCr, curCr, picWidth);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCr += 4;
-#else
- curCr += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- predCb += 40;
- predCr += 40;
-#else
- curCb += ((picWidth << 2) - 8);
- curCr += ((picWidth << 2) - 8);
-#endif
- }
-
-#ifdef MB_BASED_DEBLOCK
- SaveNeighborForIntraPred(video, offset);
-#endif
-
- return ;
-}
-
-
-/* preform the actual motion comp here */
-void LumaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos,
- uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight)
-{
- int dx, dy;
- uint8 temp[24][24]; /* for padding, make the size multiple of 4 for packing */
- int temp2[21][21]; /* for intermediate results */
- uint8 *ref2;
-
- dx = x_pos & 3;
- dy = y_pos & 3;
- x_pos = x_pos >> 2; /* round it to full-pel resolution */
- y_pos = y_pos >> 2;
-
- /* perform actual motion compensation */
- if (dx == 0 && dy == 0)
- { /* fullpel position *//* G */
- if (x_pos >= 0 && x_pos + blkwidth <= picwidth && y_pos >= 0 && y_pos + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos;
- FullPelMC(ref, picwidth, pred, pred_pitch, blkwidth, blkheight);
- }
- else
- {
- CreatePad(ref, picwidth, picheight, x_pos, y_pos, &temp[0][0], blkwidth, blkheight);
- FullPelMC(&temp[0][0], 24, pred, pred_pitch, blkwidth, blkheight);
- }
-
- } /* other positions */
- else if (dy == 0)
- { /* no vertical interpolation *//* a,b,c*/
-
- if (x_pos - 2 >= 0 && x_pos + 3 + blkwidth <= picwidth && y_pos >= 0 && y_pos + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos;
-
- HorzInterp1MC(ref, picwidth, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos, &temp[0][0], blkwidth + 5, blkheight);
-
- HorzInterp1MC(&temp[0][2], 24, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- }
- else if (dx == 0)
- { /*no horizontal interpolation *//* d,h,n */
-
- if (x_pos >= 0 && x_pos + blkwidth <= picwidth && y_pos - 2 >= 0 && y_pos + 3 + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos;
-
- VertInterp1MC(ref, picwidth, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos, y_pos - 2, &temp[0][0], blkwidth, blkheight + 5);
-
- VertInterp1MC(&temp[2][0], 24, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- }
- else if (dy == 2)
- { /* horizontal cross *//* i, j, k */
-
- if (x_pos - 2 >= 0 && x_pos + 3 + blkwidth <= picwidth && y_pos - 2 >= 0 && y_pos + 3 + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos - 2; /* move to the left 2 pixels */
-
- VertInterp2MC(ref, picwidth, &temp2[0][0], 21, blkwidth + 5, blkheight);
-
- HorzInterp2MC(&temp2[0][2], 21, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos - 2, &temp[0][0], blkwidth + 5, blkheight + 5);
-
- VertInterp2MC(&temp[2][0], 24, &temp2[0][0], 21, blkwidth + 5, blkheight);
-
- HorzInterp2MC(&temp2[0][2], 21, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- }
- else if (dx == 2)
- { /* vertical cross */ /* f,q */
-
- if (x_pos - 2 >= 0 && x_pos + 3 + blkwidth <= picwidth && y_pos - 2 >= 0 && y_pos + 3 + blkheight <= picheight)
- {
- ref += (y_pos - 2) * picwidth + x_pos; /* move to up 2 lines */
-
- HorzInterp3MC(ref, picwidth, &temp2[0][0], 21, blkwidth, blkheight + 5);
- VertInterp3MC(&temp2[2][0], 21, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos - 2, &temp[0][0], blkwidth + 5, blkheight + 5);
- HorzInterp3MC(&temp[0][2], 24, &temp2[0][0], 21, blkwidth, blkheight + 5);
- VertInterp3MC(&temp2[2][0], 21, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- }
- else
- { /* diagonal *//* e,g,p,r */
-
- if (x_pos - 2 >= 0 && x_pos + 3 + (dx / 2) + blkwidth <= picwidth &&
- y_pos - 2 >= 0 && y_pos + 3 + blkheight + (dy / 2) <= picheight)
- {
- ref2 = ref + (y_pos + (dy / 2)) * picwidth + x_pos;
-
- ref += (y_pos * picwidth) + x_pos + (dx / 2);
-
- DiagonalInterpMC(ref2, ref, picwidth, pred, pred_pitch, blkwidth, blkheight);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos - 2, &temp[0][0], blkwidth + 5 + (dx / 2), blkheight + 5 + (dy / 2));
-
- ref2 = &temp[2 + (dy/2)][2];
-
- ref = &temp[2][2 + (dx/2)];
-
- DiagonalInterpMC(ref2, ref, 24, pred, pred_pitch, blkwidth, blkheight);
- }
- }
-
- return ;
-}
-
-void CreateAlign(uint8 *ref, int picwidth, int y_pos,
- uint8 *out, int blkwidth, int blkheight)
-{
- int i, j;
- int offset, out_offset;
- uint32 prev_pix, result, pix1, pix2, pix4;
-
- out_offset = 24 - blkwidth;
-
- //switch(x_pos&0x3){
- switch (((uint32)ref)&0x3)
- {
- case 1:
- ref += y_pos * picwidth;
- offset = picwidth - blkwidth - 3;
- for (j = 0; j < blkheight; j++)
- {
- pix1 = *ref++;
- pix2 = *((uint16*)ref);
- ref += 2;
- result = (pix2 << 8) | pix1;
-
- for (i = 3; i < blkwidth; i += 4)
- {
- pix4 = *((uint32*)ref);
- ref += 4;
- prev_pix = (pix4 << 24) & 0xFF000000; /* mask out byte belong to previous word */
- result |= prev_pix;
- *((uint32*)out) = result; /* write 4 bytes */
- out += 4;
- result = pix4 >> 8; /* for the next loop */
- }
- ref += offset;
- out += out_offset;
- }
- break;
- case 2:
- ref += y_pos * picwidth;
- offset = picwidth - blkwidth - 2;
- for (j = 0; j < blkheight; j++)
- {
- result = *((uint16*)ref);
- ref += 2;
- for (i = 2; i < blkwidth; i += 4)
- {
- pix4 = *((uint32*)ref);
- ref += 4;
- prev_pix = (pix4 << 16) & 0xFFFF0000; /* mask out byte belong to previous word */
- result |= prev_pix;
- *((uint32*)out) = result; /* write 4 bytes */
- out += 4;
- result = pix4 >> 16; /* for the next loop */
- }
- ref += offset;
- out += out_offset;
- }
- break;
- case 3:
- ref += y_pos * picwidth;
- offset = picwidth - blkwidth - 1;
- for (j = 0; j < blkheight; j++)
- {
- result = *ref++;
- for (i = 1; i < blkwidth; i += 4)
- {
- pix4 = *((uint32*)ref);
- ref += 4;
- prev_pix = (pix4 << 8) & 0xFFFFFF00; /* mask out byte belong to previous word */
- result |= prev_pix;
- *((uint32*)out) = result; /* write 4 bytes */
- out += 4;
- result = pix4 >> 24; /* for the next loop */
- }
- ref += offset;
- out += out_offset;
- }
- break;
- }
-}
-
-void CreatePad(uint8 *ref, int picwidth, int picheight, int x_pos, int y_pos,
- uint8 *out, int blkwidth, int blkheight)
-{
- int x_inc0, x_mid;
- int y_inc, y_inc0, y_inc1, y_mid;
- int i, j;
- int offset;
-
- if (x_pos < 0)
- {
- x_inc0 = 0; /* increment for the first part */
- x_mid = ((blkwidth + x_pos > 0) ? -x_pos : blkwidth); /* stopping point */
- x_pos = 0;
- }
- else if (x_pos + blkwidth > picwidth)
- {
- x_inc0 = 1; /* increasing */
- x_mid = ((picwidth > x_pos) ? picwidth - x_pos - 1 : 0); /* clip negative to zero, encode fool proof! */
- }
- else /* normal case */
- {
- x_inc0 = 1;
- x_mid = blkwidth; /* just one run */
- }
-
-
- /* boundary for y_pos, taking the result from x_pos into account */
- if (y_pos < 0)
- {
- y_inc0 = (x_inc0 ? - x_mid : -blkwidth + x_mid); /* offset depending on x_inc1 and x_inc0 */
- y_inc1 = picwidth + y_inc0;
- y_mid = ((blkheight + y_pos > 0) ? -y_pos : blkheight); /* clip to prevent memory corruption */
- y_pos = 0;
- }
- else if (y_pos + blkheight > picheight)
- {
- y_inc1 = (x_inc0 ? - x_mid : -blkwidth + x_mid); /* saturate */
- y_inc0 = picwidth + y_inc1; /* increasing */
- y_mid = ((picheight > y_pos) ? picheight - 1 - y_pos : 0);
- }
- else /* normal case */
- {
- y_inc1 = (x_inc0 ? - x_mid : -blkwidth + x_mid);
- y_inc0 = picwidth + y_inc1;
- y_mid = blkheight;
- }
-
- /* clip y_pos and x_pos */
- if (y_pos > picheight - 1) y_pos = picheight - 1;
- if (x_pos > picwidth - 1) x_pos = picwidth - 1;
-
- ref += y_pos * picwidth + x_pos;
-
- y_inc = y_inc0; /* start with top half */
-
- offset = 24 - blkwidth; /* to use in offset out */
- blkwidth -= x_mid; /* to use in the loop limit */
-
- if (x_inc0 == 0)
- {
- for (j = 0; j < blkheight; j++)
- {
- if (j == y_mid) /* put a check here to reduce the code size (for unrolling the loop) */
- {
- y_inc = y_inc1; /* switch to lower half */
- }
- for (i = x_mid; i > 0; i--) /* first or third quarter */
- {
- *out++ = *ref;
- }
- for (i = blkwidth; i > 0; i--) /* second or fourth quarter */
- {
- *out++ = *ref++;
- }
- out += offset;
- ref += y_inc;
- }
- }
- else
- {
- for (j = 0; j < blkheight; j++)
- {
- if (j == y_mid) /* put a check here to reduce the code size (for unrolling the loop) */
- {
- y_inc = y_inc1; /* switch to lower half */
- }
- for (i = x_mid; i > 0; i--) /* first or third quarter */
- {
- *out++ = *ref++;
- }
- for (i = blkwidth; i > 0; i--) /* second or fourth quarter */
- {
- *out++ = *ref;
- }
- out += offset;
- ref += y_inc;
- }
- }
-
- return ;
-}
-
-void HorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx)
-{
- uint8 *p_ref;
- uint32 *p_cur;
- uint32 tmp, pkres;
- int result, curr_offset, ref_offset;
- int j;
- int32 r0, r1, r2, r3, r4, r5;
- int32 r13, r6;
-
- p_cur = (uint32*)out; /* assume it's word aligned */
- curr_offset = (outpitch - blkwidth) >> 2;
- p_ref = in;
- ref_offset = inpitch - blkwidth;
-
- if (dx&1)
- {
- dx = ((dx >> 1) ? -3 : -4); /* use in 3/4 pel */
- p_ref -= 2;
- r13 = 0;
- for (j = blkheight; j > 0; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- r0 = p_ref[0];
- r1 = p_ref[2];
- r0 |= (r1 << 16); /* 0,c,0,a */
- r1 = p_ref[1];
- r2 = p_ref[3];
- r1 |= (r2 << 16); /* 0,d,0,b */
- while ((uint32)p_ref < tmp)
- {
- r2 = *(p_ref += 4); /* move pointer to e */
- r3 = p_ref[2];
- r2 |= (r3 << 16); /* 0,g,0,e */
- r3 = p_ref[1];
- r4 = p_ref[3];
- r3 |= (r4 << 16); /* 0,h,0,f */
-
- r4 = r0 + r3; /* c+h, a+f */
- r5 = r0 + r1; /* c+d, a+b */
- r6 = r2 + r3; /* g+h, e+f */
- r5 >>= 16;
- r5 |= (r6 << 16); /* e+f, c+d */
- r4 += r5 * 20; /* c+20*e+20*f+h, a+20*c+20*d+f */
- r4 += 0x100010; /* +16, +16 */
- r5 = r1 + r2; /* d+g, b+e */
- r4 -= r5 * 5; /* c-5*d+20*e+20*f-5*g+h, a-5*b+20*c+20*d-5*e+f */
- r4 >>= 5;
- r13 |= r4; /* check clipping */
-
- r5 = p_ref[dx+2];
- r6 = p_ref[dx+4];
- r5 |= (r6 << 16);
- r4 += r5;
- r4 += 0x10001;
- r4 = (r4 >> 1) & 0xFF00FF;
-
- r5 = p_ref[4]; /* i */
- r6 = (r5 << 16);
- r5 = r6 | (r2 >> 16);/* 0,i,0,g */
- r5 += r1; /* d+i, b+g */ /* r5 not free */
- r1 >>= 16;
- r1 |= (r3 << 16); /* 0,f,0,d */ /* r1 has changed */
- r1 += r2; /* f+g, d+e */
- r5 += 20 * r1; /* d+20f+20g+i, b+20d+20e+g */
- r0 >>= 16;
- r0 |= (r2 << 16); /* 0,e,0,c */ /* r0 has changed */
- r0 += r3; /* e+h, c+f */
- r5 += 0x100010; /* 16,16 */
- r5 -= r0 * 5; /* d-5e+20f+20g-5h+i, b-5c+20d+20e-5f+g */
- r5 >>= 5;
- r13 |= r5; /* check clipping */
-
- r0 = p_ref[dx+3];
- r1 = p_ref[dx+5];
- r0 |= (r1 << 16);
- r5 += r0;
- r5 += 0x10001;
- r5 = (r5 >> 1) & 0xFF00FF;
-
- r4 |= (r5 << 8); /* pack them together */
- *p_cur++ = r4;
- r1 = r3;
- r0 = r2;
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
-
- if (r13&0xFF000700) /* need clipping */
- {
- /* move back to the beginning of the line */
- p_ref -= (ref_offset + blkwidth); /* input */
- p_cur -= (outpitch >> 2);
-
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = *p_ref++;
- r1 = *p_ref++;
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- pkres = (result >> 1) ;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- result = (result >> 1);
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- result = (result >> 1);
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- result = (result >> 1);
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 5; /* offset back to the middle of filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* move to the next line */
- }
- }
- }
- else
- {
- p_ref -= 2;
- r13 = 0;
- for (j = blkheight; j > 0; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- r0 = p_ref[0];
- r1 = p_ref[2];
- r0 |= (r1 << 16); /* 0,c,0,a */
- r1 = p_ref[1];
- r2 = p_ref[3];
- r1 |= (r2 << 16); /* 0,d,0,b */
- while ((uint32)p_ref < tmp)
- {
- r2 = *(p_ref += 4); /* move pointer to e */
- r3 = p_ref[2];
- r2 |= (r3 << 16); /* 0,g,0,e */
- r3 = p_ref[1];
- r4 = p_ref[3];
- r3 |= (r4 << 16); /* 0,h,0,f */
-
- r4 = r0 + r3; /* c+h, a+f */
- r5 = r0 + r1; /* c+d, a+b */
- r6 = r2 + r3; /* g+h, e+f */
- r5 >>= 16;
- r5 |= (r6 << 16); /* e+f, c+d */
- r4 += r5 * 20; /* c+20*e+20*f+h, a+20*c+20*d+f */
- r4 += 0x100010; /* +16, +16 */
- r5 = r1 + r2; /* d+g, b+e */
- r4 -= r5 * 5; /* c-5*d+20*e+20*f-5*g+h, a-5*b+20*c+20*d-5*e+f */
- r4 >>= 5;
- r13 |= r4; /* check clipping */
- r4 &= 0xFF00FF; /* mask */
-
- r5 = p_ref[4]; /* i */
- r6 = (r5 << 16);
- r5 = r6 | (r2 >> 16);/* 0,i,0,g */
- r5 += r1; /* d+i, b+g */ /* r5 not free */
- r1 >>= 16;
- r1 |= (r3 << 16); /* 0,f,0,d */ /* r1 has changed */
- r1 += r2; /* f+g, d+e */
- r5 += 20 * r1; /* d+20f+20g+i, b+20d+20e+g */
- r0 >>= 16;
- r0 |= (r2 << 16); /* 0,e,0,c */ /* r0 has changed */
- r0 += r3; /* e+h, c+f */
- r5 += 0x100010; /* 16,16 */
- r5 -= r0 * 5; /* d-5e+20f+20g-5h+i, b-5c+20d+20e-5f+g */
- r5 >>= 5;
- r13 |= r5; /* check clipping */
- r5 &= 0xFF00FF; /* mask */
-
- r4 |= (r5 << 8); /* pack them together */
- *p_cur++ = r4;
- r1 = r3;
- r0 = r2;
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
-
- if (r13&0xFF000700) /* need clipping */
- {
- /* move back to the beginning of the line */
- p_ref -= (ref_offset + blkwidth); /* input */
- p_cur -= (outpitch >> 2);
-
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = *p_ref++;
- r1 = *p_ref++;
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 5;
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset;
- }
- }
- }
-
- return ;
-}
-
-void HorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx)
-{
- int *p_ref;
- uint32 *p_cur;
- uint32 tmp, pkres;
- int result, result2, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = (uint32*)out; /* assume it's word aligned */
- curr_offset = (outpitch - blkwidth) >> 2;
- p_ref = in;
- ref_offset = inpitch - blkwidth;
-
- if (dx&1)
- {
- dx = ((dx >> 1) ? -3 : -4); /* use in 3/4 pel */
-
- for (j = blkheight; j > 0 ; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = p_ref[-2];
- r1 = p_ref[-1];
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- pkres = (result >> 1);
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 3; /* offset back to the middle of filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* move to the next line */
- }
- }
- else
- {
- for (j = blkheight; j > 0 ; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = p_ref[-2];
- r1 = p_ref[-1];
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 3; /* offset back to the middle of filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* move to the next line */
- }
- }
-
- return ;
-}
-
-void HorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight)
-{
- uint8 *p_ref;
- int *p_cur;
- uint32 tmp;
- int result, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = out;
- curr_offset = (outpitch - blkwidth);
- p_ref = in;
- ref_offset = inpitch - blkwidth;
-
- for (j = blkheight; j > 0 ; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = p_ref[-2];
- r1 = p_ref[-1];
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- *p_cur++ = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- *p_cur++ = result;
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- *p_cur++ = result;
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- *p_cur++ = result;
- p_ref -= 3; /* move back to the middle of the filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset;
- }
-
- return ;
-}
-void VertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy)
-{
- uint8 *p_cur, *p_ref;
- uint32 tmp;
- int result, curr_offset, ref_offset;
- int j, i;
- int32 r0, r1, r2, r3, r4, r5, r6, r7, r8, r13;
- uint8 tmp_in[24][24];
-
- /* not word-aligned */
- if (((uint32)in)&0x3)
- {
- CreateAlign(in, inpitch, -2, &tmp_in[0][0], blkwidth, blkheight + 5);
- in = &tmp_in[2][0];
- inpitch = 24;
- }
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically back up and one pixel to right */
- ref_offset = blkheight * inpitch; /* for limit */
-
- curr_offset += 3;
-
- if (dy&1)
- {
- dy = (dy >> 1) ? 0 : -inpitch;
-
- for (j = 0; j < blkwidth; j += 4, in += 4)
- {
- r13 = 0;
- p_ref = in;
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- r0 = *((uint32*)(p_ref - (inpitch << 1))); /* load 4 bytes */
- p_ref += inpitch;
- r6 = (r0 >> 8) & 0xFF00FF; /* second and fourth byte */
- r0 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + (inpitch << 1))); /* r1, r7, ref[3] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
-
- r0 += r1;
- r6 += r7;
-
- r2 = *((uint32*)p_ref); /* r2, r8, ref[1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref - inpitch)); /* r1, r7, ref[0] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 += 20 * r1;
- r6 += 20 * r7;
- r0 += 0x100010;
- r6 += 0x100010;
-
- r2 = *((uint32*)(p_ref - (inpitch << 1))); /* r2, r8, ref[-1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + inpitch)); /* r1, r7, ref[2] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 -= 5 * r1;
- r6 -= 5 * r7;
-
- r0 >>= 5;
- r6 >>= 5;
- /* clip */
- r13 |= r6;
- r13 |= r0;
- //CLIPPACK(r6,result)
-
- r1 = *((uint32*)(p_ref + dy));
- r2 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r0 += r1;
- r6 += r2;
- r0 += 0x10001;
- r6 += 0x10001;
- r0 = (r0 >> 1) & 0xFF00FF;
- r6 = (r6 >> 1) & 0xFF00FF;
-
- r0 |= (r6 << 8); /* pack it back */
- *((uint32*)(p_cur += outpitch)) = r0;
- }
- p_cur += curr_offset; /* offset to the next pixel */
- if (r13 & 0xFF000700) /* this column need clipping */
- {
- p_cur -= 4;
- for (i = 0; i < 4; i++)
- {
- p_ref = in + i;
- p_cur -= outpitch; /* compensate for the first offset */
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += (curr_offset - 3);
- }
- }
- }
- }
- else
- {
- for (j = 0; j < blkwidth; j += 4, in += 4)
- {
- r13 = 0;
- p_ref = in;
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- r0 = *((uint32*)(p_ref - (inpitch << 1))); /* load 4 bytes */
- p_ref += inpitch;
- r6 = (r0 >> 8) & 0xFF00FF; /* second and fourth byte */
- r0 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + (inpitch << 1))); /* r1, r7, ref[3] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
-
- r0 += r1;
- r6 += r7;
-
- r2 = *((uint32*)p_ref); /* r2, r8, ref[1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref - inpitch)); /* r1, r7, ref[0] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 += 20 * r1;
- r6 += 20 * r7;
- r0 += 0x100010;
- r6 += 0x100010;
-
- r2 = *((uint32*)(p_ref - (inpitch << 1))); /* r2, r8, ref[-1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + inpitch)); /* r1, r7, ref[2] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 -= 5 * r1;
- r6 -= 5 * r7;
-
- r0 >>= 5;
- r6 >>= 5;
- /* clip */
- r13 |= r6;
- r13 |= r0;
- //CLIPPACK(r6,result)
- r0 &= 0xFF00FF;
- r6 &= 0xFF00FF;
- r0 |= (r6 << 8); /* pack it back */
- *((uint32*)(p_cur += outpitch)) = r0;
- }
- p_cur += curr_offset; /* offset to the next pixel */
- if (r13 & 0xFF000700) /* this column need clipping */
- {
- p_cur -= 4;
- for (i = 0; i < 4; i++)
- {
- p_ref = in + i;
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += (curr_offset - 3);
- }
- }
- }
- }
-
- return ;
-}
-
-void VertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight)
-{
- int *p_cur;
- uint8 *p_ref;
- uint32 tmp;
- int result, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically back up and one pixel to right */
- ref_offset = blkheight * inpitch; /* for limit */
-
- for (j = 0; j < blkwidth; j++)
- {
- p_cur -= outpitch; /* compensate for the first offset */
- p_ref = in++;
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += curr_offset;
- }
-
- return ;
-}
-
-void VertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy)
-{
- uint8 *p_cur;
- int *p_ref;
- uint32 tmp;
- int result, result2, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically back up and one pixel to right */
- ref_offset = blkheight * inpitch; /* for limit */
-
- if (dy&1)
- {
- dy = (dy >> 1) ? -(inpitch << 1) : -(inpitch << 1) - inpitch;
-
- for (j = 0; j < blkwidth; j++)
- {
- p_cur -= outpitch; /* compensate for the first offset */
- p_ref = in++;
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += curr_offset;
- }
- }
- else
- {
- for (j = 0; j < blkwidth; j++)
- {
- p_cur -= outpitch; /* compensate for the first offset */
- p_ref = in++;
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += curr_offset;
- }
- }
-
- return ;
-}
-
-void DiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
- uint8 *out, int outpitch,
- int blkwidth, int blkheight)
-{
- int j, i;
- int result;
- uint8 *p_cur, *p_ref, *p_tmp8;
- int curr_offset, ref_offset;
- uint8 tmp_res[24][24], tmp_in[24][24];
- uint32 *p_tmp;
- uint32 tmp, pkres, tmp_result;
- int32 r0, r1, r2, r3, r4, r5;
- int32 r6, r7, r8, r9, r10, r13;
-
- ref_offset = inpitch - blkwidth;
- p_ref = in1 - 2;
- /* perform horizontal interpolation */
- /* not word-aligned */
- /* It is faster to read 1 byte at time to avoid calling CreateAlign */
- /* if(((uint32)p_ref)&0x3)
- {
- CreateAlign(p_ref,inpitch,0,&tmp_in[0][0],blkwidth+8,blkheight);
- p_ref = &tmp_in[0][0];
- ref_offset = 24-blkwidth;
- }*/
-
- p_tmp = (uint32*) & (tmp_res[0][0]);
- for (j = blkheight; j > 0; j--)
- {
- r13 = 0;
- tmp = (uint32)(p_ref + blkwidth);
-
- //r0 = *((uint32*)p_ref); /* d,c,b,a */
- //r1 = (r0>>8)&0xFF00FF; /* 0,d,0,b */
- //r0 &= 0xFF00FF; /* 0,c,0,a */
- /* It is faster to read 1 byte at a time, */
- r0 = p_ref[0];
- r1 = p_ref[2];
- r0 |= (r1 << 16); /* 0,c,0,a */
- r1 = p_ref[1];
- r2 = p_ref[3];
- r1 |= (r2 << 16); /* 0,d,0,b */
-
- while ((uint32)p_ref < tmp)
- {
- //r2 = *((uint32*)(p_ref+=4));/* h,g,f,e */
- //r3 = (r2>>8)&0xFF00FF; /* 0,h,0,f */
- //r2 &= 0xFF00FF; /* 0,g,0,e */
- /* It is faster to read 1 byte at a time, */
- r2 = *(p_ref += 4);
- r3 = p_ref[2];
- r2 |= (r3 << 16); /* 0,g,0,e */
- r3 = p_ref[1];
- r4 = p_ref[3];
- r3 |= (r4 << 16); /* 0,h,0,f */
-
- r4 = r0 + r3; /* c+h, a+f */
- r5 = r0 + r1; /* c+d, a+b */
- r6 = r2 + r3; /* g+h, e+f */
- r5 >>= 16;
- r5 |= (r6 << 16); /* e+f, c+d */
- r4 += r5 * 20; /* c+20*e+20*f+h, a+20*c+20*d+f */
- r4 += 0x100010; /* +16, +16 */
- r5 = r1 + r2; /* d+g, b+e */
- r4 -= r5 * 5; /* c-5*d+20*e+20*f-5*g+h, a-5*b+20*c+20*d-5*e+f */
- r4 >>= 5;
- r13 |= r4; /* check clipping */
- r4 &= 0xFF00FF; /* mask */
-
- r5 = p_ref[4]; /* i */
- r6 = (r5 << 16);
- r5 = r6 | (r2 >> 16);/* 0,i,0,g */
- r5 += r1; /* d+i, b+g */ /* r5 not free */
- r1 >>= 16;
- r1 |= (r3 << 16); /* 0,f,0,d */ /* r1 has changed */
- r1 += r2; /* f+g, d+e */
- r5 += 20 * r1; /* d+20f+20g+i, b+20d+20e+g */
- r0 >>= 16;
- r0 |= (r2 << 16); /* 0,e,0,c */ /* r0 has changed */
- r0 += r3; /* e+h, c+f */
- r5 += 0x100010; /* 16,16 */
- r5 -= r0 * 5; /* d-5e+20f+20g-5h+i, b-5c+20d+20e-5f+g */
- r5 >>= 5;
- r13 |= r5; /* check clipping */
- r5 &= 0xFF00FF; /* mask */
-
- r4 |= (r5 << 8); /* pack them together */
- *p_tmp++ = r4;
- r1 = r3;
- r0 = r2;
- }
- p_tmp += ((24 - blkwidth) >> 2); /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
-
- if (r13&0xFF000700) /* need clipping */
- {
- /* move back to the beginning of the line */
- p_ref -= (ref_offset + blkwidth); /* input */
- p_tmp -= 6; /* intermediate output */
- tmp = (uint32)(p_ref + blkwidth);
- while ((uint32)p_ref < tmp)
- {
- r0 = *p_ref++;
- r1 = *p_ref++;
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 24);
-
- *p_tmp++ = pkres; /* write 4 pixel */
- p_ref -= 5;
- }
- p_tmp += ((24 - blkwidth) >> 2); /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
- }
- }
-
- /* perform vertical interpolation */
- /* not word-aligned */
- if (((uint32)in2)&0x3)
- {
- CreateAlign(in2, inpitch, -2, &tmp_in[0][0], blkwidth, blkheight + 5);
- in2 = &tmp_in[2][0];
- inpitch = 24;
- }
-
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically up and one pixel right */
- pkres = blkheight * inpitch; /* reuse it for limit */
-
- curr_offset += 3;
-
- for (j = 0; j < blkwidth; j += 4, in2 += 4)
- {
- r13 = 0;
- p_ref = in2;
- p_tmp8 = &(tmp_res[0][j]); /* intermediate result */
- p_tmp8 -= 24; /* compensate for the first offset */
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + pkres); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- /* Read 1 byte at a time is too slow, too many read and pack ops, need to call CreateAlign, */
- /*p_ref8 = p_ref-(inpitch<<1); r0 = p_ref8[0]; r1 = p_ref8[2];
- r0 |= (r1<<16); r6 = p_ref8[1]; r1 = p_ref8[3];
- r6 |= (r1<<16); p_ref+=inpitch; */
- r0 = *((uint32*)(p_ref - (inpitch << 1))); /* load 4 bytes */
- p_ref += inpitch;
- r6 = (r0 >> 8) & 0xFF00FF; /* second and fourth byte */
- r0 &= 0xFF00FF;
-
- /*p_ref8 = p_ref+(inpitch<<1);
- r1 = p_ref8[0]; r7 = p_ref8[2]; r1 |= (r7<<16);
- r7 = p_ref8[1]; r2 = p_ref8[3]; r7 |= (r2<<16);*/
- r1 = *((uint32*)(p_ref + (inpitch << 1))); /* r1, r7, ref[3] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
-
- r0 += r1;
- r6 += r7;
-
- /*r2 = p_ref[0]; r8 = p_ref[2]; r2 |= (r8<<16);
- r8 = p_ref[1]; r1 = p_ref[3]; r8 |= (r1<<16);*/
- r2 = *((uint32*)p_ref); /* r2, r8, ref[1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- /*p_ref8 = p_ref-inpitch; r1 = p_ref8[0]; r7 = p_ref8[2];
- r1 |= (r7<<16); r1 += r2; r7 = p_ref8[1];
- r2 = p_ref8[3]; r7 |= (r2<<16);*/
- r1 = *((uint32*)(p_ref - inpitch)); /* r1, r7, ref[0] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 += 20 * r1;
- r6 += 20 * r7;
- r0 += 0x100010;
- r6 += 0x100010;
-
- /*p_ref8 = p_ref-(inpitch<<1); r2 = p_ref8[0]; r8 = p_ref8[2];
- r2 |= (r8<<16); r8 = p_ref8[1]; r1 = p_ref8[3]; r8 |= (r1<<16);*/
- r2 = *((uint32*)(p_ref - (inpitch << 1))); /* r2, r8, ref[-1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- /*p_ref8 = p_ref+inpitch; r1 = p_ref8[0]; r7 = p_ref8[2];
- r1 |= (r7<<16); r1 += r2; r7 = p_ref8[1];
- r2 = p_ref8[3]; r7 |= (r2<<16);*/
- r1 = *((uint32*)(p_ref + inpitch)); /* r1, r7, ref[2] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 -= 5 * r1;
- r6 -= 5 * r7;
-
- r0 >>= 5;
- r6 >>= 5;
- /* clip */
- r13 |= r6;
- r13 |= r0;
- //CLIPPACK(r6,result)
- /* add with horizontal results */
- r10 = *((uint32*)(p_tmp8 += 24));
- r9 = (r10 >> 8) & 0xFF00FF;
- r10 &= 0xFF00FF;
-
- r0 += r10;
- r0 += 0x10001;
- r0 = (r0 >> 1) & 0xFF00FF; /* mask to 8 bytes */
-
- r6 += r9;
- r6 += 0x10001;
- r6 = (r6 >> 1) & 0xFF00FF; /* mask to 8 bytes */
-
- r0 |= (r6 << 8); /* pack it back */
- *((uint32*)(p_cur += outpitch)) = r0;
- }
- p_cur += curr_offset; /* offset to the next pixel */
- if (r13 & 0xFF000700) /* this column need clipping */
- {
- p_cur -= 4;
- for (i = 0; i < 4; i++)
- {
- p_ref = in2 + i;
- p_tmp8 = &(tmp_res[0][j+i]); /* intermediate result */
- p_tmp8 -= 24; /* compensate for the first offset */
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + pkres); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* modify pointer before loading */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* intermediate result */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* intermediate result */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* intermediate result */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += (curr_offset - 3);
- }
- }
- }
-
- return ;
-}
-
-/* position G */
-void FullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight)
-{
- int i, j;
- int offset_in = inpitch - blkwidth;
- int offset_out = outpitch - blkwidth;
- uint32 temp;
- uint8 byte;
-
- if (((uint32)in)&3)
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 4)
- {
- temp = *in++;
- byte = *in++;
- temp |= (byte << 8);
- byte = *in++;
- temp |= (byte << 16);
- byte = *in++;
- temp |= (byte << 24);
-
- *((uint32*)out) = temp; /* write 4 bytes */
- out += 4;
- }
- out += offset_out;
- in += offset_in;
- }
- }
- else
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 4)
- {
- temp = *((uint32*)in);
- *((uint32*)out) = temp;
- in += 4;
- out += 4;
- }
- out += offset_out;
- in += offset_in;
- }
- }
- return ;
-}
-
-void ChromaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos,
- uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight)
-{
- int dx, dy;
- int offset_dx, offset_dy;
- int index;
- uint8 temp[24][24];
-
- dx = x_pos & 7;
- dy = y_pos & 7;
- offset_dx = (dx + 7) >> 3;
- offset_dy = (dy + 7) >> 3;
- x_pos = x_pos >> 3; /* round it to full-pel resolution */
- y_pos = y_pos >> 3;
-
- if ((x_pos >= 0 && x_pos + blkwidth + offset_dx <= picwidth) && (y_pos >= 0 && y_pos + blkheight + offset_dy <= picheight))
- {
- ref += y_pos * picwidth + x_pos;
- }
- else
- {
- CreatePad(ref, picwidth, picheight, x_pos, y_pos, &temp[0][0], blkwidth + offset_dx, blkheight + offset_dy);
- ref = &temp[0][0];
- picwidth = 24;
- }
-
- index = offset_dx + (offset_dy << 1) + ((blkwidth << 1) & 0x7);
-
- (*(ChromaMC_SIMD[index]))(ref, picwidth , dx, dy, pred, pred_pitch, blkwidth, blkheight);
- return ;
-}
-
-
-/* SIMD routines, unroll the loops in vertical direction, decreasing loops (things to be done) */
-void ChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- int32 r0, r1, r2, r3, result0, result1;
- uint8 temp[288];
- uint8 *ref, *out;
- int i, j;
- int dx_8 = 8 - dx;
- int dy_8 = 8 - dy;
-
- /* horizontal first */
- out = temp;
- for (i = 0; i < blkheight + 1; i++)
- {
- ref = pRef;
- r0 = ref[0];
- for (j = 0; j < blkwidth; j += 4)
- {
- r0 |= (ref[2] << 16);
- result0 = dx_8 * r0;
-
- r1 = ref[1] | (ref[3] << 16);
- result0 += dx * r1;
- *(int32 *)out = result0;
-
- result0 = dx_8 * r1;
-
- r2 = ref[4];
- r0 = r0 >> 16;
- r1 = r0 | (r2 << 16);
- result0 += dx * r1;
- *(int32 *)(out + 16) = result0;
-
- ref += 4;
- out += 4;
- r0 = r2;
- }
- pRef += srcPitch;
- out += (32 - blkwidth);
- }
-
-// pRef -= srcPitch*(blkheight+1);
- ref = temp;
-
- for (j = 0; j < blkwidth; j += 4)
- {
- r0 = *(int32 *)ref;
- r1 = *(int32 *)(ref + 16);
- ref += 32;
- out = pOut;
- for (i = 0; i < (blkheight >> 1); i++)
- {
- result0 = dy_8 * r0 + 0x00200020;
- r2 = *(int32 *)ref;
- result0 += dy * r2;
- result0 >>= 6;
- result0 &= 0x00FF00FF;
- r0 = r2;
-
- result1 = dy_8 * r1 + 0x00200020;
- r3 = *(int32 *)(ref + 16);
- result1 += dy * r3;
- result1 >>= 6;
- result1 &= 0x00FF00FF;
- r1 = r3;
- *(int32 *)out = result0 | (result1 << 8);
- out += predPitch;
- ref += 32;
-
- result0 = dy_8 * r0 + 0x00200020;
- r2 = *(int32 *)ref;
- result0 += dy * r2;
- result0 >>= 6;
- result0 &= 0x00FF00FF;
- r0 = r2;
-
- result1 = dy_8 * r1 + 0x00200020;
- r3 = *(int32 *)(ref + 16);
- result1 += dy * r3;
- result1 >>= 6;
- result1 &= 0x00FF00FF;
- r1 = r3;
- *(int32 *)out = result0 | (result1 << 8);
- out += predPitch;
- ref += 32;
- }
- pOut += 4;
- ref = temp + 4; /* since it can only iterate twice max */
- }
- return;
-}
-
-void ChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dy);
- int32 r0, r1, r2, result0, result1;
- uint8 *ref, *out;
- int i, j;
- int dx_8 = 8 - dx;
-
- /* horizontal first */
- for (i = 0; i < blkheight; i++)
- {
- ref = pRef;
- out = pOut;
-
- r0 = ref[0];
- for (j = 0; j < blkwidth; j += 4)
- {
- r0 |= (ref[2] << 16);
- result0 = dx_8 * r0 + 0x00040004;
-
- r1 = ref[1] | (ref[3] << 16);
- result0 += dx * r1;
- result0 >>= 3;
- result0 &= 0x00FF00FF;
-
- result1 = dx_8 * r1 + 0x00040004;
-
- r2 = ref[4];
- r0 = r0 >> 16;
- r1 = r0 | (r2 << 16);
- result1 += dx * r1;
- result1 >>= 3;
- result1 &= 0x00FF00FF;
-
- *(int32 *)out = result0 | (result1 << 8);
-
- ref += 4;
- out += 4;
- r0 = r2;
- }
-
- pRef += srcPitch;
- pOut += predPitch;
- }
- return;
-}
-
-void ChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dx);
- int32 r0, r1, r2, r3, result0, result1;
- int i, j;
- uint8 *ref, *out;
- int dy_8 = 8 - dy;
- /* vertical first */
- for (i = 0; i < blkwidth; i += 4)
- {
- ref = pRef;
- out = pOut;
-
- r0 = ref[0] | (ref[2] << 16);
- r1 = ref[1] | (ref[3] << 16);
- ref += srcPitch;
- for (j = 0; j < blkheight; j++)
- {
- result0 = dy_8 * r0 + 0x00040004;
- r2 = ref[0] | (ref[2] << 16);
- result0 += dy * r2;
- result0 >>= 3;
- result0 &= 0x00FF00FF;
- r0 = r2;
-
- result1 = dy_8 * r1 + 0x00040004;
- r3 = ref[1] | (ref[3] << 16);
- result1 += dy * r3;
- result1 >>= 3;
- result1 &= 0x00FF00FF;
- r1 = r3;
- *(int32 *)out = result0 | (result1 << 8);
- ref += srcPitch;
- out += predPitch;
- }
- pOut += 4;
- pRef += 4;
- }
- return;
-}
-
-void ChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(blkwidth);
- int32 r0, r1, temp0, temp1, result;
- int32 temp[9];
- int32 *out;
- int i, r_temp;
- int dy_8 = 8 - dy;
-
- /* horizontal first */
- out = temp;
- for (i = 0; i < blkheight + 1; i++)
- {
- r_temp = pRef[1];
- temp0 = (pRef[0] << 3) + dx * (r_temp - pRef[0]);
- temp1 = (r_temp << 3) + dx * (pRef[2] - r_temp);
- r0 = temp0 | (temp1 << 16);
- *out++ = r0;
- pRef += srcPitch;
- }
-
- pRef -= srcPitch * (blkheight + 1);
-
- out = temp;
-
- r0 = *out++;
-
- for (i = 0; i < blkheight; i++)
- {
- result = dy_8 * r0 + 0x00200020;
- r1 = *out++;
- result += dy * r1;
- result >>= 6;
- result &= 0x00FF00FF;
- *(int16 *)pOut = (result >> 8) | (result & 0xFF);
- r0 = r1;
- pOut += predPitch;
- }
- return;
-}
-
-void ChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dy);
- OSCL_UNUSED_ARG(blkwidth);
- int i, temp, temp0, temp1;
-
- /* horizontal first */
- for (i = 0; i < blkheight; i++)
- {
- temp = pRef[1];
- temp0 = ((pRef[0] << 3) + dx * (temp - pRef[0]) + 4) >> 3;
- temp1 = ((temp << 3) + dx * (pRef[2] - temp) + 4) >> 3;
-
- *(int16 *)pOut = temp0 | (temp1 << 8);
- pRef += srcPitch;
- pOut += predPitch;
-
- }
- return;
-}
-void ChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dx);
- OSCL_UNUSED_ARG(blkwidth);
- int32 r0, r1, result;
- int i;
- int dy_8 = 8 - dy;
- r0 = pRef[0] | (pRef[1] << 16);
- pRef += srcPitch;
- for (i = 0; i < blkheight; i++)
- {
- result = dy_8 * r0 + 0x00040004;
- r1 = pRef[0] | (pRef[1] << 16);
- result += dy * r1;
- result >>= 3;
- result &= 0x00FF00FF;
- *(int16 *)pOut = (result >> 8) | (result & 0xFF);
- r0 = r1;
- pRef += srcPitch;
- pOut += predPitch;
- }
- return;
-}
-
-void ChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dx);
- OSCL_UNUSED_ARG(dy);
- int i, j;
- int offset_in = srcPitch - blkwidth;
- int offset_out = predPitch - blkwidth;
- uint16 temp;
- uint8 byte;
-
- if (((uint32)pRef)&1)
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 2)
- {
- temp = *pRef++;
- byte = *pRef++;
- temp |= (byte << 8);
- *((uint16*)pOut) = temp; /* write 2 bytes */
- pOut += 2;
- }
- pOut += offset_out;
- pRef += offset_in;
- }
- }
- else
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 2)
- {
- temp = *((uint16*)pRef);
- *((uint16*)pOut) = temp;
- pRef += 2;
- pOut += 2;
- }
- pOut += offset_out;
- pRef += offset_in;
- }
- }
- return ;
-}
diff --git a/media/libstagefright/codecs/avc/dec/src/pred_intra.cpp b/media/libstagefright/codecs/avc/dec/src/pred_intra.cpp
deleted file mode 100644
index 0b613a4..0000000
--- a/media/libstagefright/codecs/avc/dec/src/pred_intra.cpp
+++ /dev/null
@@ -1,1786 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-
-#define CLIP_COMP *comp++ = (uint8)(((uint)temp>0xFF)? 0xFF&(~(temp>>31)): temp)
-#define CLIP_RESULT(x) if((uint)x > 0xFF){ \
- x = 0xFF & (~(x>>31));}
-
-
-/* We should combine the Intra4x4 functions with residual decoding and compensation */
-AVCStatus IntraMBPrediction(AVCCommonObj *video)
-{
- int component, SubBlock_indx, temp;
- AVCStatus status;
- AVCMacroblock *currMB = video->currMB;
- AVCPictureData *currPic = video->currPic;
- uint8 *curL, *curCb, *curCr;
- uint8 *comp;
- int block_x, block_y, offset;
- int16 *dataBlock = video->block;
- uint8 *predCb, *predCr;
-#ifdef USE_PRED_BLOCK
- uint8 *pred;
-#endif
- int pitch = currPic->pitch;
- uint32 cbp4x4 = video->cbp4x4;
-
- offset = (video->mb_y << 4) * pitch + (video->mb_x << 4);
- curL = currPic->Sl + offset;
-
-#ifdef USE_PRED_BLOCK
- video->pred_block = video->pred + 84; /* point to separate prediction memory */
- pred = video->pred_block;
- video->pred_pitch = 20;
-#else
- video->pred_block = curL; /* point directly to the frame buffer */
- video->pred_pitch = pitch;
-#endif
-
- if (currMB->mbMode == AVC_I4)
- {
- /* luminance first */
- block_x = block_y = 0;
- for (component = 0; component < 4; component++)
- {
- block_x = ((component & 1) << 1);
- block_y = ((component >> 1) << 1);
- comp = curL;// + (block_x<<2) + (block_y<<2)*currPic->pitch;
-
- for (SubBlock_indx = 0; SubBlock_indx < 4; SubBlock_indx++)
- {
- status = Intra_4x4(video, block_x, block_y, comp);
- if (status != AVC_SUCCESS)
- {
- return status;
- }
- /* transform following the 4x4 prediction, can't be SIMD
- with other blocks. */
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&(1 << ((block_y << 2) + block_x)))
- {
- itrans(dataBlock, pred, pred, 20);
- }
-#else
- if (cbp4x4&(1 << ((block_y << 2) + block_x)))
- {
- itrans(dataBlock, comp, comp, pitch);
- }
-#endif
- temp = SubBlock_indx & 1;
- if (temp)
- {
- block_y++;
- block_x--;
- dataBlock += 60;
-#ifdef USE_PRED_BLOCK
- pred += 76;
-#else
- comp += ((pitch << 2) - 4);
-#endif
- }
- else
- {
- block_x++;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- pred += 4;
-#else
- comp += 4;
-#endif
- }
- }
- if (component&1)
- {
-#ifdef USE_PRED_BLOCK
- pred -= 8;
-#else
- curL += (pitch << 3) - 8;
-#endif
- dataBlock -= 8;
- }
- else
- {
-#ifdef USE_PRED_BLOCK
- pred -= 152;
-#else
- curL += 8;
-#endif
- dataBlock -= 120;
- }
- }
- cbp4x4 >>= 16;
- }
- else /* AVC_I16 */
- {
-#ifdef MB_BASED_DEBLOCK
- video->pintra_pred_top = video->intra_pred_top + (video->mb_x << 4);
- video->pintra_pred_left = video->intra_pred_left + 1;
- video->intra_pred_topleft = video->intra_pred_left[0];
- pitch = 1;
-#else
- video->pintra_pred_top = curL - pitch;
- video->pintra_pred_left = curL - 1;
- if (video->mb_y)
- {
- video->intra_pred_topleft = *(curL - pitch - 1);
- }
-#endif
- switch (currMB->i16Mode)
- {
- case AVC_I16_Vertical: /* Intra_16x16_Vertical */
- /* check availability of top */
- if (video->intraAvailB)
- {
- Intra_16x16_Vertical(video);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_I16_Horizontal: /* Intra_16x16_Horizontal */
- /* check availability of left */
- if (video->intraAvailA)
- {
- Intra_16x16_Horizontal(video, pitch);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_I16_DC: /* Intra_16x16_DC */
- Intra_16x16_DC(video, pitch);
- break;
- case AVC_I16_Plane: /* Intra_16x16_Plane */
- if (video->intraAvailA && video->intraAvailB && video->intraAvailD)
- {
- Intra_16x16_Plane(video, pitch);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- default:
- break;
- }
-
- pitch = currPic->pitch;
-
- /* transform */
- /* can go in raster scan order now */
- /* can be done in SIMD, */
- for (block_y = 4; block_y > 0; block_y--)
- {
- for (block_x = 4; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- itrans(dataBlock, pred, pred, 20);
- }
-#else
- if (cbp4x4&1)
- {
- itrans(dataBlock, curL, curL, pitch);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- pred += 4;
-#else
- curL += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- pred += 64;
-#else
- curL += ((pitch << 2) - 16);
-#endif
- }
- }
-
- offset = (offset >> 2) + (video->mb_x << 2); //((video->mb_y << 3)* pitch + (video->mb_x << 3));
- curCb = currPic->Scb + offset;
- curCr = currPic->Scr + offset;
-
-#ifdef MB_BASED_DEBLOCK
- video->pintra_pred_top_cb = video->intra_pred_top_cb + (video->mb_x << 3);
- video->pintra_pred_left_cb = video->intra_pred_left_cb + 1;
- video->intra_pred_topleft_cb = video->intra_pred_left_cb[0];
- video->pintra_pred_top_cr = video->intra_pred_top_cr + (video->mb_x << 3);
- video->pintra_pred_left_cr = video->intra_pred_left_cr + 1;
- video->intra_pred_topleft_cr = video->intra_pred_left_cr[0];
- pitch = 1;
-#else
- pitch >>= 1;
- video->pintra_pred_top_cb = curCb - pitch;
- video->pintra_pred_left_cb = curCb - 1;
- video->pintra_pred_top_cr = curCr - pitch;
- video->pintra_pred_left_cr = curCr - 1;
-
- if (video->mb_y)
- {
- video->intra_pred_topleft_cb = *(curCb - pitch - 1);
- video->intra_pred_topleft_cr = *(curCr - pitch - 1);
- }
-#endif
-
-#ifdef USE_PRED_BLOCK
- predCb = video->pred + 452;
- predCr = predCb + 144;
- video->pred_pitch = 12;
-#else
- predCb = curCb;
- predCr = curCr;
- video->pred_pitch = currPic->pitch >> 1;
-#endif
- /* chrominance */
- switch (currMB->intra_chroma_pred_mode)
- {
- case AVC_IC_DC: /* Intra_Chroma_DC */
- Intra_Chroma_DC(video, pitch, predCb, predCr);
- break;
- case AVC_IC_Horizontal: /* Intra_Chroma_Horizontal */
- if (video->intraAvailA)
- {
- /* check availability of left */
- Intra_Chroma_Horizontal(video, pitch, predCb, predCr);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_IC_Vertical: /* Intra_Chroma_Vertical */
- if (video->intraAvailB)
- {
- /* check availability of top */
- Intra_Chroma_Vertical(video, predCb, predCr);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_IC_Plane: /* Intra_Chroma_Plane */
- if (video->intraAvailA && video->intraAvailB && video->intraAvailD)
- {
- /* check availability of top and left */
- Intra_Chroma_Plane(video, pitch, predCb, predCr);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- default:
- break;
- }
-
- /* transform, done in raster scan manner */
- pitch = currPic->pitch >> 1;
-
- for (block_y = 2; block_y > 0; block_y--)
- {
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCb, predCb, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCb, curCb, pitch);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCb += 4;
-#else
- curCb += 4;
-#endif
- }
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCr, predCr, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCr, curCr, pitch);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCr += 4;
-#else
- curCr += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- predCb += 40;
- predCr += 40;
-#else
- curCb += ((pitch << 2) - 8);
- curCr += ((pitch << 2) - 8);
-#endif
- }
-
-#ifdef MB_BASED_DEBLOCK
- SaveNeighborForIntraPred(video, offset);
-#endif
- return AVC_SUCCESS;
-}
-
-#ifdef MB_BASED_DEBLOCK
-void SaveNeighborForIntraPred(AVCCommonObj *video, int offset)
-{
- AVCPictureData *currPic = video->currPic;
- int pitch;
- uint8 *pred, *predCb, *predCr;
- uint8 *tmp_ptr, tmp_byte;
- uint32 tmp_word;
- int mb_x = video->mb_x;
-
- /* save the value for intra prediction */
-#ifdef USE_PRED_BLOCK
- pitch = 20;
- pred = video->pred + 384; /* bottom line for Y */
- predCb = pred + 152; /* bottom line for Cb */
- predCr = predCb + 144; /* bottom line for Cr */
-#else
- pitch = currPic->pitch;
- tmp_word = offset + (pitch << 2) - (pitch >> 1);
- predCb = currPic->Scb + tmp_word;/* bottom line for Cb */
- predCr = currPic->Scr + tmp_word;/* bottom line for Cr */
-
- offset = (offset << 2) - (mb_x << 4);
- pred = currPic->Sl + offset + (pitch << 4) - pitch;/* bottom line for Y */
-
-#endif
-
- video->intra_pred_topleft = video->intra_pred_top[(mb_x<<4)+15];
- video->intra_pred_topleft_cb = video->intra_pred_top_cb[(mb_x<<3)+7];
- video->intra_pred_topleft_cr = video->intra_pred_top_cr[(mb_x<<3)+7];
-
- /* then copy to video->intra_pred_top, intra_pred_top_cb, intra_pred_top_cr */
- /*memcpy(video->intra_pred_top + (mb_x<<4), pred, 16);
- memcpy(video->intra_pred_top_cb + (mb_x<<3), predCb, 8);
- memcpy(video->intra_pred_top_cr + (mb_x<<3), predCr, 8);*/
- tmp_ptr = video->intra_pred_top + (mb_x << 4);
- *((uint32*)tmp_ptr) = *((uint32*)pred);
- *((uint32*)(tmp_ptr + 4)) = *((uint32*)(pred + 4));
- *((uint32*)(tmp_ptr + 8)) = *((uint32*)(pred + 8));
- *((uint32*)(tmp_ptr + 12)) = *((uint32*)(pred + 12));
- tmp_ptr = video->intra_pred_top_cb + (mb_x << 3);
- *((uint32*)tmp_ptr) = *((uint32*)predCb);
- *((uint32*)(tmp_ptr + 4)) = *((uint32*)(predCb + 4));
- tmp_ptr = video->intra_pred_top_cr + (mb_x << 3);
- *((uint32*)tmp_ptr) = *((uint32*)predCr);
- *((uint32*)(tmp_ptr + 4)) = *((uint32*)(predCr + 4));
-
-
- /* now save last column */
-#ifdef USE_PRED_BLOCK
- pred = video->pred + 99; /* last column*/
-#else
- pred -= ((pitch << 4) - pitch - 15); /* last column */
-#endif
- tmp_ptr = video->intra_pred_left;
- tmp_word = video->intra_pred_topleft;
- tmp_byte = *(pred);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)tmp_ptr) = tmp_word;
- tmp_word = *(pred += pitch);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- tmp_word = *(pred += pitch);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- tmp_word = *(pred += pitch);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- *(tmp_ptr += 4) = *(pred += pitch);
-
- /* now for Cb */
-#ifdef USE_PRED_BLOCK
- predCb = video->pred + 459;
- pitch = 12;
-#else
- pitch >>= 1;
- predCb -= (7 * pitch - 7);
-#endif
- tmp_ptr = video->intra_pred_left_cb;
- tmp_word = video->intra_pred_topleft_cb;
- tmp_byte = *(predCb);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)tmp_ptr) = tmp_word;
- tmp_word = *(predCb += pitch);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- *(tmp_ptr += 4) = *(predCb += pitch);
-
- /* now for Cr */
-#ifdef USE_PRED_BLOCK
- predCr = video->pred + 603;
-#else
- predCr -= (7 * pitch - 7);
-#endif
- tmp_ptr = video->intra_pred_left_cr;
- tmp_word = video->intra_pred_topleft_cr;
- tmp_byte = *(predCr);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)tmp_ptr) = tmp_word;
- tmp_word = *(predCr += pitch);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- *(tmp_ptr += 4) = *(predCr += pitch);
-
- return ;
-}
-#endif /* MB_BASED_DEBLOCK */
-
-AVCStatus Intra_4x4(AVCCommonObj *video, int block_x, int block_y, uint8 *comp)
-{
- AVCMacroblock *currMB = video->currMB;
- int block_offset;
- AVCNeighborAvailability availability;
- int pitch = video->currPic->pitch;
-
-#ifdef USE_PRED_BLOCK
- block_offset = (block_y * 80) + (block_x << 2);
-#else
- block_offset = (block_y << 2) * pitch + (block_x << 2);
-#endif
-
-#ifdef MB_BASED_DEBLOCK
- /* boundary blocks use video->pred_intra_top, pred_intra_left, pred_intra_topleft */
- if (!block_x)
- {
- video->pintra_pred_left = video->intra_pred_left + 1 + (block_y << 2);
- pitch = 1;
- }
- else
- {
- video->pintra_pred_left = video->pred_block + block_offset - 1;
- pitch = video->pred_pitch;
- }
-
- if (!block_y)
- {
- video->pintra_pred_top = video->intra_pred_top + (block_x << 2) + (video->mb_x << 4);
- }
- else
- {
- video->pintra_pred_top = video->pred_block + block_offset - video->pred_pitch;
- }
-
- if (!block_x)
- {
- video->intra_pred_topleft = video->intra_pred_left[block_y<<2];
- }
- else if (!block_y)
- {
- video->intra_pred_topleft = video->intra_pred_top[(video->mb_x<<4)+(block_x<<2)-1];
- }
- else
- {
- video->intra_pred_topleft = video->pred_block[block_offset - video->pred_pitch - 1];
- }
-
-#else
- /* normal case */
- video->pintra_pred_top = comp - pitch;
- video->pintra_pred_left = comp - 1;
- if (video->mb_y || block_y)
- {
- video->intra_pred_topleft = *(comp - pitch - 1);
- }
-#endif
-
- switch (currMB->i4Mode[(block_y << 2) + block_x])
- {
- case AVC_I4_Vertical: /* Intra_4x4_Vertical */
- if (block_y > 0 || video->intraAvailB)/* to prevent out-of-bound access*/
- {
- Intra_4x4_Vertical(video, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Horizontal: /* Intra_4x4_Horizontal */
- if (block_x || video->intraAvailA) /* to prevent out-of-bound access */
- {
- Intra_4x4_Horizontal(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_DC: /* Intra_4x4_DC */
- availability.left = TRUE;
- availability.top = TRUE;
- if (!block_y)
- { /* check availability up */
- availability.top = video->intraAvailB ;
- }
- if (!block_x)
- { /* check availability left */
- availability.left = video->intraAvailA ;
- }
- Intra_4x4_DC(video, pitch, block_offset, &availability);
- break;
-
- case AVC_I4_Diagonal_Down_Left: /* Intra_4x4_Diagonal_Down_Left */
- /* lookup table will be more appropriate for this case */
- if (block_y == 0 && !video->intraAvailB)
- {
- return AVC_FAIL;
- }
-
- availability.top_right = BlkTopRight[(block_y<<2) + block_x];
-
- if (availability.top_right == 2)
- {
- availability.top_right = video->intraAvailB;
- }
- else if (availability.top_right == 3)
- {
- availability.top_right = video->intraAvailC;
- }
-
- Intra_4x4_Down_Left(video, block_offset, &availability);
- break;
-
- case AVC_I4_Diagonal_Down_Right: /* Intra_4x4_Diagonal_Down_Right */
- if ((block_y && block_x) /* to prevent out-of-bound access */
- || (block_y && video->intraAvailA)
- || (block_x && video->intraAvailB)
- || (video->intraAvailA && video->intraAvailD && video->intraAvailB))
- {
- Intra_4x4_Diagonal_Down_Right(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Vertical_Right: /* Intra_4x4_Vertical_Right */
- if ((block_y && block_x) /* to prevent out-of-bound access */
- || (block_y && video->intraAvailA)
- || (block_x && video->intraAvailB)
- || (video->intraAvailA && video->intraAvailD && video->intraAvailB))
- {
- Intra_4x4_Diagonal_Vertical_Right(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Horizontal_Down: /* Intra_4x4_Horizontal_Down */
- if ((block_y && block_x) /* to prevent out-of-bound access */
- || (block_y && video->intraAvailA)
- || (block_x && video->intraAvailB)
- || (video->intraAvailA && video->intraAvailD && video->intraAvailB))
- {
- Intra_4x4_Diagonal_Horizontal_Down(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Vertical_Left: /* Intra_4x4_Vertical_Left */
- /* lookup table may be more appropriate for this case */
- if (block_y == 0 && !video->intraAvailB)
- {
- return AVC_FAIL;
- }
-
- availability.top_right = BlkTopRight[(block_y<<2) + block_x];
-
- if (availability.top_right == 2)
- {
- availability.top_right = video->intraAvailB;
- }
- else if (availability.top_right == 3)
- {
- availability.top_right = video->intraAvailC;
- }
-
- Intra_4x4_Vertical_Left(video, block_offset, &availability);
- break;
-
- case AVC_I4_Horizontal_Up: /* Intra_4x4_Horizontal_Up */
- if (block_x || video->intraAvailA)
- {
- Intra_4x4_Horizontal_Up(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
-
- default:
-
- break;
- }
-
- return AVC_SUCCESS;
-}
-
-
-/* =============================== BEGIN 4x4
-MODES======================================*/
-void Intra_4x4_Vertical(AVCCommonObj *video, int block_offset)
-{
- uint8 *comp_ref = video->pintra_pred_top;
- uint32 temp;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- /*P = (int) *comp_ref++;
- Q = (int) *comp_ref++;
- R = (int) *comp_ref++;
- S = (int) *comp_ref++;
- temp = S|(R<<8)|(Q<<16)|(P<<24);*/
- temp = *((uint32*)comp_ref);
-
- *((uint32*)pred) = temp; /* write 4 at a time */
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Horizontal(AVCCommonObj *video, int pitch, int block_offset)
-{
- uint8 *comp_ref = video->pintra_pred_left;
- uint32 temp;
- int P;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_DC(AVCCommonObj *video, int pitch, int block_offset,
- AVCNeighborAvailability *availability)
-{
- uint8 *comp_ref = video->pintra_pred_left;
- uint32 temp;
- int DC;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- if (availability->left)
- {
- DC = *comp_ref;
- comp_ref += pitch;
- DC += *comp_ref;
- comp_ref += pitch;
- DC += *comp_ref;
- comp_ref += pitch;
- DC += *comp_ref;
- comp_ref = video->pintra_pred_top;
-
- if (availability->top)
- {
- DC = (comp_ref[0] + comp_ref[1] + comp_ref[2] + comp_ref[3] + DC + 4) >> 3;
- }
- else
- {
- DC = (DC + 2) >> 2;
-
- }
- }
- else if (availability->top)
- {
- comp_ref = video->pintra_pred_top;
- DC = (comp_ref[0] + comp_ref[1] + comp_ref[2] + comp_ref[3] + 2) >> 2;
-
- }
- else
- {
- DC = 128;
- }
-
- temp = DC | (DC << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Down_Left(AVCCommonObj *video, int block_offset,
- AVCNeighborAvailability *availability)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint32 temp;
- int r0, r1, r2, r3, r4, r5, r6, r7;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- r0 = *comp_refx++;
- r1 = *comp_refx++;
- r2 = *comp_refx++;
- r3 = *comp_refx++;
- if (availability->top_right)
- {
- r4 = *comp_refx++;
- r5 = *comp_refx++;
- r6 = *comp_refx++;
- r7 = *comp_refx++;
- }
- else
- {
- r4 = r3;
- r5 = r3;
- r6 = r3;
- r7 = r3;
- }
-
- r0 += (r1 << 1);
- r0 += r2;
- r0 += 2;
- r0 >>= 2;
- r1 += (r2 << 1);
- r1 += r3;
- r1 += 2;
- r1 >>= 2;
- r2 += (r3 << 1);
- r2 += r4;
- r2 += 2;
- r2 >>= 2;
- r3 += (r4 << 1);
- r3 += r5;
- r3 += 2;
- r3 >>= 2;
- r4 += (r5 << 1);
- r4 += r6;
- r4 += 2;
- r4 >>= 2;
- r5 += (r6 << 1);
- r5 += r7;
- r5 += 2;
- r5 >>= 2;
- r6 += (3 * r7);
- r6 += 2;
- r6 >>= 2;
-
- temp = r0 | (r1 << 8);
- temp |= (r2 << 16);
- temp |= (r3 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = (temp >> 8) | (r4 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = (temp >> 8) | (r5 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = (temp >> 8) | (r6 << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Diagonal_Down_Right(AVCCommonObj *video, int pitch, int
- block_offset)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int P_x, Q_x, R_x, P_y, Q_y, R_y, D;
- int x0, x1, x2;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- temp = *((uint32*)comp_refx); /* read 4 bytes */
- x0 = temp & 0xFF;
- x1 = (temp >> 8) & 0xFF;
- x2 = (temp >> 16) & 0xFF;
-
- Q_x = (x0 + 2 * x1 + x2 + 2) >> 2;
- R_x = (x1 + 2 * x2 + (temp >> 24) + 2) >> 2;
-
- x2 = video->intra_pred_topleft; /* re-use x2 instead of y0 */
- P_x = (x2 + 2 * x0 + x1 + 2) >> 2;
-
- x1 = *comp_refy;
- comp_refy += pitch; /* re-use x1 instead of y1 */
- D = (x0 + 2 * x2 + x1 + 2) >> 2;
-
- x0 = *comp_refy;
- comp_refy += pitch; /* re-use x0 instead of y2 */
- P_y = (x2 + 2 * x1 + x0 + 2) >> 2;
-
- x2 = *comp_refy;
- comp_refy += pitch; /* re-use x2 instead of y3 */
- Q_y = (x1 + 2 * x0 + x2 + 2) >> 2;
-
- x1 = *comp_refy; /* re-use x1 instead of y4 */
- R_y = (x0 + 2 * x2 + x1 + 2) >> 2;
-
- /* we can pack these */
- temp = D | (P_x << 8); //[D P_x Q_x R_x]
- //[P_y D P_x Q_x]
- temp |= (Q_x << 16); //[Q_y P_y D P_x]
- temp |= (R_x << 24); //[R_y Q_y P_y D ]
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = P_y | (D << 8);
- temp |= (P_x << 16);
- temp |= (Q_x << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = Q_y | (P_y << 8);
- temp |= (D << 16);
- temp |= (P_x << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = R_y | (Q_y << 8);
- temp |= (P_y << 16);
- temp |= (D << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Diagonal_Vertical_Right(AVCCommonObj *video, int pitch, int block_offset)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int P0, Q0, R0, S0, P1, Q1, R1, P2, Q2, D;
- int x0, x1, x2;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- x0 = *comp_refx++;
- x1 = *comp_refx++;
- Q0 = x0 + x1 + 1;
-
- x2 = *comp_refx++;
- R0 = x1 + x2 + 1;
-
- x1 = *comp_refx++; /* reuse x1 instead of x3 */
- S0 = x2 + x1 + 1;
-
- x1 = video->intra_pred_topleft; /* reuse x1 instead of y0 */
- P0 = x1 + x0 + 1;
-
- x2 = *comp_refy;
- comp_refy += pitch; /* reuse x2 instead of y1 */
- D = (x2 + 2 * x1 + x0 + 2) >> 2;
-
- P1 = (P0 + Q0) >> 2;
- Q1 = (Q0 + R0) >> 2;
- R1 = (R0 + S0) >> 2;
-
- P0 >>= 1;
- Q0 >>= 1;
- R0 >>= 1;
- S0 >>= 1;
-
- x0 = *comp_refy;
- comp_refy += pitch; /* reuse x0 instead of y2 */
- P2 = (x1 + 2 * x2 + x0 + 2) >> 2;
- x1 = *comp_refy;
- comp_refy += pitch; /* reuse x1 instead of y3 */
- Q2 = (x2 + 2 * x0 + x1 + 2) >> 2;
-
- temp = P0 | (Q0 << 8); //[P0 Q0 R0 S0]
- //[D P1 Q1 R1]
- temp |= (R0 << 16); //[P2 P0 Q0 R0]
- temp |= (S0 << 24); //[Q2 D P1 Q1]
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = D | (P1 << 8);
- temp |= (Q1 << 16);
- temp |= (R1 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = P2 | (P0 << 8);
- temp |= (Q0 << 16);
- temp |= (R0 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = Q2 | (D << 8);
- temp |= (P1 << 16);
- temp |= (Q1 << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Diagonal_Horizontal_Down(AVCCommonObj *video, int pitch,
- int block_offset)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int P0, Q0, R0, S0, P1, Q1, R1, P2, Q2, D;
- int x0, x1, x2;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- x0 = *comp_refx++;
- x1 = *comp_refx++;
- x2 = *comp_refx++;
- Q2 = (x0 + 2 * x1 + x2 + 2) >> 2;
-
- x2 = video->intra_pred_topleft; /* reuse x2 instead of y0 */
- P2 = (x2 + 2 * x0 + x1 + 2) >> 2;
-
- x1 = *comp_refy;
- comp_refy += pitch; /* reuse x1 instead of y1 */
- D = (x1 + 2 * x2 + x0 + 2) >> 2;
- P0 = x2 + x1 + 1;
-
- x0 = *comp_refy;
- comp_refy += pitch; /* reuse x0 instead of y2 */
- Q0 = x1 + x0 + 1;
-
- x1 = *comp_refy;
- comp_refy += pitch; /* reuse x1 instead of y3 */
- R0 = x0 + x1 + 1;
-
- x2 = *comp_refy; /* reuse x2 instead of y4 */
- S0 = x1 + x2 + 1;
-
- P1 = (P0 + Q0) >> 2;
- Q1 = (Q0 + R0) >> 2;
- R1 = (R0 + S0) >> 2;
-
- P0 >>= 1;
- Q0 >>= 1;
- R0 >>= 1;
- S0 >>= 1;
-
-
- /* we can pack these */
- temp = P0 | (D << 8); //[P0 D P2 Q2]
- //[Q0 P1 P0 D ]
- temp |= (P2 << 16); //[R0 Q1 Q0 P1]
- temp |= (Q2 << 24); //[S0 R1 R0 Q1]
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = Q0 | (P1 << 8);
- temp |= (P0 << 16);
- temp |= (D << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = R0 | (Q1 << 8);
- temp |= (Q0 << 16);
- temp |= (P1 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = S0 | (R1 << 8);
- temp |= (R0 << 16);
- temp |= (Q1 << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Vertical_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint32 temp1, temp2;
- int x0, x1, x2, x3, x4, x5, x6;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- x0 = *comp_refx++;
- x1 = *comp_refx++;
- x2 = *comp_refx++;
- x3 = *comp_refx++;
- if (availability->top_right)
- {
- x4 = *comp_refx++;
- x5 = *comp_refx++;
- x6 = *comp_refx++;
- }
- else
- {
- x4 = x3;
- x5 = x3;
- x6 = x3;
- }
-
- x0 += x1 + 1;
- x1 += x2 + 1;
- x2 += x3 + 1;
- x3 += x4 + 1;
- x4 += x5 + 1;
- x5 += x6 + 1;
-
- temp1 = (x0 >> 1);
- temp1 |= ((x1 >> 1) << 8);
- temp1 |= ((x2 >> 1) << 16);
- temp1 |= ((x3 >> 1) << 24);
-
- *((uint32*)pred) = temp1;
- pred += pred_pitch;
-
- temp2 = ((x0 + x1) >> 2);
- temp2 |= (((x1 + x2) >> 2) << 8);
- temp2 |= (((x2 + x3) >> 2) << 16);
- temp2 |= (((x3 + x4) >> 2) << 24);
-
- *((uint32*)pred) = temp2;
- pred += pred_pitch;
-
- temp1 = (temp1 >> 8) | ((x4 >> 1) << 24); /* rotate out old value */
- *((uint32*)pred) = temp1;
- pred += pred_pitch;
-
- temp2 = (temp2 >> 8) | (((x4 + x5) >> 2) << 24); /* rotate out old value */
- *((uint32*)pred) = temp2;
- pred += pred_pitch;
-
- return ;
-}
-
-void Intra_4x4_Horizontal_Up(AVCCommonObj *video, int pitch, int block_offset)
-{
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int Q0, R0, Q1, D0, D1, P0, P1;
- int y0, y1, y2, y3;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- y0 = *comp_refy;
- comp_refy += pitch;
- y1 = *comp_refy;
- comp_refy += pitch;
- y2 = *comp_refy;
- comp_refy += pitch;
- y3 = *comp_refy;
-
- Q0 = (y1 + y2 + 1) >> 1;
- Q1 = (y1 + (y2 << 1) + y3 + 2) >> 2;
- P0 = ((y0 + y1 + 1) >> 1);
- P1 = ((y0 + (y1 << 1) + y2 + 2) >> 2);
-
- temp = P0 | (P1 << 8); // [P0 P1 Q0 Q1]
- temp |= (Q0 << 16); // [Q0 Q1 R0 DO]
- temp |= (Q1 << 24); // [R0 D0 D1 D1]
- *((uint32*)pred) = temp; // [D1 D1 D1 D1]
- pred += pred_pitch;
-
- D0 = (y2 + 3 * y3 + 2) >> 2;
- R0 = (y2 + y3 + 1) >> 1;
-
- temp = Q0 | (Q1 << 8);
- temp |= (R0 << 16);
- temp |= (D0 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- D1 = y3;
-
- temp = R0 | (D0 << 8);
- temp |= (D1 << 16);
- temp |= (D1 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = D1 | (D1 << 8);
- temp |= (temp << 16);
- *((uint32*)pred) = temp;
-
- return ;
-}
-/* =============================== END 4x4 MODES======================================*/
-void Intra_16x16_Vertical(AVCCommonObj *video)
-{
- int i;
- uint32 temp1, temp2, temp3, temp4;
- uint8 *comp_ref = video->pintra_pred_top;
- uint8 *pred = video->pred_block;
- int pred_pitch = video->pred_pitch;
-
- temp1 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- temp2 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- temp3 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- temp4 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- i = 16;
- while (i > 0)
- {
- *((uint32*)pred) = temp1;
- *((uint32*)(pred + 4)) = temp2;
- *((uint32*)(pred + 8)) = temp3;
- *((uint32*)(pred + 12)) = temp4;
- pred += pred_pitch;
- i--;
- }
-
- return ;
-}
-
-void Intra_16x16_Horizontal(AVCCommonObj *video, int pitch)
-{
- int i;
- uint32 temp;
- uint8 *comp_ref = video->pintra_pred_left;
- uint8 *pred = video->pred_block;
- int pred_pitch = video->pred_pitch;
-
- for (i = 0; i < 16; i++)
- {
- temp = *comp_ref;
- temp |= (temp << 8);
- temp |= (temp << 16);
- *((uint32*)pred) = temp;
- *((uint32*)(pred + 4)) = temp;
- *((uint32*)(pred + 8)) = temp;
- *((uint32*)(pred + 12)) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- }
-}
-
-
-void Intra_16x16_DC(AVCCommonObj *video, int pitch)
-{
- int i;
- uint32 temp, temp2;
- uint8 *comp_ref_x = video->pintra_pred_top;
- uint8 *comp_ref_y = video->pintra_pred_left;
- int sum = 0;
- uint8 *pred = video->pred_block;
- int pred_pitch = video->pred_pitch;
-
- if (video->intraAvailB)
- {
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum = temp + (temp >> 16);
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum += temp + (temp >> 16);
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum += temp + (temp >> 16);
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum += temp + (temp >> 16);
- sum &= 0xFFFF;
-
- if (video->intraAvailA)
- {
- for (i = 0; i < 16; i++)
- {
- sum += (*comp_ref_y);
- comp_ref_y += pitch;
- }
- sum = (sum + 16) >> 5;
- }
- else
- {
- sum = (sum + 8) >> 4;
- }
- }
- else if (video->intraAvailA)
- {
- for (i = 0; i < 16; i++)
- {
- sum += *comp_ref_y;
- comp_ref_y += pitch;
- }
- sum = (sum + 8) >> 4;
- }
- else
- {
- sum = 128;
- }
-
- temp = sum | (sum << 8);
- temp |= (temp << 16);
-
- for (i = 0; i < 16; i++)
- {
- *((uint32*)pred) = temp;
- *((uint32*)(pred + 4)) = temp;
- *((uint32*)(pred + 8)) = temp;
- *((uint32*)(pred + 12)) = temp;
- pred += pred_pitch;
- }
-
-}
-
-void Intra_16x16_Plane(AVCCommonObj *video, int pitch)
-{
- int i, a_16, b, c, factor_c;
- uint8 *comp_ref_x = video->pintra_pred_top;
- uint8 *comp_ref_y = video->pintra_pred_left;
- uint8 *comp_ref_x0, *comp_ref_x1, *comp_ref_y0, *comp_ref_y1;
- int H = 0, V = 0 , tmp;
- uint8 *pred = video->pred_block;
- uint32 temp;
- uint8 byte1, byte2, byte3;
- int value;
- int pred_pitch = video->pred_pitch;
-
- comp_ref_x0 = comp_ref_x + 8;
- comp_ref_x1 = comp_ref_x + 6;
- comp_ref_y0 = comp_ref_y + (pitch << 3);
- comp_ref_y1 = comp_ref_y + 6 * pitch;
-
- for (i = 1; i < 8; i++)
- {
- H += i * (*comp_ref_x0++ - *comp_ref_x1--);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
- comp_ref_y0 += pitch;
- comp_ref_y1 -= pitch;
- }
-
- H += i * (*comp_ref_x0++ - video->intra_pred_topleft);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
-
-
- a_16 = ((*(comp_ref_x + 15) + *(comp_ref_y + 15 * pitch)) << 4) + 16;;
- b = (5 * H + 32) >> 6;
- c = (5 * V + 32) >> 6;
-
- tmp = 0;
-
- for (i = 0; i < 16; i++)
- {
- factor_c = a_16 + c * (tmp++ - 7);
-
- factor_c -= 7 * b;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)pred) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 4)) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 8)) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 12)) = temp;
- pred += pred_pitch;
- }
-}
-
-/************** Chroma intra prediction *********************/
-
-void Intra_Chroma_DC(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr)
-{
- int i;
- uint32 temp, temp2, pred_a, pred_b;
- uint8 *comp_ref_x, *comp_ref_y;
- uint8 *comp_ref_cb_x = video->pintra_pred_top_cb;
- uint8 *comp_ref_cb_y = video->pintra_pred_left_cb;
- uint8 *comp_ref_cr_x = video->pintra_pred_top_cr;
- uint8 *comp_ref_cr_y = video->pintra_pred_left_cr;
- int component, j;
- int sum_x0, sum_x1, sum_y0, sum_y1;
- int pred_0[2], pred_1[2], pred_2[2], pred_3[2];
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- if (video->intraAvailB & video->intraAvailA)
- {
- comp_ref_x = comp_ref_cb_x;
- comp_ref_y = comp_ref_cb_y;
- for (i = 0; i < 2; i++)
- {
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x0 = temp & 0xFFFF;
-
- temp = *((uint32*)comp_ref_x);
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x1 = temp & 0xFFFF;
-
- pred_1[i] = (sum_x1 + 2) >> 2;
-
- sum_y0 = *comp_ref_y;
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
-
- sum_y1 = *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
-
- pred_2[i] = (sum_y1 + 2) >> 2;
-
- pred_0[i] = (sum_y0 + sum_x0 + 4) >> 3;
- pred_3[i] = (sum_y1 + sum_x1 + 4) >> 3;
-
- comp_ref_x = comp_ref_cr_x;
- comp_ref_y = comp_ref_cr_y;
- }
- }
-
- else if (video->intraAvailA)
- {
- comp_ref_y = comp_ref_cb_y;
- for (i = 0; i < 2; i++)
- {
- sum_y0 = *comp_ref_y;
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
-
- sum_y1 = *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
-
- pred_0[i] = pred_1[i] = (sum_y0 + 2) >> 2;
- pred_2[i] = pred_3[i] = (sum_y1 + 2) >> 2;
- comp_ref_y = comp_ref_cr_y;
- }
- }
- else if (video->intraAvailB)
- {
- comp_ref_x = comp_ref_cb_x;
- for (i = 0; i < 2; i++)
- {
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x0 = temp & 0xFFFF;
-
- temp = *((uint32*)comp_ref_x);
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x1 = temp & 0xFFFF;
-
- pred_0[i] = pred_2[i] = (sum_x0 + 2) >> 2;
- pred_1[i] = pred_3[i] = (sum_x1 + 2) >> 2;
- comp_ref_x = comp_ref_cr_x;
- }
- }
- else
- {
- pred_0[0] = pred_0[1] = pred_1[0] = pred_1[1] =
- pred_2[0] = pred_2[1] = pred_3[0] = pred_3[1] = 128;
- }
-
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- pred_a = pred_0[component];
- pred_b = pred_1[component];
- pred_a |= (pred_a << 8);
- pred_a |= (pred_a << 16);
- pred_b |= (pred_b << 8);
- pred_b |= (pred_b << 16);
-
- for (i = 4; i < 6; i++)
- {
- for (j = 0; j < 4; j++) /* 4 lines */
- {
- *((uint32*)pred) = pred_a;
- *((uint32*)(pred + 4)) = pred_b;
- pred += pred_pitch; /* move to the next line */
- }
- pred_a = pred_2[component];
- pred_b = pred_3[component];
- pred_a |= (pred_a << 8);
- pred_a |= (pred_a << 16);
- pred_b |= (pred_b << 8);
- pred_b |= (pred_b << 16);
- }
- pred = predCr; /* point to cr */
- }
-}
-
-void Intra_Chroma_Horizontal(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr)
-{
- int i;
- uint32 temp;
- uint8 *comp_ref_cb_y = video->pintra_pred_left_cb;
- uint8 *comp_ref_cr_y = video->pintra_pred_left_cr;
- uint8 *comp;
- int component, j;
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- comp = comp_ref_cb_y;
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- for (i = 4; i < 6; i++)
- {
- for (j = 0; j < 4; j++)
- {
- temp = *comp;
- comp += pitch;
- temp |= (temp << 8);
- temp |= (temp << 16);
- *((uint32*)pred) = temp;
- *((uint32*)(pred + 4)) = temp;
- pred += pred_pitch;
- }
- }
- comp = comp_ref_cr_y;
- pred = predCr; /* point to cr */
- }
-
-}
-
-void Intra_Chroma_Vertical(AVCCommonObj *video, uint8 *predCb, uint8 *predCr)
-{
- uint32 temp1, temp2;
- uint8 *comp_ref_cb_x = video->pintra_pred_top_cb;
- uint8 *comp_ref_cr_x = video->pintra_pred_top_cr;
- uint8 *comp_ref;
- int component, j;
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- comp_ref = comp_ref_cb_x;
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- temp1 = *((uint32*)comp_ref);
- temp2 = *((uint32*)(comp_ref + 4));
- for (j = 0; j < 8; j++)
- {
- *((uint32*)pred) = temp1;
- *((uint32*)(pred + 4)) = temp2;
- pred += pred_pitch;
- }
- comp_ref = comp_ref_cr_x;
- pred = predCr; /* point to cr */
- }
-
-}
-
-void Intra_Chroma_Plane(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr)
-{
- int i;
- int a_16_C[2], b_C[2], c_C[2], a_16, b, c, factor_c;
- uint8 *comp_ref_x, *comp_ref_y, *comp_ref_x0, *comp_ref_x1, *comp_ref_y0, *comp_ref_y1;
- int component, j;
- int H, V, tmp;
- uint32 temp;
- uint8 byte1, byte2, byte3;
- int value;
- uint8 topleft;
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- comp_ref_x = video->pintra_pred_top_cb;
- comp_ref_y = video->pintra_pred_left_cb;
- topleft = video->intra_pred_topleft_cb;
-
- for (component = 0; component < 2; component++)
- {
- H = V = 0;
- comp_ref_x0 = comp_ref_x + 4;
- comp_ref_x1 = comp_ref_x + 2;
- comp_ref_y0 = comp_ref_y + (pitch << 2);
- comp_ref_y1 = comp_ref_y + (pitch << 1);
- for (i = 1; i < 4; i++)
- {
- H += i * (*comp_ref_x0++ - *comp_ref_x1--);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
- comp_ref_y0 += pitch;
- comp_ref_y1 -= pitch;
- }
- H += i * (*comp_ref_x0++ - topleft);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
-
- a_16_C[component] = ((*(comp_ref_x + 7) + *(comp_ref_y + 7 * pitch)) << 4) + 16;
- b_C[component] = (17 * H + 16) >> 5;
- c_C[component] = (17 * V + 16) >> 5;
-
- comp_ref_x = video->pintra_pred_top_cr;
- comp_ref_y = video->pintra_pred_left_cr;
- topleft = video->intra_pred_topleft_cr;
- }
-
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- a_16 = a_16_C[component];
- b = b_C[component];
- c = c_C[component];
- tmp = 0;
- for (i = 4; i < 6; i++)
- {
- for (j = 0; j < 4; j++)
- {
- factor_c = a_16 + c * (tmp++ - 3);
-
- factor_c -= 3 * b;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)pred) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 4)) = temp;
- pred += pred_pitch;
- }
- }
- pred = predCr; /* point to cr */
- }
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/residual.cpp b/media/libstagefright/codecs/avc/dec/src/residual.cpp
deleted file mode 100644
index c68550d..0000000
--- a/media/libstagefright/codecs/avc/dec/src/residual.cpp
+++ /dev/null
@@ -1,523 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#include <string.h>
-
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream)
-{
- AVCDec_Status status;
- int j;
- int mb_x, mb_y, offset1;
- uint8 *pDst;
- uint32 byte0, byte1;
- int pitch;
-
- mb_x = video->mb_x;
- mb_y = video->mb_y;
-
-#ifdef USE_PRED_BLOCK
- pDst = video->pred_block + 84;
- pitch = 20;
-#else
- offset1 = (mb_x << 4) + (mb_y << 4) * video->PicWidthInSamplesL;
- pDst = video->currPic->Sl + offset1;
- pitch = video->currPic->pitch;
-#endif
-
- /* at this point bitstream is byte-aligned */
- j = 16;
- while (j > 0)
- {
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)pDst) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 4)) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 8)) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 12)) = byte0;
- j--;
- pDst += pitch;
-
- if (status != AVCDEC_SUCCESS) /* check only once per line */
- return status;
- }
-
-#ifdef USE_PRED_BLOCK
- pDst = video->pred_block + 452;
- pitch = 12;
-#else
- offset1 = (offset1 >> 2) + (mb_x << 2);
- pDst = video->currPic->Scb + offset1;
- pitch >>= 1;
-#endif
-
- j = 8;
- while (j > 0)
- {
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)pDst) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 4)) = byte0;
-
- j--;
- pDst += pitch;
-
- if (status != AVCDEC_SUCCESS) /* check only once per line */
- return status;
- }
-
-#ifdef USE_PRED_BLOCK
- pDst = video->pred_block + 596;
- pitch = 12;
-#else
- pDst = video->currPic->Scr + offset1;
-#endif
- j = 8;
- while (j > 0)
- {
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)pDst) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 4)) = byte0;
-
- j--;
- pDst += pitch;
-
- if (status != AVCDEC_SUCCESS) /* check only once per line */
- return status;
- }
-
-#ifdef MB_BASED_DEBLOCK
- SaveNeighborForIntraPred(video, offset1);
-#endif
-
- return AVCDEC_SUCCESS;
-}
-
-
-
-/* see subclause 7.3.5.3 and readCBPandCoeffsFromNAL() in JM*/
-AVCDec_Status residual(AVCDecObject *decvid, AVCMacroblock *currMB)
-{
- AVCCommonObj *video = decvid->common;
- int16 *block;
- int level[16], run[16], numcoeff; /* output from residual_block_cavlc */
- int block_x, i, j, k, idx, iCbCr;
- int mbPartIdx, subMbPartIdx, mbPartIdx_X, mbPartIdx_Y;
- int nC, maxNumCoeff = 16;
- int coeffNum, start_scan = 0;
- uint8 *zz_scan;
- int Rq, Qq;
- uint32 cbp4x4 = 0;
-
- /* in 8.5.4, it only says if it's field macroblock. */
-
- zz_scan = (uint8*) ZZ_SCAN_BLOCK;
-
-
- /* see 8.5.8 for the initialization of these values */
- Qq = video->QPy_div_6;
- Rq = video->QPy_mod_6;
-
- memset(video->block, 0, sizeof(int16)*NUM_PIXELS_IN_MB);
-
- if (currMB->mbMode == AVC_I16)
- {
- nC = predict_nnz(video, 0, 0);
- decvid->residual_block(decvid, nC, 16, level, run, &numcoeff);
- /* then performs zigzag and transform */
- block = video->block;
- coeffNum = -1;
- for (i = numcoeff - 1; i >= 0; i--)
- {
- coeffNum += run[i] + 1;
- if (coeffNum > 15)
- {
- return AVCDEC_FAIL;
- }
- idx = zz_scan[coeffNum] << 2;
- /* idx = ((idx>>2)<<6) + ((idx&3)<<2); */
- block[idx] = level[i];
- }
-
- /* inverse transform on Intra16x16DCLevel */
- if (numcoeff)
- {
- Intra16DCTrans(block, Qq, Rq);
- cbp4x4 = 0xFFFF;
- }
- maxNumCoeff = 15;
- start_scan = 1;
- }
-
- memset(currMB->nz_coeff, 0, sizeof(uint8)*24);
-
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- mbPartIdx_X = (mbPartIdx & 1) << 1;
- mbPartIdx_Y = mbPartIdx & -2;
-
- if (currMB->CBP&(1 << mbPartIdx))
- {
- for (subMbPartIdx = 0; subMbPartIdx < 4; subMbPartIdx++)
- {
- i = mbPartIdx_X + (subMbPartIdx & 1); // check this
- j = mbPartIdx_Y + (subMbPartIdx >> 1);
- block = video->block + (j << 6) + (i << 2); //
- nC = predict_nnz(video, i, j);
- decvid->residual_block(decvid, nC, maxNumCoeff, level, run, &numcoeff);
-
- /* convert to raster scan and quantize*/
- /* Note: for P mb in SP slice and SI mb in SI slice,
- the quantization cannot be done here.
- block[idx] should be assigned with level[k].
- itrans will be done after the prediction.
- There will be transformation on the predicted value,
- then addition with block[idx], then this quantization
- and transform.*/
-
- coeffNum = -1 + start_scan;
- for (k = numcoeff - 1; k >= 0; k--)
- {
- coeffNum += run[k] + 1;
- if (coeffNum > 15)
- {
- return AVCDEC_FAIL;
- }
- idx = zz_scan[coeffNum];
- block[idx] = (level[k] * dequant_coefres[Rq][coeffNum]) << Qq ;
- }
-
- currMB->nz_coeff[(j<<2)+i] = numcoeff;
- if (numcoeff)
- {
- cbp4x4 |= (1 << ((j << 2) + i));
- }
- }
- }
- }
-
- Qq = video->QPc_div_6;
- Rq = video->QPc_mod_6;
-
- if (currMB->CBP & (3 << 4)) /* chroma DC residual present */
- {
- for (iCbCr = 0; iCbCr < 2; iCbCr++)
- {
- decvid->residual_block(decvid, -1, 4, level, run, &numcoeff);
- block = video->block + 256 + (iCbCr << 3);
- coeffNum = -1;
- for (i = numcoeff - 1; i >= 0; i--)
- {
- coeffNum += run[i] + 1;
- if (coeffNum > 3)
- {
- return AVCDEC_FAIL;
- }
- block[(coeffNum>>1)*64 + (coeffNum&1)*4] = level[i];
- }
- /* inverse transform on chroma DC */
- /* for P in SP and SI in SI, this function can't be done here,
- must do prediction transform/quant first. */
- if (numcoeff)
- {
- ChromaDCTrans(block, Qq, Rq);
- cbp4x4 |= (iCbCr ? 0xcc0000 : 0x330000);
- }
- }
- }
-
- if (currMB->CBP & (2 << 4))
- {
- for (block_x = 0; block_x < 4; block_x += 2) /* for iCbCr */
- {
- for (j = 4; j < 6; j++) /* for each block inside Cb or Cr */
- {
- for (i = block_x; i < block_x + 2; i++)
- {
-
- block = video->block + (j << 6) + (i << 2);
-
- nC = predict_nnz_chroma(video, i, j);
- decvid->residual_block(decvid, nC, 15, level, run, &numcoeff);
-
- /* convert to raster scan and quantize */
- /* for P MB in SP slice and SI MB in SI slice,
- the dequant and transform cannot be done here.
- It needs the prediction values. */
- coeffNum = 0;
- for (k = numcoeff - 1; k >= 0; k--)
- {
- coeffNum += run[k] + 1;
- if (coeffNum > 15)
- {
- return AVCDEC_FAIL;
- }
- idx = zz_scan[coeffNum];
- block[idx] = (level[k] * dequant_coefres[Rq][coeffNum]) << Qq;
- }
-
-
- /* then transform */
- // itrans(block); /* transform */
- currMB->nz_coeff[(j<<2)+i] = numcoeff; //
- if (numcoeff)
- {
- cbp4x4 |= (1 << ((j << 2) + i));
- }
- }
-
- }
- }
- }
-
- video->cbp4x4 = cbp4x4;
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 7.3.5.3.1 and 9.2 and readCoeff4x4_CAVLC() in JM */
-AVCDec_Status residual_block_cavlc(AVCDecObject *decvid, int nC, int maxNumCoeff,
- int *level, int *run, int *numcoeff)
-{
- int i, j;
- int TrailingOnes, TotalCoeff;
- AVCDecBitstream *stream = decvid->bitstream;
- int suffixLength;
- uint trailing_ones_sign_flag, level_prefix, level_suffix;
- int levelCode, levelSuffixSize, zerosLeft;
- int run_before;
-
-
- if (nC >= 0)
- {
- ce_TotalCoeffTrailingOnes(stream, &TrailingOnes, &TotalCoeff, nC);
- }
- else
- {
- ce_TotalCoeffTrailingOnesChromaDC(stream, &TrailingOnes, &TotalCoeff);
- }
-
- *numcoeff = TotalCoeff;
-
- /* This part is done quite differently in ReadCoef4x4_CAVLC() */
- if (TotalCoeff == 0)
- {
- return AVCDEC_SUCCESS;
- }
-
- if (TrailingOnes) /* keep reading the sign of those trailing ones */
- {
- /* instead of reading one bit at a time, read the whole thing at once */
- BitstreamReadBits(stream, TrailingOnes, &trailing_ones_sign_flag);
- trailing_ones_sign_flag <<= 1;
- for (i = 0; i < TrailingOnes; i++)
- {
- level[i] = 1 - ((trailing_ones_sign_flag >> (TrailingOnes - i - 1)) & 2);
- }
- }
-
- i = TrailingOnes;
- suffixLength = 1;
- if (TotalCoeff > TrailingOnes)
- {
- ce_LevelPrefix(stream, &level_prefix);
- if (TotalCoeff < 11 || TrailingOnes == 3)
- {
- if (level_prefix < 14)
- {
-// levelSuffixSize = 0;
- levelCode = level_prefix;
- }
- else if (level_prefix == 14)
- {
-// levelSuffixSize = 4;
- BitstreamReadBits(stream, 4, &level_suffix);
- levelCode = 14 + level_suffix;
- }
- else /* if (level_prefix == 15) */
- {
-// levelSuffixSize = 12;
- BitstreamReadBits(stream, 12, &level_suffix);
- levelCode = 30 + level_suffix;
- }
- }
- else
- {
- /* suffixLength = 1; */
- if (level_prefix < 15)
- {
- levelSuffixSize = suffixLength;
- }
- else
- {
- levelSuffixSize = 12;
- }
- BitstreamReadBits(stream, levelSuffixSize, &level_suffix);
-
- levelCode = (level_prefix << 1) + level_suffix;
- }
-
- if (TrailingOnes < 3)
- {
- levelCode += 2;
- }
-
- level[i] = (levelCode + 2) >> 1;
- if (level[i] > 3)
- {
- suffixLength = 2;
- }
-
- if (levelCode & 1)
- {
- level[i] = -level[i];
- }
- i++;
-
- }
-
- for (j = TotalCoeff - i; j > 0 ; j--)
- {
- ce_LevelPrefix(stream, &level_prefix);
- if (level_prefix < 15)
- {
- levelSuffixSize = suffixLength;
- }
- else
- {
- levelSuffixSize = 12;
- }
- BitstreamReadBits(stream, levelSuffixSize, &level_suffix);
-
- levelCode = (level_prefix << suffixLength) + level_suffix;
- level[i] = (levelCode >> 1) + 1;
- if (level[i] > (3 << (suffixLength - 1)) && suffixLength < 6)
- {
- suffixLength++;
- }
- if (levelCode & 1)
- {
- level[i] = -level[i];
- }
- i++;
- }
-
-
- if (TotalCoeff < maxNumCoeff)
- {
- if (nC >= 0)
- {
- ce_TotalZeros(stream, &zerosLeft, TotalCoeff);
- }
- else
- {
- ce_TotalZerosChromaDC(stream, &zerosLeft, TotalCoeff);
- }
- }
- else
- {
- zerosLeft = 0;
- }
-
- for (i = 0; i < TotalCoeff - 1; i++)
- {
- if (zerosLeft > 0)
- {
- ce_RunBefore(stream, &run_before, zerosLeft);
- run[i] = run_before;
- }
- else
- {
- run[i] = 0;
- zerosLeft = 0; // could be negative under error conditions
- }
-
- zerosLeft = zerosLeft - run[i];
- }
-
- if (zerosLeft < 0)
- {
- zerosLeft = 0;
-// return AVCDEC_FAIL;
- }
-
- run[TotalCoeff-1] = zerosLeft;
-
- /* leave the inverse zigzag scan part for the caller */
-
-
- return AVCDEC_SUCCESS;
-}
diff --git a/media/libstagefright/codecs/avc/dec/src/slice.cpp b/media/libstagefright/codecs/avc/dec/src/slice.cpp
deleted file mode 100644
index 7a2ef3d..0000000
--- a/media/libstagefright/codecs/avc/dec/src/slice.cpp
+++ /dev/null
@@ -1,772 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/* Note for optimization: syntax decoding or operations related to B_SLICE should be
-commented out by macro definition or function pointers. */
-
-#include <string.h>
-
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-const static int mbPart2raster[3][4] = {{0, 0, 0, 0}, {1, 1, 0, 0}, {1, 0, 1, 0}};
-/* decode_frame_slice() */
-/* decode_one_slice() */
-AVCDec_Status DecodeSlice(AVCDecObject *decvid)
-{
- AVCDec_Status status;
- AVCCommonObj *video = decvid->common;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- AVCMacroblock *currMB ;
- AVCDecBitstream *stream = decvid->bitstream;
- uint slice_group_id;
- uint CurrMbAddr, moreDataFlag;
-
- /* set the first mb in slice */
- CurrMbAddr = sliceHdr->first_mb_in_slice;
- slice_group_id = video->MbToSliceGroupMap[CurrMbAddr];
-
- if ((CurrMbAddr && (CurrMbAddr != (uint)(video->mbNum + 1))) && video->currSeqParams->constrained_set1_flag == 1)
- {
- ConcealSlice(decvid, video->mbNum, CurrMbAddr);
- }
-
- moreDataFlag = 1;
- video->mb_skip_run = -1;
-
-
- /* while loop , see subclause 7.3.4 */
- do
- {
- if (CurrMbAddr >= video->PicSizeInMbs)
- {
- return AVCDEC_FAIL;
- }
-
- currMB = video->currMB = &(video->mblock[CurrMbAddr]);
- video->mbNum = CurrMbAddr;
- currMB->slice_id = video->slice_id; // slice
-
- /* we can remove this check if we don't support Mbaff. */
- /* we can wrap below into an initMB() function which will also
- do necessary reset of macroblock related parameters. */
-
- video->mb_x = CurrMbAddr % video->PicWidthInMbs;
- video->mb_y = CurrMbAddr / video->PicWidthInMbs;
-
- /* check the availability of neighboring macroblocks */
- InitNeighborAvailability(video, CurrMbAddr);
-
- /* read_macroblock and decode_one_macroblock() */
- status = DecodeMB(decvid);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-#ifdef MB_BASED_DEBLOCK
- if (video->currPicParams->num_slice_groups_minus1 == 0)
- {
- MBInLoopDeblock(video); /* MB-based deblocking */
- }
- else /* this mode cannot be used if the number of slice group is not one. */
- {
- return AVCDEC_FAIL;
- }
-#endif
- video->numMBs--;
-
- moreDataFlag = more_rbsp_data(stream);
-
-
- /* go to next MB */
- while (++CurrMbAddr < video->PicSizeInMbs && video->MbToSliceGroupMap[CurrMbAddr] != (int)slice_group_id)
- {
- }
-
- }
- while ((moreDataFlag && video->numMBs > 0) || video->mb_skip_run > 0); /* even if no more data, but last few MBs are skipped */
-
- if (video->numMBs == 0)
- {
- video->newPic = TRUE;
- video->mbNum = 0; // _Conceal
- return AVCDEC_PICTURE_READY;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* read MB mode and motion vectors */
-/* perform Intra/Inter prediction and residue */
-/* update video->mb_skip_run */
-AVCDec_Status DecodeMB(AVCDecObject *decvid)
-{
- AVCDec_Status status;
- AVCCommonObj *video = decvid->common;
- AVCDecBitstream *stream = decvid->bitstream;
- AVCMacroblock *currMB = video->currMB;
- uint mb_type;
- int slice_type = video->slice_type;
- int temp;
-
- currMB->QPy = video->QPy;
- currMB->QPc = video->QPc;
-
- if (slice_type == AVC_P_SLICE)
- {
- if (video->mb_skip_run < 0)
- {
- ue_v(stream, (uint *)&(video->mb_skip_run));
- }
-
- if (video->mb_skip_run == 0)
- {
- /* this will not handle the case where the slice ends with a mb_skip_run == 0 and no following MB data */
- ue_v(stream, &mb_type);
- if (mb_type > 30)
- {
- return AVCDEC_FAIL;
- }
- InterpretMBModeP(currMB, mb_type);
- video->mb_skip_run = -1;
- }
- else
- {
- /* see subclause 7.4.4 for more details on how
- mb_field_decoding_flag is derived in case of skipped MB */
-
- currMB->mb_intra = FALSE;
-
- currMB->mbMode = AVC_SKIP;
- currMB->MbPartWidth = currMB->MbPartHeight = 16;
- currMB->NumMbPart = 1;
- currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
- currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1; //
- currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
- currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
- currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
- currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;
-
- memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);
-
- currMB->CBP = 0;
- video->cbp4x4 = 0;
- /* for skipped MB, always look at the first entry in RefPicList */
- currMB->RefIdx[0] = currMB->RefIdx[1] =
- currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
- InterMBPrediction(video);
- video->mb_skip_run--;
- return AVCDEC_SUCCESS;
- }
-
- }
- else
- {
- /* Then decode mode and MV */
- ue_v(stream, &mb_type);
- if (mb_type > 25)
- {
- return AVCDEC_FAIL;
- }
- InterpretMBModeI(currMB, mb_type);
- }
-
-
- if (currMB->mbMode != AVC_I_PCM)
- {
-
- if (currMB->mbMode == AVC_P8 || currMB->mbMode == AVC_P8ref0)
- {
- status = sub_mb_pred(video, currMB, stream);
- }
- else
- {
- status = mb_pred(video, currMB, stream) ;
- }
-
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-
- if (currMB->mbMode != AVC_I16)
- {
- /* decode coded_block_pattern */
- status = DecodeCBP(currMB, stream);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- }
-
- if (currMB->CBP > 0 || currMB->mbMode == AVC_I16)
- {
- se_v(stream, &temp);
- if (temp)
- {
- temp += (video->QPy + 52);
- currMB->QPy = video->QPy = temp - 52 * (temp * 79 >> 12);
- if (currMB->QPy > 51 || currMB->QPy < 0)
- {
- video->QPy = AVC_CLIP3(0, 51, video->QPy);
-// return AVCDEC_FAIL;
- }
- video->QPy_div_6 = (video->QPy * 43) >> 8;
- video->QPy_mod_6 = video->QPy - 6 * video->QPy_div_6;
- currMB->QPc = video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->QPy + video->currPicParams->chroma_qp_index_offset)];
- video->QPc_div_6 = (video->QPc * 43) >> 8;
- video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;
- }
- }
- /* decode residue and inverse transform */
- status = residual(decvid, currMB);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- }
- else
- {
- if (stream->bitcnt & 7)
- {
- BitstreamByteAlign(stream);
- }
- /* decode pcm_byte[i] */
- DecodeIntraPCM(video, stream);
-
- currMB->QPy = 0; /* necessary for deblocking */ // _OPTIMIZE
- currMB->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->currPicParams->chroma_qp_index_offset)];
-
- /* default values, don't know if really needed */
- currMB->CBP = 0x3F;
- video->cbp4x4 = 0xFFFF;
- currMB->mb_intra = TRUE;
- memset(currMB->nz_coeff, 16, sizeof(uint8)*NUM_BLKS_IN_MB);
- return AVCDEC_SUCCESS;
- }
-
-
- /* do Intra/Inter prediction, together with the residue compensation */
- /* This part should be common between the skip and no-skip */
- if (currMB->mbMode == AVC_I4 || currMB->mbMode == AVC_I16)
- {
- IntraMBPrediction(video);
- }
- else
- {
- InterMBPrediction(video);
- }
-
-
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 7.3.5.1 */
-AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- int mbPartIdx;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- uint max_ref_idx;
- const int *temp_0;
- int16 *temp_1;
- uint code;
-
- if (currMB->mbMode == AVC_I4 || currMB->mbMode == AVC_I16)
- {
-
- video->intraAvailA = video->intraAvailB = video->intraAvailC = video->intraAvailD = 0;
-
- if (!video->currPicParams->constrained_intra_pred_flag)
- {
- video->intraAvailA = video->mbAvailA;
- video->intraAvailB = video->mbAvailB;
- video->intraAvailC = video->mbAvailC;
- video->intraAvailD = video->mbAvailD;
- }
- else
- {
- if (video->mbAvailA)
- {
- video->intraAvailA = video->mblock[video->mbAddrA].mb_intra;
- }
- if (video->mbAvailB)
- {
- video->intraAvailB = video->mblock[video->mbAddrB].mb_intra ;
- }
- if (video->mbAvailC)
- {
- video->intraAvailC = video->mblock[video->mbAddrC].mb_intra;
- }
- if (video->mbAvailD)
- {
- video->intraAvailD = video->mblock[video->mbAddrD].mb_intra;
- }
- }
-
-
- if (currMB->mbMode == AVC_I4)
- {
- /* perform prediction to get the actual intra 4x4 pred mode */
- DecodeIntra4x4Mode(video, currMB, stream);
- /* output will be in currMB->i4Mode[4][4] */
- }
-
- ue_v(stream, &code);
-
- if (code > 3)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- currMB->intra_chroma_pred_mode = (AVCIntraChromaPredMode)code;
- }
- else
- {
-
- memset(currMB->ref_idx_L0, 0, sizeof(int16)*4);
-
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
-// max_ref_idx = sliceHdr->num_ref_idx_l0_active_minus1;
- max_ref_idx = video->refList0Size - 1;
-
- /* decode ref index for L0 */
- if (sliceHdr->num_ref_idx_l0_active_minus1 > 0)
- {
- for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
- {
- te_v(stream, &code, max_ref_idx);
- if (code > (uint)max_ref_idx)
- {
- return AVCDEC_FAIL;
- }
- currMB->ref_idx_L0[mbPartIdx] = code;
- }
- }
-
- /* populate ref_idx_L0 */
- temp_0 = &mbPart2raster[currMB->mbMode-AVC_P16][0];
- temp_1 = &currMB->ref_idx_L0[3];
-
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
-
- /* Global reference index, these values are used in deblock */
- currMB->RefIdx[0] = video->RefPicList0[currMB->ref_idx_L0[0]]->RefIdx;
- currMB->RefIdx[1] = video->RefPicList0[currMB->ref_idx_L0[1]]->RefIdx;
- currMB->RefIdx[2] = video->RefPicList0[currMB->ref_idx_L0[2]]->RefIdx;
- currMB->RefIdx[3] = video->RefPicList0[currMB->ref_idx_L0[3]]->RefIdx;
-
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
- max_ref_idx = sliceHdr->num_ref_idx_l1_active_minus1;
- /* decode mvd_l0 */
- for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
- {
- se_v(stream, &(video->mvd_l0[mbPartIdx][0][0]));
- se_v(stream, &(video->mvd_l0[mbPartIdx][0][1]));
- }
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 7.3.5.2 */
-AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- int mbPartIdx, subMbPartIdx;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- uint max_ref_idx;
- uint sub_mb_type[4];
- uint code;
-
- memset(currMB->ref_idx_L0, 0, sizeof(int16)*4);
-
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- ue_v(stream, &(sub_mb_type[mbPartIdx]));
- if (sub_mb_type[mbPartIdx] > 3)
- {
- return AVCDEC_FAIL;
- }
-
- }
- /* we have to check the values to make sure they are valid */
- /* assign values to currMB->sub_mb_type[], currMB->MBPartPredMode[][x] */
-
- InterpretSubMBModeP(currMB, sub_mb_type);
-
-
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
-// max_ref_idx = sliceHdr->num_ref_idx_l0_active_minus1;
- max_ref_idx = video->refList0Size - 1;
-
- if (sliceHdr->num_ref_idx_l0_active_minus1 > 0 && currMB->mbMode != AVC_P8ref0)
- {
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- te_v(stream, (uint*)&code, max_ref_idx);
- if (code > max_ref_idx)
- {
- return AVCDEC_FAIL;
- }
- currMB->ref_idx_L0[mbPartIdx] = code;
- }
- }
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
-
- max_ref_idx = sliceHdr->num_ref_idx_l1_active_minus1;
- /* if(video->MbaffFrameFlag && currMB->mb_field_decoding_flag)
- max_ref_idx = 2*sliceHdr->num_ref_idx_l1_active_minus1 + 1;*/
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- for (subMbPartIdx = 0; subMbPartIdx < currMB->NumSubMbPart[mbPartIdx]; subMbPartIdx++)
- {
- se_v(stream, &(video->mvd_l0[mbPartIdx][subMbPartIdx][0]));
- se_v(stream, &(video->mvd_l0[mbPartIdx][subMbPartIdx][1]));
- }
- /* used in deblocking */
- currMB->RefIdx[mbPartIdx] = video->RefPicList0[currMB->ref_idx_L0[mbPartIdx]]->RefIdx;
- }
- return AVCDEC_SUCCESS;
-}
-
-void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type)
-{
- mblock->NumMbPart = 1;
-
- mblock->mb_intra = TRUE;
-
- if (mb_type == 0) /* I_4x4 */
- {
- mblock->mbMode = AVC_I4;
- }
- else if (mb_type < 25) /* I_PCM */
- {
- mblock->mbMode = AVC_I16;
- mblock->i16Mode = (AVCIntra16x16PredMode)((mb_type - 1) & 0x3);
- if (mb_type > 12)
- {
- mblock->CBP = (((mb_type - 13) >> 2) << 4) + 0x0F;
- }
- else
- {
- mblock->CBP = ((mb_type - 1) >> 2) << 4;
- }
- }
- else
- {
- mblock->mbMode = AVC_I_PCM;
- }
-
- return ;
-}
-
-void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type)
-{
- const static int map2PartWidth[5] = {16, 16, 8, 8, 8};
- const static int map2PartHeight[5] = {16, 8, 16, 8, 8};
- const static int map2NumPart[5] = {1, 2, 2, 4, 4};
- const static AVCMBMode map2mbMode[5] = {AVC_P16, AVC_P16x8, AVC_P8x16, AVC_P8, AVC_P8ref0};
-
- mblock->mb_intra = FALSE;
- if (mb_type < 5)
- {
- mblock->mbMode = map2mbMode[mb_type];
- mblock->MbPartWidth = map2PartWidth[mb_type];
- mblock->MbPartHeight = map2PartHeight[mb_type];
- mblock->NumMbPart = map2NumPart[mb_type];
- mblock->NumSubMbPart[0] = mblock->NumSubMbPart[1] =
- mblock->NumSubMbPart[2] = mblock->NumSubMbPart[3] = 1;
- mblock->SubMbPartWidth[0] = mblock->SubMbPartWidth[1] =
- mblock->SubMbPartWidth[2] = mblock->SubMbPartWidth[3] = mblock->MbPartWidth;
- mblock->SubMbPartHeight[0] = mblock->SubMbPartHeight[1] =
- mblock->SubMbPartHeight[2] = mblock->SubMbPartHeight[3] = mblock->MbPartHeight;
- }
- else
- {
- InterpretMBModeI(mblock, mb_type - 5);
- /* set MV and Ref_Idx codes of Intra blocks in P-slices */
- memset(mblock->mvL0, 0, sizeof(int32)*16);
- mblock->ref_idx_L0[0] = mblock->ref_idx_L0[1] = mblock->ref_idx_L0[2] = mblock->ref_idx_L0[3] = -1;
- }
- return ;
-}
-
-void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type)
-{
- const static int map2PartWidth[23] = {8, 16, 16, 16, 16, 8, 16, 8, 16, 8,
- 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 8
- };
- const static int map2PartHeight[23] = {8, 16, 16, 16, 8, 16, 8, 16, 8,
- 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8
- };
- /* see enum AVCMBType declaration */
- const static AVCMBMode map2mbMode[23] = {AVC_BDirect16, AVC_P16, AVC_P16, AVC_P16,
- AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16,
- AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16,
- AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P8
- };
- const static int map2PredMode1[23] = {3, 0, 1, 2, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, -1};
- const static int map2PredMode2[23] = { -1, -1, -1, -1, 0, 0, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, -1};
- const static int map2NumPart[23] = { -1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4};
-
- mblock->mb_intra = FALSE;
-
- if (mb_type < 23)
- {
- mblock->mbMode = map2mbMode[mb_type];
- mblock->NumMbPart = map2NumPart[mb_type];
- mblock->MBPartPredMode[0][0] = (AVCPredMode)map2PredMode1[mb_type];
- if (mblock->NumMbPart > 1)
- {
- mblock->MBPartPredMode[1][0] = (AVCPredMode)map2PredMode2[mb_type];
- }
- mblock->MbPartWidth = map2PartWidth[mb_type];
- mblock->MbPartHeight = map2PartHeight[mb_type];
- }
- else
- {
- InterpretMBModeI(mblock, mb_type - 23);
- }
-
- return ;
-}
-
-void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type)
-{
- mblock->mb_intra = TRUE;
-
- if (mb_type == 0)
- {
- mblock->mbMode = AVC_SI4;
- /* other values are N/A */
- }
- else
- {
- InterpretMBModeI(mblock, mb_type - 1);
- }
- return ;
-}
-
-/* input is mblock->sub_mb_type[] */
-void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type)
-{
- int i, sub_type;
- /* see enum AVCMBType declaration */
-// const static AVCSubMBMode map2subMbMode[4] = {AVC_8x8,AVC_8x4,AVC_4x8,AVC_4x4};
- const static int map2subPartWidth[4] = {8, 8, 4, 4};
- const static int map2subPartHeight[4] = {8, 4, 8, 4};
- const static int map2numSubPart[4] = {1, 2, 2, 4};
-
- for (i = 0; i < 4 ; i++)
- {
- sub_type = (int) sub_mb_type[i];
- // mblock->subMbMode[i] = map2subMbMode[sub_type];
- mblock->NumSubMbPart[i] = map2numSubPart[sub_type];
- mblock->SubMbPartWidth[i] = map2subPartWidth[sub_type];
- mblock->SubMbPartHeight[i] = map2subPartHeight[sub_type];
- }
-
- return ;
-}
-
-void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type)
-{
- int i, j, sub_type;
- /* see enum AVCMBType declaration */
- const static AVCSubMBMode map2subMbMode[13] = {AVC_BDirect8, AVC_8x8, AVC_8x8,
- AVC_8x8, AVC_8x4, AVC_4x8, AVC_8x4, AVC_4x8, AVC_8x4, AVC_4x8, AVC_4x4, AVC_4x4, AVC_4x4
- };
- const static int map2subPartWidth[13] = {4, 8, 8, 8, 8, 4, 8, 4, 8, 4, 4, 4, 4};
- const static int map2subPartHeight[13] = {4, 8, 8, 8, 4, 8, 4, 8, 4, 8, 4, 4, 4};
- const static int map2numSubPart[13] = {1, 1, 1, 2, 2, 2, 2, 2, 2, 4, 4, 4};
- const static int map2predMode[13] = {3, 0, 1, 2, 0, 0, 1, 1, 2, 2, 0, 1, 2};
-
- for (i = 0; i < 4 ; i++)
- {
- sub_type = (int) sub_mb_type[i];
- mblock->subMbMode[i] = map2subMbMode[sub_type];
- mblock->NumSubMbPart[i] = map2numSubPart[sub_type];
- mblock->SubMbPartWidth[i] = map2subPartWidth[sub_type];
- mblock->SubMbPartHeight[i] = map2subPartHeight[sub_type];
- for (j = 0; j < 4; j++)
- {
- mblock->MBPartPredMode[i][j] = (AVCPredMode)map2predMode[sub_type];
- }
- }
-
- return ;
-}
-
-/* see subclause 8.3.1 */
-AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- int intra4x4PredModeA = 0, intra4x4PredModeB = 0, predIntra4x4PredMode = 0;
- int component, SubBlock_indx, block_x, block_y;
- int dcOnlyPredictionFlag;
- uint prev_intra4x4_pred_mode_flag[16];
- int rem_intra4x4_pred_mode[16];
- int bindx = 0;
-
- for (component = 0; component < 4; component++) /* partition index */
- {
- block_x = ((component & 1) << 1);
- block_y = ((component >> 1) << 1);
-
- for (SubBlock_indx = 0; SubBlock_indx < 4; SubBlock_indx++) /* sub-partition index */
- {
- BitstreamRead1Bit(stream, &(prev_intra4x4_pred_mode_flag[bindx]));
-
- if (!prev_intra4x4_pred_mode_flag[bindx])
- {
- BitstreamReadBits(stream, 3, (uint*)&(rem_intra4x4_pred_mode[bindx]));
- }
-
- dcOnlyPredictionFlag = 0;
- if (block_x > 0)
- {
- intra4x4PredModeA = currMB->i4Mode[(block_y << 2) + block_x - 1 ];
- }
- else
- {
- if (video->intraAvailA)
- {
- if (video->mblock[video->mbAddrA].mbMode == AVC_I4)
- {
- intra4x4PredModeA = video->mblock[video->mbAddrA].i4Mode[(block_y << 2) + 3];
- }
- else
- {
- intra4x4PredModeA = AVC_I4_DC;
- }
- }
- else
- {
- dcOnlyPredictionFlag = 1;
- }
- }
-
- if (block_y > 0)
- {
- intra4x4PredModeB = currMB->i4Mode[((block_y-1) << 2) + block_x];
- }
- else
- {
- if (video->intraAvailB)
- {
- if (video->mblock[video->mbAddrB].mbMode == AVC_I4)
- {
- intra4x4PredModeB = video->mblock[video->mbAddrB].i4Mode[(3 << 2) + block_x];
- }
- else
- {
- intra4x4PredModeB = AVC_I4_DC;
- }
- }
- else
- {
- dcOnlyPredictionFlag = 1;
- }
- }
-
- if (dcOnlyPredictionFlag)
- {
- intra4x4PredModeA = intra4x4PredModeB = AVC_I4_DC;
- }
-
- predIntra4x4PredMode = AVC_MIN(intra4x4PredModeA, intra4x4PredModeB);
- if (prev_intra4x4_pred_mode_flag[bindx])
- {
- currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)predIntra4x4PredMode;
- }
- else
- {
- if (rem_intra4x4_pred_mode[bindx] < predIntra4x4PredMode)
- {
- currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)rem_intra4x4_pred_mode[bindx];
- }
- else
- {
- currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)(rem_intra4x4_pred_mode[bindx] + 1);
- }
- }
- bindx++;
- block_y += (SubBlock_indx & 1) ;
- block_x += (1 - 2 * (SubBlock_indx & 1)) ;
- }
- }
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mbnum_end)
-{
- AVCCommonObj *video = decvid->common;
- AVCMacroblock *currMB ;
-
- int CurrMbAddr;
-
- if (video->RefPicList0[0] == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- for (CurrMbAddr = mbnum_start; CurrMbAddr < mbnum_end; CurrMbAddr++)
- {
- currMB = video->currMB = &(video->mblock[CurrMbAddr]);
- video->mbNum = CurrMbAddr;
- currMB->slice_id = video->slice_id++; // slice
-
- /* we can remove this check if we don't support Mbaff. */
- /* we can wrap below into an initMB() function which will also
- do necessary reset of macroblock related parameters. */
-
- video->mb_x = CurrMbAddr % video->PicWidthInMbs;
- video->mb_y = CurrMbAddr / video->PicWidthInMbs;
-
- /* check the availability of neighboring macroblocks */
- InitNeighborAvailability(video, CurrMbAddr);
-
- currMB->mb_intra = FALSE;
-
- currMB->mbMode = AVC_SKIP;
- currMB->MbPartWidth = currMB->MbPartHeight = 16;
-
- currMB->NumMbPart = 1;
- currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
- currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1;
- currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
- currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
- currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
- currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;
- currMB->QPy = 26;
- currMB->QPc = 26;
- memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);
-
- currMB->CBP = 0;
- video->cbp4x4 = 0;
- /* for skipped MB, always look at the first entry in RefPicList */
- currMB->RefIdx[0] = currMB->RefIdx[1] =
- currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
- InterMBPrediction(video);
-
- video->numMBs--;
-
- }
-
- return AVCDEC_SUCCESS;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/vlc.cpp b/media/libstagefright/codecs/avc/dec/src/vlc.cpp
deleted file mode 100644
index f531249..0000000
--- a/media/libstagefright/codecs/avc/dec/src/vlc.cpp
+++ /dev/null
@@ -1,815 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-//#define PV_ARM_V5
-#ifdef PV_ARM_V5
-#define PV_CLZ(A,B) __asm{CLZ (A),(B)} \
- A -= 16;
-#else
-#define PV_CLZ(A,B) while (((B) & 0x8000) == 0) {(B) <<=1; A++;}
-#endif
-
-
-#define PV_NO_CLZ
-
-#ifndef PV_NO_CLZ
-typedef struct tagVLCNumCoeffTrail
-{
- int trailing;
- int total_coeff;
- int length;
-} VLCNumCoeffTrail;
-
-typedef struct tagShiftOffset
-{
- int shift;
- int offset;
-} ShiftOffset;
-
-const VLCNumCoeffTrail NumCoeffTrailOnes[3][67] =
-{
- {{0, 0, 1}, {1, 1, 2}, {2, 2, 3}, {1, 2, 6}, {0, 1, 6}, {3, 3, 5}, {3, 3, 5}, {3, 5, 7},
- {2, 3, 7}, {3, 4, 6}, {3, 4, 6}, {3, 6, 8}, {2, 4, 8}, {1, 3, 8}, {0, 2, 8}, {3, 7, 9},
- {2, 5, 9}, {1, 4, 9}, {0, 3, 9}, {3, 8, 10}, {2, 6, 10}, {1, 5, 10}, {0, 4, 10}, {3, 9, 11},
- {2, 7, 11}, {1, 6, 11}, {0, 5, 11}, {0, 8, 13}, {2, 9, 13}, {1, 8, 13}, {0, 7, 13}, {3, 10, 13},
- {2, 8, 13}, {1, 7, 13}, {0, 6, 13}, {3, 12, 14}, {2, 11, 14}, {1, 10, 14}, {0, 10, 14}, {3, 11, 14},
- {2, 10, 14}, {1, 9, 14}, {0, 9, 14}, {3, 14, 15}, {2, 13, 15}, {1, 12, 15}, {0, 12, 15}, {3, 13, 15},
- {2, 12, 15}, {1, 11, 15}, {0, 11, 15}, {3, 16, 16}, {2, 15, 16}, {1, 15, 16}, {0, 14, 16}, {3, 15, 16},
- {2, 14, 16}, {1, 14, 16}, {0, 13, 16}, {0, 16, 16}, {2, 16, 16}, {1, 16, 16}, {0, 15, 16}, {1, 13, 15},
- { -1, -1, -1}, { -1, -1, -1}, { -1, -1, -1}},
-
- {{1, 1, 2}, {0, 0, 2}, {3, 4, 4}, {3, 3, 4}, {2, 2, 3}, {2, 2, 3}, {3, 6, 6}, {2, 3, 6},
- {1, 3, 6}, {0, 1, 6}, {3, 5, 5}, {3, 5, 5}, {1, 2, 5}, {1, 2, 5}, {3, 7, 6}, {2, 4, 6},
- {1, 4, 6}, {0, 2, 6}, {3, 8, 7}, {2, 5, 7}, {1, 5, 7}, {0, 3, 7}, {0, 5, 8}, {2, 6, 8},
- {1, 6, 8}, {0, 4, 8}, {3, 9, 9}, {2, 7, 9}, {1, 7, 9}, {0, 6, 9}, {3, 11, 11}, {2, 9, 11},
- {1, 9, 11}, {0, 8, 11}, {3, 10, 11}, {2, 8, 11}, {1, 8, 11}, {0, 7, 11}, {0, 11, 12}, {2, 11, 12},
- {1, 11, 12}, {0, 10, 12}, {3, 12, 12}, {2, 10, 12}, {1, 10, 12}, {0, 9, 12}, {3, 14, 13}, {2, 13, 13},
- {1, 13, 13}, {0, 13, 13}, {3, 13, 13}, {2, 12, 13}, {1, 12, 13}, {0, 12, 13}, {1, 15, 14}, {0, 15, 14},
- {2, 15, 14}, {1, 14, 14}, {2, 14, 13}, {2, 14, 13}, {0, 14, 13}, {0, 14, 13}, {3, 16, 14}, {2, 16, 14},
- {1, 16, 14}, {0, 16, 14}, {3, 15, 13}},
-
- {{3, 7, 4}, {3, 6, 4}, {3, 5, 4}, {3, 4, 4}, {3, 3, 4}, {2, 2, 4}, {1, 1, 4}, {0, 0, 4},
- {1, 5, 5}, {2, 5, 5}, {1, 4, 5}, {2, 4, 5}, {1, 3, 5}, {3, 8, 5}, {2, 3, 5}, {1, 2, 5},
- {0, 3, 6}, {2, 7, 6}, {1, 7, 6}, {0, 2, 6}, {3, 9, 6}, {2, 6, 6}, {1, 6, 6}, {0, 1, 6},
- {0, 7, 7}, {0, 6, 7}, {2, 9, 7}, {0, 5, 7}, {3, 10, 7}, {2, 8, 7}, {1, 8, 7}, {0, 4, 7},
- {3, 12, 8}, {2, 11, 8}, {1, 10, 8}, {0, 9, 8}, {3, 11, 8}, {2, 10, 8}, {1, 9, 8}, {0, 8, 8},
- {0, 12, 9}, {2, 13, 9}, {1, 12, 9}, {0, 11, 9}, {3, 13, 9}, {2, 12, 9}, {1, 11, 9}, {0, 10, 9},
- {1, 15, 10}, {0, 14, 10}, {3, 14, 10}, {2, 14, 10}, {1, 14, 10}, {0, 13, 10}, {1, 13, 9}, {1, 13, 9},
- {1, 16, 10}, {0, 15, 10}, {3, 15, 10}, {2, 15, 10}, {3, 16, 10}, {2, 16, 10}, {0, 16, 10}, { -1, -1, -1},
- { -1, -1, -1}, { -1, -1, -1}, { -1, -1, -1}}
-};
-
-
-const ShiftOffset NumCoeffTrailOnes_indx[3][15] =
-{
- {{15, -1}, {14, 0}, {13, 1}, {10, -1}, {9, 3}, {8, 7}, {7, 11}, {6, 15},
- {5, 19}, {3, 19}, {2, 27}, {1, 35}, {0, 43}, {0, 55}, {1, 62}},
-
- {{14, -2}, {12, -2}, {10, -2}, {10, 10}, {9, 14}, {8, 18}, {7, 22}, {5, 22},
- {4, 30}, {3, 38}, {2, 46}, {2, 58}, {3, 65}, {16, 0}, {16, 0}},
-
- {{12, -8}, {11, 0}, {10, 8}, {9, 16}, {8, 24}, {7, 32}, {6, 40}, {6, 52},
- {6, 58}, {6, 61}, {16, 0}, {16, 0}, {16, 0}, {16, 0}, {16, 0}}
-};
-
-const static int nC_table[8] = {0, 0, 1, 1, 2, 2, 2, 2};
-
-#endif
-/**
-See algorithm in subclause 9.1, Table 9-1, Table 9-2. */
-AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum)
-{
- uint temp, tmp_cnt;
- int leading_zeros = 0;
- BitstreamShowBits(bitstream, 16, &temp);
- tmp_cnt = temp | 0x1;
-
- PV_CLZ(leading_zeros, tmp_cnt)
-
- if (leading_zeros < 8)
- {
- *codeNum = (temp >> (15 - (leading_zeros << 1))) - 1;
- BitstreamFlushBits(bitstream, (leading_zeros << 1) + 1);
- }
- else
- {
- BitstreamReadBits(bitstream, (leading_zeros << 1) + 1, &temp);
- *codeNum = temp - 1;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/**
-See subclause 9.1.1, Table 9-3 */
-AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value)
-{
- uint temp, tmp_cnt;
- int leading_zeros = 0;
- BitstreamShowBits(bitstream, 16, &temp);
- tmp_cnt = temp | 0x1;
-
- PV_CLZ(leading_zeros, tmp_cnt)
-
- if (leading_zeros < 8)
- {
- temp >>= (15 - (leading_zeros << 1));
- BitstreamFlushBits(bitstream, (leading_zeros << 1) + 1);
- }
- else
- {
- BitstreamReadBits(bitstream, (leading_zeros << 1) + 1, &temp);
- }
-
- *value = temp >> 1;
-
- if (temp & 0x01) // lsb is signed bit
- *value = -(*value);
-
-// leading_zeros = temp >> 1;
-// *value = leading_zeros - (leading_zeros*2*(temp&1));
-
- return AVCDEC_SUCCESS;
-}
-
-AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value)
-{
- int leadingZeros;
- uint32 infobits;
- uint32 codeNum;
-
- if (AVCDEC_SUCCESS != GetEGBitstring32bit(bitstream, &leadingZeros, &infobits))
- return AVCDEC_FAIL;
-
- codeNum = (1 << leadingZeros) - 1 + infobits;
-
- *value = (codeNum + 1) / 2;
-
- if ((codeNum & 0x01) == 0) // lsb is signed bit
- *value = -(*value);
-
- return AVCDEC_SUCCESS;
-}
-
-
-AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range)
-{
- if (range > 1)
- {
- ue_v(bitstream, value);
- }
- else
- {
- BitstreamRead1Bit(bitstream, value);
- *value = 1 - (*value);
- }
- return AVCDEC_SUCCESS;
-}
-
-
-
-/* This function is only used for syntax with range from -2^31 to 2^31-1 */
-/* only a few of them in the SPS and PPS */
-AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadingZeros, uint32 *infobits)
-{
- int bit_value;
- uint info_temp;
-
- *leadingZeros = 0;
-
- BitstreamRead1Bit(bitstream, (uint*)&bit_value);
-
- while (!bit_value)
- {
- (*leadingZeros)++;
- BitstreamRead1Bit(bitstream, (uint*)&bit_value);
- }
-
- if (*leadingZeros > 0)
- {
- if (sizeof(uint) == 4) /* 32 bit machine */
- {
- BitstreamReadBits(bitstream, *leadingZeros, (uint*)&info_temp);
- *infobits = (uint32)info_temp;
- }
- else if (sizeof(uint) == 2) /* 16 bit machine */
- {
- *infobits = 0;
- if (*leadingZeros > 16)
- {
- BitstreamReadBits(bitstream, 16, (uint*)&info_temp);
- (*leadingZeros) -= 16;
- *infobits = ((uint32)info_temp) << (*leadingZeros);
- }
-
- BitstreamReadBits(bitstream, *leadingZeros, (uint*)&info_temp);
- *infobits |= (uint32)info_temp ;
- }
- }
- else
- *infobits = 0;
-
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-4 assignment of codeNum to values of coded_block_pattern. */
-const static uint8 MapCBP[48][2] =
-{
- {47, 0}, {31, 16}, {15, 1}, { 0, 2}, {23, 4}, {27, 8}, {29, 32}, {30, 3}, { 7, 5}, {11, 10}, {13, 12}, {14, 15},
- {39, 47}, {43, 7}, {45, 11}, {46, 13}, {16, 14}, { 3, 6}, { 5, 9}, {10, 31}, {12, 35}, {19, 37}, {21, 42}, {26, 44},
- {28, 33}, {35, 34}, {37, 36}, {42, 40}, {44, 39}, { 1, 43}, { 2, 45}, { 4, 46}, { 8, 17}, {17, 18}, {18, 20}, {20, 24},
- {24, 19}, { 6, 21}, { 9, 26}, {22, 28}, {25, 23}, {32, 27}, {33, 29}, {34, 30}, {36, 22}, {40, 25}, {38, 38}, {41, 41},
-};
-
-AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- uint codeNum;
- uint coded_block_pattern;
-
- ue_v(stream, &codeNum);
-
- if (codeNum > 47)
- {
- return AVCDEC_FAIL;
- }
-
- /* can get rid of the if _OPTIMIZE */
- if (currMB->mbMode == AVC_I4)
- {
- coded_block_pattern = MapCBP[codeNum][0];
- }
- else
- {
- coded_block_pattern = MapCBP[codeNum][1];
- }
-
-// currMB->cbpL = coded_block_pattern&0xF; /* modulo 16 */
-// currMB->cbpC = coded_block_pattern>>4; /* divide 16 */
- currMB->CBP = coded_block_pattern;
-
- return AVCDEC_SUCCESS;
-}
-
-
-/* TO BE OPTIMIZED !!!!! */
-AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff, int nC)
-{
-#ifdef PV_NO_CLZ
- const static uint8 TotCofNTrail1[75][3] = {{0, 0, 16}/*error */, {0, 0, 16}/*error */, {1, 13, 15}, {1, 13, 15}, {0, 16, 16}, {2, 16, 16}, {1, 16, 16}, {0, 15, 16},
- {3, 16, 16}, {2, 15, 16}, {1, 15, 16}, {0, 14, 16}, {3, 15, 16}, {2, 14, 16}, {1, 14, 16}, {0, 13, 16},
- {3, 14, 15}, {2, 13, 15}, {1, 12, 15}, {0, 12, 15}, {3, 13, 15}, {2, 12, 15}, {1, 11, 15}, {0, 11, 15},
- {3, 12, 14}, {2, 11, 14}, {1, 10, 14}, {0, 10, 14}, {3, 11, 14}, {2, 10, 14}, {1, 9, 14}, {0, 9, 14},
- {0, 8, 13}, {2, 9, 13}, {1, 8, 13}, {0, 7, 13}, {3, 10, 13}, {2, 8, 13}, {1, 7, 13}, {0, 6, 13},
- {3, 9, 11}, {2, 7, 11}, {1, 6, 11}, {0, 5, 11}, {3, 8, 10},
- {2, 6, 10}, {1, 5, 10}, {0, 4, 10}, {3, 7, 9}, {2, 5, 9}, {1, 4, 9}, {0, 3, 9}, {3, 6, 8},
- {2, 4, 8}, {1, 3, 8}, {0, 2, 8}, {3, 5, 7}, {2, 3, 7}, {3, 4, 6}, {3, 4, 6}, {1, 2, 6},
- {1, 2, 6}, {0, 1, 6}, {0, 1, 6}, {3, 3, 5}, {3, 3, 5}, {3, 3, 5}, {3, 3, 5}, {2, 2, 3},
- {1, 1, 2}, {1, 1, 2}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}
- };
-
- const static uint8 TotCofNTrail2[84][3] = {{0, 0, 14 /* error */}, {0, 0, 14/*error */}, {3, 15, 13}, {3, 15, 13}, {3, 16, 14}, {2, 16, 14}, {1, 16, 14}, {0, 16, 14},
- {1, 15, 14}, {0, 15, 14}, {2, 15, 14}, {1, 14, 14}, {2, 14, 13}, {2, 14, 13}, {0, 14, 13}, {0, 14, 13},
- {3, 14, 13}, {2, 13, 13}, {1, 13, 13}, {0, 13, 13}, {3, 13, 13}, {2, 12, 13}, {1, 12, 13}, {0, 12, 13},
- {0, 11, 12}, {2, 11, 12}, {1, 11, 12}, {0, 10, 12}, {3, 12, 12}, {2, 10, 12}, {1, 10, 12}, {0, 9, 12},
- {3, 11, 11}, {2, 9, 11}, {1, 9, 11}, {0, 8, 11}, {3, 10, 11}, {2, 8, 11}, {1, 8, 11}, {0, 7, 11},
- {3, 9, 9}, {2, 7, 9}, {1, 7, 9}, {0, 6, 9}, {0, 5, 8}, {0, 5, 8}, {2, 6, 8}, {2, 6, 8},
- {1, 6, 8}, {1, 6, 8}, {0, 4, 8}, {0, 4, 8}, {3, 8, 7}, {2, 5, 7}, {1, 5, 7}, {0, 3, 7},
- {3, 7, 6}, {3, 7, 6}, {2, 4, 6}, {2, 4, 6}, {1, 4, 6}, {1, 4, 6}, {0, 2, 6}, {0, 2, 6},
- {3, 6, 6}, {2, 3, 6}, {1, 3, 6}, {0, 1, 6}, {3, 5, 5}, {3, 5, 5}, {1, 2, 5}, {1, 2, 5},
- {3, 4, 4}, {3, 3, 4}, {2, 2, 3}, {2, 2, 3}, {1, 1, 2}, {1, 1, 2}, {1, 1, 2}, {1, 1, 2},
- {0, 0, 2}, {0, 0, 2}, {0, 0, 2}, {0, 0, 2}
- };
-
- const static uint8 TotCofNTrail3[64][3] = {{0, 0, 10/*error*/}, {0, 16, 10}, {3, 16, 10}, {2, 16, 10}, {1, 16, 10}, {0, 15, 10}, {3, 15, 10},
- {2, 15, 10}, {1, 15, 10}, {0, 14, 10}, {3, 14, 10}, {2, 14, 10}, {1, 14, 10}, {0, 13, 10}, {1, 13, 9},
- {1, 13, 9}, {0, 12, 9}, {2, 13, 9}, {1, 12, 9}, {0, 11, 9}, {3, 13, 9}, {2, 12, 9}, {1, 11, 9},
- {0, 10, 9}, {3, 12, 8}, {2, 11, 8}, {1, 10, 8}, {0, 9, 8}, {3, 11, 8}, {2, 10, 8}, {1, 9, 8},
- {0, 8, 8}, {0, 7, 7}, {0, 6, 7}, {2, 9, 7}, {0, 5, 7}, {3, 10, 7}, {2, 8, 7}, {1, 8, 7},
- {0, 4, 7}, {0, 3, 6}, {2, 7, 6}, {1, 7, 6}, {0, 2, 6}, {3, 9, 6}, {2, 6, 6}, {1, 6, 6},
- {0, 1, 6}, {1, 5, 5}, {2, 5, 5}, {1, 4, 5}, {2, 4, 5}, {1, 3, 5}, {3, 8, 5}, {2, 3, 5},
- {1, 2, 5}, {3, 7, 4}, {3, 6, 4}, {3, 5, 4}, {3, 4, 4}, {3, 3, 4}, {2, 2, 4}, {1, 1, 4},
- {0, 0, 4}
- };
-#endif
- uint code;
-
-#ifdef PV_NO_CLZ
- uint8 *pcode;
- if (nC < 2)
- {
- BitstreamShowBits(stream, 16, &code);
-
- if (code >= 8192)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>13)+65+2][0]);
- }
- else if (code >= 2048)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>9)+50+2][0]);
- }
- else if (code >= 1024)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>8)+46+2][0]);
- }
- else if (code >= 512)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>7)+42+2][0]);
- }
- else if (code >= 256)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>6)+38+2][0]);
- }
- else if (code >= 128)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>5)+34+2][0]);
- }
- else if (code >= 64)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>3)+22+2][0]);
- }
- else if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>2)+14+2][0]);
- }
- else if (code >= 16)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>1)+6+2][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail1[(code-2)+2][0]);
- }
-
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
- }
- else if (nC < 4)
- {
- BitstreamShowBits(stream, 14, &code);
-
- if (code >= 4096)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>10)+66+2][0]);
- }
- else if (code >= 2048)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>8)+54+2][0]);
- }
- else if (code >= 512)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>7)+46+2][0]);
- }
- else if (code >= 128)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>5)+34+2][0]);
- }
- else if (code >= 64)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>3)+22+2][0]);
- }
- else if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>2)+14+2][0]);
- }
- else if (code >= 16)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>1)+6+2][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail2[code-2+2][0]);
- }
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
- }
- else if (nC < 8)
- {
- BitstreamShowBits(stream, 10, &code);
-
- if (code >= 512)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>6)+47+1][0]);
- }
- else if (code >= 256)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>5)+39+1][0]);
- }
- else if (code >= 128)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>4)+31+1][0]);
- }
- else if (code >= 64)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>3)+23+1][0]);
- }
- else if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>2)+15+1][0]);
- }
- else if (code >= 16)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>1)+7+1][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail3[code-1+1][0]);
- }
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
- }
- else
- {
- /* read 6 bit FLC */
- BitstreamReadBits(stream, 6, &code);
-
-
- *TrailingOnes = code & 3;
- *TotalCoeff = (code >> 2) + 1;
-
- if (*TotalCoeff > 16)
- {
- *TotalCoeff = 16; // _ERROR
- }
-
- if (code == 3)
- {
- *TrailingOnes = 0;
- (*TotalCoeff)--;
- }
- }
-#else
- const VLCNumCoeffTrail *ptr;
- const ShiftOffset *ptr_indx;
- uint temp, leading_zeros = 0;
-
- if (nC < 8)
- {
-
- BitstreamShowBits(stream, 16, &code);
- temp = code | 1;
-
- PV_CLZ(leading_zeros, temp)
-
- temp = nC_table[nC];
- ptr_indx = &NumCoeffTrailOnes_indx[temp][leading_zeros];
- ptr = &NumCoeffTrailOnes[temp][(code >> ptr_indx->shift) + ptr_indx->offset];
- *TrailingOnes = ptr->trailing;
- *TotalCoeff = ptr->total_coeff;
- BitstreamFlushBits(stream, ptr->length);
- }
- else
- {
- /* read 6 bit FLC */
- BitstreamReadBits(stream, 6, &code);
-
-
- *TrailingOnes = code & 3;
- *TotalCoeff = (code >> 2) + 1;
-
- if (*TotalCoeff > 16)
- {
- *TotalCoeff = 16; // _ERROR
- }
-
- if (code == 3)
- {
- *TrailingOnes = 0;
- (*TotalCoeff)--;
- }
- }
-#endif
- return AVCDEC_SUCCESS;
-}
-
-/* TO BE OPTIMIZED !!!!! */
-AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff)
-{
- AVCDec_Status status;
-
- const static uint8 TotCofNTrail5[21][3] =
- {
- {3, 4, 7}, {3, 4, 7}, {2, 4, 8}, {1, 4, 8}, {2, 3, 7}, {2, 3, 7}, {1, 3, 7},
- {1, 3, 7}, {0, 4, 6}, {0, 3, 6}, {0, 2, 6}, {3, 3, 6}, {1, 2, 6}, {0, 1, 6},
- {2, 2, 3}, {0, 0, 2}, {0, 0, 2}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}
- };
-
- uint code;
- uint8 *pcode;
-
- status = BitstreamShowBits(stream, 8, &code);
-
- if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail5[(code>>5)+13][0]);
- }
- else if (code >= 8)
- {
- pcode = (uint8*) & (TotCofNTrail5[(code>>2)+6][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail5[code][0]);
- }
-
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
-
- return status;
-}
-
-/* see Table 9-6 */
-AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code)
-{
- uint temp;
- uint leading_zeros = 0;
- BitstreamShowBits(stream, 16, &temp);
- temp |= 1 ;
-
- PV_CLZ(leading_zeros, temp)
-
- BitstreamFlushBits(stream, leading_zeros + 1);
- *code = leading_zeros;
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-7 and 9-8 */
-AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int TotalCoeff)
-{
- const static uint8 TotZero1[28][2] = {{15, 9}, {14, 9}, {13, 9}, {12, 8},
- {12, 8}, {11, 8}, {11, 8}, {10, 7}, {9, 7}, {8, 6}, {8, 6}, {7, 6}, {7, 6}, {6, 5}, {6, 5},
- {6, 5}, {6, 5}, {5, 5}, {5, 5}, {5, 5}, {5, 5}, {4, 4}, {3, 4},
- {2, 3}, {2, 3}, {1, 3}, {1, 3}, {0, 1}
- };
-
- const static uint8 TotZero2n3[2][18][2] = {{{14, 6}, {13, 6}, {12, 6}, {11, 6},
- {10, 5}, {10, 5}, {9, 5}, {9, 5}, {8, 4}, {7, 4}, {6, 4}, {5, 4}, {4, 3}, {4, 3},
- {3, 3}, {2, 3}, {1, 3}, {0, 3}},
-
- /*const static uint8 TotZero3[18][2]=*/{{13, 6}, {11, 6}, {12, 5}, {12, 5}, {10, 5},
- {10, 5}, {9, 5}, {9, 5}, {8, 4}, {5, 4}, {4, 4}, {0, 4}, {7, 3}, {7, 3}, {6, 3}, {3, 3},
- {2, 3}, {1, 3}}
- };
-
- const static uint8 TotZero4[17][2] = {{12, 5}, {11, 5}, {10, 5}, {0, 5}, {9, 4},
- {9, 4}, {7, 4}, {7, 4}, {3, 4}, {3, 4}, {2, 4}, {2, 4}, {8, 3}, {6, 3}, {5, 3}, {4, 3}, {1, 3}
- };
-
- const static uint8 TotZero5[13][2] = {{11, 5}, {9, 5}, {10, 4}, {8, 4}, {2, 4},
- {1, 4}, {0, 4}, {7, 3}, {7, 3}, {6, 3}, {5, 3}, {4, 3}, {3, 3}
- };
-
- const static uint8 TotZero6to10[5][15][2] = {{{10, 6}, {0, 6}, {1, 5}, {1, 5}, {8, 4},
- {8, 4}, {8, 4}, {8, 4}, {9, 3}, {7, 3}, {6, 3}, {5, 3}, {4, 3}, {3, 3}, {2, 3}},
-
- /*const static uint8 TotZero7[15][2]=*/{{9, 6}, {0, 6}, {1, 5}, {1, 5}, {7, 4},
- {7, 4}, {7, 4}, {7, 4}, {8, 3}, {6, 3}, {4, 3}, {3, 3}, {2, 3}, {5, 2}, {5, 2}},
-
- /*const static uint8 TotZero8[15][2]=*/{{8, 6}, {0, 6}, {2, 5}, {2, 5}, {1, 4},
- {1, 4}, {1, 4}, {1, 4}, {7, 3}, {6, 3}, {3, 3}, {5, 2}, {5, 2}, {4, 2}, {4, 2}},
-
- /*const static uint8 TotZero9[15][2]=*/{{1, 6}, {0, 6}, {7, 5}, {7, 5}, {2, 4},
- {2, 4}, {2, 4}, {2, 4}, {5, 3}, {6, 2}, {6, 2}, {4, 2}, {4, 2}, {3, 2}, {3, 2}},
-
- /*const static uint8 TotZero10[11][2]=*/{{1, 5}, {0, 5}, {6, 4}, {6, 4}, {2, 3},
- {2, 3}, {2, 3}, {2, 3}, {5, 2}, {4, 2}, {3, 2}, {0, 0}, {0, 0}, {0, 0}, {0, 0}}
- };
-
- const static uint8 TotZero11[7][2] = {{0, 4}, {1, 4}, {2, 3}, {2, 3}, {3, 3}, {5, 3}, {4, 1}};
-
- const static uint8 TotZero12to15[4][5][2] =
- {
- {{3, 1}, {2, 2}, {4, 3}, {1, 4}, {0, 4}},
- {{2, 1}, {3, 2}, {1, 3}, {0, 3}, {0, 0}},
- {{2, 1}, {1, 2}, {0, 2}, {0, 0}, {0, 0}},
- {{1, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}}
- };
-
- uint temp, mask;
- int indx;
- uint8 *pcode;
-
- if (TotalCoeff == 1)
- {
- BitstreamShowBits(stream, 9, &temp);
-
- if (temp >= 256)
- {
- pcode = (uint8*) & (TotZero1[27][0]);
- }
- else if (temp >= 64)
- {
- pcode = (uint8*) & (TotZero1[(temp>>5)+19][0]);
- }
- else if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero1[(temp>>2)+5][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero1[temp-1][0]);
- }
-
- }
- else if (TotalCoeff == 2 || TotalCoeff == 3)
- {
- BitstreamShowBits(stream, 6, &temp);
-
- if (temp >= 32)
- {
- pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][(temp>>3)+10][0]);
- }
- else if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][(temp>>2)+6][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][temp][0]);
- }
- }
- else if (TotalCoeff == 4)
- {
- BitstreamShowBits(stream, 5, &temp);
-
- if (temp >= 12)
- {
- pcode = (uint8*) & (TotZero4[(temp>>2)+9][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero4[temp][0]);
- }
- }
- else if (TotalCoeff == 5)
- {
- BitstreamShowBits(stream, 5, &temp);
-
- if (temp >= 16)
- {
- pcode = (uint8*) & (TotZero5[(temp>>2)+5][0]);
- }
- else if (temp >= 2)
- {
- pcode = (uint8*) & (TotZero5[(temp>>1)+1][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero5[temp][0]);
- }
- }
- else if (TotalCoeff >= 6 && TotalCoeff <= 10)
- {
- if (TotalCoeff == 10)
- {
- BitstreamShowBits(stream, 5, &temp);
- }
- else
- {
- BitstreamShowBits(stream, 6, &temp);
- }
-
-
- if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero6to10[TotalCoeff-6][(temp>>3)+7][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero6to10[TotalCoeff-6][temp][0]);
- }
- }
- else if (TotalCoeff == 11)
- {
- BitstreamShowBits(stream, 4, &temp);
-
-
- if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero11[6][0]);
- }
- else if (temp >= 4)
- {
- pcode = (uint8*) & (TotZero11[(temp>>1)+2][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero11[temp][0]);
- }
- }
- else
- {
- BitstreamShowBits(stream, (16 - TotalCoeff), &temp);
- mask = 1 << (15 - TotalCoeff);
- indx = 0;
- while ((temp&mask) == 0 && indx < (16 - TotalCoeff)) /* search location of 1 bit */
- {
- mask >>= 1;
- indx++;
- }
-
- pcode = (uint8*) & (TotZero12to15[TotalCoeff-12][indx]);
- }
-
- *code = pcode[0];
- BitstreamFlushBits(stream, pcode[1]);
-
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-9 */
-AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, int TotalCoeff)
-{
- const static uint8 TotZeroChrom1to3[3][8][2] =
- {
- {{3, 3}, {2, 3}, {1, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
- {{2, 2}, {2, 2}, {1, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
- {{1, 1}, {1, 1}, {1, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
- };
-
-
- uint temp;
- uint8 *pcode;
-
- BitstreamShowBits(stream, 3, &temp);
- pcode = (uint8*) & (TotZeroChrom1to3[TotalCoeff-1][temp]);
-
- *code = pcode[0];
-
- BitstreamFlushBits(stream, pcode[1]);
-
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-10 */
-AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zerosLeft)
-{
- const static int codlen[6] = {1, 2, 2, 3, 3, 3}; /* num bits to read */
- const static uint8 RunBeforeTab[6][8][2] = {{{1, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
- /*const static int RunBefore2[4][2]=*/{{2, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
- /*const static int RunBefore3[4][2]=*/{{3, 2}, {2, 2}, {1, 2}, {0, 2}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
- /*const static int RunBefore4[7][2]=*/{{4, 3}, {3, 3}, {2, 2}, {2, 2}, {1, 2}, {1, 2}, {0, 2}, {0, 2}},
- /*const static int RunBefore5[7][2]=*/{{5, 3}, {4, 3}, {3, 3}, {2, 3}, {1, 2}, {1, 2}, {0, 2}, {0, 2}},
- /*const static int RunBefore6[7][2]=*/{{1, 3}, {2, 3}, {4, 3}, {3, 3}, {6, 3}, {5, 3}, {0, 2}, {0, 2}}
- };
-
- uint temp;
- uint8 *pcode;
- int indx;
-
- if (zerosLeft <= 6)
- {
- BitstreamShowBits(stream, codlen[zerosLeft-1], &temp);
-
- pcode = (uint8*) & (RunBeforeTab[zerosLeft-1][temp][0]);
-
- *code = pcode[0];
-
- BitstreamFlushBits(stream, pcode[1]);
- }
- else
- {
- BitstreamReadBits(stream, 3, &temp);
- if (temp)
- {
- *code = 7 - temp;
- }
- else
- {
- BitstreamShowBits(stream, 9, &temp);
- temp <<= 7;
- temp |= 1;
- indx = 0;
- PV_CLZ(indx, temp)
- *code = 7 + indx;
- BitstreamFlushBits(stream, indx + 1);
- }
- }
-
-
- return AVCDEC_SUCCESS;
-}
diff --git a/media/libstagefright/codecs/common/cmnMemory.c b/media/libstagefright/codecs/common/cmnMemory.c
index dd7c26d..aa52bd9 100644
--- a/media/libstagefright/codecs/common/cmnMemory.c
+++ b/media/libstagefright/codecs/common/cmnMemory.c
@@ -21,10 +21,8 @@
*******************************************************************************/
#include "cmnMemory.h"
-#include <malloc.h>
-#if defined LINUX
+#include <stdlib.h>
#include <string.h>
-#endif
//VO_MEM_OPERATOR g_memOP;
diff --git a/media/libstagefright/codecs/common/include/voType.h b/media/libstagefright/codecs/common/include/voType.h
index 70b2e83..5f659ab 100644
--- a/media/libstagefright/codecs/common/include/voType.h
+++ b/media/libstagefright/codecs/common/include/voType.h
@@ -101,7 +101,7 @@ typedef signed long VO_S32;
since the compiler does not support the way the component was written.
*/
#ifndef VO_SKIP64BIT
-#ifdef _WIN32
+#ifdef _MSC_VER
/** VO_U64 is a 64 bit unsigned quantity that is 64 bit word aligned */
typedef unsigned __int64 VO_U64;
/** VO_S64 is a 64 bit signed quantity that is 64 bit word aligned */
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 02b1c8e..1e33f05 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -38,7 +38,6 @@ static const struct {
{ "OMX.google.amrnb.decoder", "amrdec", "audio_decoder.amrnb" },
{ "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" },
{ "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" },
- { "OMX.google.avc.decoder", "avcdec", "video_decoder.avc" },
{ "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
{ "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
{ "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },
diff --git a/native/android/native_window.cpp b/native/android/native_window.cpp
index 2c0e88e..5c016c4 100644
--- a/native/android/native_window.cpp
+++ b/native/android/native_window.cpp
@@ -81,39 +81,9 @@ int32_t ANativeWindow_setBuffersGeometry(ANativeWindow* window, int32_t width,
int32_t ANativeWindow_lock(ANativeWindow* window, ANativeWindow_Buffer* outBuffer,
ARect* inOutDirtyBounds) {
- int type = -1;
- if (window->query(window, NATIVE_WINDOW_CONCRETE_TYPE, &type) != 0 ||
- type != NATIVE_WINDOW_SURFACE) {
- return BAD_VALUE;
- }
-
- Region dirtyRegion;
- Region* dirtyParam = NULL;
- if (inOutDirtyBounds != NULL) {
- dirtyRegion.set(*(Rect*)inOutDirtyBounds);
- dirtyParam = &dirtyRegion;
- }
-
- Surface::SurfaceInfo info;
- status_t res = static_cast<Surface*>(window)->lock(&info, dirtyParam);
- if (res != OK) {
- return -1;
- }
-
- outBuffer->width = (int32_t)info.w;
- outBuffer->height = (int32_t)info.h;
- outBuffer->stride = (int32_t)info.s;
- outBuffer->format = (int32_t)info.format;
- outBuffer->bits = info.bits;
-
- if (inOutDirtyBounds != NULL) {
- *inOutDirtyBounds = dirtyRegion.getBounds();
- }
-
- return 0;
+ return window->perform(window, NATIVE_WINDOW_LOCK, outBuffer, inOutDirtyBounds);
}
int32_t ANativeWindow_unlockAndPost(ANativeWindow* window) {
- status_t res = static_cast<Surface*>(window)->unlockAndPost();
- return res == android::OK ? 0 : -1;
+ return window->perform(window, NATIVE_WINDOW_UNLOCK_AND_POST);
}
diff --git a/native/include/android/native_window.h b/native/include/android/native_window.h
index 337fa96..2f4f2d3 100644
--- a/native/include/android/native_window.h
+++ b/native/include/android/native_window.h
@@ -99,10 +99,16 @@ int32_t ANativeWindow_getFormat(ANativeWindow* window);
* width and height must be either both zero or both non-zero.
*
*/
-int32_t ANativeWindow_setBuffersGeometry(ANativeWindow* window, int32_t width, int32_t height, int32_t format);
+int32_t ANativeWindow_setBuffersGeometry(ANativeWindow* window,
+ int32_t width, int32_t height, int32_t format);
/**
* Lock the window's next drawing surface for writing.
+ * inOutDirtyBounds is used as an in/out parameter, upon entering the
+ * function, it contains the dirty region, that is, the region the caller
+ * intends to redraw. When the function returns, inOutDirtyBounds is updated
+ * with the actual area the caller needs to redraw -- this region is often
+ * extended by ANativeWindow_lock.
*/
int32_t ANativeWindow_lock(ANativeWindow* window, ANativeWindow_Buffer* outBuffer,
ARect* inOutDirtyBounds);
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBarPolicy.java b/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBarPolicy.java
index dedbe5d..af5c72d 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBarPolicy.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/phone/PhoneStatusBarPolicy.java
@@ -218,9 +218,165 @@ public class PhoneStatusBarPolicy {
R.drawable.stat_sys_roaming_cdma_0,
R.drawable.stat_sys_roaming_cdma_0,
R.drawable.stat_sys_roaming_cdma_0,
- R.drawable.stat_sys_roaming_cdma_0 //83
+ R.drawable.stat_sys_roaming_cdma_0, //83
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0,
+ R.drawable.stat_sys_roaming_cdma_0 //239
- // 128-255 Reserved
+ // 240-255 Reserved
};
//***** Data connection icons
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 9b09983..0eff776 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -502,77 +502,82 @@ void CameraService::Client::disconnect() {
// ----------------------------------------------------------------------------
-// set the Surface that the preview will use
-status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) {
- LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
+static void disconnectWindow(const sp<ANativeWindow>& window) {
+ if (window != 0) {
+ status_t result = native_window_disconnect(window.get(),
+ NATIVE_WINDOW_API_CAMERA);
+ if (result != NO_ERROR) {
+ LOGW("native_window_disconnect failed: %s (%d)", strerror(-result),
+ result);
+ }
+ }
+}
+
+status_t CameraService::Client::setPreviewWindow(const sp<IBinder>& binder,
+ const sp<ANativeWindow>& window) {
Mutex::Autolock lock(mLock);
status_t result = checkPidAndHardware();
if (result != NO_ERROR) return result;
- result = NO_ERROR;
-
// return if no change in surface.
- sp<IBinder> binder(surface != 0 ? surface->asBinder() : 0);
if (binder == mSurface) {
- return result;
+ return NO_ERROR;
}
- if (mSurface != 0) {
- LOG1("clearing old preview surface %p", mSurface.get());
+ if (window != 0) {
+ result = native_window_connect(window.get(), NATIVE_WINDOW_API_CAMERA);
+ if (result != NO_ERROR) {
+ LOGE("native_window_connect failed: %s (%d)", strerror(-result),
+ result);
+ return result;
+ }
}
- mSurface = binder;
- mPreviewWindow = surface;
- // If preview has been already started, register preview
- // buffers now.
+ // If preview has been already started, register preview buffers now.
if (mHardware->previewEnabled()) {
- if (mPreviewWindow != 0) {
- native_window_set_buffers_transform(mPreviewWindow.get(),
- mOrientation);
- result = mHardware->setPreviewWindow(mPreviewWindow);
+ if (window != 0) {
+ native_window_set_buffers_transform(window.get(), mOrientation);
+ result = mHardware->setPreviewWindow(window);
}
}
+ if (result == NO_ERROR) {
+ // Everything has succeeded. Disconnect the old window and remember the
+ // new window.
+ disconnectWindow(mPreviewWindow);
+ mSurface = binder;
+ mPreviewWindow = window;
+ } else {
+ // Something went wrong after we connected to the new window, so
+ // disconnect here.
+ disconnectWindow(window);
+ }
+
return result;
}
+// set the Surface that the preview will use
+status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) {
+ LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
+
+ sp<IBinder> binder(surface != 0 ? surface->asBinder() : 0);
+ sp<ANativeWindow> window(surface);
+ return setPreviewWindow(binder, window);
+}
+
// set the SurfaceTexture that the preview will use
status_t CameraService::Client::setPreviewTexture(
const sp<ISurfaceTexture>& surfaceTexture) {
LOG1("setPreviewTexture(%p) (pid %d)", surfaceTexture.get(),
getCallingPid());
- Mutex::Autolock lock(mLock);
- status_t result = checkPidAndHardware();
- if (result != NO_ERROR) return result;
-
- // return if no change in surface.
- // asBinder() is safe on NULL (returns NULL)
- if (surfaceTexture->asBinder() == mSurface) {
- return result;
- }
- if (mSurface != 0) {
- LOG1("clearing old preview surface %p", mSurface.get());
- }
- mSurface = surfaceTexture->asBinder();
+ sp<IBinder> binder;
+ sp<ANativeWindow> window;
if (surfaceTexture != 0) {
- mPreviewWindow = new SurfaceTextureClient(surfaceTexture);
- } else {
- mPreviewWindow = 0;
- }
-
- // If preview has been already started, set overlay or register preview
- // buffers now.
- if (mHardware->previewEnabled()) {
- // XXX: What if the new preview window is 0?
- if (mPreviewWindow != 0) {
- native_window_set_buffers_transform(mPreviewWindow.get(),
- mOrientation);
- result = mHardware->setPreviewWindow(mPreviewWindow);
- }
+ binder = surfaceTexture->asBinder();
+ window = new SurfaceTextureClient(surfaceTexture);
}
-
- return result;
+ return setPreviewWindow(binder, window);
}
// set the preview callback flag to affect how the received frames from
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 5e2d571..c5fefb8 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -161,6 +161,10 @@ private:
int getOrientation(int orientation, bool mirror);
+ status_t setPreviewWindow(
+ const sp<IBinder>& binder,
+ const sp<ANativeWindow>& window);
+
// these are initialized in the constructor.
sp<CameraService> mCameraService; // immutable after constructor
sp<ICameraClient> mCameraClient;
diff --git a/services/java/com/android/server/InputMethodManagerService.java b/services/java/com/android/server/InputMethodManagerService.java
index 18d393f..2597978 100644
--- a/services/java/com/android/server/InputMethodManagerService.java
+++ b/services/java/com/android/server/InputMethodManagerService.java
@@ -35,6 +35,7 @@ import org.xmlpull.v1.XmlSerializer;
import android.app.ActivityManagerNative;
import android.app.AlertDialog;
+import android.app.KeyguardManager;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
@@ -47,6 +48,7 @@ import android.content.Intent;
import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.content.pm.ApplicationInfo;
+import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.pm.ServiceInfo;
@@ -160,6 +162,7 @@ public class InputMethodManagerService extends IInputMethodManager.Stub
// Ongoing notification
private final NotificationManager mNotificationManager;
+ private final KeyguardManager mKeyguardManager;
private final Notification mImeSwitcherNotification;
private final PendingIntent mImeSwitchPendingIntent;
private final boolean mShowOngoingImeSwitcherForPhones;
@@ -520,6 +523,8 @@ public class InputMethodManagerService extends IInputMethodManager.Stub
}
});
+ mKeyguardManager = (KeyguardManager)
+ mContext.getSystemService(Context.KEYGUARD_SERVICE);
mNotificationManager = (NotificationManager)
mContext.getSystemService(Context.NOTIFICATION_SERVICE);
mImeSwitcherNotification = new Notification();
@@ -1632,19 +1637,27 @@ public class InputMethodManagerService extends IInputMethodManager.Stub
}
@Override
- public boolean setAdditionalInputMethodSubtypes(IBinder token, InputMethodSubtype[] subtypes) {
- if (token == null || mCurToken != token) {
- return false;
- }
- if (subtypes == null || subtypes.length == 0) return false;
+ public boolean setAdditionalInputMethodSubtypes(String imiId, InputMethodSubtype[] subtypes) {
+ // By this IPC call, only a process which shares the same uid with the IME can add
+ // additional input method subtypes to the IME.
+ if (TextUtils.isEmpty(imiId) || subtypes == null || subtypes.length == 0) return false;
synchronized (mMethodMap) {
- final InputMethodInfo imi = mMethodMap.get(mCurMethodId);
+ final InputMethodInfo imi = mMethodMap.get(imiId);
if (imi == null) return false;
- final int N = subtypes.length;
- mFileManager.addInputMethodSubtypes(imi, subtypes);
- buildInputMethodListLocked(mMethodList, mMethodMap);
- return true;
+ final PackageManager pm = mContext.getPackageManager();
+ final String[] packageInfos = pm.getPackagesForUid(Binder.getCallingUid());
+ if (packageInfos != null) {
+ final int packageNum = packageInfos.length;
+ for (int i = 0; i < packageNum; ++i) {
+ if (packageInfos[i].equals(imi.getPackageName())) {
+ mFileManager.addInputMethodSubtypes(imi, subtypes);
+ buildInputMethodListLocked(mMethodList, mMethodMap);
+ return true;
+ }
+ }
+ }
}
+ return false;
}
private void setInputMethodWithSubtypeId(IBinder token, String id, int subtypeId) {
@@ -2118,7 +2131,8 @@ public class InputMethodManagerService extends IInputMethodManager.Stub
}
});
- if (showSubtypes) {
+ if (showSubtypes && !(mKeyguardManager.isKeyguardLocked()
+ && mKeyguardManager.isKeyguardSecure())) {
mDialogBuilder.setPositiveButton(
com.android.internal.R.string.configure_input_methods,
new DialogInterface.OnClickListener() {
diff --git a/services/java/com/android/server/usb/UsbDeviceManager.java b/services/java/com/android/server/usb/UsbDeviceManager.java
index 3139798..c80cd0a 100644
--- a/services/java/com/android/server/usb/UsbDeviceManager.java
+++ b/services/java/com/android/server/usb/UsbDeviceManager.java
@@ -473,10 +473,7 @@ public class UsbDeviceManager {
case MSG_SET_CURRENT_FUNCTION:
String function = (String)msg.obj;
boolean makeDefault = (msg.arg1 == 1);
- if (makeDefault) {
- if (function == null) {
- throw new NullPointerException();
- }
+ if (function != null && makeDefault) {
if (mAdbEnabled) {
function = addFunction(function, UsbManager.USB_FUNCTION_ADB);
}
diff --git a/services/java/com/android/server/wm/DragState.java b/services/java/com/android/server/wm/DragState.java
index 118cd55..8146fca 100644
--- a/services/java/com/android/server/wm/DragState.java
+++ b/services/java/com/android/server/wm/DragState.java
@@ -51,6 +51,8 @@ class DragState {
float mCurrentX, mCurrentY;
float mThumbOffsetX, mThumbOffsetY;
InputChannel mServerChannel, mClientChannel;
+ InputApplicationHandle mDragApplicationHandle;
+ InputWindowHandle mDragWindowHandle;
WindowState mTargetWindow;
ArrayList<WindowState> mNotifiedWindows;
boolean mDragInProgress;
@@ -91,6 +93,38 @@ class DragState {
mService.mInputManager.registerInputChannel(mServerChannel, null);
InputQueue.registerInputChannel(mClientChannel, mService.mDragInputHandler,
mService.mH.getLooper().getQueue());
+
+ mDragApplicationHandle = new InputApplicationHandle(null);
+ mDragApplicationHandle.name = "drag";
+ mDragApplicationHandle.dispatchingTimeoutNanos =
+ WindowManagerService.DEFAULT_INPUT_DISPATCHING_TIMEOUT_NANOS;
+
+ mDragWindowHandle = new InputWindowHandle(mDragApplicationHandle, null);
+ mDragWindowHandle.name = "drag";
+ mDragWindowHandle.inputChannel = mServerChannel;
+ mDragWindowHandle.layer = getDragLayerLw();
+ mDragWindowHandle.layoutParamsFlags = 0;
+ mDragWindowHandle.layoutParamsType = WindowManager.LayoutParams.TYPE_DRAG;
+ mDragWindowHandle.dispatchingTimeoutNanos =
+ WindowManagerService.DEFAULT_INPUT_DISPATCHING_TIMEOUT_NANOS;
+ mDragWindowHandle.visible = true;
+ mDragWindowHandle.canReceiveKeys = false;
+ mDragWindowHandle.hasFocus = true;
+ mDragWindowHandle.hasWallpaper = false;
+ mDragWindowHandle.paused = false;
+ mDragWindowHandle.ownerPid = Process.myPid();
+ mDragWindowHandle.ownerUid = Process.myUid();
+ mDragWindowHandle.inputFeatures = 0;
+ mDragWindowHandle.scaleFactor = 1.0f;
+
+ // The drag window cannot receive new touches.
+ mDragWindowHandle.touchableRegion.setEmpty();
+
+ // The drag window covers the entire display
+ mDragWindowHandle.frameLeft = 0;
+ mDragWindowHandle.frameTop = 0;
+ mDragWindowHandle.frameRight = mService.mDisplay.getRealWidth();
+ mDragWindowHandle.frameBottom = mService.mDisplay.getRealHeight();
}
}
diff --git a/services/java/com/android/server/wm/InputMonitor.java b/services/java/com/android/server/wm/InputMonitor.java
index 08a3560..12ef238 100644
--- a/services/java/com/android/server/wm/InputMonitor.java
+++ b/services/java/com/android/server/wm/InputMonitor.java
@@ -42,10 +42,6 @@ final class InputMonitor {
// When true, need to call updateInputWindowsLw().
private boolean mUpdateInputWindowsNeeded = true;
- // Fake handles for the drag surface, lazily initialized.
- private InputApplicationHandle mDragApplicationHandle;
- private InputWindowHandle mDragWindowHandle;
-
// Array of window handles to provide to the input dispatcher.
private InputWindowHandle[] mInputWindowHandles;
private int mInputWindowHandleCount;
@@ -121,44 +117,6 @@ final class InputMonitor {
return 0; // abort dispatching
}
- private void addDragInputWindowLw() {
- if (mDragWindowHandle == null) {
- mDragApplicationHandle = new InputApplicationHandle(null);
- mDragApplicationHandle.name = "drag";
- mDragApplicationHandle.dispatchingTimeoutNanos =
- WindowManagerService.DEFAULT_INPUT_DISPATCHING_TIMEOUT_NANOS;
-
- mDragWindowHandle = new InputWindowHandle(mDragApplicationHandle, null);
- mDragWindowHandle.name = "drag";
- mDragWindowHandle.layoutParamsFlags = 0;
- mDragWindowHandle.layoutParamsType = WindowManager.LayoutParams.TYPE_DRAG;
- mDragWindowHandle.dispatchingTimeoutNanos =
- WindowManagerService.DEFAULT_INPUT_DISPATCHING_TIMEOUT_NANOS;
- mDragWindowHandle.visible = true;
- mDragWindowHandle.canReceiveKeys = false;
- mDragWindowHandle.hasFocus = true;
- mDragWindowHandle.hasWallpaper = false;
- mDragWindowHandle.paused = false;
- mDragWindowHandle.ownerPid = Process.myPid();
- mDragWindowHandle.ownerUid = Process.myUid();
- mDragWindowHandle.inputFeatures = 0;
- mDragWindowHandle.scaleFactor = 1.0f;
-
- // The drag window cannot receive new touches.
- mDragWindowHandle.touchableRegion.setEmpty();
- }
-
- mDragWindowHandle.layer = mService.mDragState.getDragLayerLw();
-
- // The drag window covers the entire display
- mDragWindowHandle.frameLeft = 0;
- mDragWindowHandle.frameTop = 0;
- mDragWindowHandle.frameRight = mService.mDisplay.getRealWidth();
- mDragWindowHandle.frameBottom = mService.mDisplay.getRealHeight();
-
- addInputWindowHandleLw(mDragWindowHandle);
- }
-
private void addInputWindowHandleLw(InputWindowHandle windowHandle) {
if (mInputWindowHandles == null) {
mInputWindowHandles = new InputWindowHandle[16];
@@ -202,7 +160,7 @@ final class InputMonitor {
if (WindowManagerService.DEBUG_DRAG) {
Log.d(WindowManagerService.TAG, "Inserting drag window");
}
- addDragInputWindowLw();
+ addInputWindowHandleLw(mService.mDragState.mDragWindowHandle);
}
final int N = windows.size();
@@ -429,4 +387,4 @@ final class InputMonitor {
private void updateInputDispatchModeLw() {
mService.mInputManager.setInputDispatchMode(mInputDispatchEnabled, mInputDispatchFrozen);
}
-} \ No newline at end of file
+}
diff --git a/telephony/java/com/android/internal/telephony/BaseCommands.java b/telephony/java/com/android/internal/telephony/BaseCommands.java
index 8427d14..f0d2fba 100644
--- a/telephony/java/com/android/internal/telephony/BaseCommands.java
+++ b/telephony/java/com/android/internal/telephony/BaseCommands.java
@@ -857,22 +857,28 @@ public abstract class BaseCommands implements CommandsInterface {
*/
public static int getLteOnCdmaModeStatic() {
int retVal;
- String productType;
-
- Matcher matcher = sProductTypePattern.matcher(sKernelCmdLine);
- if (matcher.find()) {
- productType = matcher.group(1);
- if (sLteOnCdmaProductType.equals(productType)) {
- retVal = Phone.LTE_ON_CDMA_TRUE;
+ int curVal;
+ String productType = "";
+
+ curVal = SystemProperties.getInt(TelephonyProperties.PROPERTY_LTE_ON_CDMA_DEVICE,
+ Phone.LTE_ON_CDMA_UNKNOWN);
+ retVal = curVal;
+ if (retVal == Phone.LTE_ON_CDMA_UNKNOWN) {
+ Matcher matcher = sProductTypePattern.matcher(sKernelCmdLine);
+ if (matcher.find()) {
+ productType = matcher.group(1);
+ if (sLteOnCdmaProductType.equals(productType)) {
+ retVal = Phone.LTE_ON_CDMA_TRUE;
+ } else {
+ retVal = Phone.LTE_ON_CDMA_FALSE;
+ }
} else {
retVal = Phone.LTE_ON_CDMA_FALSE;
}
- } else {
- retVal = Phone.LTE_ON_CDMA_FALSE;
- productType = "";
}
- Log.d(LOG_TAG, "getLteOnCdmaMode=" + retVal + " product_type='" + productType +
+ Log.d(LOG_TAG, "getLteOnCdmaMode=" + retVal + " curVal=" + curVal +
+ " product_type='" + productType +
"' lteOnCdmaProductType='" + sLteOnCdmaProductType + "'");
return retVal;
}
diff --git a/telephony/java/com/android/internal/telephony/TelephonyProperties.java b/telephony/java/com/android/internal/telephony/TelephonyProperties.java
index 60cf9b7..abb4523 100644
--- a/telephony/java/com/android/internal/telephony/TelephonyProperties.java
+++ b/telephony/java/com/android/internal/telephony/TelephonyProperties.java
@@ -79,6 +79,15 @@ public interface TelephonyProperties
*/
static final String PROPERTY_LTE_ON_CDMA_PRODUCT_TYPE = "telephony.lteOnCdmaProductType";
+ /**
+ * The contents of this property is the one of {@link Phone#LTE_ON_CDMA_TRUE} or
+ * {@link Phone#LTE_ON_CDMA_FALSE}. If absent the value will assumed to be false
+ * and the {@see #PROPERTY_LTE_ON_CDMA_PRODUCT_TYPE} will be used to determine its
+ * final value which could also be {@link Phone#LTE_ON_CDMA_FALSE}.
+ * {@see BaseCommands#getLteOnCdmaMode()}
+ */
+ static final String PROPERTY_LTE_ON_CDMA_DEVICE = "telephony.lteOnCdmaDevice";
+
static final String CURRENT_ACTIVE_PHONE = "gsm.current.phone-type";
//****** SIM Card
diff --git a/tests/BiDiTests/res/drawable/alphabet_a.png b/tests/BiDiTests/res/drawable/alphabet_a.png
new file mode 100644
index 0000000..2a80ec1
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_a.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_b.png b/tests/BiDiTests/res/drawable/alphabet_b.png
new file mode 100644
index 0000000..ac887ad
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_b.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_c.png b/tests/BiDiTests/res/drawable/alphabet_c.png
new file mode 100644
index 0000000..f8cc5c6
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_c.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_d.png b/tests/BiDiTests/res/drawable/alphabet_d.png
new file mode 100644
index 0000000..764dfe5
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_d.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_e.png b/tests/BiDiTests/res/drawable/alphabet_e.png
new file mode 100644
index 0000000..dbd00e1
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_e.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_f.png b/tests/BiDiTests/res/drawable/alphabet_f.png
new file mode 100644
index 0000000..f6a1bbe
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_f.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_g.png b/tests/BiDiTests/res/drawable/alphabet_g.png
new file mode 100644
index 0000000..e9d360c
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_g.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_h.png b/tests/BiDiTests/res/drawable/alphabet_h.png
new file mode 100644
index 0000000..cbc4eb1
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_h.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_i.png b/tests/BiDiTests/res/drawable/alphabet_i.png
new file mode 100644
index 0000000..bae2103
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_i.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_j.png b/tests/BiDiTests/res/drawable/alphabet_j.png
new file mode 100644
index 0000000..264c6a7
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_j.png
Binary files differ
diff --git a/tests/BiDiTests/res/layout/canvas2.xml b/tests/BiDiTests/res/layout/canvas2.xml
new file mode 100644
index 0000000..b3e038f
--- /dev/null
+++ b/tests/BiDiTests/res/layout/canvas2.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/canvas2"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent">
+
+ <LinearLayout
+ xmlns:local="http://schemas.android.com/apk/res/com.android.bidi"
+ android:orientation="vertical"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent">
+
+ <TextView
+ android:text="@string/ltr"
+ android:textSize="40dip"
+ android:gravity="center"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content" />
+
+ <com.android.bidi.BiDiTestViewDrawText
+ local:text="@string/ltr"
+ android:layout_width="fill_parent"
+ android:layout_height="64dp" />
+
+ <TextView
+ android:text="@string/rtl"
+ android:textSize="40dip"
+ android:gravity="center"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"/>
+
+ <com.android.bidi.BiDiTestViewDrawText
+ local:text="@string/rtl"
+ android:layout_width="fill_parent"
+ android:layout_height="64dp" />
+
+ <TextView
+ android:text="@string/composing"
+ android:textSize="40dip"
+ android:gravity="center"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"/>
+
+ <com.android.bidi.BiDiTestViewDrawText
+ local:text="@string/composing"
+ android:layout_width="fill_parent"
+ android:layout_height="64dp" />
+
+ </LinearLayout>
+
+</FrameLayout> \ No newline at end of file
diff --git a/tests/BiDiTests/res/layout/gallery_ltr.xml b/tests/BiDiTests/res/layout/gallery_ltr.xml
new file mode 100644
index 0000000..d0e4168
--- /dev/null
+++ b/tests/BiDiTests/res/layout/gallery_ltr.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/gallery_ltr"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layoutDirection="ltr">
+
+ <Gallery
+ android:id="@+id/galleryview"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:spacing="10dip"
+ />
+
+</FrameLayout>
diff --git a/tests/BiDiTests/res/layout/gallery_rtl.xml b/tests/BiDiTests/res/layout/gallery_rtl.xml
new file mode 100644
index 0000000..c5c2f5c
--- /dev/null
+++ b/tests/BiDiTests/res/layout/gallery_rtl.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/gallery_rtl"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layoutDirection="rtl">
+
+ <Gallery
+ android:id="@+id/galleryview"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:spacing="10dip"
+ />
+
+</FrameLayout>
diff --git a/tests/BiDiTests/res/values/attrs.xml b/tests/BiDiTests/res/values/attrs.xml
new file mode 100644
index 0000000..7f8a1d8
--- /dev/null
+++ b/tests/BiDiTests/res/values/attrs.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <declare-styleable name="DrawTextTestView">
+ <attr name="size" format="dimension" />
+ <attr name="color" format="color" />
+ <attr name="text" format="string" />
+ </declare-styleable>
+</resources> \ No newline at end of file
diff --git a/tests/BiDiTests/res/values/strings.xml b/tests/BiDiTests/res/values/strings.xml
index bc99e79..1f6be7f 100644
--- a/tests/BiDiTests/res/values/strings.xml
+++ b/tests/BiDiTests/res/values/strings.xml
@@ -42,5 +42,8 @@
<string name="textview_hebrew_text">&#x05DD;&#x05DE;ab?!</string>
<string name="textview_latin_text">ab&#x05DD;&#x05DE;?!</string>
<string name="textview_multiline_text">&#x05DD;&#x05DE;?!\nab?!\n?!</string>
+ <string name="ltr">Left to right text"</string>
+ <string name="rtl">"والحق أن تترك ونص"</string>
+ <string name="composing">"\u0644\u0627"</string>
</resources>
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java
index 7002c41..b45b98f 100644
--- a/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java
@@ -102,6 +102,7 @@ public class BiDiTestActivity extends Activity {
addItem(result, "Basic", BiDiTestBasic.class, R.id.basic);
addItem(result, "Canvas", BiDiTestCanvas.class, R.id.canvas);
+ addItem(result, "Canvas2", BiDiTestCanvas2.class, R.id.canvas2);
addItem(result, "Linear LTR", BiDiTestLinearLayoutLtr.class, R.id.linear_layout_ltr);
addItem(result, "Linear RTL", BiDiTestLinearLayoutRtl.class, R.id.linear_layout_rtl);
@@ -136,6 +137,8 @@ public class BiDiTestActivity extends Activity {
addItem(result, "TextView Drawables LTR", BiDiTestTextViewDrawablesLtr.class, R.id.textview_drawables_ltr);
addItem(result, "TextView Drawables RTL", BiDiTestTextViewDrawablesRtl.class, R.id.textview_drawables_rtl);
+ addItem(result, "Gallery LTR", BiDiTestGalleryLtr.class, R.id.gallery_ltr);
+ addItem(result, "Gallery RTL", BiDiTestGalleryRtl.class, R.id.gallery_rtl);
return result;
}
@@ -146,4 +149,4 @@ public class BiDiTestActivity extends Activity {
inflater.inflate(R.menu.main_menu, menu);
return true;
}
-} \ No newline at end of file
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestCanvas2.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestCanvas2.java
new file mode 100644
index 0000000..b801f0e
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestCanvas2.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bidi;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.SeekBar;
+
+import static com.android.bidi.BiDiTestConstants.FONT_MAX_SIZE;
+import static com.android.bidi.BiDiTestConstants.FONT_MIN_SIZE;
+
+public class BiDiTestCanvas2 extends Fragment {
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ return inflater.inflate(R.layout.canvas2, container, false);
+ }
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java
new file mode 100644
index 0000000..adc17e1
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.bidi;
+
+import android.content.Context;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.BaseAdapter;
+import android.widget.ImageView;
+import android.widget.ImageView.ScaleType;
+
+public class BiDiTestGalleryImages extends BaseAdapter {
+ int mGalleryItemBackground;
+ private Context mContext;
+
+ private Integer[] mImageIds = {
+ R.drawable.alphabet_a,
+ R.drawable.alphabet_b,
+ R.drawable.alphabet_c,
+ R.drawable.alphabet_d,
+ R.drawable.alphabet_e,
+ R.drawable.alphabet_f,
+ R.drawable.alphabet_g,
+ R.drawable.alphabet_h,
+ R.drawable.alphabet_i,
+ R.drawable.alphabet_j,
+ };
+
+ public BiDiTestGalleryImages(Context c) {
+ mContext = c;
+ }
+
+ @Override
+ public int getCount() {
+ return mImageIds.length;
+ }
+
+ @Override
+ public Object getItem(int position) {
+ return position;
+ }
+
+ @Override
+ public long getItemId(int position) {
+ return position;
+ }
+
+ @Override
+ public View getView(int position, View convertView, ViewGroup parent) {
+ ImageView i = new ImageView(mContext);
+ i.setImageResource(mImageIds[position]);
+ i.setScaleType(ScaleType.CENTER_INSIDE);
+ return i;
+ }
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java
new file mode 100644
index 0000000..fa86b1a
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.bidi;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Gallery;
+
+public class BiDiTestGalleryLtr extends Fragment {
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.gallery_ltr, container, false);
+ Gallery g = (Gallery) v.findViewById(R.id.galleryview);
+ g.setAdapter(new BiDiTestGalleryImages(this.getActivity().getBaseContext()));
+ return v;
+ }
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java
new file mode 100644
index 0000000..4cef658
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.bidi;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Gallery;
+
+public class BiDiTestGalleryRtl extends Fragment {
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.gallery_rtl, container, false);
+ Gallery g = (Gallery) v.findViewById(R.id.galleryview);
+ g.setAdapter(new BiDiTestGalleryImages(this.getActivity().getBaseContext()));
+ return v;
+ }
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestView.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestView.java
index 27e1887..0126dea 100644
--- a/tests/BiDiTests/src/com/android/bidi/BiDiTestView.java
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestView.java
@@ -21,7 +21,7 @@ import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
-import android.graphics.Typeface;
+import android.text.TextPaint;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
@@ -37,7 +37,6 @@ public class BiDiTestView extends View {
private static final float DEFAULT_ITALIC_SKEW_X = -0.25f;
- private Paint paint = new Paint();
private Rect rect = new Rect();
private String NORMAL_TEXT;
@@ -51,8 +50,7 @@ public class BiDiTestView extends View {
private String CHINESE_TEXT;
private String MIXED_TEXT_1;
private String HEBREW_TEXT;
-
- private Typeface typeface;
+ private String RTL_TEXT;
private int currentTextSize;
@@ -83,9 +81,7 @@ public class BiDiTestView extends View {
CHINESE_TEXT = context.getString(R.string.chinese_text);
MIXED_TEXT_1 = context.getString(R.string.mixed_text_1);
HEBREW_TEXT = context.getString(R.string.hebrew_text);
-
- typeface = paint.getTypeface();
- paint.setAntiAlias(true);
+ RTL_TEXT = context.getString(R.string.rtl);
}
public void setCurrentTextSize(int size) {
@@ -95,54 +91,56 @@ public class BiDiTestView extends View {
@Override
public void onDraw(Canvas canvas) {
- drawInsideRect(canvas, Color.BLACK);
+ drawInsideRect(canvas, new Paint(), Color.BLACK);
int deltaX = 0;
deltaX = testString(canvas, NORMAL_TEXT, ORIGIN, ORIGIN,
- paint, typeface, false, false, Paint.DIRECTION_LTR, currentTextSize);
+ false, false, Paint.DIRECTION_LTR, currentTextSize);
deltaX += testString(canvas, ITALIC_TEXT, ORIGIN + deltaX, ORIGIN,
- paint, typeface, true, false, Paint.DIRECTION_LTR, currentTextSize);
+ true, false, Paint.DIRECTION_LTR, currentTextSize);
deltaX += testString(canvas, BOLD_TEXT, ORIGIN + deltaX, ORIGIN,
- paint, typeface, false, true, Paint.DIRECTION_LTR, currentTextSize);
+ false, true, Paint.DIRECTION_LTR, currentTextSize);
deltaX += testString(canvas, BOLD_ITALIC_TEXT, ORIGIN + deltaX, ORIGIN,
- paint, typeface, true, true, Paint.DIRECTION_LTR, currentTextSize);
+ true, true, Paint.DIRECTION_LTR, currentTextSize);
// Test with a long string
deltaX = testString(canvas, NORMAL_LONG_TEXT, ORIGIN, ORIGIN + 2 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_LTR, currentTextSize);
+ false, false, Paint.DIRECTION_LTR, currentTextSize);
// Test with a long string
deltaX = testString(canvas, NORMAL_LONG_TEXT_2, ORIGIN, ORIGIN + 4 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_LTR, currentTextSize);
+ false, false, Paint.DIRECTION_LTR, currentTextSize);
// Test with a long string
deltaX = testString(canvas, NORMAL_LONG_TEXT_3, ORIGIN, ORIGIN + 6 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_LTR, currentTextSize);
+ false, false, Paint.DIRECTION_LTR, currentTextSize);
// Test Arabic ligature
deltaX = testString(canvas, ARABIC_TEXT, ORIGIN, ORIGIN + 8 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_RTL, currentTextSize);
+ false, false, Paint.DIRECTION_RTL, currentTextSize);
// Test Chinese
deltaX = testString(canvas, CHINESE_TEXT, ORIGIN, ORIGIN + 10 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_LTR, currentTextSize);
+ false, false, Paint.DIRECTION_LTR, currentTextSize);
// Test Mixed (English and Arabic)
deltaX = testString(canvas, MIXED_TEXT_1, ORIGIN, ORIGIN + 12 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_LTR, currentTextSize);
+ false, false, Paint.DIRECTION_LTR, currentTextSize);
// Test Hebrew
- deltaX = testString(canvas, HEBREW_TEXT, ORIGIN, ORIGIN + 14 * currentTextSize,
- paint, typeface, false, false, Paint.DIRECTION_RTL, currentTextSize);
+ deltaX = testString(canvas, RTL_TEXT, ORIGIN, ORIGIN + 14 * currentTextSize,
+ false, false, Paint.DIRECTION_RTL, currentTextSize);
}
- private int testString(Canvas canvas, String text, int x, int y, Paint paint, Typeface typeface,
+ private int testString(Canvas canvas, String text, int x, int y,
boolean isItalic, boolean isBold, int dir, int textSize) {
- paint.setTypeface(typeface);
+
+ TextPaint paint = new TextPaint();
+ paint.setAntiAlias(true);
// Set paint properties
boolean oldFakeBold = paint.isFakeBoldText();
@@ -153,9 +151,9 @@ public class BiDiTestView extends View {
paint.setTextSkewX(DEFAULT_ITALIC_SKEW_X);
}
- Log.v(TAG, "START -- drawTextWithCanvasDrawText");
- drawTextWithCanvasDrawText(text, canvas, x, y, textSize, Color.WHITE, dir);
- Log.v(TAG, "END -- drawTextWithCanvasDrawText");
+ paint.setTextSize(textSize);
+ paint.setColor(Color.WHITE);
+ canvas.drawText(text, x, y, paint);
int length = text.length();
float[] advances = new float[length];
@@ -167,13 +165,6 @@ public class BiDiTestView extends View {
logAdvances(text, textWidthHB, textWidthICU, advances);
drawMetricsAroundText(canvas, x, y, textWidthHB, textWidthICU, textSize, Color.RED, Color.GREEN);
- paint.setColor(Color.WHITE);
-
- Log.v(TAG, "START -- drawText");
- setPaintDir(paint, dir);
- canvas.drawText(text, x, y + currentTextSize, this.paint);
- Log.v(TAG, "END -- drawText");
-
// Restore old paint properties
paint.setFakeBoldText(oldFakeBold);
paint.setTextSkewX(oldTextSkewX);
@@ -186,7 +177,7 @@ public class BiDiTestView extends View {
paint.setBidiFlags(dir);
}
- private void drawInsideRect(Canvas canvas, int color) {
+ private void drawInsideRect(Canvas canvas, Paint paint, int color) {
paint.setColor(color);
int width = getWidth();
int height = getHeight();
@@ -194,16 +185,9 @@ public class BiDiTestView extends View {
canvas.drawRect(rect, paint);
}
- private void drawTextWithCanvasDrawText(String text, Canvas canvas,
- float x, float y, float textSize, int color, int dir) {
- setPaintDir(paint, dir);
- paint.setColor(color);
- paint.setTextSize(textSize);
- canvas.drawText(text, x, y, paint);
- }
-
private void drawMetricsAroundText(Canvas canvas, int x, int y, float textWidthHB,
float textWidthICU, int textSize, int color, int colorICU) {
+ Paint paint = new Paint();
paint.setColor(color);
canvas.drawLine(x, y - textSize, x, y + 8, paint);
canvas.drawLine(x, y + 8, x + textWidthHB, y + 8, paint);
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestViewDrawText.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestViewDrawText.java
new file mode 100644
index 0000000..dfdb807
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestViewDrawText.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.bidi;
+
+import android.content.Context;
+import android.content.res.TypedArray;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint.Align;
+import android.text.TextPaint;
+import android.util.AttributeSet;
+import android.view.View;
+
+public class BiDiTestViewDrawText extends View {
+ private float mSize;
+ private int mColor;
+ private String mText;
+
+ public BiDiTestViewDrawText(Context context) {
+ this(context, null);
+ }
+
+ public BiDiTestViewDrawText(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public BiDiTestViewDrawText(Context context, AttributeSet attrs, int defStyle) {
+ super(context, attrs, defStyle);
+
+ final TypedArray a = context.obtainStyledAttributes(attrs,
+ R.styleable.DrawTextTestView, defStyle, 0);
+ mSize = a.getDimension(R.styleable.DrawTextTestView_size, 40.0f);
+ mColor = a.getColor(R.styleable.DrawTextTestView_color, Color.YELLOW);
+ final CharSequence text = a.getText(R.styleable.DrawTextTestView_text);
+ mText = (text != null) ? text.toString() : "(empty)";
+ a.recycle();
+ }
+
+ @Override
+ protected void onDraw(Canvas canvas) {
+ super.onDraw(canvas);
+ final int width = getWidth();
+ final int height = getHeight();
+
+ final TextPaint paint = new TextPaint();
+ paint.setTextSize(mSize);
+ paint.setColor(mColor);
+ paint.setTextAlign(Align.CENTER);
+
+ canvas.drawText(mText, width / 2, height * 2 / 3, paint);
+ }
+} \ No newline at end of file
diff --git a/tests/GridLayoutTest/src/com/android/test/layout/Activity2.java b/tests/GridLayoutTest/src/com/android/test/layout/Activity2.java
index af5006f..38a85a3 100644
--- a/tests/GridLayoutTest/src/com/android/test/layout/Activity2.java
+++ b/tests/GridLayoutTest/src/com/android/test/layout/Activity2.java
@@ -95,9 +95,7 @@ public class Activity2 extends Activity {
}
{
Space v = new Space(context);
- {
- vg.addView(v, new LayoutParams(row5, col3));
- }
+ vg.addView(v, new LayoutParams(row5, col3));
}
{
Button v = new Button(context);
diff --git a/tests/HwAccelerationTest/AndroidManifest.xml b/tests/HwAccelerationTest/AndroidManifest.xml
index 32a6a65..9fcd05a 100644
--- a/tests/HwAccelerationTest/AndroidManifest.xml
+++ b/tests/HwAccelerationTest/AndroidManifest.xml
@@ -94,6 +94,15 @@
</activity>
<activity
+ android:name="CanvasTextureViewActivity"
+ android:label="_CanvasTextureView">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+
+ <activity
android:name="GLTextureViewActivity"
android:label="_TextureViewGL">
<intent-filter>
diff --git a/tests/HwAccelerationTest/src/com/android/test/hwui/CanvasTextureViewActivity.java b/tests/HwAccelerationTest/src/com/android/test/hwui/CanvasTextureViewActivity.java
new file mode 100644
index 0000000..81c22b8
--- /dev/null
+++ b/tests/HwAccelerationTest/src/com/android/test/hwui/CanvasTextureViewActivity.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.test.hwui;
+
+import android.app.Activity;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.PorterDuff;
+import android.graphics.SurfaceTexture;
+import android.os.Bundle;
+import android.view.Gravity;
+import android.view.TextureView;
+import android.widget.FrameLayout;
+
+@SuppressWarnings({"UnusedDeclaration"})
+public class CanvasTextureViewActivity extends Activity
+ implements TextureView.SurfaceTextureListener {
+ private TextureView mTextureView;
+ private CanvasTextureViewActivity.RenderingThread mThread;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ FrameLayout content = new FrameLayout(this);
+
+ mTextureView = new TextureView(this);
+ mTextureView.setSurfaceTextureListener(this);
+ mTextureView.setOpaque(false);
+
+ content.addView(mTextureView, new FrameLayout.LayoutParams(500, 500, Gravity.CENTER));
+ setContentView(content);
+ }
+
+ @Override
+ public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+ mThread = new RenderingThread(mTextureView);
+ mThread.start();
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+ // Ignored
+ }
+
+ @Override
+ public void onSurfaceTextureDestroyed(SurfaceTexture surface) {
+ if (mThread != null) mThread.stopRendering();
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+ // Ignored
+ }
+
+ private static class RenderingThread extends Thread {
+ private final TextureView mSurface;
+ private volatile boolean mRunning = true;
+
+ public RenderingThread(TextureView surface) {
+ mSurface = surface;
+ }
+
+ @Override
+ public void run() {
+ float x = 0.0f;
+ float y = 0.0f;
+ float speedX = 5.0f;
+ float speedY = 3.0f;
+
+ Paint paint = new Paint();
+ paint.setColor(0xff00ff00);
+
+ while (mRunning && !Thread.interrupted()) {
+ final Canvas canvas = mSurface.lockCanvas(null);
+ try {
+ canvas.drawColor(0x00000000, PorterDuff.Mode.CLEAR);
+ canvas.drawRect(x, y, x + 20.0f, y + 20.0f, paint);
+ } finally {
+ mSurface.unlockCanvasAndPost(canvas);
+ }
+
+ if (x + 20.0f + speedX >= mSurface.getWidth() || x + speedX <= 0.0f) {
+ speedX = -speedX;
+ }
+ if (y + 20.0f + speedY >= mSurface.getHeight() || y + speedY <= 0.0f) {
+ speedY = -speedY;
+ }
+
+ x += speedX;
+ y += speedY;
+
+ try {
+ Thread.sleep(15);
+ } catch (InterruptedException e) {
+ // Interrupted
+ }
+ }
+ }
+
+ void stopRendering() {
+ interrupt();
+ mRunning = false;
+ }
+ }
+}
diff --git a/tools/aapt/AaptAssets.cpp b/tools/aapt/AaptAssets.cpp
index 29d2b87..b35878a 100644
--- a/tools/aapt/AaptAssets.cpp
+++ b/tools/aapt/AaptAssets.cpp
@@ -1560,10 +1560,10 @@ status_t AaptDir::addLeafFile(const String8& leafName, const sp<AaptFile>& file)
}
ssize_t AaptDir::slurpFullTree(Bundle* bundle, const String8& srcDir,
- const AaptGroupEntry& kind, const String8& resType)
+ const AaptGroupEntry& kind, const String8& resType,
+ sp<FilePathStore>& fullResPaths)
{
Vector<String8> fileNames;
-
{
DIR* dir = NULL;
@@ -1586,9 +1586,14 @@ ssize_t AaptDir::slurpFullTree(Bundle* bundle, const String8& srcDir,
if (isHidden(srcDir.string(), entry->d_name))
continue;
- fileNames.add(String8(entry->d_name));
+ String8 name(entry->d_name);
+ fileNames.add(name);
+ // Add fully qualified path for dependency purposes
+ // if we're collecting them
+ if (fullResPaths != NULL) {
+ fullResPaths->add(srcDir.appendPathCopy(name));
+ }
}
-
closedir(dir);
}
@@ -1615,7 +1620,7 @@ ssize_t AaptDir::slurpFullTree(Bundle* bundle, const String8& srcDir,
notAdded = true;
}
ssize_t res = subdir->slurpFullTree(bundle, pathName, kind,
- resType);
+ resType, fullResPaths);
if (res < NO_ERROR) {
return res;
}
@@ -1847,7 +1852,7 @@ ssize_t AaptAssets::slurpFromArgs(Bundle* bundle)
sp<AaptDir> assetAaptDir = makeDir(String8(kAssetDir));
AaptGroupEntry group;
count = assetAaptDir->slurpFullTree(bundle, assetRoot, group,
- String8());
+ String8(), mFullResPaths);
if (count < 0) {
totalCount = count;
goto bail;
@@ -1878,6 +1883,7 @@ ssize_t AaptAssets::slurpFromArgs(Bundle* bundle)
sp<AaptAssets> nextOverlay = new AaptAssets();
current->setOverlay(nextOverlay);
current = nextOverlay;
+ current->setFullResPaths(mFullResPaths);
}
count = current->slurpResourceTree(bundle, String8(res));
@@ -1920,7 +1926,7 @@ ssize_t AaptAssets::slurpFromArgs(Bundle* bundle)
* guarantees about ordering, so we're okay with an inorder search
* using whatever order the OS happens to hand back to us.
*/
- count = slurpFullTree(bundle, assetRoot, AaptGroupEntry(), String8());
+ count = slurpFullTree(bundle, assetRoot, AaptGroupEntry(), String8(), mFullResPaths);
if (count < 0) {
/* failure; report error and remove archive */
totalCount = count;
@@ -1946,9 +1952,10 @@ bail:
ssize_t AaptAssets::slurpFullTree(Bundle* bundle, const String8& srcDir,
const AaptGroupEntry& kind,
- const String8& resType)
+ const String8& resType,
+ sp<FilePathStore>& fullResPaths)
{
- ssize_t res = AaptDir::slurpFullTree(bundle, srcDir, kind, resType);
+ ssize_t res = AaptDir::slurpFullTree(bundle, srcDir, kind, resType, fullResPaths);
if (res > 0) {
mGroupEntries.add(kind);
}
@@ -2010,7 +2017,7 @@ ssize_t AaptAssets::slurpResourceTree(Bundle* bundle, const String8& srcDir)
if (type == kFileTypeDirectory) {
sp<AaptDir> dir = makeDir(String8(entry->d_name));
ssize_t res = dir->slurpFullTree(bundle, subdirName, group,
- resType);
+ resType, mFullResPaths);
if (res < 0) {
count = res;
goto bail;
diff --git a/tools/aapt/AaptAssets.h b/tools/aapt/AaptAssets.h
index 65743d8..a1c7c40 100644
--- a/tools/aapt/AaptAssets.h
+++ b/tools/aapt/AaptAssets.h
@@ -140,6 +140,7 @@ inline int strictly_order_type(const AaptGroupEntry& lhs, const AaptGroupEntry&
}
class AaptGroup;
+class FilePathStore;
/**
* A single asset file we know about.
@@ -269,7 +270,8 @@ public:
virtual ssize_t slurpFullTree(Bundle* bundle,
const String8& srcDir,
const AaptGroupEntry& kind,
- const String8& resType);
+ const String8& resType,
+ sp<FilePathStore>& fullResPaths);
/*
* Perform some sanity checks on the names of files and directories here.
@@ -484,6 +486,14 @@ public:
ResourceTypeSet();
};
+// Storage for lists of fully qualified paths for
+// resources encountered during slurping.
+class FilePathStore : public RefBase,
+ public Vector<String8>
+{
+public:
+ FilePathStore();
+};
/**
* Asset hierarchy being operated on.
@@ -517,7 +527,8 @@ public:
virtual ssize_t slurpFullTree(Bundle* bundle,
const String8& srcDir,
const AaptGroupEntry& kind,
- const String8& resType);
+ const String8& resType,
+ sp<FilePathStore>& fullResPaths);
ssize_t slurpResourceTree(Bundle* bundle, const String8& srcDir);
ssize_t slurpResourceZip(Bundle* bundle, const char* filename);
@@ -545,6 +556,10 @@ public:
inline void
setResources(KeyedVector<String8, sp<ResourceTypeSet> >* res) { delete mRes; mRes = res; }
+ inline sp<FilePathStore>& getFullResPaths() { return mFullResPaths; }
+ inline void
+ setFullResPaths(sp<FilePathStore>& res) { mFullResPaths = res; }
+
private:
String8 mPackage;
SortedVector<AaptGroupEntry> mGroupEntries;
@@ -558,6 +573,8 @@ private:
sp<AaptAssets> mOverlay;
KeyedVector<String8, sp<ResourceTypeSet> >* mRes;
+
+ sp<FilePathStore> mFullResPaths;
};
#endif // __AAPT_ASSETS_H
diff --git a/tools/aapt/Bundle.h b/tools/aapt/Bundle.h
index fa84e93..56fe524 100644
--- a/tools/aapt/Bundle.h
+++ b/tools/aapt/Bundle.h
@@ -41,11 +41,12 @@ public:
mCompressionMethod(0), mOutputAPKFile(NULL),
mManifestPackageNameOverride(NULL), mInstrumentationPackageNameOverride(NULL),
mIsOverlayPackage(false),
- mAutoAddOverlay(false), mAssetSourceDir(NULL), mProguardFile(NULL),
+ mAutoAddOverlay(false), mGenDependencies(false),
+ mAssetSourceDir(NULL), mProguardFile(NULL),
mAndroidManifestFile(NULL), mPublicOutputFile(NULL),
mRClassDir(NULL), mResourceIntermediatesDir(NULL), mManifestMinSdkVersion(NULL),
mMinSdkVersion(NULL), mTargetSdkVersion(NULL), mMaxSdkVersion(NULL),
- mVersionCode(NULL), mVersionName(NULL), mCustomPackage(NULL),
+ mVersionCode(NULL), mVersionName(NULL), mCustomPackage(NULL), mExtraPackages(NULL),
mMaxResVersion(NULL), mDebugMode(false), mNonConstantId(false), mProduct(NULL),
mArgc(0), mArgv(NULL)
{}
@@ -97,6 +98,8 @@ public:
void setIsOverlayPackage(bool val) { mIsOverlayPackage = val; }
bool getAutoAddOverlay() { return mAutoAddOverlay; }
void setAutoAddOverlay(bool val) { mAutoAddOverlay = val; }
+ bool getGenDependencies() { return mGenDependencies; }
+ void setGenDependencies(bool val) { mGenDependencies = val; }
/*
* Input options.
@@ -138,6 +141,8 @@ public:
void setVersionName(const char* val) { mVersionName = val; }
const char* getCustomPackage() const { return mCustomPackage; }
void setCustomPackage(const char* val) { mCustomPackage = val; }
+ const char* getExtraPackages() const { return mExtraPackages; }
+ void setExtraPackages(const char* val) { mExtraPackages = val; }
const char* getMaxResVersion() const { return mMaxResVersion; }
void setMaxResVersion(const char * val) { mMaxResVersion = val; }
bool getDebugMode() { return mDebugMode; }
@@ -224,6 +229,7 @@ private:
const char* mInstrumentationPackageNameOverride;
bool mIsOverlayPackage;
bool mAutoAddOverlay;
+ bool mGenDependencies;
const char* mAssetSourceDir;
const char* mProguardFile;
const char* mAndroidManifestFile;
@@ -243,6 +249,7 @@ private:
const char* mVersionCode;
const char* mVersionName;
const char* mCustomPackage;
+ const char* mExtraPackages;
const char* mMaxResVersion;
bool mDebugMode;
bool mNonConstantId;
diff --git a/tools/aapt/Command.cpp b/tools/aapt/Command.cpp
index 7852197..903c62c 100644
--- a/tools/aapt/Command.cpp
+++ b/tools/aapt/Command.cpp
@@ -1508,6 +1508,8 @@ int doPackage(Bundle* bundle)
status_t err;
sp<AaptAssets> assets;
int N;
+ FILE* fp;
+ String8 dependencyFile;
// -c zz_ZZ means do pseudolocalization
ResourceFilter filter;
@@ -1542,6 +1544,13 @@ int doPackage(Bundle* bundle)
// Load the assets.
assets = new AaptAssets();
+
+ // Set up the resource gathering in assets if we're trying to make R.java
+ if (bundle->getGenDependencies()) {
+ sp<FilePathStore> pathStore = new FilePathStore;
+ assets->setFullResPaths(pathStore);
+ }
+
err = assets->slurpFromArgs(bundle);
if (err < 0) {
goto bail;
@@ -1551,7 +1560,7 @@ int doPackage(Bundle* bundle)
assets->print();
}
- // If they asked for any files that need to be compiled, do so.
+ // If they asked for any fileAs that need to be compiled, do so.
if (bundle->getResourceSourceDirs().size() || bundle->getAndroidManifestFile()) {
err = buildResources(bundle, assets);
if (err != 0) {
@@ -1565,10 +1574,29 @@ int doPackage(Bundle* bundle)
goto bail;
}
+ if (bundle->getGenDependencies()) {
+ dependencyFile = String8(bundle->getRClassDir());
+ // Make sure we have a clean dependency file to start with
+ dependencyFile.appendPath("R.d");
+ fp = fopen(dependencyFile, "w");
+ fclose(fp);
+ }
+
// Write out R.java constants
if (assets->getPackage() == assets->getSymbolsPrivatePackage()) {
if (bundle->getCustomPackage() == NULL) {
err = writeResourceSymbols(bundle, assets, assets->getPackage(), true);
+ // Copy R.java for libraries
+ if (bundle->getExtraPackages() != NULL) {
+ // Split on colon
+ String8 libs(bundle->getExtraPackages());
+ char* packageString = strtok(libs.lockBuffer(libs.length()), ":");
+ while (packageString != NULL) {
+ err = writeResourceSymbols(bundle, assets, String8(packageString), true);
+ packageString = strtok(NULL, ":");
+ }
+ libs.unlockBuffer();
+ }
} else {
const String8 customPkg(bundle->getCustomPackage());
err = writeResourceSymbols(bundle, assets, customPkg, true);
@@ -1587,6 +1615,19 @@ int doPackage(Bundle* bundle)
}
}
+ if (bundle->getGenDependencies()) {
+ // Now that writeResourceSymbols has taken care of writing the
+ // dependency targets to the dependencyFile, we'll write the
+ // pre-requisites.
+ fp = fopen(dependencyFile, "a+");
+ fprintf(fp, " : ");
+ err = writeDependencyPreReqs(bundle, assets, fp);
+
+ // Also manually add the AndroidManifeset since it's a non-asset
+ fprintf(fp, "%s \\\n", bundle->getAndroidManifestFile());
+ fclose(fp);
+ }
+
// Write out the ProGuard file
err = writeProguardFile(bundle, assets);
if (err < 0) {
diff --git a/tools/aapt/Main.cpp b/tools/aapt/Main.cpp
index 1e63131..8edb5b5 100644
--- a/tools/aapt/Main.cpp
+++ b/tools/aapt/Main.cpp
@@ -145,6 +145,10 @@ void usage(void)
" inserts android:versionName in to manifest.\n"
" --custom-package\n"
" generates R.java into a different package.\n"
+ " --extra-packages\n"
+ " generate R.java for libraries. Separate libraries with ':'.\n"
+ " --generate-dependencies\n"
+ " generate a dependency file for R.java.\n"
" --auto-add-overlay\n"
" Automatically add resources that are only in overlays.\n"
" --rename-manifest-package\n"
@@ -475,6 +479,17 @@ int main(int argc, char* const argv[])
goto bail;
}
bundle.setCustomPackage(argv[0]);
+ } else if (strcmp(cp, "-extra-packages") == 0) {
+ argc--;
+ argv++;
+ if (!argc) {
+ fprintf(stderr, "ERROR: No argument supplied for '--extra-packages' option\n");
+ wantUsage = true;
+ goto bail;
+ }
+ bundle.setExtraPackages(argv[0]);
+ } else if (strcmp(cp, "-generate-dependencies") == 0) {
+ bundle.setGenDependencies(true);
} else if (strcmp(cp, "-utf16") == 0) {
bundle.setWantUTF16(true);
} else if (strcmp(cp, "-rename-manifest-package") == 0) {
diff --git a/tools/aapt/Main.h b/tools/aapt/Main.h
index 3ba4f39..1df1144 100644
--- a/tools/aapt/Main.h
+++ b/tools/aapt/Main.h
@@ -46,4 +46,5 @@ int dumpResources(Bundle* bundle);
String8 getAttribute(const ResXMLTree& tree, const char* ns,
const char* attr, String8* outError);
+status_t writeDependencyPreReqs(Bundle* bundle, const sp<AaptAssets>& assets, FILE* fp);
#endif // __MAIN_H
diff --git a/tools/aapt/Resource.cpp b/tools/aapt/Resource.cpp
index b4ac929..99e781d 100644
--- a/tools/aapt/Resource.cpp
+++ b/tools/aapt/Resource.cpp
@@ -51,6 +51,12 @@ ResourceTypeSet::ResourceTypeSet()
{
}
+FilePathStore::FilePathStore()
+ :RefBase(),
+ Vector<String8>()
+{
+}
+
class ResourceDirIterator
{
public:
@@ -1917,6 +1923,16 @@ status_t writeResourceSymbols(Bundle* bundle, const sp<AaptAssets>& assets,
return err;
}
fclose(fp);
+
+ if (bundle->getGenDependencies()) {
+ // Add this R.java to the dependency file
+ String8 dependencyFile(bundle->getRClassDir());
+ dependencyFile.appendPath("R.d");
+
+ fp = fopen(dependencyFile.string(), "a");
+ fprintf(fp,"%s \\\n", dest.string());
+ fclose(fp);
+ }
}
return NO_ERROR;
@@ -2244,3 +2260,16 @@ writeProguardFile(Bundle* bundle, const sp<AaptAssets>& assets)
return err;
}
+
+status_t
+writeDependencyPreReqs(Bundle* bundle, const sp<AaptAssets>& assets, FILE* fp)
+{
+ status_t deps = -1;
+ sp<FilePathStore> files = assets->getFullResPaths();
+ for (size_t file_i = 0; file_i < files->size(); ++file_i) {
+ // Add the full file path to the dependency file
+ fprintf(fp, "%s \\\n", files->itemAt(file_i).string());
+ deps++;
+ }
+ return deps;
+}
diff --git a/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeIInputMethodManager.java b/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeIInputMethodManager.java
index 1394c32..2519ebc 100644
--- a/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeIInputMethodManager.java
+++ b/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeIInputMethodManager.java
@@ -107,7 +107,7 @@ public class BridgeIInputMethodManager implements IInputMethodManager {
}
- public boolean setAdditionalInputMethodSubtypes(IBinder arg0, InputMethodSubtype[] arg1)
+ public boolean setAdditionalInputMethodSubtypes(String arg0, InputMethodSubtype[] arg1)
throws RemoteException {
// TODO Auto-generated method stub
return false;