aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Makefile.android4
-rw-r--r--Makefile.common17
-rw-r--r--Makefile.target2
-rwxr-xr-xandroid-configure.sh114
-rw-r--r--android/avd/hardware-properties.ini135
-rw-r--r--android/boot-properties.c6
-rw-r--r--android/camera/camera-capture-linux.c1096
-rw-r--r--android/camera/camera-capture-mac.m549
-rwxr-xr-xandroid/camera/camera-capture-windows.c598
-rw-r--r--android/camera/camera-capture.h114
-rwxr-xr-xandroid/camera/camera-common.h206
-rwxr-xr-xandroid/camera/camera-format-converters.c1558
-rwxr-xr-xandroid/camera/camera-format-converters.h67
-rw-r--r--android/camera/camera-service.c1468
-rw-r--r--android/camera/camera-service.h30
-rw-r--r--android/camera/camera-win.h78
-rw-r--r--android/cmdline-options.h5
-rw-r--r--android/help.c59
-rw-r--r--android/hw-control.c7
-rw-r--r--android/hw-pipe-net.c80
-rw-r--r--android/hw-qemud.c602
-rw-r--r--android/hw-qemud.h8
-rw-r--r--android/hw-sensors.c7
-rw-r--r--android/main-emulator.c128
-rw-r--r--android/main.c170
-rw-r--r--android/opengles.c201
-rw-r--r--android/opengles.h48
-rw-r--r--android/skin/scaler.c22
-rw-r--r--android/skin/scaler.h6
-rw-r--r--android/skin/window.c54
-rw-r--r--android/utils/debug.h2
-rw-r--r--android/utils/dll.c194
-rw-r--r--android/utils/dll.h44
-rw-r--r--android/utils/path.c55
-rw-r--r--android/utils/path.h6
-rw-r--r--arch_init.c10
-rw-r--r--docs/ANDROID-KERNEL.TXT (renamed from docs/KERNEL.TXT)11
-rw-r--r--docs/ANDROID-QEMUD.TXT17
-rw-r--r--hw/android_arm.c1
-rw-r--r--hw/goldfish_audio.c2
-rw-r--r--hw/goldfish_device.c13
-rw-r--r--hw/goldfish_device.h14
-rw-r--r--hw/goldfish_interrupt.c2
-rw-r--r--hw/goldfish_nand.c46
-rw-r--r--hw/goldfish_nand_reg.h19
-rw-r--r--hw/goldfish_pipe.c33
-rw-r--r--hw/goldfish_pipe.h2
-rw-r--r--hw/i8259.c3
-rw-r--r--hw/pc.c6
-rw-r--r--proxy/proxy_http_rewriter.c103
-rw-r--r--qemu-options.hx3
-rw-r--r--savevm.c6
-rw-r--r--slirp-android/tcp_subr.c19
-rw-r--r--target-i386/helper.c9
-rw-r--r--vl-android.c45
55 files changed, 7928 insertions, 176 deletions
diff --git a/Makefile.android b/Makefile.android
index 01cb35b..e58f984 100644
--- a/Makefile.android
+++ b/Makefile.android
@@ -58,6 +58,7 @@ ifeq ($(HOST_OS),windows)
MY_CFLAGS += -D_WIN32
# we need Win32 features that are available since Windows 2000 Professional/Server (NT 5.0)
MY_CFLAGS += -DWINVER=0x501
+ MY_LDLIBS += -lvfw32
endif
ifeq ($(HOST_ARCH),ppc)
@@ -189,7 +190,7 @@ else
endif
ifeq ($(HOST_OS),darwin)
- QEMU_SYSTEM_LDLIBS += -Wl,-framework,Cocoa
+ QEMU_SYSTEM_LDLIBS += -Wl,-framework,Cocoa,-framework,QTKit,-framework,CoreVideo
endif
include $(LOCAL_PATH)/Makefile.common
@@ -242,6 +243,7 @@ LOCAL_SRC_FILES := \
android/snapshot.c \
android/main-common.c \
android/main.c \
+ android/opengles.c \
android/utils/setenv.c \
vl-android-ui.c \
android/protocol/core-connection.c \
diff --git a/Makefile.common b/Makefile.common
index 93f15be..71100f3 100644
--- a/Makefile.common
+++ b/Makefile.common
@@ -110,6 +110,7 @@ LOCAL_SRC_FILES += \
android/utils/assert.c \
android/utils/bufprint.c \
android/utils/debug.c \
+ android/utils/dll.c \
android/utils/dirscanner.c \
android/utils/filelock.c \
android/utils/ini.c \
@@ -191,8 +192,9 @@ endif
# Except if we used android-configure.sh --sdl-config=<script>
#
-ifneq ($(SDL_CONFIG),)
+ifneq ($(QEMU_SDL_CONFIG),)
BUILD_SDL_FROM_SOURCES := false
+ SDL_CONFIG := $(QEMU_SDL_CONFIG)
endif
ifneq ($(BUILD_SDL_FROM_SOURCES),true)
@@ -422,6 +424,8 @@ CORE_MISC_SOURCES = \
android/qemu-setup.c \
android/snapshot.c \
android/utils/timezone.c \
+ android/camera/camera-format-converters.c \
+ android/camera/camera-service.c
$(call gen-hw-config-defs)
@@ -438,17 +442,24 @@ endif
ifeq ($(HOST_OS),linux)
CORE_MISC_SOURCES += usb-linux.c \
- qemu-thread.c
+ qemu-thread.c \
+ android/camera/camera-capture-linux.c
else
CORE_MISC_SOURCES += usb-dummy-android.c
endif
ifeq ($(HOST_OS),windows)
- CORE_MISC_SOURCES += tap-win32.c
+ CORE_MISC_SOURCES += tap-win32.c \
+ android/camera/camera-capture-windows.c
+
else
CORE_MISC_SOURCES += posix-aio-compat.c
endif
+ifeq ($(HOST_OS),darwin)
+ CORE_MISC_SOURCES += android/camera/camera-capture-mac.m
+endif
+
LOCAL_SRC_FILES += $(CORE_MISC_SOURCES)
# Required
diff --git a/Makefile.target b/Makefile.target
index ccc86b4..07940bb 100644
--- a/Makefile.target
+++ b/Makefile.target
@@ -269,6 +269,7 @@ LOCAL_SRC_FILES := \
user-events-qemu.c \
vl-android.c \
android/console.c \
+ android/opengles.c \
android/display-core.c \
android/protocol/attach-ui-proxy.c \
android/protocol/fb-updates-proxy.c \
@@ -353,6 +354,7 @@ LOCAL_SRC_FILES := \
android/help.c \
android/main-common.c \
android/main.c \
+ android/opengles.c \
android/protocol/core-commands-qemu.c \
android/protocol/ui-commands-qemu.c \
android/
diff --git a/android-configure.sh b/android-configure.sh
index 4939323..6d25d20 100755
--- a/android-configure.sh
+++ b/android-configure.sh
@@ -26,6 +26,11 @@ OPTION_DEBUG=no
OPTION_STATIC=no
OPTION_MINGW=no
+GLES_INCLUDE=
+GLES_LIBS=
+GLES_SUPPORT=no
+GLES_PROBE=yes
+
HOST_CC=${CC:-gcc}
OPTION_CC=
@@ -67,6 +72,14 @@ for opt do
;;
--arch=*) TARGET_ARCH=$optarg
;;
+ --gles-include=*) GLES_INCLUDE=$optarg
+ GLES_SUPPORT=yes
+ ;;
+ --gles-libs=*) GLES_LIBS=$optarg
+ GLES_SUPPORT=yes
+ ;;
+ --no-gles) GLES_PROBE=no
+ ;;
*)
echo "unknown option '$opt', use --help"
exit 1
@@ -96,6 +109,9 @@ EOF
echo " --static build a completely static executable"
echo " --verbose verbose configuration"
echo " --debug build debug version of the emulator"
+ echo " --gles-include=PATH specify path to GLES emulation headers"
+ echo " --gles-libs=PATH specify path to GLES emulation host libraries"
+ echo " --no-gles disable GLES emulation support"
echo ""
exit 1
fi
@@ -124,10 +140,22 @@ if [ "$OPTION_TRY_64" != "yes" ] ; then
force_32bit_binaries
fi
+case $OS in
+ linux-*)
+ TARGET_DLL_SUFFIX=.so
+ ;;
+ darwin-*)
+ TARGET_DLL_SUFFIX=.dylib
+ ;;
+ windows*)
+ TARGET_DLL_SUFFIX=.dll
+esac
+
TARGET_OS=$OS
-if [ "$OPTION_MINGW" == "yes" ] ; then
+if [ "$OPTION_MINGW" = "yes" ] ; then
enable_linux_mingw
TARGET_OS=windows
+ TARGET_DLL_SUFFIX=.dll
else
enable_cygwin
fi
@@ -146,6 +174,14 @@ if [ "$OPTION_NO_PREBUILTS" = "yes" ] ; then
IN_ANDROID_BUILD=no
fi
+# This is the list of static and shared host libraries we need to link
+# against in order to support OpenGLES emulation properly. Note that in
+# the case of a standalone build, we will find these libraries inside the
+# platform build tree and copy them into objs/lib/ automatically, unless
+# you use --gles-libs to point explicitely to a different directory.
+#
+GLES_SHARED_LIBRARIES="libOpenglRender libGLES_CM_translator libGLES_V2_translator libEGL_translator"
+
if [ "$IN_ANDROID_BUILD" = "yes" ] ; then
locate_android_prebuilt
@@ -169,6 +205,9 @@ if [ "$IN_ANDROID_BUILD" = "yes" ] ; then
# finally ensure that our new binary is copied to the 'out'
# subdirectory as 'emulator'
HOST_BIN=$(get_android_abs_build_var HOST_OUT_EXECUTABLES)
+ if [ "$TARGET_OS" = "windows" ]; then
+ HOST_BIN=$(echo $HOST_BIN | sed "s%$OS/bin%windows/bin%")
+ fi
if [ -n "$HOST_BIN" ] ; then
OPTION_TARGETS="$OPTION_TARGETS $HOST_BIN/emulator$EXE"
log "Targets : TARGETS=$OPTION_TARGETS"
@@ -182,8 +221,70 @@ if [ "$IN_ANDROID_BUILD" = "yes" ] ; then
else
log "Tools : Could not locate $TOOLS_PROPS !?"
fi
+
+ # Try to find the GLES emulation headers and libraries automatically
+ if [ "$GLES_PROBE" = "yes" ]; then
+ GLES_SUPPORT=yes
+ if [ -z "$GLES_INCLUDE" ]; then
+ log "GLES : Probing for headers"
+ GLES_INCLUDE=$ANDROID_TOP/development/tools/emulator/opengl/host/include
+ if [ -d "$GLES_INCLUDE" ]; then
+ log "GLES : Headers in $GLES_INCLUDE"
+ else
+ echo "Warning: Could not find OpenGLES emulation include dir: $GLES_INCLUDE"
+ echo "Disabling GLES emulation from this build!"
+ GLES_SUPPORT=no
+ fi
+ fi
+ if [ -z "$GLES_LIBS" ]; then
+ log "GLES : Probing for host libraries"
+ GLES_LIBS=$(dirname "$HOST_BIN")/lib
+ if [ -d "$GLES_LIBS" ]; then
+ echo "GLES : Libs in $GLES_LIBS"
+ else
+ echo "Warning: Could nof find OpenGLES emulation libraries in: $GLES_LIBS"
+ echo "Disabling GLES emulation from this build!"
+ GLES_SUPPORT=no
+ fi
+ fi
+ fi
fi # IN_ANDROID_BUILD = no
+if [ "$GLES_SUPPORT" = "yes" ]; then
+ if [ -z "$GLES_INCLUDE" -o -z "$GLES_LIBS" ]; then
+ echo "ERROR: You must use both --gles-include and --gles-libs at the same time!"
+ echo " Or use --no-gles to disable its support from this build."
+ exit 1
+ fi
+
+ GLES_HEADER=$GLES_INCLUDE/libOpenglRender/render_api.h
+ if [ ! -f "$GLES_HEADER" ]; then
+ echo "ERROR: Missing OpenGLES emulation header file: $GLES_HEADER"
+ echo "Please fix this by using --gles-include to point to the right directory!"
+ exit 1
+ fi
+
+ mkdir -p objs/lib
+
+ for lib in $GLES_SHARED_LIBRARIES; do
+ GLES_LIB=$GLES_LIBS/${lib}$TARGET_DLL_SUFFIX
+ if [ ! -f "$GLES_LIB" ]; then
+ echo "ERROR: Missing OpenGLES emulation host library: $GLES_LIB"
+ echo "Please fix this by using --gles-libs to point to the right directory!"
+ if [ "$IN_ANDROID_BUILD" = "true" ]; then
+ echo "You might also be missing the library because you forgot to rebuild the whole platform!"
+ fi
+ exit 1
+ fi
+ cp $GLES_LIB objs/lib
+ if [ $? != 0 ]; then
+ echo "ERROR: Could not find required OpenGLES emulation library: $GLES_LIB"
+ exit 1
+ else
+ log "GLES : Copying $GLES_LIB"
+ fi
+ done
+fi
# we can build the emulator with Cygwin, so enable it
enable_cygwin
@@ -414,7 +515,7 @@ echo "PREBUILT := $ANDROID_PREBUILT" >> $config_mk
PWD=`pwd`
echo "SRC_PATH := $PWD" >> $config_mk
if [ -n "$SDL_CONFIG" ] ; then
-echo "SDL_CONFIG := $SDL_CONFIG" >> $config_mk
+echo "QEMU_SDL_CONFIG := $SDL_CONFIG" >> $config_mk
fi
echo "CONFIG_COREAUDIO := $PROBE_COREAUDIO" >> $config_mk
echo "CONFIG_WINAUDIO := $PROBE_WINAUDIO" >> $config_mk
@@ -440,6 +541,11 @@ if [ "$OPTION_MINGW" = "yes" ] ; then
echo "HOST_OS := windows" >> $config_mk
fi
+if [ "$GLES_INCLUDE" -a "$GLES_LIBS" ]; then
+ echo "QEMU_OPENGLES_INCLUDE := $GLES_INCLUDE" >> $config_mk
+ echo "QEMU_OPENGLES_LIBS := $GLES_LIBS" >> $config_mk
+fi
+
# Build the config-host.h file
#
config_h=objs/config-host.h
@@ -543,6 +649,10 @@ fi
echo "#define CONFIG_ANDROID 1" >> $config_h
+if [ "$GLES_INCLUDE" -a "$GLES_LIBS" ]; then
+ echo "#define CONFIG_ANDROID_OPENGLES 1" >> $config_h
+fi
+
log "Generate : $config_h"
echo "Ready to go. Type 'make' to build emulator"
diff --git a/android/avd/hardware-properties.ini b/android/avd/hardware-properties.ini
index 0e650bb..a2b05df 100644
--- a/android/avd/hardware-properties.ini
+++ b/android/avd/hardware-properties.ini
@@ -14,10 +14,8 @@
# - once to implement the hardware configuration loader
# (see android/avd/hw-config.h)
#
-# Hopefully, this file should also be read by a virtual device creation
-# tool/wizard to provide a nice user interface (hence the presence of
-# the 'abstract' and 'description' keys which are not currently used)
-#
+# It is also packaged by the SDK and parsed by tools to let the developers
+# create AVDs.
#
# NOTE: if you remove items from this file, be sure that you do not break
# the emulator build.
@@ -55,6 +53,13 @@ default = yes
abstract = Touch-screen support
description = Whether there is a touch screen or not on the device.
+# Hardware main keys (back/home)
+name = hw.mainKeys
+type = boolean
+default = yes
+abstract = Hardware Back/Home keys
+description = Whether there are hardware back/home keys on the device.
+
# Trackball support
name = hw.trackBall
type = boolean
@@ -224,6 +229,128 @@ default = yes
abstract = LCD backlight
description = Enable/Disable LCD backlight simulation,yes-enabled,no-disabled.
+# Hardware OpenGLES emulation support
+#
+name = hw.gpu.enabled
+type = boolean
+default = no
+abstract = GPU emulation
+description = Enable/Disable emulated OpenGLES GPU
+
+# Fake camera support
+#
+name = hw.fakeCamera
+type = string
+default = back
+abstract = Fake camera control
+description = Must be 'back', if fake camera is facing back, 'front', if fake camera is facing front, or 'off' if fake camera is disabled.
+
+# Number of emulated web cameras
+#
+name = hw.webcam.count
+type = integer
+default = 6
+abstract = Number of emulated web cameras
+description = Defines number of web cameras to emulate. 0 disables webcam emulation.
+
+# Defines name of the emulated webcam with index 0
+#
+name = hw.webcam.0.name
+type = string
+default = webcam0
+abstract = Name of the 1-st emulated web camera
+description = Emulator-generated platform-independent name identifying a camera in the list of enumerated web cameras.
+
+# Defines name of the emulated webcam with index 1
+#
+name = hw.webcam.1.name
+type = string
+default = webcam1
+abstract = Name of the 2-nd emulated web camera
+description = Emulator-generated platform-independent camera name.
+
+# Defines name of the emulated webcam with index 2
+#
+name = hw.webcam.2.name
+type = string
+default = webcam2
+abstract = Name of the 3-rd emulated web camera
+description = Emulator-generated platform-independent camera name.
+
+# Defines name of the emulated webcam with index 3
+#
+name = hw.webcam.3.name
+type = string
+default = webcam3
+abstract = Name of the 4-th emulated web camera
+description = Emulator-generated platform-independent camera name.
+
+# Defines name of the emulated webcam with index 4
+#
+name = hw.webcam.4.name
+type = string
+default = webcam4
+abstract = Name of the 5-th emulated web camera
+description = Emulator-generated platform-independent camera name.
+
+# Defines name of the emulated webcam with index 5
+#
+name = hw.webcam.5.name
+type = string
+default = webcam5
+abstract = Name of the 6-th emulated web camera
+description = Emulator-generated platform-independent camera name.
+
+# Defines direction of the emulated webcam with index 0
+#
+name = hw.webcam.0.direction
+type = string
+default = front
+abstract = 1-st emulated web camera direction
+description = Direction of the 1-st emulated web camera
+
+# Defines direction of the emulated webcam with index 1
+# Note that first two cameras must face in opposite directions in order to enable
+# camera switch in the camera application.
+#
+name = hw.webcam.1.direction
+type = string
+default = back
+abstract = 2-nd emulated web camera direction
+description = Direction of the 2-nd emulated web camera
+
+# Defines direction of the emulated webcam with index 2
+#
+name = hw.webcam.2.direction
+type = string
+default = front
+abstract = 3-rd emulated web camera direction
+description = Direction of the 3-rd emulated web camera
+
+# Defines direction of the emulated webcam with index 3
+#
+name = hw.webcam.3.direction
+type = string
+default = front
+abstract = 4-th emulated web camera direction
+description = Direction of the 4-th emulated web camera
+
+# Defines direction of the emulated webcam with index 4
+#
+name = hw.webcam.4.direction
+type = string
+default = front
+abstract = 5-th emulated web camera direction
+description = Direction of the 5-th emulated web camera
+
+# Defines direction of the emulated webcam with index 5
+#
+name = hw.webcam.5.direction
+type = string
+default = front
+abstract = 6-th emulated web camera direction
+description = Direction of the 6-th emulated web camera
+
# Maximum VM heap size
# Higher values are required for high-dpi devices
# Default will depend on RAM size.
diff --git a/android/boot-properties.c b/android/boot-properties.c
index 1a86010..acdeed9 100644
--- a/android/boot-properties.c
+++ b/android/boot-properties.c
@@ -308,7 +308,6 @@ boot_property_client_recv( void* opaque,
/* Send a NUL to signal the end of the list. */
qemud_client_send(client, (uint8_t*)"", 1);
- qemud_client_close(client);
return;
}
@@ -319,11 +318,12 @@ boot_property_client_recv( void* opaque,
static QemudClient*
boot_property_service_connect( void* opaque,
QemudService* serv,
- int channel )
+ int channel,
+ const char* client_param )
{
QemudClient* client;
- client = qemud_client_new( serv, channel, NULL,
+ client = qemud_client_new( serv, channel, client_param, NULL,
boot_property_client_recv,
NULL, NULL, NULL );
diff --git a/android/camera/camera-capture-linux.c b/android/camera/camera-capture-linux.c
new file mode 100644
index 0000000..5243fb6
--- /dev/null
+++ b/android/camera/camera-capture-linux.c
@@ -0,0 +1,1096 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Contains code that is used to capture video frames from a camera device
+ * on Linux. This code uses V4L2 API to work with camera devices, and requires
+ * Linux kernel version at least 2.5
+ */
+
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+#include "android/camera/camera-capture.h"
+#include "android/camera/camera-format-converters.h"
+
+#define E(...) derror(__VA_ARGS__)
+#define W(...) dwarning(__VA_ARGS__)
+#define D(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#define D_ACTIVE VERBOSE_CHECK(camera)
+
+/* the T(...) macro is used to dump traffic */
+#define T_ACTIVE 0
+
+#if T_ACTIVE
+#define T(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#else
+#define T(...) ((void)0)
+#endif
+
+#define CLEAR(x) memset (&(x), 0, sizeof(x))
+
+/* Pixel format descriptor.
+ * Instances of this descriptor are created during camera device enumeration, and
+ * an instance of this structure describing pixel format chosen for the camera
+ * emulation is saved by the camera factory service to represent an emulating
+ * camera properties.
+ */
+typedef struct QemuPixelFormat {
+ /* Pixel format in V4L2_PIX_FMT_XXX form. */
+ uint32_t format;
+ /* Frame dimensions supported by this format. */
+ CameraFrameDim* dims;
+ /* Number of frame dimensions supported by this format. */
+ int dim_num;
+} QemuPixelFormat;
+
+/* Describes a framebuffer. */
+typedef struct CameraFrameBuffer {
+ /* Framebuffer data. */
+ uint8_t* data;
+ /* Framebuffer data size. */
+ size_t size;
+} CameraFrameBuffer;
+
+/* Defines type of the I/O used to obtain frames from the device. */
+typedef enum CameraIoType {
+ /* Framebuffers are shared via memory mapping. */
+ CAMERA_IO_MEMMAP,
+ /* Framebuffers are available via user pointers. */
+ CAMERA_IO_USERPTR,
+ /* Framebuffers are to be read from the device. */
+ CAMERA_IO_DIRECT
+} CameraIoType;
+
+typedef struct LinuxCameraDevice LinuxCameraDevice;
+/*
+ * Describes a connection to an actual camera device.
+ */
+struct LinuxCameraDevice {
+ /* Common header. */
+ CameraDevice header;
+
+ /* Camera device name. (default is /dev/video0) */
+ char* device_name;
+ /* Input channel. (default is 0) */
+ int input_channel;
+
+ /*
+ * Set by the framework after initializing camera connection.
+ */
+
+ /* Handle to the opened camera device. */
+ int handle;
+ /* Device capabilities. */
+ struct v4l2_capability caps;
+ /* Actual pixel format reported by the device when capturing is started. */
+ struct v4l2_pix_format actual_pixel_format;
+ /* Defines type of the I/O to use to retrieve frames from the device. */
+ CameraIoType io_type;
+ /* Allocated framebuffers. */
+ struct CameraFrameBuffer* framebuffers;
+ /* Actual number of allocated framebuffers. */
+ int framebuffer_num;
+};
+
+/* Preferred pixel formats arranged from the most to the least desired.
+ *
+ * More than anything else this array is defined by an existance of format
+ * conversion between the camera supported formats, and formats that are
+ * supported by camera framework in the guest system. Currently, guest supports
+ * only YV12 pixel format for data, and RGB32 for preview. So, this array should
+ * contain only those formats, for which converters are implemented. Generally
+ * speaking, the order in which entries should be arranged in this array matters
+ * only as far as conversion speed is concerned. So, formats with the fastest
+ * converters should be put closer to the top of the array, while slower ones
+ * should be put closer to the bottom. But as far as functionality is concerned,
+ * the orser doesn't matter, and any format can be placed anywhere in this array,
+ * as long as conversion for it exists.
+ */
+static const uint32_t _preferred_formats[] =
+{
+ /* Native format for the emulated camera: no conversion at all. */
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YVU420,
+ /* Continue with YCbCr: less math than with RGB */
+ V4L2_PIX_FMT_NV12,
+ V4L2_PIX_FMT_NV21,
+ V4L2_PIX_FMT_YUYV,
+ /* End with RGB. */
+ V4L2_PIX_FMT_RGB32,
+ V4L2_PIX_FMT_RGB24,
+ V4L2_PIX_FMT_RGB565,
+};
+/* Number of entries in _preferred_formats array. */
+static const int _preferred_format_num =
+ sizeof(_preferred_formats)/sizeof(*_preferred_formats);
+
+/*******************************************************************************
+ * Helper routines
+ ******************************************************************************/
+
+/* IOCTL wrapper. */
+static int
+_xioctl(int fd, int request, void *arg) {
+ int r;
+ do {
+ r = ioctl(fd, request, arg);
+ } while (-1 == r && EINTR == errno);
+ return r;
+}
+
+/* Frees resource allocated for QemuPixelFormat instance, excluding the instance
+ * itself.
+ */
+static void _qemu_pixel_format_free(QemuPixelFormat* fmt)
+{
+ if (fmt != NULL) {
+ if (fmt->dims != NULL)
+ free(fmt->dims);
+ }
+}
+
+/* Returns an index of the given pixel format in an array containing pixel
+ * format descriptors.
+ * This routine is used to choose a pixel format for a camera device. The idea
+ * is that when the camera service enumerates all pixel formats for all cameras
+ * connected to the host, we need to choose just one, which would be most
+ * appropriate for camera emulation. To do that, the camera service will run
+ * formats, contained in _preferred_formats array against enumerated pixel
+ * formats to pick the first format that match.
+ * Param:
+ * fmt - Pixel format, for which to obtain the index.
+ * formats - Array containing list of pixel formats, supported by the camera
+ * device.
+ * size - Number of elements in the 'formats' array.
+ * Return:
+ * Index of the matched entry in the array, or -1 if no entry has been found.
+ */
+static int
+_get_format_index(uint32_t fmt, QemuPixelFormat* formats, int size)
+{
+ int f;
+ for (f = 0; f < size && formats[f].format != fmt; f++);
+ return f < size ? f : -1;
+}
+
+/*******************************************************************************
+ * CameraFrameBuffer routines
+ ******************************************************************************/
+
+/* Frees array of framebuffers, depending on the I/O method the array has been
+ * initialized for.
+ * Note that this routine doesn't frees the array itself.
+ * Param:
+ * fb, num - Array data, and its size.
+ * io_type - Type of the I/O the array has been initialized for.
+ */
+static void
+_free_framebuffers(CameraFrameBuffer* fb, int num, CameraIoType io_type)
+{
+ if (fb != NULL) {
+ int n;
+
+ switch (io_type) {
+ case CAMERA_IO_MEMMAP:
+ /* Unmap framebuffers. */
+ for (n = 0; n < num; n++) {
+ if (fb[n].data != NULL) {
+ munmap(fb[n].data, fb[n].size);
+ fb[n].data = NULL;
+ fb[n].size = 0;
+ }
+ }
+ break;
+
+ case CAMERA_IO_USERPTR:
+ case CAMERA_IO_DIRECT:
+ /* Free framebuffers. */
+ for (n = 0; n < num; n++) {
+ if (fb[n].data != NULL) {
+ free(fb[n].data);
+ fb[n].data = NULL;
+ fb[n].size = 0;
+ }
+ }
+ break;
+
+ default:
+ E("%s: Invalid I/O type %d", __FUNCTION__, io_type);
+ break;
+ }
+ }
+}
+
+/*******************************************************************************
+ * CameraDevice routines
+ ******************************************************************************/
+
+/* Allocates an instance of LinuxCameraDevice structure.
+ * Return:
+ * Allocated instance of LinuxCameraDevice structure. Note that this routine
+ * also sets 'opaque' field in the 'header' structure to point back to the
+ * containing LinuxCameraDevice instance.
+ */
+static LinuxCameraDevice*
+_camera_device_alloc(void)
+{
+ LinuxCameraDevice* cd;
+
+ ANEW0(cd);
+ memset(cd, 0, sizeof(*cd));
+ cd->header.opaque = cd;
+ cd->handle = -1;
+
+ return cd;
+}
+
+/* Uninitializes and frees CameraDevice structure.
+ */
+static void
+_camera_device_free(LinuxCameraDevice* lcd)
+{
+ if (lcd != NULL) {
+ /* Closing handle will also disconnect from the driver. */
+ if (lcd->handle >= 0) {
+ close(lcd->handle);
+ }
+ if (lcd->device_name != NULL) {
+ free(lcd->device_name);
+ }
+ if (lcd->framebuffers != NULL) {
+ _free_framebuffers(lcd->framebuffers, lcd->framebuffer_num,
+ lcd->io_type);
+ free(lcd->framebuffers);
+ }
+ AFREE(lcd);
+ } else {
+ E("%s: No descriptor", __FUNCTION__);
+ }
+}
+
+/* Resets camera device after capturing.
+ * Since new capture request may require different frame dimensions we must
+ * reset camera device by reopening its handle. Otherwise attempts to set up new
+ * frame properties (different from the previous one) may fail. */
+static void
+_camera_device_reset(LinuxCameraDevice* cd)
+{
+ struct v4l2_cropcap cropcap;
+ struct v4l2_crop crop;
+
+ /* Free capturing framebuffers first. */
+ if (cd->framebuffers != NULL) {
+ _free_framebuffers(cd->framebuffers, cd->framebuffer_num, cd->io_type);
+ free(cd->framebuffers);
+ cd->framebuffers = NULL;
+ cd->framebuffer_num = 0;
+ }
+
+ /* Reset device handle. */
+ close(cd->handle);
+ cd->handle = open(cd->device_name, O_RDWR | O_NONBLOCK, 0);
+
+ if (cd->handle >= 0) {
+ /* Select video input, video standard and tune here. */
+ cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ _xioctl(cd->handle, VIDIOC_CROPCAP, &cropcap);
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ crop.c = cropcap.defrect; /* reset to default */
+ _xioctl (cd->handle, VIDIOC_S_CROP, &crop);
+ }
+}
+
+/* Memory maps buffers and shares mapped memory with the device.
+ * Return:
+ * 0 Framebuffers have been mapped.
+ * -1 A critical error has ocurred.
+ * 1 Memory mapping is not available.
+ */
+static int
+_camera_device_mmap_framebuffer(LinuxCameraDevice* cd)
+{
+ struct v4l2_requestbuffers req;
+ CLEAR(req);
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+
+ /* Request memory mapped buffers. Note that device can return less buffers
+ * than requested. */
+ if(_xioctl(cd->handle, VIDIOC_REQBUFS, &req)) {
+ if (EINVAL == errno) {
+ D("%s: Device '%s' does not support memory mapping",
+ __FUNCTION__, cd->device_name);
+ return 1;
+ } else {
+ E("%s: VIDIOC_REQBUFS has failed: %s",
+ __FUNCTION__, strerror(errno));
+ return -1;
+ }
+ }
+
+ /* Allocate framebuffer array. */
+ cd->framebuffers = calloc(req.count, sizeof(CameraFrameBuffer));
+ if (cd->framebuffers == NULL) {
+ E("%s: Not enough memory to allocate framebuffer array", __FUNCTION__);
+ return -1;
+ }
+
+ /* Map every framebuffer to the shared memory, and queue it
+ * with the device. */
+ for(cd->framebuffer_num = 0; cd->framebuffer_num < req.count;
+ cd->framebuffer_num++) {
+ /* Map framebuffer. */
+ struct v4l2_buffer buf;
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = cd->framebuffer_num;
+ if(_xioctl(cd->handle, VIDIOC_QUERYBUF, &buf) < 0) {
+ E("%s: VIDIOC_QUERYBUF has failed: %s",
+ __FUNCTION__, strerror(errno));
+ return -1;
+ }
+ cd->framebuffers[cd->framebuffer_num].size = buf.length;
+ cd->framebuffers[cd->framebuffer_num].data =
+ mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,
+ cd->handle, buf.m.offset);
+ if (MAP_FAILED == cd->framebuffers[cd->framebuffer_num].data) {
+ E("%s: Memory mapping has failed: %s",
+ __FUNCTION__, strerror(errno));
+ return -1;
+ }
+
+ /* Queue the mapped buffer. */
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = cd->framebuffer_num;
+ if (_xioctl(cd->handle, VIDIOC_QBUF, &buf) < 0) {
+ E("%s: VIDIOC_QBUF has failed: %s", __FUNCTION__, strerror(errno));
+ return -1;
+ }
+ }
+
+ cd->io_type = CAMERA_IO_MEMMAP;
+
+ return 0;
+}
+
+/* Allocates frame buffers and registers them with the device.
+ * Return:
+ * 0 Framebuffers have been mapped.
+ * -1 A critical error has ocurred.
+ * 1 Device doesn't support user pointers.
+ */
+static int
+_camera_device_user_framebuffer(LinuxCameraDevice* cd)
+{
+ struct v4l2_requestbuffers req;
+ CLEAR (req);
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_USERPTR;
+
+ /* Request user buffers. Note that device can return less buffers
+ * than requested. */
+ if(_xioctl(cd->handle, VIDIOC_REQBUFS, &req)) {
+ if (EINVAL == errno) {
+ D("%s: Device '%s' does not support user pointers",
+ __FUNCTION__, cd->device_name);
+ return 1;
+ } else {
+ E("%s: VIDIOC_REQBUFS has failed: %s",
+ __FUNCTION__, strerror(errno));
+ return -1;
+ }
+ }
+
+ /* Allocate framebuffer array. */
+ cd->framebuffers = calloc(req.count, sizeof(CameraFrameBuffer));
+ if (cd->framebuffers == NULL) {
+ E("%s: Not enough memory to allocate framebuffer array", __FUNCTION__);
+ return -1;
+ }
+
+ /* Allocate buffers, queueing them wit the device at the same time */
+ for(cd->framebuffer_num = 0; cd->framebuffer_num < req.count;
+ cd->framebuffer_num++) {
+ cd->framebuffers[cd->framebuffer_num].size =
+ cd->actual_pixel_format.sizeimage;
+ cd->framebuffers[cd->framebuffer_num].data =
+ malloc(cd->framebuffers[cd->framebuffer_num].size);
+ if (cd->framebuffers[cd->framebuffer_num].data == NULL) {
+ E("%s: Not enough memory to allocate framebuffer", __FUNCTION__);
+ return -1;
+ }
+
+ /* Queue the user buffer. */
+ struct v4l2_buffer buf;
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+ buf.m.userptr = (unsigned long)cd->framebuffers[cd->framebuffer_num].data;
+ buf.length = cd->framebuffers[cd->framebuffer_num].size;
+ if (_xioctl(cd->handle, VIDIOC_QBUF, &buf) < 0) {
+ E("%s: VIDIOC_QBUF has failed: %s", __FUNCTION__, strerror(errno));
+ return -1;
+ }
+ }
+
+ cd->io_type = CAMERA_IO_USERPTR;
+
+ return 0;
+}
+
+/* Allocate frame buffer for direct read from the device.
+ * Return:
+ * 0 Framebuffers have been mapped.
+ * -1 A critical error has ocurred.
+ * 1 Memory mapping is not available.
+ */
+static int
+_camera_device_direct_framebuffer(LinuxCameraDevice* cd)
+{
+ /* Allocate framebuffer array. */
+ cd->framebuffer_num = 1;
+ cd->framebuffers = malloc(sizeof(CameraFrameBuffer));
+ if (cd->framebuffers == NULL) {
+ E("%s: Not enough memory to allocate framebuffer array", __FUNCTION__);
+ return -1;
+ }
+
+ cd->framebuffers[0].size = cd->actual_pixel_format.sizeimage;
+ cd->framebuffers[0].data = malloc(cd->framebuffers[0].size);
+ if (cd->framebuffers[0].data == NULL) {
+ E("%s: Not enough memory to allocate framebuffer", __FUNCTION__);
+ return -1;
+ }
+
+ cd->io_type = CAMERA_IO_DIRECT;
+
+ return 0;
+}
+
+/* Opens camera device.
+ * Param:
+ * cd - Camera device descriptor to open the camera for.
+ * Return:
+ * 0 on success, != 0 on failure.
+ */
+static int
+_camera_device_open(LinuxCameraDevice* cd)
+{
+ struct stat st;
+
+ if (stat(cd->device_name, &st)) {
+ return -1;
+ }
+
+ if (!S_ISCHR(st.st_mode)) {
+ E("%s: '%s' is not a device", __FUNCTION__, cd->device_name);
+ return -1;
+ }
+
+ /* Open handle to the device, and query device capabilities. */
+ cd->handle = open(cd->device_name, O_RDWR | O_NONBLOCK, 0);
+ if (cd->handle < 0) {
+ E("%s: Cannot open camera device '%s': %s",
+ __FUNCTION__, cd->device_name, strerror(errno));
+ return -1;
+ }
+ if (_xioctl(cd->handle, VIDIOC_QUERYCAP, &cd->caps) < 0) {
+ if (EINVAL == errno) {
+ E("%s: Camera '%s' is not a V4L2 device",
+ __FUNCTION__, cd->device_name);
+ close(cd->handle);
+ cd->handle = -1;
+ return -1;
+ } else {
+ E("%s: Unable to query capabilities for camera device '%s'",
+ __FUNCTION__, cd->device_name);
+ close(cd->handle);
+ cd->handle = -1;
+ return -1;
+ }
+ }
+
+ /* Make sure that camera supports minimal requirements. */
+ if (!(cd->caps.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+ E("%s: Camera '%s' is not a video capture device",
+ __FUNCTION__, cd->device_name);
+ close(cd->handle);
+ cd->handle = -1;
+ return -1;
+ }
+
+ return 0;
+}
+
+/* Enumerates frame sizes for the given pixel format.
+ * Param:
+ * cd - Opened camera device descriptor.
+ * fmt - Pixel format to enum frame sizes for.
+ * sizes - Upon success contains an array of supported frame sizes. The size of
+ * the array is defined by the value, returned from this routine. The caller
+ * is responsible for freeing memory allocated for this array.
+ * Return:
+ * On success returns number of entries in the 'sizes' array. On failure returns
+ * a negative value.
+ */
+static int
+_camera_device_enum_format_sizes(LinuxCameraDevice* cd,
+ uint32_t fmt,
+ CameraFrameDim** sizes)
+{
+ int n;
+ int sizes_num = 0;
+ int out_num = 0;
+ struct v4l2_frmsizeenum size_enum;
+ CameraFrameDim* arr;
+
+ /* Calculate number of supported sizes for the given format. */
+ for (n = 0; ; n++) {
+ size_enum.index = n;
+ size_enum.pixel_format = fmt;
+ if(_xioctl(cd->handle, VIDIOC_ENUM_FRAMESIZES, &size_enum)) {
+ break;
+ }
+ if (size_enum.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ /* Size is in the simpe width, height form. */
+ sizes_num++;
+ } else if (size_enum.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+ /* Sizes are represented as min/max width and height with a step for
+ * each dimension. Since at the end we want to list each supported
+ * size in the array (that's the only format supported by the guest
+ * camera framework), we need to calculate how many array entries
+ * this will generate. */
+ const uint32_t dif_widths =
+ (size_enum.stepwise.max_width - size_enum.stepwise.min_width) /
+ size_enum.stepwise.step_width + 1;
+ const uint32_t dif_heights =
+ (size_enum.stepwise.max_height - size_enum.stepwise.min_height) /
+ size_enum.stepwise.step_height + 1;
+ sizes_num += dif_widths * dif_heights;
+ } else if (size_enum.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ /* Special stepwise case, when steps are set to 1. We still need to
+ * flatten this for the guest, but the array may be too big.
+ * Fortunately, we don't need to be fancy, so three sizes would be
+ * sufficient here: min, max, and one in the middle. */
+ sizes_num += 3;
+ }
+
+ }
+ if (sizes_num == 0) {
+ return 0;
+ }
+
+ /* Allocate, and initialize the array of supported entries. */
+ *sizes = (CameraFrameDim*)malloc(sizes_num * sizeof(CameraFrameDim));
+ if (*sizes == NULL) {
+ E("%s: Memory allocation failure", __FUNCTION__);
+ return -1;
+ }
+ arr = *sizes;
+ for (n = 0; out_num < sizes_num; n++) {
+ size_enum.index = n;
+ size_enum.pixel_format = fmt;
+ if(_xioctl(cd->handle, VIDIOC_ENUM_FRAMESIZES, &size_enum)) {
+ /* Errors are not welcome here anymore. */
+ E("%s: Unexpected failure while getting pixel dimensions: %s",
+ __FUNCTION__, strerror(errno));
+ free(arr);
+ return -1;
+ }
+
+ if (size_enum.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ arr[out_num].width = size_enum.discrete.width;
+ arr[out_num].height = size_enum.discrete.height;
+ out_num++;
+ } else if (size_enum.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+ uint32_t w;
+ for (w = size_enum.stepwise.min_width;
+ w <= size_enum.stepwise.max_width;
+ w += size_enum.stepwise.step_width) {
+ uint32_t h;
+ for (h = size_enum.stepwise.min_height;
+ h <= size_enum.stepwise.max_height;
+ h += size_enum.stepwise.step_height) {
+ arr[out_num].width = w;
+ arr[out_num].height = h;
+ out_num++;
+ }
+ }
+ } else if (size_enum.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ /* min */
+ arr[out_num].width = size_enum.stepwise.min_width;
+ arr[out_num].height = size_enum.stepwise.min_height;
+ out_num++;
+ /* one in the middle */
+ arr[out_num].width =
+ (size_enum.stepwise.min_width + size_enum.stepwise.max_width) / 2;
+ arr[out_num].height =
+ (size_enum.stepwise.min_height + size_enum.stepwise.max_height) / 2;
+ out_num++;
+ /* max */
+ arr[out_num].width = size_enum.stepwise.max_width;
+ arr[out_num].height = size_enum.stepwise.max_height;
+ out_num++;
+ }
+ }
+
+ return out_num;
+}
+
+/* Enumerates pixel formats, supported by the device.
+ * Note that this routine will enumerate only raw (uncompressed) formats.
+ * Param:
+ * cd - Opened camera device descriptor.
+ * fmts - Upon success contains an array of supported pixel formats. The size of
+ * the array is defined by the value, returned from this routine. The caller
+ * is responsible for freeing memory allocated for this array.
+ * Return:
+ * On success returns number of entries in the 'fmts' array. On failure returns
+ * a negative value.
+ */
+static int
+_camera_device_enum_pixel_formats(LinuxCameraDevice* cd, QemuPixelFormat** fmts)
+{
+ int n, max_fmt;
+ int fmt_num = 0;
+ int out_num = 0;
+ struct v4l2_fmtdesc fmt_enum;
+ QemuPixelFormat* arr;
+
+ /* Calculate number of supported formats. */
+ for (max_fmt = 0; ; max_fmt++) {
+ memset(&fmt_enum, 0, sizeof(fmt_enum));
+ fmt_enum.index = max_fmt;
+ fmt_enum.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if(_xioctl(cd->handle, VIDIOC_ENUM_FMT, &fmt_enum)) {
+ break;
+ }
+ /* Skip the compressed ones. */
+ if ((fmt_enum.flags & V4L2_FMT_FLAG_COMPRESSED) == 0) {
+ fmt_num++;
+ }
+ }
+ if (fmt_num == 0) {
+ return 0;
+ }
+
+ /* Allocate, and initialize array for enumerated formats. */
+ *fmts = (QemuPixelFormat*)malloc(fmt_num * sizeof(QemuPixelFormat));
+ if (*fmts == NULL) {
+ E("%s: Memory allocation failure", __FUNCTION__);
+ return -1;
+ }
+ arr = *fmts;
+ memset(arr, 0, fmt_num * sizeof(QemuPixelFormat));
+ for (n = 0; n < max_fmt && out_num < fmt_num; n++) {
+ memset(&fmt_enum, 0, sizeof(fmt_enum));
+ fmt_enum.index = n;
+ fmt_enum.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if(_xioctl(cd->handle, VIDIOC_ENUM_FMT, &fmt_enum)) {
+ int nn;
+ /* Errors are not welcome here anymore. */
+ E("%s: Unexpected failure while getting pixel format: %s",
+ __FUNCTION__, strerror(errno));
+ for (nn = 0; nn < out_num; nn++) {
+ _qemu_pixel_format_free(arr + nn);
+ }
+ free(arr);
+ return -1;
+ }
+ /* Skip the compressed ones. */
+ if ((fmt_enum.flags & V4L2_FMT_FLAG_COMPRESSED) == 0) {
+ arr[out_num].format = fmt_enum.pixelformat;
+ /* Enumerate frame dimensions supported for this format. */
+ arr[out_num].dim_num =
+ _camera_device_enum_format_sizes(cd, fmt_enum.pixelformat,
+ &arr[out_num].dims);
+ if (arr[out_num].dim_num > 0) {
+ out_num++;
+ } else if (arr[out_num].dim_num < 0) {
+ int nn;
+ E("Unable to enumerate supported dimensions for pixel format %d",
+ fmt_enum.pixelformat);
+ for (nn = 0; nn < out_num; nn++) {
+ _qemu_pixel_format_free(arr + nn);
+ }
+ free(arr);
+ return -1;
+ }
+ }
+ }
+
+ return out_num;
+}
+
+/* Collects information about an opened camera device.
+ * The information collected in this routine contains list of pixel formats,
+ * supported by the device, and list of frame dimensions supported by the camera
+ * for each pixel format.
+ * Param:
+ * cd - Opened camera device descriptor.
+ * cis - Upon success contains information collected from the camera device.
+ * Return:
+ * 0 on success, != 0 on failure.
+ */
+static int
+_camera_device_get_info(LinuxCameraDevice* cd, CameraInfo* cis)
+{
+ int f;
+ int chosen = -1;
+ QemuPixelFormat* formats = NULL;
+ int num_pix_fmts = _camera_device_enum_pixel_formats(cd, &formats);
+ if (num_pix_fmts <= 0) {
+ return -1;
+ }
+
+ /* Lets see if camera supports preferred formats */
+ for (f = 0; f < _preferred_format_num; f++) {
+ chosen = _get_format_index(_preferred_formats[f], formats, num_pix_fmts);
+ if (chosen >= 0) {
+ break;
+ }
+ }
+ if (chosen < 0) {
+ /* Camera doesn't support any of the chosen formats. Then it doesn't
+ * matter which one we choose. Lets choose the first one. */
+ chosen = 0;
+ }
+
+ cis->device_name = ASTRDUP(cd->device_name);
+ cis->inp_channel = cd->input_channel;
+ cis->pixel_format = formats[chosen].format;
+ cis->frame_sizes_num = formats[chosen].dim_num;
+ /* Swap instead of copy. */
+ cis->frame_sizes = formats[chosen].dims;
+ formats[chosen].dims = NULL;
+ cis->in_use = 0;
+
+ for (f = 0; f < num_pix_fmts; f++) {
+ _qemu_pixel_format_free(formats + f);
+ }
+ free(formats);
+
+ return 0;
+}
+
+/*******************************************************************************
+ * CameraDevice API
+ ******************************************************************************/
+
+CameraDevice*
+camera_device_open(const char* name, int inp_channel)
+{
+ struct v4l2_cropcap cropcap;
+ struct v4l2_crop crop;
+ LinuxCameraDevice* cd;
+
+ /* Allocate and initialize the descriptor. */
+ cd = _camera_device_alloc();
+ cd->device_name = name != NULL ? ASTRDUP(name) : ASTRDUP("/dev/video0");
+ cd->input_channel = inp_channel;
+
+ /* Open the device. */
+ if (_camera_device_open(cd)) {
+ _camera_device_free(cd);
+ return NULL;
+ }
+
+ /* Select video input, video standard and tune here. */
+ cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ _xioctl(cd->handle, VIDIOC_CROPCAP, &cropcap);
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ crop.c = cropcap.defrect; /* reset to default */
+ _xioctl (cd->handle, VIDIOC_S_CROP, &crop);
+
+ return &cd->header;
+}
+
+int
+camera_device_start_capturing(CameraDevice* ccd,
+ uint32_t pixel_format,
+ int frame_width,
+ int frame_height)
+{
+ struct v4l2_format fmt;
+ LinuxCameraDevice* cd;
+ char fmt_str[5];
+ int r;
+
+ /* Sanity checks. */
+ if (ccd == NULL || ccd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ cd = (LinuxCameraDevice*)ccd->opaque;
+ if (cd->handle < 0) {
+ E("%s: Camera device is not opened", __FUNCTION__);
+ return -1;
+ }
+
+ /* Try to set pixel format with the given dimensions. */
+ CLEAR(fmt);
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.width = frame_width;
+ fmt.fmt.pix.height = frame_height;
+ fmt.fmt.pix.pixelformat = pixel_format;
+ if (_xioctl(cd->handle, VIDIOC_S_FMT, &fmt) < 0) {
+ memcpy(fmt_str, &pixel_format, 4);
+ fmt_str[4] = '\0';
+ E("%s: Camera '%s' does not support pixel format '%s' with dimensions %dx%d",
+ __FUNCTION__, cd->device_name, fmt_str, frame_width, frame_height);
+ _camera_device_reset(cd);
+ return -1;
+ }
+ /* VIDIOC_S_FMT may has changed some properties of the structure. Make sure
+ * that dimensions didn't change. */
+ if (fmt.fmt.pix.width != frame_width || fmt.fmt.pix.height != frame_height) {
+ memcpy(fmt_str, &pixel_format, 4);
+ fmt_str[4] = '\0';
+ E("%s: Dimensions %dx%d are wrong for pixel format '%s'",
+ __FUNCTION__, frame_width, frame_height, fmt_str);
+ _camera_device_reset(cd);
+ return -1;
+ }
+ memcpy(&cd->actual_pixel_format, &fmt.fmt.pix, sizeof(struct v4l2_pix_format));
+
+ /*
+ * Lets initialize frame buffers, and see what kind of I/O we're going to
+ * use to retrieve frames.
+ */
+
+ /* First, lets see if we can do mapped I/O (as most performant one). */
+ r = _camera_device_mmap_framebuffer(cd);
+ if (r < 0) {
+ /* Some critical error has ocurred. Bail out. */
+ _camera_device_reset(cd);
+ return -1;
+ } else if (r > 0) {
+ /* Device doesn't support memory mapping. Retrieve to the next performant
+ * one: preallocated user buffers. */
+ r = _camera_device_user_framebuffer(cd);
+ if (r < 0) {
+ /* Some critical error has ocurred. Bail out. */
+ _camera_device_reset(cd);
+ return -1;
+ } else if (r > 0) {
+ /* The only thing left for us is direct reading from the device. */
+ if (!(cd->caps.capabilities & V4L2_CAP_READWRITE)) {
+ E("%s: Don't know how to access frames on device '%s'",
+ __FUNCTION__, cd->device_name);
+ _camera_device_reset(cd);
+ return -1;
+ }
+ r = _camera_device_direct_framebuffer(cd);
+ if (r != 0) {
+ /* Any error at this point is a critical one. */
+ _camera_device_reset(cd);
+ return -1;
+ }
+ }
+ }
+
+ /* Start capturing from the device. */
+ if (cd->io_type != CAMERA_IO_DIRECT) {
+ enum v4l2_buf_type type;
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (_xioctl (cd->handle, VIDIOC_STREAMON, &type) < 0) {
+ E("%s: VIDIOC_STREAMON on camera '%s' has failed: %s",
+ __FUNCTION__, cd->device_name, strerror(errno));
+ _camera_device_reset(cd);
+ return -1;
+ }
+ }
+ return 0;
+}
+
+int
+camera_device_stop_capturing(CameraDevice* ccd)
+{
+ enum v4l2_buf_type type;
+ LinuxCameraDevice* cd;
+
+ /* Sanity checks. */
+ if (ccd == NULL || ccd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ cd = (LinuxCameraDevice*)ccd->opaque;
+ if (cd->handle < 0) {
+ E("%s: Camera device is not opened", __FUNCTION__);
+ return -1;
+ }
+
+ switch (cd->io_type) {
+ case CAMERA_IO_DIRECT:
+ /* Nothing to do. */
+ break;
+
+ case CAMERA_IO_MEMMAP:
+ case CAMERA_IO_USERPTR:
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (_xioctl(cd->handle, VIDIOC_STREAMOFF, &type) < 0) {
+ E("%s: VIDIOC_STREAMOFF on camera '%s' has failed: %s",
+ __FUNCTION__, cd->device_name, strerror(errno));
+ return -1;
+ }
+ break;
+ default:
+ E("%s: Unknown I/O method: %d", __FUNCTION__, cd->io_type);
+ return -1;
+ }
+
+ /* Reopen the device to reset its internal state. It seems that if we don't
+ * do that, an attempt to reinit the device with different frame dimensions
+ * would fail. */
+ _camera_device_reset(cd);
+
+ return 0;
+}
+
+int
+camera_device_read_frame(CameraDevice* ccd,
+ ClientFrameBuffer* framebuffers,
+ int fbs_num)
+{
+ LinuxCameraDevice* cd;
+
+ /* Sanity checks. */
+ if (ccd == NULL || ccd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ cd = (LinuxCameraDevice*)ccd->opaque;
+ if (cd->handle < 0) {
+ E("%s: Camera device is not opened", __FUNCTION__);
+ return -1;
+ }
+
+ if (cd->io_type == CAMERA_IO_DIRECT) {
+ /* Read directly from the device. */
+ size_t total_read_bytes = 0;
+ /* There is one framebuffer allocated for direct read. */
+ void* buff = cd->framebuffers[0].data;
+ do {
+ int read_bytes =
+ read(cd->handle, buff + total_read_bytes,
+ cd->actual_pixel_format.sizeimage - total_read_bytes);
+ if (read_bytes < 0) {
+ switch (errno) {
+ case EIO:
+ case EAGAIN:
+ continue;
+ default:
+ E("%s: Unable to read from the camera device '%s': %s",
+ __FUNCTION__, cd->device_name, strerror(errno));
+ return -1;
+ }
+ }
+ total_read_bytes += read_bytes;
+ } while (total_read_bytes < cd->actual_pixel_format.sizeimage);
+ /* Convert the read frame into the caller's framebuffers. */
+ return convert_frame(buff, cd->actual_pixel_format.pixelformat,
+ cd->actual_pixel_format.sizeimage,
+ cd->actual_pixel_format.width,
+ cd->actual_pixel_format.height,
+ framebuffers, fbs_num);
+ } else {
+ /* Dequeue next buffer from the device. */
+ struct v4l2_buffer buf;
+ int res;
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = cd->io_type == CAMERA_IO_MEMMAP ? V4L2_MEMORY_MMAP :
+ V4L2_MEMORY_USERPTR;
+ for (;;) {
+ const int res = _xioctl(cd->handle, VIDIOC_DQBUF, &buf);
+ if (res >= 0) {
+ break;
+ } else if (errno == EAGAIN) {
+ return 1; // Tells the caller to repeat.
+ } else if (errno != EINTR && errno != EIO) {
+ E("%s: VIDIOC_DQBUF on camera '%s' has failed: %s",
+ __FUNCTION__, cd->device_name, strerror(errno));
+ return -1;
+ }
+ }
+
+ /* Convert frame to the receiving buffers. */
+ res = convert_frame(cd->framebuffers[buf.index].data,
+ cd->actual_pixel_format.pixelformat,
+ cd->actual_pixel_format.sizeimage,
+ cd->actual_pixel_format.width,
+ cd->actual_pixel_format.height,
+ framebuffers, fbs_num);
+
+ /* Requeue the buffer back to the device. */
+ if (_xioctl(cd->handle, VIDIOC_QBUF, &buf) < 0) {
+ W("%s: VIDIOC_QBUF on camera '%s' has failed: %s",
+ __FUNCTION__, cd->device_name, strerror(errno));
+ }
+
+ return res;
+ }
+}
+
+void
+camera_device_close(CameraDevice* ccd)
+{
+ LinuxCameraDevice* cd;
+
+ /* Sanity checks. */
+ if (ccd != NULL && ccd->opaque != NULL) {
+ cd = (LinuxCameraDevice*)ccd->opaque;
+ _camera_device_free(cd);
+ } else {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ }
+}
+
+int
+enumerate_camera_devices(CameraInfo* cis, int max)
+{
+ char dev_name[24];
+ int found = 0;
+ int n;
+
+ for (n = 0; n < max; n++) {
+ CameraDevice* cd;
+
+ sprintf(dev_name, "/dev/video%d", n);
+ cd = camera_device_open(dev_name, 0);
+ if (cd != NULL) {
+ LinuxCameraDevice* lcd = (LinuxCameraDevice*)cd->opaque;
+ if (!_camera_device_get_info(lcd, cis + found)) {
+ char user_name[24];
+ sprintf(user_name, "webcam%d", found);
+ cis[found].display_name = ASTRDUP(user_name);
+ cis[found].in_use = 0;
+ found++;
+ }
+ camera_device_close(cd);
+ } else {
+ break;
+ }
+ }
+
+ return found;
+}
diff --git a/android/camera/camera-capture-mac.m b/android/camera/camera-capture-mac.m
new file mode 100644
index 0000000..1c1c3d5
--- /dev/null
+++ b/android/camera/camera-capture-mac.m
@@ -0,0 +1,549 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Contains code that is used to capture video frames from a camera device
+ * on Mac. This code uses QTKit API to work with camera devices, and requires
+ * Mac OS at least 10.5
+ */
+
+#import <Cocoa/Cocoa.h>
+#import <QTKit/QTkit.h>
+#import <CoreAudio/CoreAudio.h>
+#include "android/camera/camera-capture.h"
+#include "android/camera/camera-format-converters.h"
+
+#define E(...) derror(__VA_ARGS__)
+#define W(...) dwarning(__VA_ARGS__)
+#define D(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+
+/*******************************************************************************
+ * Helper routines
+ ******************************************************************************/
+
+/* Converts internal QT pixel format to a FOURCC value. */
+static uint32_t
+_QTtoFOURCC(uint32_t qt_pix_format)
+{
+ switch (qt_pix_format) {
+ case kCVPixelFormatType_24RGB:
+ return V4L2_PIX_FMT_RGB24;
+
+ case kCVPixelFormatType_24BGR:
+ return V4L2_PIX_FMT_BGR32;
+
+ case kCVPixelFormatType_32ARGB:
+ case kCVPixelFormatType_32RGBA:
+ return V4L2_PIX_FMT_RGB32;
+
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ABGR:
+ return V4L2_PIX_FMT_BGR32;
+
+ case kCVPixelFormatType_422YpCbCr8:
+ return V4L2_PIX_FMT_UYVY;
+
+ case kCVPixelFormatType_420YpCbCr8Planar:
+ return V4L2_PIX_FMT_YVU420;
+
+ case 'yuvs': // kCVPixelFormatType_422YpCbCr8_yuvs - undeclared?
+ return V4L2_PIX_FMT_YUYV;
+
+ default:
+ E("Unrecognized pixel format '%.4s'", (const char*)&qt_pix_format);
+ return 0;
+ }
+}
+
+/*******************************************************************************
+ * MacCamera implementation
+ ******************************************************************************/
+
+/* Encapsulates a camera device on MacOS */
+@interface MacCamera : NSObject {
+ /* Capture session. */
+ QTCaptureSession* capture_session;
+ /* Camera capture device. */
+ QTCaptureDevice* capture_device;
+ /* Input device registered with the capture session. */
+ QTCaptureDeviceInput* input_device;
+ /* Output device registered with the capture session. */
+ QTCaptureVideoPreviewOutput* output_device;
+ /* Current framebuffer. */
+ CVImageBufferRef current_frame;
+ /* Desired frame width */
+ int desired_width;
+ /* Desired frame height */
+ int desired_height;
+}
+
+/* Initializes MacCamera instance.
+ * Return:
+ * Pointer to initialized instance on success, or nil on failure.
+ */
+- (MacCamera*)init;
+
+/* Undoes 'init' */
+- (void)free;
+
+/* Starts capturing video frames.
+ * Param:
+ * width, height - Requested dimensions for the captured video frames.
+ * Return:
+ * 0 on success, or !=0 on failure.
+ */
+- (int)start_capturing:(int)width:(int)height;
+
+/* Captures a frame from the camera device.
+ * Param:
+ * framebuffers - Array of framebuffers where to read the frame. Size of this
+ * array is defined by the 'fbs_num' parameter. Note that the caller must
+ * make sure that buffers are large enough to contain entire frame captured
+ * from the device.
+ * fbs_num - Number of entries in the 'framebuffers' array.
+ * Return:
+ * 0 on success, or non-zero value on failure. There is a special vaule 1
+ * returned from this routine which indicates that frames are not yet available
+ * in the device. The client should respond to this value by repeating the
+ * read, rather than reporting an error.
+ */
+- (int)read_frame:(ClientFrameBuffer*)framebuffers:(int)fbs_num;
+
+@end
+
+@implementation MacCamera
+
+- (MacCamera*)init
+{
+ NSError *error;
+ BOOL success;
+
+ /* Obtain the capture device, make sure it's not used by another
+ * application, and open it. */
+ capture_device =
+ [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
+ if (capture_device == nil) {
+ E("There are no available video devices found.");
+ [self release];
+ return nil;
+ }
+ if ([capture_device isInUseByAnotherApplication]) {
+ E("Default camera device is in use by another application.");
+ [capture_device release];
+ capture_device = nil;
+ [self release];
+ return nil;
+ }
+ success = [capture_device open:&error];
+ if (!success) {
+ E("Unable to open camera device: '%s'",
+ [[error localizedDescription] UTF8String]);
+ [self free];
+ [self release];
+ return nil;
+ }
+
+ /* Create capture session. */
+ capture_session = [[QTCaptureSession alloc] init];
+ if (capture_session == nil) {
+ E("Unable to create capure session.");
+ [self free];
+ [self release];
+ return nil;
+ }
+
+ /* Create an input device and register it with the capture session. */
+ input_device = [[QTCaptureDeviceInput alloc] initWithDevice:capture_device];
+ success = [capture_session addInput:input_device error:&error];
+ if (!success) {
+ E("Unable to initialize input device: '%s'",
+ [[error localizedDescription] UTF8String]);
+ [input_device release];
+ input_device = nil;
+ [self free];
+ [self release];
+ return nil;
+ }
+
+ /* Create an output device and register it with the capture session. */
+ output_device = [[QTCaptureVideoPreviewOutput alloc] init];
+ success = [capture_session addOutput:output_device error:&error];
+ if (!success) {
+ E("Unable to initialize output device: '%s'",
+ [[error localizedDescription] UTF8String]);
+ [output_device release];
+ output_device = nil;
+ [self free];
+ [self release];
+ return nil;
+ }
+ [output_device setDelegate:self];
+
+ return self;
+}
+
+- (void)free
+{
+ /* Uninitialize capture session. */
+ if (capture_session != nil) {
+ /* Make sure that capturing is stopped. */
+ if ([capture_session isRunning]) {
+ [capture_session stopRunning];
+ }
+ /* Detach input and output devices from the session. */
+ if (input_device != nil) {
+ [capture_session removeInput:input_device];
+ [input_device release];
+ input_device = nil;
+ }
+ if (output_device != nil) {
+ [capture_session removeOutput:output_device];
+ [output_device release];
+ output_device = nil;
+ }
+ /* Destroy capture session. */
+ [capture_session release];
+ capture_session = nil;
+ }
+
+ /* Uninitialize capture device. */
+ if (capture_device != nil) {
+ /* Make sure device is not opened. */
+ if ([capture_device isOpen]) {
+ [capture_device close];
+ }
+ [capture_device release];
+ capture_device = nil;
+ }
+
+ /* Release current framebuffer. */
+ if (current_frame != nil) {
+ CVBufferRelease(current_frame);
+ current_frame = nil;
+ }
+}
+
+- (int)start_capturing:(int)width:(int)height
+{
+ if (![capture_session isRunning]) {
+ /* Set desired frame dimensions. */
+ desired_width = width;
+ desired_height = height;
+ [output_device setPixelBufferAttributes:
+ [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt: width], kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt: height], kCVPixelBufferHeightKey,
+ nil]];
+ [capture_session startRunning];
+ return 0;
+ } else if (width == desired_width && height == desired_height) {
+ W("%s: Already capturing %dx%d frames",
+ __FUNCTION__, desired_width, desired_height);
+ return -1;
+ } else {
+ E("%s: Already capturing %dx%d frames. Requested frame dimensions are %dx%d",
+ __FUNCTION__, desired_width, desired_height, width, height);
+ return -1;
+ }
+}
+
+- (int)read_frame:(ClientFrameBuffer*)framebuffers:(int)fbs_num
+{
+ int res = -1;
+
+ /* Frames are pushed by QT in another thread.
+ * So we need a protection here. */
+ @synchronized (self)
+ {
+ if (current_frame != nil) {
+ /* Collect frame info. */
+ const uint32_t pixel_format =
+ _QTtoFOURCC(CVPixelBufferGetPixelFormatType(current_frame));
+ const int frame_width = CVPixelBufferGetWidth(current_frame);
+ const int frame_height = CVPixelBufferGetHeight(current_frame);
+ const size_t frame_size =
+ CVPixelBufferGetBytesPerRow(current_frame) * frame_height;
+
+ /* Get framebuffer pointer. */
+ CVPixelBufferLockBaseAddress(current_frame, 0);
+ const void* pixels = CVPixelBufferGetBaseAddress(current_frame);
+ if (pixels != nil) {
+ /* Convert framebuffer. */
+ res = convert_frame(pixels, pixel_format, frame_size,
+ frame_width, frame_height,
+ framebuffers, fbs_num);
+ } else {
+ E("%s: Unable to obtain framebuffer", __FUNCTION__);
+ res = -1;
+ }
+ CVPixelBufferUnlockBaseAddress(current_frame, 0);
+ } else {
+ /* First frame didn't come in just yet. Let the caller repeat. */
+ res = 1;
+ }
+ }
+
+ return res;
+}
+
+- (void)captureOutput:(QTCaptureOutput*) captureOutput
+ didOutputVideoFrame:(CVImageBufferRef)videoFrame
+ withSampleBuffer:(QTSampleBuffer*) sampleBuffer
+ fromConnection:(QTCaptureConnection*) connection
+{
+ CVImageBufferRef to_release;
+ CVBufferRetain(videoFrame);
+
+ /* Frames are pulled by the client in another thread.
+ * So we need a protection here. */
+ @synchronized (self)
+ {
+ to_release = current_frame;
+ current_frame = videoFrame;
+ }
+ CVBufferRelease(to_release);
+}
+
+@end
+
+/*******************************************************************************
+ * CameraDevice routines
+ ******************************************************************************/
+
+typedef struct MacCameraDevice MacCameraDevice;
+/* MacOS-specific camera device descriptor. */
+struct MacCameraDevice {
+ /* Common camera device descriptor. */
+ CameraDevice header;
+ /* Actual camera device object. */
+ MacCamera* device;
+};
+
+/* Allocates an instance of MacCameraDevice structure.
+ * Return:
+ * Allocated instance of MacCameraDevice structure. Note that this routine
+ * also sets 'opaque' field in the 'header' structure to point back to the
+ * containing MacCameraDevice instance.
+ */
+static MacCameraDevice*
+_camera_device_alloc(void)
+{
+ MacCameraDevice* cd = (MacCameraDevice*)malloc(sizeof(MacCameraDevice));
+ if (cd != NULL) {
+ memset(cd, 0, sizeof(MacCameraDevice));
+ cd->header.opaque = cd;
+ } else {
+ E("%s: Unable to allocate MacCameraDevice instance", __FUNCTION__);
+ }
+ return cd;
+}
+
+/* Uninitializes and frees MacCameraDevice descriptor.
+ * Note that upon return from this routine memory allocated for the descriptor
+ * will be freed.
+ */
+static void
+_camera_device_free(MacCameraDevice* cd)
+{
+ if (cd != NULL) {
+ if (cd->device != NULL) {
+ [cd->device free];
+ [cd->device release];
+ cd->device = nil;
+ }
+ AFREE(cd);
+ } else {
+ W("%s: No descriptor", __FUNCTION__);
+ }
+}
+
+/* Resets camera device after capturing.
+ * Since new capture request may require different frame dimensions we must
+ * reset frame info cached in the capture window. The only way to do that would
+ * be closing, and reopening it again. */
+static void
+_camera_device_reset(MacCameraDevice* cd)
+{
+ if (cd != NULL && cd->device) {
+ [cd->device free];
+ cd->device = [cd->device init];
+ }
+}
+
+/*******************************************************************************
+ * CameraDevice API
+ ******************************************************************************/
+
+CameraDevice*
+camera_device_open(const char* name, int inp_channel)
+{
+ MacCameraDevice* mcd;
+
+ mcd = _camera_device_alloc();
+ if (mcd == NULL) {
+ E("%s: Unable to allocate MacCameraDevice instance", __FUNCTION__);
+ return NULL;
+ }
+ mcd->device = [[MacCamera alloc] init];
+ if (mcd->device == nil) {
+ E("%s: Unable to initialize camera device.", __FUNCTION__);
+ return NULL;
+ }
+ return &mcd->header;
+}
+
+int
+camera_device_start_capturing(CameraDevice* cd,
+ uint32_t pixel_format,
+ int frame_width,
+ int frame_height)
+{
+ MacCameraDevice* mcd;
+
+ /* Sanity checks. */
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ mcd = (MacCameraDevice*)cd->opaque;
+ if (mcd->device == nil) {
+ E("%s: Camera device is not opened", __FUNCTION__);
+ return -1;
+ }
+
+ return [mcd->device start_capturing:frame_width:frame_height];
+}
+
+int
+camera_device_stop_capturing(CameraDevice* cd)
+{
+ MacCameraDevice* mcd;
+
+ /* Sanity checks. */
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ mcd = (MacCameraDevice*)cd->opaque;
+ if (mcd->device == nil) {
+ E("%s: Camera device is not opened", __FUNCTION__);
+ return -1;
+ }
+
+ /* Reset capture settings, so next call to capture can set its own. */
+ _camera_device_reset(mcd);
+
+ return 0;
+}
+
+int
+camera_device_read_frame(CameraDevice* cd,
+ ClientFrameBuffer* framebuffers,
+ int fbs_num)
+{
+ MacCameraDevice* mcd;
+
+ /* Sanity checks. */
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ mcd = (MacCameraDevice*)cd->opaque;
+ if (mcd->device == nil) {
+ E("%s: Camera device is not opened", __FUNCTION__);
+ return -1;
+ }
+
+ return [mcd->device read_frame:framebuffers:fbs_num];
+}
+
+void
+camera_device_close(CameraDevice* cd)
+{
+ /* Sanity checks. */
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ } else {
+ _camera_device_free((MacCameraDevice*)cd->opaque);
+ }
+}
+
+int
+enumerate_camera_devices(CameraInfo* cis, int max)
+{
+/* Array containing emulated webcam frame dimensions.
+ * QT API provides device independent frame dimensions, by scaling frames
+ * received from the device to whatever dimensions were requested for the
+ * output device. So, we can just use a small set of frame dimensions to
+ * emulate.
+ */
+static const CameraFrameDim _emulate_dims[] =
+{
+ /* Emulates 640x480 frame. */
+ {640, 480},
+ /* Emulates 320x240 frame. */
+ {320, 240},
+};
+
+ /* Obtain default video device. QT API doesn't really provide a reliable
+ * way to identify camera devices. There is a QTCaptureDevice::uniqueId
+ * method that supposedly does that, but in some cases it just doesn't
+ * work. Until we figure out a reliable device identification, we will
+ * stick to using only one (default) camera for emulation. */
+ QTCaptureDevice* video_dev =
+ [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
+ if (video_dev == nil) {
+ D("No web cameras are connected to the host.");
+ return 0;
+ }
+
+ /* Obtain pixel format for the device. */
+ NSArray* pix_formats = [video_dev formatDescriptions];
+ if (pix_formats == nil || [pix_formats count] == 0) {
+ E("Unable to obtain pixel format for the default camera device.");
+ [video_dev release];
+ return 0;
+ }
+ const uint32_t qt_pix_format = [[pix_formats objectAtIndex:0] formatType];
+ [pix_formats release];
+
+ /* Obtain FOURCC pixel format for the device. */
+ cis[0].pixel_format = _QTtoFOURCC(qt_pix_format);
+ if (cis[0].pixel_format == 0) {
+ /* Unsupported pixel format. */
+ E("Pixel format '%.4s' reported by the camera device is unsupported",
+ (const char*)&qt_pix_format);
+ [video_dev release];
+ return 0;
+ }
+
+ /* Initialize camera info structure. */
+ cis[0].frame_sizes = (CameraFrameDim*)malloc(sizeof(_emulate_dims));
+ if (cis[0].frame_sizes != NULL) {
+ cis[0].frame_sizes_num = sizeof(_emulate_dims) / sizeof(*_emulate_dims);
+ memcpy(cis[0].frame_sizes, _emulate_dims, sizeof(_emulate_dims));
+ cis[0].device_name = ASTRDUP("webcam0");
+ cis[0].inp_channel = 0;
+ cis[0].display_name = ASTRDUP("webcam0");
+ cis[0].in_use = 0;
+ [video_dev release];
+ return 1;
+ } else {
+ E("Unable to allocate memory for camera information.");
+ [video_dev release];
+ return 0;
+ }
+}
diff --git a/android/camera/camera-capture-windows.c b/android/camera/camera-capture-windows.c
new file mode 100755
index 0000000..28820a8
--- /dev/null
+++ b/android/camera/camera-capture-windows.c
@@ -0,0 +1,598 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Contains code capturing video frames from a camera device on Windows.
+ * This code uses capXxx API, available via capCreateCaptureWindow.
+ */
+
+#include <vfw.h>
+#include "android/camera/camera-capture.h"
+#include "android/camera/camera-format-converters.h"
+
+#define E(...) derror(__VA_ARGS__)
+#define W(...) dwarning(__VA_ARGS__)
+#define D(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#define D_ACTIVE VERBOSE_CHECK(camera)
+
+/* the T(...) macro is used to dump traffic */
+#define T_ACTIVE 0
+
+#if T_ACTIVE
+#define T(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#else
+#define T(...) ((void)0)
+#endif
+
+/* Default name for the capture window. */
+static const char* _default_window_name = "AndroidEmulatorVC";
+
+typedef struct WndCameraDevice WndCameraDevice;
+/* Windows-specific camera device descriptor. */
+struct WndCameraDevice {
+ /* Common camera device descriptor. */
+ CameraDevice header;
+ /* Capture window name. (default is AndroidEmulatorVC) */
+ char* window_name;
+ /* Input channel (video driver index). (default is 0) */
+ int input_channel;
+
+ /*
+ * Set when framework gets initialized.
+ */
+
+ /* Video capturing window. Null indicates that device is not connected. */
+ HWND cap_window;
+ /* DC for frame bitmap manipulation. Null indicates that frames are not
+ * being capturing. */
+ HDC dc;
+ /* Bitmap info for the frames obtained from the video capture driver. */
+ BITMAPINFO* frame_bitmap;
+ /* Bitmap info to use for GetDIBits calls. We can't really use bitmap info
+ * obtained from the video capture driver, because of the two issues. First,
+ * the driver may return an incompatible 'biCompresstion' value. For instance,
+ * sometimes it returns a "fourcc' pixel format value instead of BI_XXX,
+ * which causes GetDIBits to fail. Second, the bitmap that represents a frame
+ * that has been actually obtained from the device is not necessarily matches
+ * bitmap info that capture driver has returned. Sometimes the captured bitmap
+ * is a 32-bit RGB, while bit count reported by the driver is 16. So, to
+ * address these issues we need to have another bitmap info, that can be used
+ * in GetDIBits calls. */
+ BITMAPINFO* gdi_bitmap;
+ /* Framebuffer large enough to fit the frame. */
+ uint8_t* framebuffer;
+ /* Framebuffer size. */
+ size_t framebuffer_size;
+ /* Framebuffer's pixel format. */
+ uint32_t pixel_format;
+ /* If != 0, frame bitmap is "top-down". If 0, frame bitmap is "bottom-up". */
+ int is_top_down;
+};
+
+/*******************************************************************************
+ * CameraDevice routines
+ ******************************************************************************/
+
+/* Allocates an instance of WndCameraDevice structure.
+ * Return:
+ * Allocated instance of WndCameraDevice structure. Note that this routine
+ * also sets 'opaque' field in the 'header' structure to point back to the
+ * containing WndCameraDevice instance.
+ */
+static WndCameraDevice*
+_camera_device_alloc(void)
+{
+ WndCameraDevice* cd = (WndCameraDevice*)malloc(sizeof(WndCameraDevice));
+ if (cd != NULL) {
+ memset(cd, 0, sizeof(WndCameraDevice));
+ cd->header.opaque = cd;
+ } else {
+ E("%s: Unable to allocate WndCameraDevice instance", __FUNCTION__);
+ }
+ return cd;
+}
+
+/* Uninitializes and frees WndCameraDevice descriptor.
+ * Note that upon return from this routine memory allocated for the descriptor
+ * will be freed.
+ */
+static void
+_camera_device_free(WndCameraDevice* cd)
+{
+ if (cd != NULL) {
+ if (cd->cap_window != NULL) {
+ /* Disconnect from the driver. */
+ capDriverDisconnect(cd->cap_window);
+
+ if (cd->dc != NULL) {
+ W("%s: Frames should not be capturing at this point",
+ __FUNCTION__);
+ ReleaseDC(cd->cap_window, cd->dc);
+ cd->dc = NULL;
+ }
+ /* Destroy the capturing window. */
+ DestroyWindow(cd->cap_window);
+ cd->cap_window = NULL;
+ }
+ if (cd->gdi_bitmap != NULL) {
+ free(cd->gdi_bitmap);
+ }
+ if (cd->frame_bitmap != NULL) {
+ free(cd->frame_bitmap);
+ }
+ if (cd->window_name != NULL) {
+ free(cd->window_name);
+ }
+ if (cd->framebuffer != NULL) {
+ free(cd->framebuffer);
+ }
+ AFREE(cd);
+ } else {
+ W("%s: No descriptor", __FUNCTION__);
+ }
+}
+
+/* Resets camera device after capturing.
+ * Since new capture request may require different frame dimensions we must
+ * reset frame info cached in the capture window. The only way to do that would
+ * be closing, and reopening it again. */
+static void
+_camera_device_reset(WndCameraDevice* cd)
+{
+ if (cd != NULL && cd->cap_window != NULL) {
+ capDriverDisconnect(cd->cap_window);
+ if (cd->dc != NULL) {
+ ReleaseDC(cd->cap_window, cd->dc);
+ cd->dc = NULL;
+ }
+ if (cd->gdi_bitmap != NULL) {
+ free(cd->gdi_bitmap);
+ cd->gdi_bitmap = NULL;
+ }
+ if (cd->frame_bitmap != NULL) {
+ free(cd->frame_bitmap);
+ cd->frame_bitmap = NULL;
+ }
+ if (cd->framebuffer != NULL) {
+ free(cd->framebuffer);
+ cd->framebuffer = NULL;
+ }
+
+ /* Recreate the capturing window. */
+ DestroyWindow(cd->cap_window);
+ cd->cap_window = capCreateCaptureWindow(cd->window_name, WS_CHILD, 0, 0,
+ 0, 0, HWND_MESSAGE, 1);
+ }
+}
+
+/* Gets an absolute value out of a signed integer. */
+static __inline__ int
+_abs(int val)
+{
+ return (val < 0) ? -val : val;
+}
+
+/*******************************************************************************
+ * CameraDevice API
+ ******************************************************************************/
+
+CameraDevice*
+camera_device_open(const char* name, int inp_channel)
+{
+ WndCameraDevice* wcd;
+
+ /* Allocate descriptor and initialize windows-specific fields. */
+ wcd = _camera_device_alloc();
+ if (wcd == NULL) {
+ E("%s: Unable to allocate WndCameraDevice instance", __FUNCTION__);
+ return NULL;
+ }
+ wcd->window_name = (name != NULL) ? ASTRDUP(name) :
+ ASTRDUP(_default_window_name);
+ if (wcd->window_name == NULL) {
+ E("%s: Unable to save window name", __FUNCTION__);
+ _camera_device_free(wcd);
+ return NULL;
+ }
+ wcd->input_channel = inp_channel;
+
+ /* Create capture window that is a child of HWND_MESSAGE window.
+ * We make it invisible, so it doesn't mess with the UI. Also
+ * note that we supply standard HWND_MESSAGE window handle as
+ * the parent window, since we don't want video capturing
+ * machinery to be dependent on the details of our UI. */
+ wcd->cap_window = capCreateCaptureWindow(wcd->window_name, WS_CHILD, 0, 0,
+ 0, 0, HWND_MESSAGE, 1);
+ if (wcd->cap_window == NULL) {
+ E("%s: Unable to create video capturing window '%s': %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ _camera_device_free(wcd);
+ return NULL;
+ }
+
+ return &wcd->header;
+}
+
+int
+camera_device_start_capturing(CameraDevice* cd,
+ uint32_t pixel_format,
+ int frame_width,
+ int frame_height)
+{
+ WndCameraDevice* wcd;
+ HBITMAP bm_handle;
+ BITMAP bitmap;
+ size_t format_info_size;
+
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ wcd = (WndCameraDevice*)cd->opaque;
+
+ /* wcd->dc is an indicator of capturing: !NULL - capturing, NULL - not */
+ if (wcd->dc != NULL) {
+ W("%s: Capturing is already on on device '%s'",
+ __FUNCTION__, wcd->window_name);
+ return 0;
+ }
+
+ /* Connect capture window to the video capture driver. */
+ if (!capDriverConnect(wcd->cap_window, wcd->input_channel)) {
+ return -1;
+ }
+
+ /* Get current frame information from the driver. */
+ format_info_size = capGetVideoFormatSize(wcd->cap_window);
+ if (format_info_size == 0) {
+ E("%s: Unable to get video format size: %d",
+ __FUNCTION__, GetLastError());
+ _camera_device_reset(wcd);
+ return -1;
+ }
+ wcd->frame_bitmap = (BITMAPINFO*)malloc(format_info_size);
+ if (wcd->frame_bitmap == NULL) {
+ E("%s: Unable to allocate frame bitmap info buffer", __FUNCTION__);
+ _camera_device_reset(wcd);
+ return -1;
+ }
+ if (!capGetVideoFormat(wcd->cap_window, wcd->frame_bitmap,
+ format_info_size)) {
+ E("%s: Unable to obtain video format: %d", __FUNCTION__, GetLastError());
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /* Lets see if we need to set different frame dimensions */
+ if (wcd->frame_bitmap->bmiHeader.biWidth != frame_width ||
+ abs(wcd->frame_bitmap->bmiHeader.biHeight) != frame_height) {
+ /* Dimensions don't match. Set new frame info. */
+ wcd->frame_bitmap->bmiHeader.biWidth = frame_width;
+ wcd->frame_bitmap->bmiHeader.biHeight = frame_height;
+ /* We need to recalculate image size, since the capture window / driver
+ * will use image size provided by us. */
+ if (wcd->frame_bitmap->bmiHeader.biBitCount == 24) {
+ /* Special case that may require WORD boundary alignment. */
+ uint32_t bpl = (frame_width * 3 + 1) & ~1;
+ wcd->frame_bitmap->bmiHeader.biSizeImage = bpl * frame_height;
+ } else {
+ wcd->frame_bitmap->bmiHeader.biSizeImage =
+ (frame_width * frame_height * wcd->frame_bitmap->bmiHeader.biBitCount) / 8;
+ }
+ if (!capSetVideoFormat(wcd->cap_window, wcd->frame_bitmap,
+ format_info_size)) {
+ E("%s: Unable to set video format: %d", __FUNCTION__, GetLastError());
+ _camera_device_reset(wcd);
+ return -1;
+ }
+ }
+
+ if (wcd->frame_bitmap->bmiHeader.biCompression > BI_PNG) {
+ D("%s: Video capturing driver has reported pixel format %.4s",
+ __FUNCTION__, (const char*)&wcd->frame_bitmap->bmiHeader.biCompression);
+ }
+
+ /* Most of the time frame bitmaps come in "bottom-up" form, where its origin
+ * is the lower-left corner. However, it could be in the normal "top-down"
+ * form with the origin in the upper-left corner. So, we must adjust the
+ * biHeight field, since the way "top-down" form is reported here is by
+ * setting biHeight to a negative value. */
+ if (wcd->frame_bitmap->bmiHeader.biHeight < 0) {
+ wcd->frame_bitmap->bmiHeader.biHeight =
+ -wcd->frame_bitmap->bmiHeader.biHeight;
+ wcd->is_top_down = 1;
+ } else {
+ wcd->is_top_down = 0;
+ }
+
+ /* Get DC for the capturing window that will be used when we deal with
+ * bitmaps obtained from the camera device during frame capturing. */
+ wcd->dc = GetDC(wcd->cap_window);
+ if (wcd->dc == NULL) {
+ E("%s: Unable to obtain DC for %s: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /*
+ * At this point we need to grab a frame to properly setup framebuffer, and
+ * calculate pixel format. The problem is that bitmap information obtained
+ * from the driver doesn't necessarily match the actual bitmap we're going to
+ * obtain via capGrabFrame / capEditCopy / GetClipboardData
+ */
+
+ /* Grab a frame, and post it to the clipboard. Not very effective, but this
+ * is how capXxx API is operating. */
+ if (!capGrabFrameNoStop(wcd->cap_window) ||
+ !capEditCopy(wcd->cap_window) ||
+ !OpenClipboard(wcd->cap_window)) {
+ E("%s: Device '%s' is unable to save frame to the clipboard: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /* Get bitmap handle saved into clipboard. Note that bitmap is still
+ * owned by the clipboard here! */
+ bm_handle = (HBITMAP)GetClipboardData(CF_BITMAP);
+ if (bm_handle == NULL) {
+ E("%s: Device '%s' is unable to obtain frame from the clipboard: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ CloseClipboard();
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /* Get bitmap object that is initialized with the actual bitmap info. */
+ if (!GetObject(bm_handle, sizeof(BITMAP), &bitmap)) {
+ E("%s: Device '%s' is unable to obtain frame's bitmap: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ CloseClipboard();
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /* Now that we have all we need in 'bitmap' */
+ CloseClipboard();
+
+ /* Make sure that dimensions match. Othewise - fail. */
+ if (wcd->frame_bitmap->bmiHeader.biWidth != bitmap.bmWidth ||
+ wcd->frame_bitmap->bmiHeader.biHeight != bitmap.bmHeight ) {
+ E("%s: Requested dimensions %dx%d do not match the actual %dx%d",
+ __FUNCTION__, frame_width, frame_height,
+ wcd->frame_bitmap->bmiHeader.biWidth,
+ wcd->frame_bitmap->bmiHeader.biHeight);
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /* Create bitmap info that will be used with GetDIBits. */
+ wcd->gdi_bitmap = (BITMAPINFO*)malloc(wcd->frame_bitmap->bmiHeader.biSize);
+ if (wcd->gdi_bitmap == NULL) {
+ E("%s: Unable to allocate gdi bitmap info", __FUNCTION__);
+ _camera_device_reset(wcd);
+ return -1;
+ }
+ memcpy(wcd->gdi_bitmap, wcd->frame_bitmap,
+ wcd->frame_bitmap->bmiHeader.biSize);
+ wcd->gdi_bitmap->bmiHeader.biCompression = BI_RGB;
+ wcd->gdi_bitmap->bmiHeader.biBitCount = bitmap.bmBitsPixel;
+ wcd->gdi_bitmap->bmiHeader.biSizeImage = bitmap.bmWidthBytes * bitmap.bmWidth;
+ /* Adjust GDI's bitmap biHeight for proper frame direction ("top-down", or
+ * "bottom-up") We do this trick in order to simplify pixel format conversion
+ * routines, where we always assume "top-down" frames. The trick he is to
+ * have negative biHeight in 'gdi_bitmap' if driver provides "bottom-up"
+ * frames, and positive biHeight in 'gdi_bitmap' if driver provides "top-down"
+ * frames. This way GetGDIBits will always return "top-down" frames. */
+ if (wcd->is_top_down) {
+ wcd->gdi_bitmap->bmiHeader.biHeight =
+ wcd->frame_bitmap->bmiHeader.biHeight;
+ } else {
+ wcd->gdi_bitmap->bmiHeader.biHeight =
+ -wcd->frame_bitmap->bmiHeader.biHeight;
+ }
+
+ /* Allocate framebuffer. */
+ wcd->framebuffer = (uint8_t*)malloc(wcd->gdi_bitmap->bmiHeader.biSizeImage);
+ if (wcd->framebuffer == NULL) {
+ E("%s: Unable to allocate %d bytes for framebuffer",
+ __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biSizeImage);
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ /* Lets see what pixel format we will use. */
+ if (wcd->gdi_bitmap->bmiHeader.biBitCount == 16) {
+ wcd->pixel_format = V4L2_PIX_FMT_RGB565;
+ } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 24) {
+ wcd->pixel_format = V4L2_PIX_FMT_BGR24;
+ } else if (wcd->gdi_bitmap->bmiHeader.biBitCount == 32) {
+ wcd->pixel_format = V4L2_PIX_FMT_BGR32;
+ } else {
+ E("%s: Unsupported number of bits per pixel %d",
+ __FUNCTION__, wcd->gdi_bitmap->bmiHeader.biBitCount);
+ _camera_device_reset(wcd);
+ return -1;
+ }
+
+ D("%s: Capturing device '%s': %d bits per pixel in %.4s [%dx%d] frame",
+ __FUNCTION__, wcd->window_name, wcd->gdi_bitmap->bmiHeader.biBitCount,
+ (const char*)&wcd->pixel_format, wcd->frame_bitmap->bmiHeader.biWidth,
+ wcd->frame_bitmap->bmiHeader.biHeight);
+
+ return 0;
+}
+
+int
+camera_device_stop_capturing(CameraDevice* cd)
+{
+ WndCameraDevice* wcd;
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ wcd = (WndCameraDevice*)cd->opaque;
+
+ /* wcd->dc is the indicator of capture. */
+ if (wcd->dc == NULL) {
+ W("%s: Device '%s' is not capturing video",
+ __FUNCTION__, wcd->window_name);
+ return 0;
+ }
+ ReleaseDC(wcd->cap_window, wcd->dc);
+ wcd->dc = NULL;
+
+ /* Reset the device in preparation for the next capture. */
+ _camera_device_reset(wcd);
+
+ return 0;
+}
+
+int
+camera_device_read_frame(CameraDevice* cd,
+ ClientFrameBuffer* framebuffers,
+ int fbs_num)
+{
+ WndCameraDevice* wcd;
+ HBITMAP bm_handle;
+
+ /* Sanity checks. */
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ return -1;
+ }
+ wcd = (WndCameraDevice*)cd->opaque;
+ if (wcd->dc == NULL) {
+ W("%s: Device '%s' is not captuing video",
+ __FUNCTION__, wcd->window_name);
+ return -1;
+ }
+
+ /* Grab a frame, and post it to the clipboard. Not very effective, but this
+ * is how capXxx API is operating. */
+ if (!capGrabFrameNoStop(wcd->cap_window) ||
+ !capEditCopy(wcd->cap_window) ||
+ !OpenClipboard(wcd->cap_window)) {
+ E("%s: Device '%s' is unable to save frame to the clipboard: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ return -1;
+ }
+
+ /* Get bitmap handle saved into clipboard. Note that bitmap is still
+ * owned by the clipboard here! */
+ bm_handle = (HBITMAP)GetClipboardData(CF_BITMAP);
+ if (bm_handle == NULL) {
+ E("%s: Device '%s' is unable to obtain frame from the clipboard: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ CloseClipboard();
+ return -1;
+ }
+
+ /* Get bitmap buffer. */
+ if (wcd->gdi_bitmap->bmiHeader.biHeight > 0) {
+ wcd->gdi_bitmap->bmiHeader.biHeight = -wcd->gdi_bitmap->bmiHeader.biHeight;
+ }
+
+ if (!GetDIBits(wcd->dc, bm_handle, 0, wcd->frame_bitmap->bmiHeader.biHeight,
+ wcd->framebuffer, wcd->gdi_bitmap, DIB_RGB_COLORS)) {
+ E("%s: Device '%s' is unable to transfer frame to the framebuffer: %d",
+ __FUNCTION__, wcd->window_name, GetLastError());
+ CloseClipboard();
+ return -1;
+ }
+
+ if (wcd->gdi_bitmap->bmiHeader.biHeight < 0) {
+ wcd->gdi_bitmap->bmiHeader.biHeight = -wcd->gdi_bitmap->bmiHeader.biHeight;
+ }
+
+ CloseClipboard();
+
+ /* Convert framebuffer. */
+ return convert_frame(wcd->framebuffer,
+ wcd->pixel_format,
+ wcd->gdi_bitmap->bmiHeader.biSizeImage,
+ wcd->frame_bitmap->bmiHeader.biWidth,
+ wcd->frame_bitmap->bmiHeader.biHeight,
+ framebuffers, fbs_num);
+}
+
+void
+camera_device_close(CameraDevice* cd)
+{
+ /* Sanity checks. */
+ if (cd == NULL || cd->opaque == NULL) {
+ E("%s: Invalid camera device descriptor", __FUNCTION__);
+ } else {
+ WndCameraDevice* wcd = (WndCameraDevice*)cd->opaque;
+ _camera_device_free(wcd);
+ }
+}
+
+int
+enumerate_camera_devices(CameraInfo* cis, int max)
+{
+ int inp_channel, found = 0;
+
+ for (inp_channel = 0; inp_channel < 10 && found < max; inp_channel++) {
+ char name[256];
+ CameraDevice* cd;
+
+ snprintf(name, sizeof(name), "%s%d", _default_window_name, found);
+ cd = camera_device_open(name, inp_channel);
+ if (cd != NULL) {
+ WndCameraDevice* wcd = (WndCameraDevice*)cd->opaque;
+
+ /* Unfortunately, on Windows we have to start capturing in order to get the
+ * actual frame properties. Note that on Windows camera_device_start_capturing
+ * will ignore the pixel format parameter, since it will be determined during
+ * the course of the routine. Also note that on Windows all frames will be
+ * 640x480. */
+ if (!camera_device_start_capturing(cd, V4L2_PIX_FMT_RGB32, 640, 480)) {
+ /* capXxx API supports only single frame size (always observed 640x480,
+ * but the actual numbers may vary). */
+ cis[found].frame_sizes = (CameraFrameDim*)malloc(sizeof(CameraFrameDim));
+ if (cis[found].frame_sizes != NULL) {
+ char disp_name[24];
+ sprintf(disp_name, "webcam%d", found);
+ cis[found].display_name = ASTRDUP(disp_name);
+ cis[found].device_name = ASTRDUP(name);
+ cis[found].direction = ASTRDUP("front");
+ cis[found].inp_channel = inp_channel;
+ cis[found].frame_sizes->width = wcd->frame_bitmap->bmiHeader.biWidth;
+ cis[found].frame_sizes->height = wcd->frame_bitmap->bmiHeader.biHeight;
+ cis[found].frame_sizes_num = 1;
+ cis[found].pixel_format = wcd->pixel_format;
+ cis[found].in_use = 0;
+ found++;
+ } else {
+ E("%s: Unable to allocate dimensions", __FUNCTION__);
+ }
+ camera_device_stop_capturing(cd);
+ } else {
+ /* No more cameras. */
+ camera_device_close(cd);
+ break;
+ }
+ camera_device_close(cd);
+ } else {
+ /* No more cameras. */
+ break;
+ }
+ }
+
+ return found;
+}
diff --git a/android/camera/camera-capture.h b/android/camera/camera-capture.h
new file mode 100644
index 0000000..3025a23
--- /dev/null
+++ b/android/camera/camera-capture.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_CAMERA_CAPTURE_H
+#define ANDROID_CAMERA_CAMERA_CAPTURE_H
+
+/*
+ * Contains declarations for video capturing API that is used by the camera
+ * emulator.
+ */
+
+#include "camera-common.h"
+
+/* Initializes camera device descriptor, and connects to the camera device.
+ * Param:
+ * name - On Linux contains name of the device to be used to capture video.
+ * On Windows contains name to assign to the capturing window. This parameter
+ * can be NULL, in which case '/dev/video0' will be used as device name on
+ * Linux, or 'AndroidEmulatorVC' on Windows.
+ * inp_channel - On Linux defines input channel to use when communicating with
+ * the camera driver. On Windows contains an index (up to 10) of the driver
+ * to use to communicate with the camera device.
+ * Return:
+ * Initialized camera device descriptor on success, or NULL on failure.
+ */
+extern CameraDevice* camera_device_open(const char* name, int inp_channel);
+
+/* Starts capturing frames from the camera device.
+ * Param:
+ * cd - Camera descriptor representing a camera device opened in
+ * camera_device_open routine.
+ * pixel_format - Defines pixel format for the captured frames. Must be one of
+ * the formats, supported by the camera device.
+ * width, height - Frame dimensions for the captured video frame. Must match
+ * dimensions supported by the camera for the pixel format defined by the
+ * 'pixel_format' parameter.
+ * Return:
+ * 0 on success, or non-zero value on failure.
+ */
+extern int camera_device_start_capturing(CameraDevice* cd,
+ uint32_t pixel_format,
+ int frame_width,
+ int frame_height);
+
+/* Stops capturing frames from the camera device.
+ * Param:
+ * cd - Camera descriptor representing a camera device opened in
+ * camera_device_open routine.
+ * Return:
+ * 0 on success, or non-zero value on failure.
+ */
+extern int camera_device_stop_capturing(CameraDevice* cd);
+
+/* Captures a frame from the camera device.
+ * Param:
+ * cd - Camera descriptor representing a camera device opened in
+ * camera_device_open routine.
+ * framebuffers - Array of framebuffers where to read the frame. Size of this
+ * array is defined by the 'fbs_num' parameter. Note that the caller must
+ * make sure that buffers are large enough to contain entire frame captured
+ * from the device.
+ * fbs_num - Number of entries in the 'framebuffers' array.
+ * Return:
+ * 0 on success, or non-zero value on failure. There is a special vaule 1
+ * returned from this routine which indicates that frames were not available in
+ * the device. This value is returned on Linux implementation when frame ioctl
+ * has returned EAGAIN error. The client should respond to this value by
+ * repeating the read, rather than reporting an error.
+ */
+extern int camera_device_read_frame(CameraDevice* cd,
+ ClientFrameBuffer* framebuffers,
+ int fbs_num);
+
+/* Closes camera device, opened in camera_device_open routine.
+ * Param:
+ * cd - Camera descriptor representing a camera device opened in
+ * camera_device_open routine.
+ */
+extern void camera_device_close(CameraDevice* cd);
+
+/* Enumerates camera devices connected to the host, and collects information
+ * about each device.
+ * Apparently, camera framework in the guest will only accept the the YV12
+ * (V4L2_PIX_FMT_YVU420) pixel format. So, we don't really need to report all the
+ * pixel formats, supported by the camera device back to the guest. We can simpy
+ * pick any format that is supported by the device, and collect frame dimensions
+ * available for it. The only thing we can do is to specifically check, if camera
+ * support YV12, and choose it, in order to spare some CPU cycles on the
+ * conversion.
+ * Param:
+ * cis - An allocated array where to store informaion about found camera
+ * devices. For each found camera device an entry will be initialized in the
+ * array. It's responsibility of the caller to free the memory allocated for
+ * the entries.
+ * max - Maximum number of entries that can fit into the array.
+ * Return:
+ * Number of entries added to the 'cis' array on success, or < 0 on failure.
+ */
+extern int enumerate_camera_devices(CameraInfo* cis, int max);
+
+#endif /* ANDROID_CAMERA_CAMERA_CAPTURE_H */
diff --git a/android/camera/camera-common.h b/android/camera/camera-common.h
new file mode 100755
index 0000000..de09045
--- /dev/null
+++ b/android/camera/camera-common.h
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_CAMERA_COMMON_H_
+#define ANDROID_CAMERA_CAMERA_COMMON_H_
+
+/*
+ * Contains declarations of platform-independent the stuff that is used in
+ * camera emulation.
+ */
+
+#include "qemu-common.h"
+#include "android/utils/debug.h"
+#include "android/utils/misc.h"
+#include "android/utils/system.h"
+#ifdef _WIN32
+/* Include declarations that are missing in non-Linux headers. */
+#include "android/camera/camera-win.h"
+#elif _DARWIN_C_SOURCE
+/* Include declarations that are missing in non-Linux headers. */
+#include "android/camera/camera-win.h"
+#else
+#include <linux/videodev2.h>
+#endif /* _WIN32 */
+
+/*
+ * These are missing in the current linux/videodev2.h
+ */
+
+#ifndef V4L2_PIX_FMT_YVYU
+#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U')
+#endif /* V4L2_PIX_FMT_YVYU */
+#ifndef V4L2_PIX_FMT_VYUY
+#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y')
+#endif /* V4L2_PIX_FMT_VYUY */
+#ifndef V4L2_PIX_FMT_YUY2
+#define V4L2_PIX_FMT_YUY2 v4l2_fourcc('Y', 'U', 'Y', '2')
+#endif /* V4L2_PIX_FMT_YUY2 */
+#ifndef V4L2_PIX_FMT_YUNV
+#define V4L2_PIX_FMT_YUNV v4l2_fourcc('Y', 'U', 'N', 'V')
+#endif /* V4L2_PIX_FMT_YUNV */
+#ifndef V4L2_PIX_FMT_V422
+#define V4L2_PIX_FMT_V422 v4l2_fourcc('V', '4', '2', '2')
+#endif /* V4L2_PIX_FMT_V422 */
+#ifndef V4L2_PIX_FMT_YYVU
+#define V4L2_PIX_FMT_YYVU v4l2_fourcc('Y', 'Y', 'V', 'U')
+#endif /* V4L2_PIX_FMT_YYVU */
+#ifndef V4L2_PIX_FMT_SGBRG8
+#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G')
+#endif /* V4L2_PIX_FMT_SGBRG8 */
+#ifndef V4L2_PIX_FMT_SGRBG8
+#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G')
+#endif /* V4L2_PIX_FMT_SGRBG8 */
+#ifndef V4L2_PIX_FMT_SRGGB8
+#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B')
+#endif /* V4L2_PIX_FMT_SRGGB8 */
+#ifndef V4L2_PIX_FMT_SBGGR10
+#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '\0')
+#endif /* V4L2_PIX_FMT_SBGGR10 */
+#ifndef V4L2_PIX_FMT_SGBRG10
+#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '\0')
+#endif /* V4L2_PIX_FMT_SGBRG10 */
+#ifndef V4L2_PIX_FMT_SGRBG10
+#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '\0')
+#endif /* V4L2_PIX_FMT_SGRBG10 */
+#ifndef V4L2_PIX_FMT_SRGGB10
+#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '\0')
+#endif /* V4L2_PIX_FMT_SRGGB10 */
+#ifndef V4L2_PIX_FMT_SBGGR12
+#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2')
+#endif /* V4L2_PIX_FMT_SBGGR12 */
+#ifndef V4L2_PIX_FMT_SGBRG12
+#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2')
+#endif /* V4L2_PIX_FMT_SGBRG12 */
+#ifndef V4L2_PIX_FMT_SGRBG12
+#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2')
+#endif /* V4L2_PIX_FMT_SGRBG12 */
+#ifndef V4L2_PIX_FMT_SRGGB12
+#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2')
+#endif /* V4L2_PIX_FMT_SRGGB12 */
+
+/* Describes framebuffer, used by the client of camera capturing API.
+ * This descritptor is used in camera_device_read_frame call.
+ */
+typedef struct ClientFrameBuffer {
+ /* Pixel format used in the client framebuffer. */
+ uint32_t pixel_format;
+ /* Address of the client framebuffer. */
+ void* framebuffer;
+} ClientFrameBuffer;
+
+/* Describes frame dimensions.
+ */
+typedef struct CameraFrameDim {
+ /* Frame width. */
+ int width;
+ /* Frame height. */
+ int height;
+} CameraFrameDim;
+
+/* Camera information descriptor, containing properties of a camera connected
+ * to the host.
+ *
+ * Instances of this structure are created during camera device enumerations,
+ * and are considered to be constant everywhere else. The only exception to this
+ * rule is changing the 'in_use' flag during creation / destruction of a service
+ * representing that camera.
+ */
+typedef struct CameraInfo {
+ /* User-friendly camera display name. */
+ char* display_name;
+ /* Device name for the camera. */
+ char* device_name;
+ /* Input channel for the camera. */
+ int inp_channel;
+ /* Pixel format chosen for the camera. */
+ uint32_t pixel_format;
+ /* Direction the camera is facing: 'front', or 'back' */
+ char* direction;
+ /* Array of frame sizes supported for the pixel format chosen for the camera.
+ * The size of the array is defined by the frame_sizes_num field of this
+ * structure. */
+ CameraFrameDim* frame_sizes;
+ /* Number of frame sizes supported for the pixel format chosen
+ * for the camera. */
+ int frame_sizes_num;
+ /* In use status. When there is a camera service created for this camera,
+ * "in use" is set to one. Otherwise this flag is zet to 0. */
+ int in_use;
+} CameraInfo;
+
+/* Allocates CameraInfo instance. */
+static __inline__ CameraInfo* _camera_info_alloc(void)
+{
+ CameraInfo* ci;
+ ANEW0(ci);
+ return ci;
+}
+
+/* Frees all resources allocated for CameraInfo instance (including the
+ * instance itself).
+ */
+static __inline__ void _camera_info_free(CameraInfo* ci)
+{
+ if (ci != NULL) {
+ if (ci->display_name != NULL)
+ free(ci->display_name);
+ if (ci->device_name != NULL)
+ free(ci->device_name);
+ if (ci->direction != NULL)
+ free(ci->direction);
+ if (ci->frame_sizes != NULL)
+ free(ci->frame_sizes);
+ AFREE(ci);
+ }
+}
+
+/* Describes a connected camera device.
+ * This is a pratform-independent camera device descriptor that is used in
+ * the camera API.
+ */
+typedef struct CameraDevice {
+ /* Opaque pointer used by the camera capturing API. */
+ void* opaque;
+} CameraDevice;
+
+/* Returns current time in microseconds. */
+static __inline__ uint64_t
+_get_timestamp(void)
+{
+ struct timeval t;
+ t.tv_sec = t.tv_usec = 0;
+ gettimeofday(&t, NULL);
+ return (uint64_t)t.tv_sec * 1000000LL + t.tv_usec;
+}
+
+/* Sleeps for the given amount of milliseconds */
+static __inline__ void
+_camera_sleep(int millisec)
+{
+ struct timeval t;
+ const uint64_t wake_at = _get_timestamp() + (uint64_t)millisec * 1000;
+ do {
+ const uint64_t stamp = _get_timestamp();
+ if ((stamp / 1000) >= (wake_at / 1000)) {
+ break;
+ }
+ t.tv_sec = (wake_at - stamp) / 1000000;
+ t.tv_usec = (wake_at - stamp) - (uint64_t)t.tv_sec * 1000000;
+ } while (select(0, NULL, NULL, NULL, &t) < 0 && errno == EINTR);
+}
+
+#endif /* ANDROID_CAMERA_CAMERA_COMMON_H_ */
diff --git a/android/camera/camera-format-converters.c b/android/camera/camera-format-converters.c
new file mode 100755
index 0000000..3366a44
--- /dev/null
+++ b/android/camera/camera-format-converters.c
@@ -0,0 +1,1558 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _WIN32
+#elif _DARWIN_C_SOURCE
+#else
+#include <linux/videodev2.h>
+#endif
+#include "android/camera/camera-format-converters.h"
+
+#define E(...) derror(__VA_ARGS__)
+#define W(...) dwarning(__VA_ARGS__)
+#define D(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#define D_ACTIVE VERBOSE_CHECK(camera)
+
+ /*
+ * NOTE: RGB and big/little endian considerations. Wherewer in this code RGB
+ * pixels are represented as WORD, or DWORD, the color order inside the
+ * WORD / DWORD matches the one that would occur if that WORD / DWORD would have
+ * been read from the typecasted framebuffer:
+ *
+ * const uint32_t rgb = *reinterpret_cast<const uint32_t*>(framebuffer);
+ *
+ * So, if this code runs on the little endian CPU, red color in 'rgb' would be
+ * masked as 0x000000ff, and blue color would be masked as 0x00ff0000, while if
+ * the code runs on a big endian CPU, the red color in 'rgb' would be masked as
+ * 0xff000000, and blue color would be masked as 0x0000ff00,
+ */
+
+/*
+ * RGB565 color masks
+ */
+
+#ifndef HOST_WORDS_BIGENDIAN
+static const uint16_t kRed5 = 0x001f;
+static const uint16_t kGreen6 = 0x07e0;
+static const uint16_t kBlue5 = 0xf800;
+#else // !HOST_WORDS_BIGENDIAN
+static const uint16_t kRed5 = 0xf800;
+static const uint16_t kGreen6 = 0x07e0;
+static const uint16_t kBlue5 = 0x001f;
+#endif // !HOST_WORDS_BIGENDIAN
+
+/*
+ * RGB32 color masks
+ */
+
+#ifndef HOST_WORDS_BIGENDIAN
+static const uint32_t kRed8 = 0x000000ff;
+static const uint32_t kGreen8 = 0x0000ff00;
+static const uint32_t kBlue8 = 0x00ff0000;
+#else // !HOST_WORDS_BIGENDIAN
+static const uint32_t kRed8 = 0x00ff0000;
+static const uint32_t kGreen8 = 0x0000ff00;
+static const uint32_t kBlue8 = 0x000000ff;
+#endif // !HOST_WORDS_BIGENDIAN
+
+/*
+ * Extracting, and saving color bytes from / to WORD / DWORD RGB.
+ */
+
+#ifndef HOST_WORDS_BIGENDIAN
+/* Extract red, green, and blue bytes from RGB565 word. */
+#define R16(rgb) (uint8_t)((rgb) & kRed5)
+#define G16(rgb) (uint8_t)(((rgb) & kGreen6) >> 5)
+#define B16(rgb) (uint8_t)(((rgb) & kBlue5) >> 11)
+/* Make 8 bits red, green, and blue, extracted from RGB565 word. */
+#define R16_32(rgb) (uint8_t)((((rgb) & kRed5) << 3) | (((rgb) & kRed5) >> 2))
+#define G16_32(rgb) (uint8_t)((((rgb) & kGreen6) >> 3) | (((rgb) & kGreen6) >> 9))
+#define B16_32(rgb) (uint8_t)((((rgb) & kBlue5) >> 8) | (((rgb) & kBlue5) >> 14))
+/* Extract red, green, and blue bytes from RGB32 dword. */
+#define R32(rgb) (uint8_t)((rgb) & kRed8)
+#define G32(rgb) (uint8_t)((((rgb) & kGreen8) >> 8) & 0xff)
+#define B32(rgb) (uint8_t)((((rgb) & kBlue8) >> 16) & 0xff)
+/* Build RGB565 word from red, green, and blue bytes. */
+#define RGB565(r, g, b) (uint16_t)(((((uint16_t)(b) << 6) | (g)) << 5) | (r))
+/* Build RGB32 dword from red, green, and blue bytes. */
+#define RGB32(r, g, b) (uint32_t)(((((uint32_t)(b) << 8) | (g)) << 8) | (r))
+#else // !HOST_WORDS_BIGENDIAN
+/* Extract red, green, and blue bytes from RGB565 word. */
+#define R16(rgb) (uint8_t)(((rgb) & kRed5) >> 11)
+#define G16(rgb) (uint8_t)(((rgb) & kGreen6) >> 5)
+#define B16(rgb) (uint8_t)((rgb) & kBlue5)
+/* Make 8 bits red, green, and blue, extracted from RGB565 word. */
+#define R16_32(rgb) (uint8_t)((((rgb) & kRed5) >> 8) | (((rgb) & kRed5) >> 14))
+#define G16_32(rgb) (uint8_t)((((rgb) & kGreen6) >> 3) | (((rgb) & kGreen6) >> 9))
+#define B16_32(rgb) (uint8_t)((((rgb) & kBlue5) << 3) | (((rgb) & kBlue5) >> 2))
+/* Extract red, green, and blue bytes from RGB32 dword. */
+#define R32(rgb) (uint8_t)(((rgb) & kRed8) >> 16)
+#define G32(rgb) (uint8_t)(((rgb) & kGreen8) >> 8)
+#define B32(rgb) (uint8_t)((rgb) & kBlue8)
+/* Build RGB565 word from red, green, and blue bytes. */
+#define RGB565(r, g, b) (uint16_t)(((((uint16_t)(r) << 6) | (g)) << 5) | (b))
+/* Build RGB32 dword from red, green, and blue bytes. */
+#define RGB32(r, g, b) (uint32_t)(((((uint32_t)(r) << 8) | (g)) << 8) | (b))
+#endif // !HOST_WORDS_BIGENDIAN
+
+/*
+ * BAYER bitmasks
+ */
+
+/* Bitmask for 8-bits BAYER pixel. */
+#define kBayer8 0xff
+/* Bitmask for 10-bits BAYER pixel. */
+#define kBayer10 0x3ff
+/* Bitmask for 12-bits BAYER pixel. */
+#define kBayer12 0xfff
+
+/* An union that simplifies breaking 32 bit RGB into separate R, G, and B colors.
+ */
+typedef union RGB32_t {
+ uint32_t color;
+ struct {
+#ifndef HOST_WORDS_BIGENDIAN
+ uint8_t r; uint8_t g; uint8_t b; uint8_t a;
+#else // !HOST_WORDS_BIGENDIAN
+ uint8_t a; uint8_t b; uint8_t g; uint8_t r;
+#endif // HOST_WORDS_BIGENDIAN
+ };
+} RGB32_t;
+
+/* Clips a value to the unsigned 0-255 range, treating negative values as zero.
+ */
+static __inline__ int
+clamp(int x)
+{
+ if (x > 255) return 255;
+ if (x < 0) return 0;
+ return x;
+}
+
+/********************************************************************************
+ * Basics of RGB -> YUV conversion
+ *******************************************************************************/
+
+/*
+ * RGB -> YUV conversion macros
+ */
+#define RGB2Y(r, g, b) (uint8_t)(((66 * (r) + 129 * (g) + 25 * (b) + 128) >> 8) + 16)
+#define RGB2U(r, g, b) (uint8_t)(((-38 * (r) - 74 * (g) + 112 * (b) + 128) >> 8) + 128)
+#define RGB2V(r, g, b) (uint8_t)(((112 * (r) - 94 * (g) - 18 * (b) + 128) >> 8) + 128)
+
+/* Converts R8 G8 B8 color to YUV. */
+static __inline__ void
+R8G8B8ToYUV(uint8_t r, uint8_t g, uint8_t b, uint8_t* y, uint8_t* u, uint8_t* v)
+{
+ *y = RGB2Y((int)r, (int)g, (int)b);
+ *u = RGB2U((int)r, (int)g, (int)b);
+ *v = RGB2V((int)r, (int)g, (int)b);
+}
+
+/* Converts RGB565 color to YUV. */
+static __inline__ void
+RGB565ToYUV(uint16_t rgb, uint8_t* y, uint8_t* u, uint8_t* v)
+{
+ R8G8B8ToYUV(R16_32(rgb), G16_32(rgb), B16_32(rgb), y, u, v);
+}
+
+/* Converts RGB32 color to YUV. */
+static __inline__ void
+RGB32ToYUV(uint32_t rgb, uint8_t* y, uint8_t* u, uint8_t* v)
+{
+ RGB32_t rgb_c;
+ rgb_c.color = rgb;
+ R8G8B8ToYUV(rgb_c.r, rgb_c.g, rgb_c.b, y, u, v);
+}
+
+/********************************************************************************
+ * Basics of YUV -> RGB conversion.
+ *******************************************************************************/
+
+/*
+ * YUV -> RGB conversion macros
+ */
+
+/* "Optimized" macros that take specialy prepared Y, U, and V values:
+ * C = Y - 16
+ * D = U - 128
+ * E = V - 128
+ */
+#define YUV2RO(C, D, E) clamp((298 * (C) + 409 * (E) + 128) >> 8)
+#define YUV2GO(C, D, E) clamp((298 * (C) - 100 * (D) - 208 * (E) + 128) >> 8)
+#define YUV2BO(C, D, E) clamp((298 * (C) + 516 * (D) + 128) >> 8)
+
+/*
+ * Main macros that take the original Y, U, and V values
+ */
+#define YUV2R(y, u, v) clamp((298 * ((y)-16) + 409 * ((v)-128) + 128) >> 8)
+#define YUV2G(y, u, v) clamp((298 * ((y)-16) - 100 * ((u)-128) - 208 * ((v)-128) + 128) >> 8)
+#define YUV2B(y, u, v) clamp((298 * ((y)-16) + 516 * ((u)-128) + 128) >> 8)
+
+
+/* Converts YUV color to RGB565. */
+static __inline__ uint16_t
+YUVToRGB565(int y, int u, int v)
+{
+ /* Calculate C, D, and E values for the optimized macro. */
+ y -= 16; u -= 128; v -= 128;
+ const uint16_t r = YUV2RO(y,u,v) >> 3;
+ const uint16_t g = YUV2GO(y,u,v) >> 2;
+ const uint16_t b = YUV2BO(y,u,v) >> 3;
+ return RGB565(r, g, b);
+}
+
+/* Converts YUV color to RGB32. */
+static __inline__ uint32_t
+YUVToRGB32(int y, int u, int v)
+{
+ /* Calculate C, D, and E values for the optimized macro. */
+ y -= 16; u -= 128; v -= 128;
+ RGB32_t rgb;
+ rgb.r = YUV2RO(y,u,v);
+ rgb.g = YUV2GO(y,u,v);
+ rgb.b = YUV2BO(y,u,v);
+ return rgb.color;
+}
+
+/* Converts YUV color to separated RGB32 colors. */
+static __inline__ void
+YUVToRGBPix(int y, int u, int v, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ /* Calculate C, D, and E values for the optimized macro. */
+ y -= 16; u -= 128; v -= 128;
+ *r = (uint8_t)YUV2RO(y,u,v);
+ *g = (uint8_t)YUV2GO(y,u,v);
+ *b = (uint8_t)YUV2BO(y,u,v);
+}
+
+/********************************************************************************
+ * Generic converters between YUV and RGB formats
+ *******************************************************************************/
+
+/*
+ * The converters go line by line, convering one frame format to another.
+ * It's pretty much straight forward for RGB/BRG, where all colors are
+ * grouped next to each other in memory. The only two things that differ one RGB
+ * format from another are:
+ * - Is it an RGB, or BRG (i.e. color ordering)
+ * - Is it 16, 24, or 32 bits format.
+ * All these differences are addressed by load_rgb / save_rgb routines, provided
+ * for each format in the RGB descriptor to load / save RGB color bytes from / to
+ * the buffer. As far as moving from one RGB pixel to the next, there
+ * are two question to consider:
+ * - How many bytes it takes to encode one RGB pixel (could be 2, 3, or 4)
+ * - How many bytes it takes to encode a line (i.e. line alignment issue, which
+ * makes sence only for 24-bit formats, since 16, and 32 bit formats provide
+ * automatic word alignment.)
+ * The first question is answered with the 'rgb_inc' field of the RGB descriptor,
+ * and the second one is done by aligning rgb pointer up to the nearest 16 bit
+ * boundaries at the end of each line.
+ * YUV format has much greater complexity for conversion. in YUV color encoding
+ * is divided into three separate panes that can be mixed together in any way
+ * imaginable. Fortunately, there are still some consistent patterns in different
+
+ * YUV formats that can be abstracted through a descriptor:
+ * - At the line-by-line level, colors are always groupped aroud pairs of pixels,
+ * where each pixel in the pair has its own Y value, and each pair of pixels
+ * share thesame U, and V values.
+ * - Position of Y, U, and V is the same for each pair, so the distance between
+ * Ys, and U/V for adjacent pairs is the same.
+ * - Inside the pair, the distance between two Ys is always the same.
+
+ * Moving between the lines in YUV can also be easily formalized. Essentially,
+ * there are three ways how color panes are arranged:
+ * 1. All interleaved, where all three Y, U, and V values are encoded together in
+ * one block:
+ * 1,2 3,4 5,6 n,n+1
+ * YUVY YUVY YUVY .... YUVY
+ *
+ * This type is used to encode YUV 4:2:2 formats.
+ *
+ * 2. One separate block of memory for Y pane, and one separate block of memory
+ * containing interleaved U, and V panes.
+ *
+ * YY | YY | YY | YY
+ * YY | YY | YY | YY
+ * -----------------
+ * UV | UV | UV | UV
+ * -----------------
+ *
+ * This type is used to encode 4:2:0 formats.
+ *
+ * 3. Three separate blocks of memory for each pane.
+ *
+ * YY | YY | YY | YY
+ * YY | YY | YY | YY
+ * -----------------
+ * U | U | U | U
+ * V | V | V | V
+ * -----------------
+ *
+ * This type is also used to encode 4:2:0 formats.
+ *
+ * Note that in cases 2, and 3 each pair of U and V is shared among four pixels,
+ * grouped together as they are groupped in the framebeffer: divide the frame's
+ * rectangle into 2x2 pixels squares, starting from 0,0 corner - and each square
+ * represents the group of pixels that share same pair of UV values. So, each
+ * line in the U/V panes table is shared between two adjacent lines in Y pane,
+ * which provides a pattern on how to move between U/V lines as we move between
+ * Y lines.
+ *
+ * So, all these patterns can be coded in a YUV format descriptor, so there can
+ * just one generic way of walking YUV frame.
+ *
+ * BAYER format.
+ * We don't use BAYER inside the guest system, so there is no need to have a
+ * converter to the BAYER formats, only from it. The color approximation used in
+ * the BAYER format converters implemented here simply averages corresponded
+ * color values found in pixels sorrounding the one for which RGB colors are
+ * calculated.
+ *
+ * Performance considerations:
+ * Since converters implemented here are intended to work as part of the camera
+ * emulation, making the code super performant is not a priority at all. There
+ * will be enough loses in other parts of the emultion to overlook any slight
+ * inefficiences in the conversion algorithm as neglectable.
+ */
+
+typedef struct RGBDesc RGBDesc;
+typedef struct YUVDesc YUVDesc;
+typedef struct BayerDesc BayerDesc;
+
+/* Prototype for a routine that loads RGB colors from an RGB/BRG stream.
+ * Param:
+ * rgb - Pointer to a pixel inside the stream where to load colors from.
+ * r, g, b - Upon return will contain red, green, and blue colors for the pixel
+ * addressed by 'rgb' pointer.
+ * Return:
+ * Pointer to the next pixel in the stream.
+ */
+typedef const void* (*load_rgb_func)(const void* rgb,
+ uint8_t* r,
+ uint8_t* g,
+ uint8_t* b);
+
+/* Prototype for a routine that saves RGB colors to an RGB/BRG stream.
+ * Param:
+ * rgb - Pointer to a pixel inside the stream where to save colors.
+ * r, g, b - Red, green, and blue colors to save to the pixel addressed by
+ * 'rgb' pointer.
+ * Return:
+ * Pointer to the next pixel in the stream.
+ */
+typedef void* (*save_rgb_func)(void* rgb, uint8_t r, uint8_t g, uint8_t b);
+
+/* Prototype for a routine that calculates an offset of the first U value for the
+ * given line in a YUV framebuffer.
+ * Param:
+ * desc - Descriptor for the YUV frame for which the offset is being calculated.
+ * line - Zero-based line number for which to calculate the offset.
+ * width, height - Frame dimensions.
+ * Return:
+ * Offset of the first U value for the given frame line. The offset returned
+ * here is relative to the beginning of the YUV framebuffer.
+ */
+typedef int (*u_offset_func)(const YUVDesc* desc, int line, int width, int height);
+
+/* Prototype for a routine that calculates an offset of the first V value for the
+ * given line in a YUV framebuffer.
+ * Param:
+ * desc - Descriptor for the YUV frame for which the offset is being calculated.
+ * line - Zero-based line number for which to calculate the offset.
+ * width, height - Frame dimensions.
+ * Return:
+ * Offset of the first V value for the given frame line. The offset returned
+ * here is relative to the beginning of the YUV framebuffer.
+ */
+typedef int (*v_offset_func)(const YUVDesc* desc, int line, int width, int height);
+
+/* RGB/BRG format descriptor. */
+struct RGBDesc {
+ /* Routine that loads RGB colors from a buffer. */
+ load_rgb_func load_rgb;
+ /* Routine that saves RGB colors into a buffer. */
+ save_rgb_func save_rgb;
+ /* Byte size of an encoded RGB pixel. */
+ int rgb_inc;
+};
+
+/* YUV format descriptor. */
+struct YUVDesc {
+ /* Offset of the first Y value in a fully interleaved YUV framebuffer. */
+ int Y_offset;
+ /* Distance between two Y values inside a pair of pixels in a fully
+ * interleaved YUV framebuffer. */
+ int Y_inc;
+ /* Distance between first Y values of the adjacent pixel pairs in a fully
+ * interleaved YUV framebuffer. */
+ int Y_next_pair;
+ /* Increment between adjacent U/V values in a YUV framebuffer. */
+ int UV_inc;
+ /* Controls location of the first U value in YUV framebuffer. Depending on
+ * the actual YUV format can mean three things:
+ * - For fully interleaved YUV formats contains offset of the first U value
+ * in each line.
+ * - For YUV format that use separate, but interleaved UV pane, this field
+ * contains an offset of the first U value in the UV pane.
+ * - For YUV format that use fully separated Y, U, and V panes this field
+ * defines order of U and V panes in the framebuffer:
+ * = 1 - U pane comes first, right after Y pane.
+ * = 0 - U pane follows V pane that startes right after Y pane. */
+ int U_offset;
+ /* Controls location of the first V value in YUV framebuffer.
+ * See comments to U_offset for more info. */
+ int V_offset;
+ /* Routine that calculates an offset of the first U value for the given line
+ * in a YUV framebuffer. */
+ u_offset_func u_offset;
+ /* Routine that calculates an offset of the first V value for the given line
+ * in a YUV framebuffer. */
+ v_offset_func v_offset;
+};
+
+/* Bayer format descriptor. */
+struct BayerDesc {
+ /* Defines color ordering in the BAYER framebuffer. Can be one of the four:
+ * - "GBRG" for GBGBGB / RGRGRG
+ * - "GRBG" for GRGRGR / BGBGBG
+ * - "RGGB" for RGRGRG / GBGBGB
+ * - "BGGR" for BGBGBG / GRGRGR
+ */
+ const char* color_order;
+ /* Bitmask for valid bits in the pixel:
+ * - 0xff For a 8-bit BAYER format
+ * - 0x3ff For a 10-bit BAYER format
+ * - 0xfff For a 12-bit BAYER format
+ */
+ int mask;
+};
+
+/********************************************************************************
+ * RGB/BRG load / save routines.
+ *******************************************************************************/
+
+/* Loads R, G, and B colors from a RGB32 framebuffer. */
+static const void*
+_load_RGB32(const void* rgb, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ const uint8_t* rgb_ptr = (const uint8_t*)rgb;
+ *r = rgb_ptr[0]; *g = rgb_ptr[1]; *b = rgb_ptr[2];
+ return rgb_ptr + 4;
+}
+
+/* Saves R, G, and B colors to a RGB32 framebuffer. */
+static void*
+_save_RGB32(void* rgb, uint8_t r, uint8_t g, uint8_t b)
+{
+ uint8_t* rgb_ptr = (uint8_t*)rgb;
+ rgb_ptr[0] = r; rgb_ptr[1] = g; rgb_ptr[2] = b;
+ return rgb_ptr + 4;
+}
+
+/* Loads R, G, and B colors from a BRG32 framebuffer. */
+static const void*
+_load_BRG32(const void* rgb, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ const uint8_t* rgb_ptr = (const uint8_t*)rgb;
+ *r = rgb_ptr[2]; *g = rgb_ptr[1]; *b = rgb_ptr[0];
+ return rgb_ptr + 4;
+}
+
+/* Saves R, G, and B colors to a BRG32 framebuffer. */
+static void*
+_save_BRG32(void* rgb, uint8_t r, uint8_t g, uint8_t b)
+{
+ uint8_t* rgb_ptr = (uint8_t*)rgb;
+ rgb_ptr[2] = r; rgb_ptr[1] = g; rgb_ptr[0] = b;
+ return rgb_ptr + 4;
+}
+
+/* Loads R, G, and B colors from a RGB24 framebuffer.
+ * Note that it's the caller's responsibility to ensure proper alignment of the
+ * returned pointer at the line's break. */
+static const void*
+_load_RGB24(const void* rgb, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ const uint8_t* rgb_ptr = (const uint8_t*)rgb;
+ *r = rgb_ptr[0]; *g = rgb_ptr[1]; *b = rgb_ptr[2];
+ return rgb_ptr + 3;
+}
+
+/* Saves R, G, and B colors to a RGB24 framebuffer.
+ * Note that it's the caller's responsibility to ensure proper alignment of the
+ * returned pointer at the line's break. */
+static void*
+_save_RGB24(void* rgb, uint8_t r, uint8_t g, uint8_t b)
+{
+ uint8_t* rgb_ptr = (uint8_t*)rgb;
+ rgb_ptr[0] = r; rgb_ptr[1] = g; rgb_ptr[2] = b;
+ return rgb_ptr + 3;
+}
+
+/* Loads R, G, and B colors from a BRG32 framebuffer.
+ * Note that it's the caller's responsibility to ensure proper alignment of the
+ * returned pointer at the line's break. */
+static const void*
+_load_BRG24(const void* rgb, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ const uint8_t* rgb_ptr = (const uint8_t*)rgb;
+ *r = rgb_ptr[2]; *g = rgb_ptr[1]; *b = rgb_ptr[0];
+ return rgb_ptr + 3;
+}
+
+/* Saves R, G, and B colors to a BRG24 framebuffer.
+ * Note that it's the caller's responsibility to ensure proper alignment of the
+ * returned pointer at the line's break. */
+static void*
+_save_BRG24(void* rgb, uint8_t r, uint8_t g, uint8_t b)
+{
+ uint8_t* rgb_ptr = (uint8_t*)rgb;
+ rgb_ptr[2] = r; rgb_ptr[1] = g; rgb_ptr[0] = b;
+ return rgb_ptr + 3;
+}
+
+/* Loads R, G, and B colors from a RGB565 framebuffer. */
+static const void*
+_load_RGB16(const void* rgb, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ const uint16_t rgb16 = *(const uint16_t*)rgb;
+ *r = R16(rgb16); *g = G16(rgb16); *b = B16(rgb16);
+ return (const uint8_t*)rgb + 2;
+}
+
+/* Saves R, G, and B colors to a RGB565 framebuffer. */
+static void*
+_save_RGB16(void* rgb, uint8_t r, uint8_t g, uint8_t b)
+{
+ *(uint16_t*)rgb = RGB565(r & 0x1f, g & 0x3f, b & 0x1f);
+ return (uint8_t*)rgb + 2;
+}
+
+/* Loads R, G, and B colors from a BRG565 framebuffer. */
+static const void*
+_load_BRG16(const void* rgb, uint8_t* r, uint8_t* g, uint8_t* b)
+{
+ const uint16_t rgb16 = *(const uint16_t*)rgb;
+ *r = B16(rgb16); *g = G16(rgb16); *b = R16(rgb16);
+ return (const uint8_t*)rgb + 2;
+}
+
+/* Saves R, G, and B colors to a BRG565 framebuffer. */
+static void*
+_save_BRG16(void* rgb, uint8_t r, uint8_t g, uint8_t b)
+{
+ *(uint16_t*)rgb = RGB565(b & 0x1f, g & 0x3f, r & 0x1f);
+ return (uint8_t*)rgb + 2;
+}
+
+/********************************************************************************
+ * YUV's U/V offset calculation routines.
+ *******************************************************************************/
+
+/* U offset in a fully interleaved YUV 4:2:2 */
+static int
+_UOffIntrlYUV(const YUVDesc* desc, int line, int width, int height)
+{
+ /* In interleaved YUV 4:2:2 each pair of pixels is encoded with 4 consecutive
+ * bytes (or 2 bytes per pixel). So line size in a fully interleaved YUV 4:2:2
+ * is twice its width. */
+ return line * width * 2 + desc->U_offset;
+}
+
+/* V offset in a fully interleaved YUV 4:2:2 */
+static int
+_VOffIntrlYUV(const YUVDesc* desc, int line, int width, int height)
+{
+ /* See _UOffIntrlYUV comments. */
+ return line * width * 2 + desc->V_offset;
+}
+
+/* U offset in an interleaved UV pane of YUV 4:2:0 */
+static int
+_UOffIntrlUV(const YUVDesc* desc, int line, int width, int height)
+{
+ /* UV pane starts right after the Y pane, that occupies 'height * width'
+ * bytes. Eacht line in UV pane contains width / 2 'UV' pairs, which makes UV
+ * lane to contain as many bytes, as the width is.
+ * Each line in the UV pane is shared between two Y lines. So, final formula
+ * for the beggining of the UV pane's line for the given line in YUV
+ * framebuffer is:
+ *
+ * height * width + (line / 2) * width = (height + line / 2) * width
+ */
+ return (height + line / 2) * width + desc->U_offset;
+}
+
+/* V offset in an interleaved UV pane of YUV 4:2:0 */
+static int
+_VOffIntrlUV(const YUVDesc* desc, int line, int width, int height)
+{
+ /* See comments in _UOffIntrlUV. */
+ return (height + line / 2) * width + desc->V_offset;
+}
+
+/* U offset in a 3-pane YUV 4:2:0 */
+static int
+_UOffSepYUV(const YUVDesc* desc, int line, int width, int height)
+{
+ /* U, or V pane starts right after the Y pane, that occupies 'height * width'
+ * bytes. Eacht line in each of U and V panes contains width / 2 elements.
+ * Also, each line in each of U and V panes is shared between two Y lines.
+ * So, final formula for the beggining of a line in the U/V pane is:
+ *
+ * <Y pane size> + (line / 2) * width / 2
+ *
+ * for the pane that follows right after the Y pane, or
+ *
+ * <Y pane size> + <Y pane size> / 4 + (line / 2) * width / 2
+ *
+ * for the second pane.
+ */
+ const int y_pane_size = height * width;
+ if (desc->U_offset) {
+ /* U pane comes right after the Y pane. */
+ return y_pane_size + (line / 2) * width / 2;
+ } else {
+ /* U pane follows V pane. */
+ return y_pane_size + y_pane_size / 4 + (line / 2) * width / 2;
+ }
+}
+
+/* V offset in a 3-pane YUV 4:2:0 */
+static int
+_VOffSepYUV(const YUVDesc* desc, int line, int width, int height)
+{
+ /* See comment for _UOffSepYUV. */
+ const int y_pane_size = height * width;
+ if (desc->V_offset) {
+ /* V pane comes right after the Y pane. */
+ return y_pane_size + (line / 2) * width / 2;
+ } else {
+ /* V pane follows U pane. */
+ return y_pane_size + y_pane_size / 4 + (line / 2) * width / 2;
+ }
+}
+
+/********************************************************************************
+ * Bayer routines.
+ *******************************************************************************/
+
+/* Gets a color value for the given pixel in a bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * buf - Beginning of the framebuffer.
+ * x, y - Coordinates of the pixel inside the framebuffer to get the color for.
+ * width - Number of pixel in a line inside the framebuffer.
+ * Return:
+ * Given pixel color.
+ */
+static __inline__ int
+_get_bayer_color(const BayerDesc* desc, const void* buf, int x, int y, int width)
+{
+ if (desc->mask == kBayer8) {
+ /* Each pixel is represented with one byte. */
+ return *((const uint8_t*)buf + y * width + x);
+ } else {
+#ifndef HOST_WORDS_BIGENDIAN
+ return *((const int16_t*)buf + y * width + x) & desc->mask;
+#else
+ const uint8_t* pixel = (const uint8_t*)buf + (y * width + x) * 2;
+ return (((uint16_t)pixel[1] << 8) | pixel[0]) & desc->mask;
+#endif /* !HOST_WORDS_BIGENDIAN */
+ }
+}
+
+/* Gets an average value of colors that are horisontally adjacent to a pixel in
+ * a bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * buf - Beginning of the framebuffer.
+ * x, y - Coordinates of the pixel inside the framebuffer that is the center for
+ * the calculation.
+ * width, height - Framebuffer dimensions.
+ * Return:
+ * Average color for horisontally adjacent pixels.
+ */
+static int
+_get_bayer_ave_hor(const BayerDesc* desc,
+ const void* buf,
+ int x,
+ int y,
+ int width,
+ int height)
+{
+ if (x == 0) {
+ return _get_bayer_color(desc, buf, x + 1, y, width);
+ } else if (x == (width - 1)) {
+ return _get_bayer_color(desc, buf, x - 1, y, width);
+ } else {
+ return (_get_bayer_color(desc, buf, x - 1, y, width) +
+ _get_bayer_color(desc, buf, x + 1, y, width)) / 2;
+ }
+}
+
+/* Gets an average value of colors that are vertically adjacent to a pixel in
+ * a bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * buf - Beginning of the framebuffer.
+ * x, y - Coordinates of the pixel inside the framebuffer that is the center for
+ * the calculation.
+ * width, height - Framebuffer dimensions.
+ * Return:
+ * Average color for vertically adjacent pixels.
+ */
+static int
+_get_bayer_ave_vert(const BayerDesc* desc,
+ const void* buf,
+ int x,
+ int y,
+ int width,
+ int height)
+{
+ if (y == 0) {
+ return _get_bayer_color(desc, buf, x, y + 1, width);
+ } else if (y == (height - 1)) {
+ return _get_bayer_color(desc, buf, x, y - 1, width);
+ } else {
+ return (_get_bayer_color(desc, buf, x, y - 1, width) +
+ _get_bayer_color(desc, buf, x, y + 1, width)) / 2;
+ }
+}
+
+/* Gets an average value of colors that are horisontally and vertically adjacent
+ * to a pixel in a bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * buf - Beginning of the framebuffer.
+ * x, y - Coordinates of the pixel inside the framebuffer that is the center for
+ * the calculation.
+ * width, height - Framebuffer dimensions.
+ * Return:
+ * Average color for horisontally and vertically adjacent pixels.
+ */
+static int
+_get_bayer_ave_cross(const BayerDesc* desc,
+ const void* buf,
+ int x,
+ int y,
+ int width,
+ int height)
+{
+ if (x > 0 && x < (width - 1) && y > 0 && y < (height - 1)) {
+ /* Most of the time the code will take this path. So it makes sence to
+ * special case it for performance reasons. */
+ return (_get_bayer_color(desc, buf, x - 1, y, width) +
+ _get_bayer_color(desc, buf, x + 1, y, width) +
+ _get_bayer_color(desc, buf, x, y - 1, width) +
+ _get_bayer_color(desc, buf, x, y + 1, width)) / 4;
+ } else {
+ int sum = 0;
+ int num = 0;
+
+ /* Horisontal sum */
+ if (x == 0) {
+ sum += _get_bayer_color(desc, buf, x + 1, y, width);
+ num++;
+ } else if (x == (width - 1)) {
+ sum += _get_bayer_color(desc, buf, x - 1, y, width);
+ num++;
+ } else {
+ sum += _get_bayer_color(desc, buf, x - 1, y, width) +
+ _get_bayer_color(desc, buf, x + 1, y, width);
+ num += 2;
+ }
+
+ /* Vertical sum */
+ if (y == 0) {
+ sum += _get_bayer_color(desc, buf, x, y + 1, width);
+ num++;
+ } else if (y == (height - 1)) {
+ sum += _get_bayer_color(desc, buf, x, y - 1, width);
+ num++;
+ } else {
+ sum += _get_bayer_color(desc, buf, x, y - 1, width) +
+ _get_bayer_color(desc, buf, x, y + 1, width);
+ num += 2;
+ }
+
+ return sum / num;
+ }
+}
+
+/* Gets an average value of colors that are diagonally adjacent to a pixel in a
+ * bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * buf - Beginning of the framebuffer.
+ * x, y - Coordinates of the pixel inside the framebuffer that is the center for
+ * the calculation.
+ * width, height - Framebuffer dimensions.
+ * Return:
+ * Average color for diagonally adjacent pixels.
+ */
+static int
+_get_bayer_ave_diag(const BayerDesc* desc,
+ const void* buf,
+ int x,
+ int y,
+ int width,
+ int height)
+{
+ if (x > 0 && x < (width - 1) && y > 0 && y < (height - 1)) {
+ /* Most of the time the code will take this path. So it makes sence to
+ * special case it for performance reasons. */
+ return (_get_bayer_color(desc, buf, x - 1, y - 1, width) +
+ _get_bayer_color(desc, buf, x + 1, y - 1, width) +
+ _get_bayer_color(desc, buf, x - 1, y + 1, width) +
+ _get_bayer_color(desc, buf, x + 1, y + 1, width)) / 4;
+ } else {
+ int sum = 0;
+ int num = 0;
+ int xx, yy;
+ for (xx = x - 1; xx < (x + 2); xx += 2) {
+ for (yy = y - 1; yy < (y + 2); yy += 2) {
+ if (xx >= 0 && yy >= 0 && xx < width && yy < height) {
+ sum += _get_bayer_color(desc, buf, xx, yy, width);
+ num++;
+ }
+ }
+ }
+ return sum / num;
+ }
+}
+
+/* Gets pixel color selector for the given pixel in a bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * x, y - Coordinates of the pixel inside the framebuffer to get the color
+ * selector for.
+ * Return:
+ * Pixel color selector:
+ * - 'R' - pixel is red.
+ * - 'G' - pixel is green.
+ * - 'B' - pixel is blue.
+ */
+static __inline__ char
+_get_bayer_color_sel(const BayerDesc* desc, int x, int y)
+{
+ return desc->color_order[((y & 1) << 1) | (x & 1)];
+}
+
+/* Calculates RGB colors for a pixel in a bayer framebuffer.
+ * Param:
+ * desc - Bayer framebuffer descriptor.
+ * buf - Beginning of the framebuffer.
+ * x, y - Coordinates of the pixel inside the framebuffer to get the colors for.
+ * width, height - Framebuffer dimensions.
+ * red, green bluu - Upon return will contain RGB colors calculated for the pixel.
+ */
+static void
+_get_bayerRGB(const BayerDesc* desc,
+ const void* buf,
+ int x,
+ int y,
+ int width,
+ int height,
+ int* red,
+ int* green,
+ int* blue)
+{
+ const char pixel_color = _get_bayer_color_sel(desc, x, y);
+
+ if (pixel_color == 'G') {
+ /* This is a green pixel. */
+ const char next_pixel_color = _get_bayer_color_sel(desc, x + 1, y);
+ *green = _get_bayer_color(desc, buf, x, y, width);
+ if (next_pixel_color == 'R') {
+ *red = _get_bayer_ave_hor(desc, buf, x, y, width, height);
+ *blue = _get_bayer_ave_vert(desc, buf, x, y, width, height);
+ } else {
+ *red = _get_bayer_ave_vert(desc, buf, x, y, width, height);
+ *blue = _get_bayer_ave_hor(desc, buf, x, y, width, height);
+ }
+ } else if (pixel_color == 'R') {
+ /* This is a red pixel. */
+ *red = _get_bayer_color(desc, buf, x, y, width);
+ *green = _get_bayer_ave_cross(desc, buf, x, y, width, height);
+ *blue = _get_bayer_ave_diag(desc, buf, x, y, width, height);
+ } else {
+ /* This is a blue pixel. */
+ *blue = _get_bayer_color(desc, buf, x, y, width);
+ *green = _get_bayer_ave_cross(desc, buf, x, y, width, height);
+ *red = _get_bayer_ave_diag(desc, buf, x, y, width, height);
+ }
+}
+
+/********************************************************************************
+ * Generic YUV/RGB/BAYER converters
+ *******************************************************************************/
+
+/* Generic converter from an RGB/BRG format to a YUV format. */
+static void
+RGBToYUV(const RGBDesc* rgb_fmt,
+ const YUVDesc* yuv_fmt,
+ const void* rgb,
+ void* yuv,
+ int width,
+ int height)
+{
+ int y, x;
+ const int Y_Inc = yuv_fmt->Y_inc;
+ const int UV_inc = yuv_fmt->UV_inc;
+ const int Y_next_pair = yuv_fmt->Y_next_pair;
+ uint8_t* pY = (uint8_t*)yuv + yuv_fmt->Y_offset;
+ for (y = 0; y < height; y++) {
+ uint8_t* pU =
+ (uint8_t*)yuv + yuv_fmt->u_offset(yuv_fmt, y, width, height);
+ uint8_t* pV =
+ (uint8_t*)yuv + yuv_fmt->v_offset(yuv_fmt, y, width, height);
+ for (x = 0; x < width; x += 2,
+ pY += Y_next_pair, pU += UV_inc, pV += UV_inc) {
+ uint8_t r, g, b;
+ rgb = rgb_fmt->load_rgb(rgb, &r, &g, &b);
+ R8G8B8ToYUV(r, g, b, pY, pU, pV);
+ rgb = rgb_fmt->load_rgb(rgb, &r, &g, &b);
+ pY[Y_Inc] = RGB2Y((int)r, (int)g, (int)b);
+ }
+ /* Aling rgb_ptr to 16 bit */
+ if (((uintptr_t)rgb & 1) != 0) rgb = (const uint8_t*)rgb + 1;
+ }
+}
+
+/* Generic converter from one RGB/BRG format to another RGB/BRG format. */
+static void
+RGBToRGB(const RGBDesc* src_rgb_fmt,
+ const RGBDesc* dst_rgb_fmt,
+ const void* src_rgb,
+ void* dst_rgb,
+ int width,
+ int height)
+{
+ int x, y;
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ uint8_t r, g, b;
+ src_rgb = src_rgb_fmt->load_rgb(src_rgb, &r, &g, &b);
+ dst_rgb = dst_rgb_fmt->save_rgb(dst_rgb, r, g, b);
+ }
+ /* Aling rgb pinters to 16 bit */
+ if (((uintptr_t)src_rgb & 1) != 0) src_rgb = (uint8_t*)src_rgb + 1;
+ if (((uintptr_t)dst_rgb & 1) != 0) dst_rgb = (uint8_t*)dst_rgb + 1;
+ }
+}
+
+/* Generic converter from a YUV format to an RGB/BRG format. */
+static void
+YUVToRGB(const YUVDesc* yuv_fmt,
+ const RGBDesc* rgb_fmt,
+ const void* yuv,
+ void* rgb,
+ int width,
+ int height)
+{
+ int y, x;
+ const int Y_Inc = yuv_fmt->Y_inc;
+ const int UV_inc = yuv_fmt->UV_inc;
+ const int Y_next_pair = yuv_fmt->Y_next_pair;
+ const uint8_t* pY = (const uint8_t*)yuv + yuv_fmt->Y_offset;
+ for (y = 0; y < height; y++) {
+ const uint8_t* pU =
+ (const uint8_t*)yuv + yuv_fmt->u_offset(yuv_fmt, y, width, height);
+ const uint8_t* pV =
+ (const uint8_t*)yuv + yuv_fmt->v_offset(yuv_fmt, y, width, height);
+ for (x = 0; x < width; x += 2,
+ pY += Y_next_pair, pU += UV_inc, pV += UV_inc) {
+ uint8_t r, g, b;
+ const uint8_t U = *pU;
+ const uint8_t V = *pV;
+ YUVToRGBPix(*pY, U, V, &r, &g, &b);
+ rgb = rgb_fmt->save_rgb(rgb, r, g, b);
+ YUVToRGBPix(pY[Y_Inc], U, V, &r, &g, &b);
+ rgb = rgb_fmt->save_rgb(rgb, r, g, b);
+ }
+ /* Aling rgb_ptr to 16 bit */
+ if (((uintptr_t)rgb & 1) != 0) rgb = (uint8_t*)rgb + 1;
+ }
+}
+
+/* Generic converter from one YUV format to another YUV format. */
+static void
+YUVToYUV(const YUVDesc* src_fmt,
+ const YUVDesc* dst_fmt,
+ const void* src,
+ void* dst,
+ int width,
+ int height)
+{
+ int y, x;
+ const int Y_Inc_src = src_fmt->Y_inc;
+ const int UV_inc_src = src_fmt->UV_inc;
+ const int Y_next_pair_src = src_fmt->Y_next_pair;
+ const int Y_Inc_dst = dst_fmt->Y_inc;
+ const int UV_inc_dst = dst_fmt->UV_inc;
+ const int Y_next_pair_dst = dst_fmt->Y_next_pair;
+ const uint8_t* pYsrc = (const uint8_t*)src + src_fmt->Y_offset;
+ uint8_t* pYdst = (uint8_t*)dst + dst_fmt->Y_offset;
+ for (y = 0; y < height; y++) {
+ const uint8_t* pUsrc =
+ (const uint8_t*)src + src_fmt->u_offset(src_fmt, y, width, height);
+ const uint8_t* pVsrc =
+ (const uint8_t*)src + src_fmt->v_offset(src_fmt, y, width, height);
+ uint8_t* pUdst =
+ (uint8_t*)dst + dst_fmt->u_offset(dst_fmt, y, width, height);
+ uint8_t* pVdst =
+ (uint8_t*)dst + dst_fmt->v_offset(dst_fmt, y, width, height);
+ for (x = 0; x < width; x += 2, pYsrc += Y_next_pair_src,
+ pUsrc += UV_inc_src,
+ pVsrc += UV_inc_src,
+ pYdst += Y_next_pair_dst,
+ pUdst += UV_inc_dst,
+ pVdst += UV_inc_dst) {
+ *pYdst = *pYsrc; *pUdst = *pUsrc; *pVdst = *pVsrc;
+ pYdst[Y_Inc_dst] = pYsrc[Y_Inc_src];
+ }
+ }
+}
+
+/* Generic converter from a BAYER format to an RGB/BRG format. */
+static void
+BAYERToRGB(const BayerDesc* bayer_fmt,
+ const RGBDesc* rgb_fmt,
+ const void* bayer,
+ void* rgb,
+ int width,
+ int height)
+{
+ int y, x;
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ int r, g, b;
+ _get_bayerRGB(bayer_fmt, bayer, x, y, width, height, &r, &g, &b);
+ if (bayer_fmt->mask == kBayer10) {
+ r >>= 2; g >>= 2; b >>= 2;
+ } else if (bayer_fmt->mask == kBayer12) {
+ r >>= 4; g >>= 4; b >>= 4;
+ }
+ rgb = rgb_fmt->save_rgb(rgb, r, g, b);
+ }
+ /* Aling rgb_ptr to 16 bit */
+ if (((uintptr_t)rgb & 1) != 0) rgb = (uint8_t*)rgb + 1;
+ }
+}
+
+/* Generic converter from a BAYER format to a YUV format. */
+static void
+BAYERToYUV(const BayerDesc* bayer_fmt,
+ const YUVDesc* yuv_fmt,
+ const void* bayer,
+ void* yuv,
+ int width,
+ int height)
+{
+ int y, x;
+ const int Y_Inc = yuv_fmt->Y_inc;
+ const int UV_inc = yuv_fmt->UV_inc;
+ const int Y_next_pair = yuv_fmt->Y_next_pair;
+ uint8_t* pY = (uint8_t*)yuv + yuv_fmt->Y_offset;
+ for (y = 0; y < height; y++) {
+ uint8_t* pU =
+ (uint8_t*)yuv + yuv_fmt->u_offset(yuv_fmt, y, width, height);
+ uint8_t* pV =
+ (uint8_t*)yuv + yuv_fmt->v_offset(yuv_fmt, y, width, height);
+ for (x = 0; x < width; x += 2,
+ pY += Y_next_pair, pU += UV_inc, pV += UV_inc) {
+ int r, g, b;
+ _get_bayerRGB(bayer_fmt, bayer, x, y, width, height, &r, &g, &b);
+ R8G8B8ToYUV(r, g, b, pY, pU, pV);
+ _get_bayerRGB(bayer_fmt, bayer, x + 1, y, width, height, &r, &g, &b);
+ pY[Y_Inc] = RGB2Y(r, g, b);
+ }
+ }
+}
+
+/********************************************************************************
+ * RGB format descriptors.
+ */
+
+/* Describes RGB32 format. */
+static const RGBDesc _RGB32 =
+{
+ .load_rgb = _load_RGB32,
+ .save_rgb = _save_RGB32,
+ .rgb_inc = 4
+};
+
+/* Describes BRG32 format. */
+static const RGBDesc _BRG32 =
+{
+ .load_rgb = _load_BRG32,
+ .save_rgb = _save_BRG32,
+ .rgb_inc = 4
+};
+
+/* Describes RGB24 format. */
+static const RGBDesc _RGB24 =
+{
+ .load_rgb = _load_RGB24,
+ .save_rgb = _save_RGB24,
+ .rgb_inc = 3
+};
+
+/* Describes BRG24 format. */
+static const RGBDesc _BRG24 =
+{
+ .load_rgb = _load_BRG24,
+ .save_rgb = _save_BRG24,
+ .rgb_inc = 3
+};
+
+/* Describes RGB16 format. */
+static const RGBDesc _RGB16 =
+{
+ .load_rgb = _load_RGB16,
+ .save_rgb = _save_RGB16,
+ .rgb_inc = 2
+};
+
+/* Describes BRG16 format. */
+static const RGBDesc _BRG16 =
+{
+ .load_rgb = _load_BRG16,
+ .save_rgb = _save_BRG16,
+ .rgb_inc = 2
+};
+
+/********************************************************************************
+ * YUV 4:2:2 format descriptors.
+ */
+
+/* YUYV: 4:2:2, YUV are interleaved. */
+static const YUVDesc _YUYV =
+{
+ .Y_offset = 0,
+ .Y_inc = 2,
+ .Y_next_pair = 4,
+ .UV_inc = 4,
+ .U_offset = 1,
+ .V_offset = 3,
+ .u_offset = &_UOffIntrlYUV,
+ .v_offset = &_VOffIntrlYUV
+};
+
+/* UYVY: 4:2:2, YUV are interleaved. */
+static const YUVDesc _UYVY =
+{
+ .Y_offset = 1,
+ .Y_inc = 2,
+ .Y_next_pair = 4,
+ .UV_inc = 4,
+ .U_offset = 0,
+ .V_offset = 2,
+ .u_offset = &_UOffIntrlYUV,
+ .v_offset = &_VOffIntrlYUV
+};
+
+/* YVYU: 4:2:2, YUV are interleaved. */
+static const YUVDesc _YVYU =
+{
+ .Y_offset = 0,
+ .Y_inc = 2,
+ .Y_next_pair = 4,
+ .UV_inc = 4,
+ .U_offset = 3,
+ .V_offset = 1,
+ .u_offset = &_UOffIntrlYUV,
+ .v_offset = &_VOffIntrlYUV
+};
+
+/* VYUY: 4:2:2, YUV are interleaved. */
+static const YUVDesc _VYUY =
+{
+ .Y_offset = 1,
+ .Y_inc = 2,
+ .Y_next_pair = 4,
+ .UV_inc = 4,
+ .U_offset = 2,
+ .V_offset = 0,
+ .u_offset = &_UOffIntrlYUV,
+ .v_offset = &_VOffIntrlYUV
+};
+
+/* YYUV (also YUY2, YUNV, V422) : 4:2:2, YUV are interleaved. */
+static const YUVDesc _YYUV =
+{
+ .Y_offset = 0,
+ .Y_inc = 1,
+ .Y_next_pair = 4,
+ .UV_inc = 4,
+ .U_offset = 2,
+ .V_offset = 3,
+ .u_offset = &_UOffIntrlYUV,
+ .v_offset = &_VOffIntrlYUV
+};
+
+/* YYVU: 4:2:2, YUV are interleaved. */
+static const YUVDesc _YYVU =
+{
+ .Y_offset = 0,
+ .Y_inc = 1,
+ .Y_next_pair = 4,
+ .UV_inc = 4,
+ .U_offset = 3,
+ .V_offset = 2,
+ .u_offset = &_UOffIntrlYUV,
+ .v_offset = &_VOffIntrlYUV
+};
+
+/********************************************************************************
+ * YUV 4:2:0 descriptors.
+ */
+
+/* YV12: 4:2:0, YUV are fully separated, U pane follows V pane */
+static const YUVDesc _YV12 =
+{
+ .Y_offset = 0,
+ .Y_inc = 1,
+ .Y_next_pair = 2,
+ .UV_inc = 1,
+ .U_offset = 0,
+ .V_offset = 1,
+ .u_offset = &_UOffSepYUV,
+ .v_offset = &_VOffSepYUV
+};
+
+/* YU12: 4:2:0, YUV are fully separated, V pane follows U pane */
+static const YUVDesc _YU12 =
+{
+ .Y_offset = 0,
+ .Y_inc = 1,
+ .Y_next_pair = 2,
+ .UV_inc = 1,
+ .U_offset = 1,
+ .V_offset = 0,
+ .u_offset = &_UOffSepYUV,
+ .v_offset = &_VOffSepYUV
+};
+
+/* NV12: 4:2:0, UV are interleaved, V follows U in UV pane */
+static const YUVDesc _NV12 =
+{
+ .Y_offset = 0,
+ .Y_inc = 1,
+ .Y_next_pair = 2,
+ .UV_inc = 2,
+ .U_offset = 0,
+ .V_offset = 1,
+ .u_offset = &_UOffIntrlUV,
+ .v_offset = &_VOffIntrlUV
+};
+
+/* NV21: 4:2:0, UV are interleaved, U follows V in UV pane */
+static const YUVDesc _NV21 =
+{
+ .Y_offset = 0,
+ .Y_inc = 1,
+ .Y_next_pair = 2,
+ .UV_inc = 2,
+ .U_offset = 1,
+ .V_offset = 0,
+ .u_offset = &_UOffIntrlUV,
+ .v_offset = &_VOffIntrlUV
+};
+
+/********************************************************************************
+ * RGB bayer format descriptors.
+ */
+
+/* Descriptor for a 8-bit GBGB / RGRG format. */
+static const BayerDesc _GB8 =
+{
+ .mask = kBayer8,
+ .color_order = "GBRG"
+};
+
+/* Descriptor for a 8-bit GRGR / BGBG format. */
+static const BayerDesc _GR8 =
+{
+ .mask = kBayer8,
+ .color_order = "GRBG"
+};
+
+/* Descriptor for a 8-bit BGBG / GRGR format. */
+static const BayerDesc _BG8 =
+{
+ .mask = kBayer8,
+ .color_order = "BGGR"
+};
+
+/* Descriptor for a 8-bit RGRG / GBGB format. */
+static const BayerDesc _RG8 =
+{
+ .mask = kBayer8,
+ .color_order = "RGGB"
+};
+
+/* Descriptor for a 10-bit GBGB / RGRG format. */
+static const BayerDesc _GB10 =
+{
+ .mask = kBayer10,
+ .color_order = "GBRG"
+};
+
+/* Descriptor for a 10-bit GRGR / BGBG format. */
+static const BayerDesc _GR10 =
+{
+ .mask = kBayer10,
+ .color_order = "GRBG"
+};
+
+/* Descriptor for a 10-bit BGBG / GRGR format. */
+static const BayerDesc _BG10 =
+{
+ .mask = kBayer10,
+ .color_order = "BGGR"
+};
+
+/* Descriptor for a 10-bit RGRG / GBGB format. */
+static const BayerDesc _RG10 =
+{
+ .mask = kBayer10,
+ .color_order = "RGGB"
+};
+
+/* Descriptor for a 12-bit GBGB / RGRG format. */
+static const BayerDesc _GB12 =
+{
+ .mask = kBayer12,
+ .color_order = "GBRG"
+};
+
+/* Descriptor for a 12-bit GRGR / BGBG format. */
+static const BayerDesc _GR12 =
+{
+ .mask = kBayer12,
+ .color_order = "GRBG"
+};
+
+/* Descriptor for a 12-bit BGBG / GRGR format. */
+static const BayerDesc _BG12 =
+{
+ .mask = kBayer12,
+ .color_order = "BGGR"
+};
+
+/* Descriptor for a 12-bit RGRG / GBGB format. */
+static const BayerDesc _RG12 =
+{
+ .mask = kBayer12,
+ .color_order = "RGGB"
+};
+
+
+/********************************************************************************
+ * List of descriptors for supported formats.
+ *******************************************************************************/
+
+/* Enumerates pixel formats supported by converters. */
+typedef enum PIXFormatSel {
+ /* Pixel format is RGB/BGR */
+ PIX_FMT_RGB,
+ /* Pixel format is YUV */
+ PIX_FMT_YUV,
+ /* Pixel format is BAYER */
+ PIX_FMT_BAYER
+} PIXFormatSel;
+
+/* Formats entry in the list of descriptors for supported formats. */
+typedef struct PIXFormat {
+ /* "FOURCC" (V4L2_PIX_FMT_XXX) format type. */
+ uint32_t fourcc_type;
+ /* RGB/YUV/BAYER format selector */
+ PIXFormatSel format_sel;
+ union {
+ /* References RGB format descriptor for that format. */
+ const RGBDesc* rgb_desc;
+ /* References YUV format descriptor for that format. */
+ const YUVDesc* yuv_desc;
+ /* References BAYER format descriptor for that format. */
+ const BayerDesc* bayer_desc;
+ } desc;
+} PIXFormat;
+
+/* Array of supported pixel format descriptors. */
+static const PIXFormat _PIXFormats[] = {
+ /* RGB/BRG formats. */
+ { V4L2_PIX_FMT_RGB32, PIX_FMT_RGB, .desc.rgb_desc = &_RGB32 },
+ { V4L2_PIX_FMT_BGR32, PIX_FMT_RGB, .desc.rgb_desc = &_BRG32 },
+ { V4L2_PIX_FMT_RGB565, PIX_FMT_RGB, .desc.rgb_desc = &_RGB16 },
+ { V4L2_PIX_FMT_RGB24, PIX_FMT_RGB, .desc.rgb_desc = &_RGB24 },
+ { V4L2_PIX_FMT_BGR24, PIX_FMT_RGB, .desc.rgb_desc = &_BRG24 },
+
+ /* YUV 4:2:0 formats. */
+ { V4L2_PIX_FMT_YVU420, PIX_FMT_YUV, .desc.yuv_desc = &_YV12 },
+ { V4L2_PIX_FMT_YUV420, PIX_FMT_YUV, .desc.yuv_desc = &_YU12 },
+ { V4L2_PIX_FMT_NV12, PIX_FMT_YUV, .desc.yuv_desc = &_NV12 },
+ { V4L2_PIX_FMT_NV21, PIX_FMT_YUV, .desc.yuv_desc = &_NV21 },
+
+ /* YUV 4:2:2 formats. */
+ { V4L2_PIX_FMT_YUYV, PIX_FMT_YUV, .desc.yuv_desc = &_YUYV },
+ { V4L2_PIX_FMT_YYUV, PIX_FMT_YUV, .desc.yuv_desc = &_YYUV },
+ { V4L2_PIX_FMT_YVYU, PIX_FMT_YUV, .desc.yuv_desc = &_YVYU },
+ { V4L2_PIX_FMT_UYVY, PIX_FMT_YUV, .desc.yuv_desc = &_UYVY },
+ { V4L2_PIX_FMT_VYUY, PIX_FMT_YUV, .desc.yuv_desc = &_VYUY },
+ { V4L2_PIX_FMT_YVYU, PIX_FMT_YUV, .desc.yuv_desc = &_YVYU },
+ { V4L2_PIX_FMT_VYUY, PIX_FMT_YUV, .desc.yuv_desc = &_VYUY },
+ { V4L2_PIX_FMT_YYVU, PIX_FMT_YUV, .desc.yuv_desc = &_YYVU },
+ { V4L2_PIX_FMT_YUY2, PIX_FMT_YUV, .desc.yuv_desc = &_YUYV },
+ { V4L2_PIX_FMT_YUNV, PIX_FMT_YUV, .desc.yuv_desc = &_YUYV },
+ { V4L2_PIX_FMT_V422, PIX_FMT_YUV, .desc.yuv_desc = &_YUYV },
+
+ /* BAYER formats. */
+ { V4L2_PIX_FMT_SBGGR8, PIX_FMT_BAYER, .desc.bayer_desc = &_BG8 },
+ { V4L2_PIX_FMT_SGBRG8, PIX_FMT_BAYER, .desc.bayer_desc = &_GB8 },
+ { V4L2_PIX_FMT_SGRBG8, PIX_FMT_BAYER, .desc.bayer_desc = &_GR8 },
+ { V4L2_PIX_FMT_SRGGB8, PIX_FMT_BAYER, .desc.bayer_desc = &_RG8 },
+ { V4L2_PIX_FMT_SBGGR10, PIX_FMT_BAYER, .desc.bayer_desc = &_BG10 },
+ { V4L2_PIX_FMT_SGBRG10, PIX_FMT_BAYER, .desc.bayer_desc = &_GB10 },
+ { V4L2_PIX_FMT_SGRBG10, PIX_FMT_BAYER, .desc.bayer_desc = &_GR10 },
+ { V4L2_PIX_FMT_SRGGB10, PIX_FMT_BAYER, .desc.bayer_desc = &_RG10 },
+ { V4L2_PIX_FMT_SBGGR12, PIX_FMT_BAYER, .desc.bayer_desc = &_BG12 },
+ { V4L2_PIX_FMT_SGBRG12, PIX_FMT_BAYER, .desc.bayer_desc = &_GB12 },
+ { V4L2_PIX_FMT_SGRBG12, PIX_FMT_BAYER, .desc.bayer_desc = &_GR12 },
+ { V4L2_PIX_FMT_SRGGB12, PIX_FMT_BAYER, .desc.bayer_desc = &_RG12 },
+};
+static const int _PIXFormats_num = sizeof(_PIXFormats) / sizeof(*_PIXFormats);
+
+/* Get an entry in the array of supported pixel format descriptors.
+ * Param:
+ * pixel_format - "fourcc" pixel format to lookup an entry for.
+ * Return"
+ * Pointer to the found entry, or NULL if no entry exists for the given pixel
+ * format.
+ */
+static const PIXFormat*
+_get_pixel_format_descriptor(uint32_t pixel_format)
+{
+ int f;
+ for (f = 0; f < _PIXFormats_num; f++) {
+ if (_PIXFormats[f].fourcc_type == pixel_format) {
+ return &_PIXFormats[f];
+ }
+ }
+ W("%s: Pixel format %.4s is unknown",
+ __FUNCTION__, (const char*)&pixel_format);
+ return NULL;
+}
+
+/********************************************************************************
+ * Public API
+ *******************************************************************************/
+
+int
+has_converter(uint32_t from, uint32_t to)
+{
+ if (from == to) {
+ /* Same format: converter esists. */
+ return 1;
+ }
+ return _get_pixel_format_descriptor(from) != NULL &&
+ _get_pixel_format_descriptor(to) != NULL;
+}
+
+int
+convert_frame(const void* frame,
+ uint32_t pixel_format,
+ size_t framebuffer_size,
+ int width,
+ int height,
+ ClientFrameBuffer* framebuffers,
+ int fbs_num)
+{
+ int n;
+ const PIXFormat* src_desc = _get_pixel_format_descriptor(pixel_format);
+ if (src_desc == NULL) {
+ E("%s: Source pixel format %.4s is unknown",
+ __FUNCTION__, (const char*)&pixel_format);
+ return -1;
+ }
+
+ for (n = 0; n < fbs_num; n++) {
+ if (framebuffers[n].pixel_format == pixel_format) {
+ /* Same pixel format. No conversion needed: just make a copy. */
+ memcpy(framebuffers[n].framebuffer, frame, framebuffer_size);
+ } else {
+ const PIXFormat* dst_desc =
+ _get_pixel_format_descriptor(framebuffers[n].pixel_format);
+ if (dst_desc == NULL) {
+ E("%s: Destination pixel format %.4s is unknown",
+ __FUNCTION__, (const char*)&framebuffers[n].pixel_format);
+ return -1;
+ }
+ switch (src_desc->format_sel) {
+ case PIX_FMT_RGB:
+ if (dst_desc->format_sel == PIX_FMT_RGB) {
+ RGBToRGB(src_desc->desc.rgb_desc, dst_desc->desc.rgb_desc,
+ frame, framebuffers[n].framebuffer, width, height);
+ } else if (dst_desc->format_sel == PIX_FMT_YUV) {
+ RGBToYUV(src_desc->desc.rgb_desc, dst_desc->desc.yuv_desc,
+ frame, framebuffers[n].framebuffer, width, height);
+ } else {
+ E("%s: Unexpected destination pixel format %d",
+ __FUNCTION__, dst_desc->format_sel);
+ return -1;
+ }
+ break;
+ case PIX_FMT_YUV:
+ if (dst_desc->format_sel == PIX_FMT_RGB) {
+ YUVToRGB(src_desc->desc.yuv_desc, dst_desc->desc.rgb_desc,
+ frame, framebuffers[n].framebuffer, width, height);
+ } else if (dst_desc->format_sel == PIX_FMT_YUV) {
+ YUVToYUV(src_desc->desc.yuv_desc, dst_desc->desc.yuv_desc,
+ frame, framebuffers[n].framebuffer, width, height);
+ } else {
+ E("%s: Unexpected destination pixel format %d",
+ __FUNCTION__, dst_desc->format_sel);
+ return -1;
+ }
+ break;
+ case PIX_FMT_BAYER:
+ if (dst_desc->format_sel == PIX_FMT_RGB) {
+ BAYERToRGB(src_desc->desc.bayer_desc, dst_desc->desc.rgb_desc,
+ frame, framebuffers[n].framebuffer, width, height);
+ } else if (dst_desc->format_sel == PIX_FMT_YUV) {
+ BAYERToYUV(src_desc->desc.bayer_desc, dst_desc->desc.yuv_desc,
+ frame, framebuffers[n].framebuffer, width, height);
+ } else {
+ E("%s: Unexpected destination pixel format %d",
+ __FUNCTION__, dst_desc->format_sel);
+ return -1;
+ }
+ break;
+ default:
+ E("%s: Unexpected source pixel format %d",
+ __FUNCTION__, dst_desc->format_sel);
+ return -1;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/android/camera/camera-format-converters.h b/android/camera/camera-format-converters.h
new file mode 100755
index 0000000..6f1b492
--- /dev/null
+++ b/android/camera/camera-format-converters.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_CAMERA_FORMAT_CONVERTERS_H
+#define ANDROID_CAMERA_CAMERA_FORMAT_CONVERTERS_H
+
+/*
+ * Contains declaration of the API that allows converting frames from one
+ * pixel format to another.
+ *
+ * For the emulator, we really need to convert into two formats: YV12, which is
+ * used by the camera framework for video, and RGB32 for preview window.
+ */
+
+#include "camera-common.h"
+
+/* Checks if conversion between two pixel formats is available.
+ * Param:
+ * from - Pixel format to convert from.
+ * to - Pixel format to convert to.
+ * Return:
+ * boolean: 1 if converter is available, or 0 if no conversion exists.
+ */
+extern int has_converter(uint32_t from, uint32_t to);
+
+/* Converts a frame into multiple framebuffers.
+ * When camera service replies to a framebuffer request from the client, it
+ * usualy sends two framebuffers in the reply: one for video, and another for
+ * preview window. Since these two framebuffers have different pixel formats
+ * (most of the time), we need to do two conversions for each frame received from
+ * the camera. This is the main intention behind this routine: to have a one call
+ * that produces as many conversions as needed.
+ * Param:
+ * frame - Frame to convert.
+ * pixel_format - Defines pixel format for the converting framebuffer.
+ * framebuffer_size, width, height - Converting framebuffer byte size, width,
+ * and height.
+ * framebuffers - Array of framebuffers where to convert the frame. Size of this
+ * array is defined by the 'fbs_num' parameter. Note that the caller must
+ * make sure that buffers are large enough to contain entire frame captured
+ * from the device.
+ * fbs_num - Number of entries in the 'framebuffers' array.
+ * Return:
+ * 0 on success, or non-zero value on failure.
+*/
+extern int convert_frame(const void* frame,
+ uint32_t pixel_format,
+ size_t framebuffer_size,
+ int width,
+ int height,
+ ClientFrameBuffer* framebuffers,
+ int fbs_num);
+
+#endif /* ANDROID_CAMERA_CAMERA_FORMAT_CONVERTERS_H */
diff --git a/android/camera/camera-service.c b/android/camera/camera-service.c
new file mode 100644
index 0000000..e551dca
--- /dev/null
+++ b/android/camera/camera-service.c
@@ -0,0 +1,1468 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Contains emulated camera service implementation.
+ */
+
+#include "qemu-common.h"
+#include "android/globals.h" /* for android_hw */
+#include "android/hw-qemud.h"
+#include "android/utils/misc.h"
+#include "android/utils/system.h"
+#include "android/utils/debug.h"
+#include "android/camera/camera-capture.h"
+#include "android/camera/camera-format-converters.h"
+#include "android/camera/camera-service.h"
+
+#define E(...) derror(__VA_ARGS__)
+#define W(...) dwarning(__VA_ARGS__)
+#define D(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#define D_ACTIVE VERBOSE_CHECK(camera)
+
+/* the T(...) macro is used to dump traffic */
+#define T_ACTIVE 0
+
+#if T_ACTIVE
+#define T(...) VERBOSE_PRINT(camera,__VA_ARGS__)
+#else
+#define T(...) ((void)0)
+#endif
+
+/* Defines name of the camera service. */
+#define SERVICE_NAME "camera"
+
+/* Maximum number of supported emulated cameras. */
+#define MAX_CAMERA 8
+
+/* Camera sevice descriptor. */
+typedef struct CameraServiceDesc CameraServiceDesc;
+struct CameraServiceDesc {
+ /* Information about camera devices connected to the host.
+ * Note that once initialized, entries in this array are considered to be
+ * constant. */
+ CameraInfo camera_info[MAX_CAMERA];
+ /* Number of camera devices connected to the host. */
+ int camera_count;
+};
+
+/* One and only one camera service. */
+static CameraServiceDesc _camera_service_desc;
+
+/********************************************************************************
+ * Helper routines
+ *******************************************************************************/
+
+/* A strict 'int' version of the 'strtol'.
+ * This routine is implemented on top of the standard 'strtol' for 32/64 bit
+ * portability.
+ */
+static int
+strtoi(const char *nptr, char **endptr, int base)
+{
+ long val;
+
+ errno = 0;
+ val = strtol(nptr, endptr, base);
+ if (errno) {
+ return (val == LONG_MAX) ? INT_MAX : INT_MIN;
+ } else {
+ if (val == (int)val) {
+ return (int)val;
+ } else {
+ errno = ERANGE;
+ return val > 0 ? INT_MAX : INT_MIN;
+ }
+ }
+}
+
+/* Gets a parameter value out of the parameter string.
+ * All parameters that are passed to the camera service are formatted as such:
+ * "<name1>=<value1> <name2>=<value2> ... <nameN>=<valueN>"
+ * I.e.:
+ * - Every parameter must have a name, and a value.
+ * - Name and value must be separated with '='.
+ * - No spaces are allowed around '=' separating name and value.
+ * - Parameters must be separated with a single ' ' character.
+ * - No '=' character is allowed in name and in value.
+ * Param:
+ * params - String, containing the parameters.
+ * name - Parameter name.
+ * value - Upon success contains value for the given parameter.
+ * val_size - Size of the 'value' string buffer.
+ * Return:
+ * 0 on success, -1 if requested parameter is not found, or (a positive) number
+ * of bytes, required to make a copy of the parameter's value if 'value' string
+ * was too small to contain it.
+ */
+static int
+_get_param_value(const char* params, const char* name, char* value, int val_size)
+{
+ const char* val_end;
+ int len = strlen(name);
+ const char* par_end = params + strlen(params);
+ const char* par_start = strstr(params, name);
+
+ /* Search for 'name=' */
+ while (par_start != NULL) {
+ /* Make sure that we're within the parameters buffer. */
+ if ((par_end - par_start) < len) {
+ par_start = NULL;
+ break;
+ }
+ /* Make sure that par_start starts at the beginning of <name>, and only
+ * then check for '=' value separator. */
+ if ((par_start == params || (*(par_start - 1) == ' ')) &&
+ par_start[len] == '=') {
+ break;
+ }
+ /* False positive. Move on... */
+ par_start = strstr(par_start + 1, name);
+ }
+ if (par_start == NULL) {
+ return -1;
+ }
+
+ /* Advance past 'name=', and calculate value's string length. */
+ par_start += len + 1;
+ val_end = strchr(par_start, ' ');
+ if (val_end == NULL) {
+ val_end = par_start + strlen(par_start);
+ }
+ len = val_end - par_start;
+
+ /* Check if fits... */
+ if ((len + 1) <= val_size) {
+ memcpy(value, par_start, len);
+ value[len] = '\0';
+ return 0;
+ } else {
+ return len + 1;
+ }
+}
+
+/* Gets a parameter value out of the parameter string.
+ * This routine is similar to _get_param_value, except it will always allocate
+ * a string buffer for the value.
+ * Param:
+ * params - String, containing the parameters.
+ * name - Parameter name.
+ * value - Upon success contains an allocated string containint the value for
+ * the given parameter. The caller is responsible for freeing the buffer
+ * returned in this parameter on success.
+ * Return:
+ * 0 on success, -1 if requested parameter is not found, or -2 on
+ * memory failure.
+ */
+static int
+_get_param_value_alloc(const char* params, const char* name, char** value)
+{
+ char tmp;
+ int res;
+
+ /* Calculate size of string buffer required for the value. */
+ const int val_size = _get_param_value(params, name, &tmp, 0);
+ if (val_size < 0) {
+ *value = NULL;
+ return val_size;
+ }
+
+ /* Allocate string buffer, and retrieve the value. */
+ *value = (char*)malloc(val_size);
+ if (*value == NULL) {
+ E("%s: Unable to allocated %d bytes for string buffer.",
+ __FUNCTION__, val_size);
+ return -2;
+ }
+ res = _get_param_value(params, name, *value, val_size);
+ if (res) {
+ E("%s: Unable to retrieve value into allocated buffer.", __FUNCTION__);
+ free(*value);
+ *value = NULL;
+ }
+
+ return res;
+}
+
+/* Gets an integer parameter value out of the parameter string.
+ * Param:
+ * params - String, containing the parameters. See comments to _get_param_value
+ * routine on the parameters format.
+ * name - Parameter name. Parameter value must be a decimal number.
+ * value - Upon success contains integer value for the given parameter.
+ * Return:
+ * 0 on success, or -1 if requested parameter is not found, or -2 if parameter's
+ * format was bad (i.e. value was not a decimal number).
+ */
+static int
+_get_param_value_int(const char* params, const char* name, int* value)
+{
+ char val_str[64]; // Should be enough for all numeric values.
+ if (!_get_param_value(params, name, val_str, sizeof(val_str))) {
+ errno = 0;
+ *value = strtoi(val_str, (char**)NULL, 10);
+ if (errno) {
+ E("%s: Value '%s' of the parameter '%s' in '%s' is not a decimal number.",
+ __FUNCTION__, val_str, name, params);
+ return -2;
+ } else {
+ return 0;
+ }
+ } else {
+ return -1;
+ }
+}
+
+/* Extracts query name, and (optionally) query parameters from the query string.
+ * Param:
+ * query - Query string. Query string in the camera service are formatted as such:
+ * "<query name>[ <parameters>]",
+ * where parameters are optional, and if present, must be separated from the
+ * query name with a single ' '. See comments to _get_param_value routine
+ * for the format of the parameters string.
+ * query_name - Upon success contains query name extracted from the query
+ * string.
+ * query_name_size - Buffer size for 'query_name' string.
+ * query_param - Upon success contains a pointer to the beginning of the query
+ * parameters. If query has no parameters, NULL will be passed back with
+ * this parameter. This parameter is optional and can be NULL.
+ * Return:
+ * 0 on success, or number of bytes required for query name if 'query_name'
+ * string buffer was too small to contain it.
+ */
+static int
+_parse_query(const char* query,
+ char* query_name,
+ int query_name_size,
+ const char** query_param)
+{
+ /* Extract query name. */
+ const char* qend = strchr(query, ' ');
+ if (qend == NULL) {
+ qend = query + strlen(query);
+ }
+ if ((qend - query) >= query_name_size) {
+ return qend - query + 1;
+ }
+ memcpy(query_name, query, qend - query);
+ query_name[qend - query] = '\0';
+
+ /* Calculate query parameters pointer (if needed) */
+ if (query_param != NULL) {
+ if (*qend == ' ') {
+ qend++;
+ }
+ *query_param = (*qend == '\0') ? NULL : qend;
+ }
+
+ return 0;
+}
+
+/* Appends one string to another, growing the destination string buffer if
+ * needed.
+ * Param:
+ * str_buffer - Contains pointer to the destination string buffer. Content of
+ * this parameter can be NULL. Note that content of this parameter will
+ * change if string buffer has been reallocated.
+ * str_buf_size - Contains current buffer size of the string, addressed by
+ * 'str_buffer' parameter. Note that content of this parameter will change
+ * if string buffer has been reallocated.
+ * str - String to append.
+ * Return:
+ * 0 on success, or -1 on failure (memory allocation).
+ */
+static int
+_append_string(char** str_buf, size_t* str_buf_size, const char* str)
+{
+ const size_t offset = (*str_buf != NULL) ? strlen(*str_buf) : 0;
+ const size_t append_bytes = strlen(str) + 1;
+
+ /* Make sure these two match. */
+ if (*str_buf == NULL) {
+ *str_buf_size = 0;
+ }
+
+ if ((offset + append_bytes) > *str_buf_size) {
+ /* Reallocate string, so it can fit what's being append to it. Note that
+ * we reallocate a bit bigger buffer than is needed in order to minimize
+ * number of memory allocation calls in case there are more "appends"
+ * coming. */
+ const size_t required_mem = offset + append_bytes + 256;
+ char* new_buf = (char*)realloc(*str_buf, required_mem);
+ if (new_buf == NULL) {
+ E("%s: Unable to allocate %d bytes for a string",
+ __FUNCTION__, required_mem);
+ return -1;
+ }
+ *str_buf = new_buf;
+ *str_buf_size = required_mem;
+ }
+ memcpy(*str_buf + offset, str, append_bytes);
+
+ return 0;
+}
+
+/* Represents camera information as a string formatted as follows:
+ * 'name=<devname> channel=<num> pix=<format> facing=<direction> framedims=<widh1xheight1,...>\n'
+ * Param:
+ * ci - Camera information descriptor to convert into a string.
+ * str - Pointer to the string buffer where to save the converted camera
+ * information descriptor. On entry, content of this parameter can be NULL.
+ * Note that string buffer addressed with this parameter may be reallocated
+ * in this routine, so (if not NULL) it must contain a buffer allocated with
+ * malloc. The caller is responsible for freeing string buffer returned in
+ * this parameter.
+ * str_size - Contains byte size of the buffer addressed by 'str' parameter.
+ * Return:
+ * 0 on success, or != 0 on failure.
+ */
+static int
+_camera_info_to_string(const CameraInfo* ci, char** str, size_t* str_size) {
+ int res;
+ int n;
+ char tmp[128];
+
+ /* Append device name. */
+ snprintf(tmp, sizeof(tmp), "name=%s ", ci->device_name);
+ res = _append_string(str, str_size, tmp);
+ if (res) {
+ return res;
+ }
+ /* Append input channel. */
+ snprintf(tmp, sizeof(tmp), "channel=%d ", ci->inp_channel);
+ res = _append_string(str, str_size, tmp);
+ if (res) {
+ return res;
+ }
+ /* Append pixel format. */
+ snprintf(tmp, sizeof(tmp), "pix=%d ", ci->pixel_format);
+ res = _append_string(str, str_size, tmp);
+ if (res) {
+ return res;
+ }
+ /* Append direction. */
+ snprintf(tmp, sizeof(tmp), "dir=%s ", ci->direction);
+ res = _append_string(str, str_size, tmp);
+ if (res) {
+ return res;
+ }
+ /* Append supported frame sizes. */
+ snprintf(tmp, sizeof(tmp), "framedims=%dx%d",
+ ci->frame_sizes[0].width, ci->frame_sizes[0].height);
+ res = _append_string(str, str_size, tmp);
+ if (res) {
+ return res;
+ }
+ for (n = 1; n < ci->frame_sizes_num; n++) {
+ snprintf(tmp, sizeof(tmp), ",%dx%d",
+ ci->frame_sizes[n].width, ci->frame_sizes[n].height);
+ res = _append_string(str, str_size, tmp);
+ if (res) {
+ return res;
+ }
+ }
+
+ /* Stringified camera properties should end with EOL. */
+ return _append_string(str, str_size, "\n");
+}
+
+/* Gets camera information matching a display name.
+ * Param:
+ * disp_name - Display name to match.
+ * arr - Array of camera informations.
+ * num - Number of elements in the array.
+ * Return:
+ * Matching camera information, or NULL if matching camera information for the
+ * given display name has not been found in the array.
+ */
+static CameraInfo*
+_camera_info_get_by_display_name(const char* disp_name, CameraInfo* arr, int num)
+{
+ int n;
+ for (n = 0; n < num; n++) {
+ if (arr[n].display_name != NULL && !strcmp(arr[n].display_name, disp_name)) {
+ return &arr[n];
+ }
+ }
+ return NULL;
+}
+
+/* Gets camera information matching a device name.
+ * Param:
+ * device_name - Device name to match.
+ * arr - Array of camera informations.
+ * num - Number of elements in the array.
+ * Return:
+ * Matching camera information, or NULL if matching camera information for the
+ * given device name has not been found in the array.
+ */
+static CameraInfo*
+_camera_info_get_by_device_name(const char* device_name, CameraInfo* arr, int num)
+{
+ int n;
+ for (n = 0; n < num; n++) {
+ if (arr[n].device_name != NULL && !strcmp(arr[n].device_name, device_name)) {
+ return &arr[n];
+ }
+ }
+ return NULL;
+}
+
+/********************************************************************************
+ * CameraServiceDesc API
+ *******************************************************************************/
+
+/* Initializes camera service descriptor.
+ */
+static void
+_camera_service_init(CameraServiceDesc* csd)
+{
+ CameraInfo ci[MAX_CAMERA];
+ int connected_cnt;
+ int i;
+
+ /* Enumerate camera devices connected to the host. */
+ memset(ci, 0, sizeof(CameraInfo) * MAX_CAMERA);
+ memset(csd->camera_info, 0, sizeof(CameraInfo) * MAX_CAMERA);
+ csd->camera_count = 0;
+ connected_cnt = enumerate_camera_devices(ci, MAX_CAMERA);
+ if (connected_cnt <= 0) {
+ /* Nothing is connected - nothing to emulate. */
+ return;
+ }
+
+ /* For each webcam declared in hw.ini find an actual camera information
+ * descriptor, and save it into the service descriptor for the emulation.
+ * Stop the loop when all the connected cameras have been added to the
+ * service. */
+ for (i = 0; i < android_hw->hw_webcam_count &&
+ csd->camera_count < connected_cnt; i++) {
+ const char* disp_name;
+ const char* dir;
+ CameraInfo* found;
+
+ switch (i) {
+ case 0:
+ disp_name = android_hw->hw_webcam_0_name;
+ dir = android_hw->hw_webcam_0_direction;
+ break;
+ case 1:
+ disp_name = android_hw->hw_webcam_1_name;
+ dir = android_hw->hw_webcam_1_direction;
+ break;
+ case 2:
+ disp_name = android_hw->hw_webcam_2_name;
+ dir = android_hw->hw_webcam_2_direction;
+ break;
+ case 3:
+ disp_name = android_hw->hw_webcam_3_name;
+ dir = android_hw->hw_webcam_3_direction;
+ break;
+ case 4:
+ disp_name = android_hw->hw_webcam_4_name;
+ dir = android_hw->hw_webcam_4_direction;
+ break;
+ case 5:
+ default:
+ disp_name = android_hw->hw_webcam_5_name;
+ dir = android_hw->hw_webcam_5_direction;
+ break;
+ }
+ found = _camera_info_get_by_display_name(disp_name, ci, connected_cnt);
+ if (found != NULL) {
+ /* Save to the camera info array that will be used by the service.
+ * Note that we just copy everything over, and NULL the source
+ * record. */
+ memcpy(csd->camera_info + csd->camera_count, found, sizeof(CameraInfo));
+ /* Update direction parameter. */
+ if (csd->camera_info[csd->camera_count].direction != NULL) {
+ free(csd->camera_info[csd->camera_count].direction);
+ }
+ csd->camera_info[csd->camera_count].direction = ASTRDUP(dir);
+ D("Camera %d '%s' connected to '%s' facing %s using %.4s pixel format",
+ csd->camera_count, csd->camera_info[csd->camera_count].display_name,
+ csd->camera_info[csd->camera_count].device_name,
+ csd->camera_info[csd->camera_count].direction,
+ (const char*)(&csd->camera_info[csd->camera_count].pixel_format));
+ csd->camera_count++;
+ memset(found, 0, sizeof(CameraInfo));
+ } else {
+ W("Camera name '%s' is not found in the list of connected cameras.\n"
+ "Use '-webcam list' emulator option to obtain the list of connected camera names.\n",
+ disp_name);
+ }
+ }
+
+ /* Make sure that camera 0 and camera 1 are facing in opposite directions.
+ * If they don't the camera application will crash on an attempt to switch
+ * cameras. */
+ if (csd->camera_count > 0) {
+ const char* cam2_dir = NULL;
+ const char* cam2_name = NULL;
+ if (csd->camera_count >= 2) {
+ cam2_dir = csd->camera_info[1].direction;
+ cam2_name = csd->camera_info[1].display_name;
+ } else if (strcmp(android_hw->hw_fakeCamera, "off")) {
+ cam2_dir = android_hw->hw_fakeCamera;
+ cam2_name = "fake camera";
+ }
+ if (cam2_dir != NULL && !strcmp(csd->camera_info[0].direction, cam2_dir)) {
+ W("Cameras '%s' and '%s' are both facing %s.\n"
+ "It is required by the camera application that first two emulated cameras\n"
+ "are facing in opposite directions. If they both are facing in the same direction,\n"
+ "the camera application will crash on an attempt to switch the camera.\n",
+ csd->camera_info[0].display_name, cam2_name, cam2_dir);
+
+ }
+ }
+}
+
+/* Gets camera information for the given camera device name.
+ * Param:
+ * cs - Initialized camera service descriptor.
+ * device_name - Camera's device name to look up the information for.
+ * Return:
+ * Camera information pointer on success, or NULL if no camera information has
+ * been found for the given device name.
+ */
+static CameraInfo*
+_camera_service_get_camera_info_by_device_name(CameraServiceDesc* cs,
+ const char* device_name)
+{
+ return _camera_info_get_by_device_name(device_name, cs->camera_info,
+ cs->camera_count);
+}
+
+/********************************************************************************
+ * Helpers for handling camera client queries
+ *******************************************************************************/
+
+/* Formats paload size according to the protocol, and sends it to the client.
+ * To simplify endianess handling we convert payload size to an eight characters
+ * string, representing payload size value in hexadecimal format.
+ * Param:
+ * qc - Qemu client to send the payload size to.
+ * payload_size - Payload size to report to the client.
+ */
+static void
+_qemu_client_reply_payload(QemudClient* qc, size_t payload_size)
+{
+ char payload_size_str[9];
+ snprintf(payload_size_str, sizeof(payload_size_str), "%08x", payload_size);
+ qemud_client_send(qc, (const uint8_t*)payload_size_str, 8);
+}
+
+/*
+ * Prefixes for replies to camera client queries.
+ */
+
+/* Success, no data to send in reply. */
+#define OK_REPLY "ok"
+/* Failure, no data to send in reply. */
+#define KO_REPLY "ko"
+/* Success, there are data to send in reply. */
+#define OK_REPLY_DATA OK_REPLY ":"
+/* Failure, there are data to send in reply. */
+#define KO_REPLY_DATA KO_REPLY ":"
+
+/* Builds and sends a reply to a query.
+ * All replies to a query in camera service have a prefix indicating whether the
+ * query has succeeded ("ok"), or failed ("ko"). The prefix can be followed by
+ * extra data, containing response to the query. In case there are extra data,
+ * they are separated from the prefix with a ':' character.
+ * Param:
+ * qc - Qemu client to send the reply to.
+ * ok_ko - An "ok", or "ko" selector, where 0 is for "ko", and !0 is for "ok".
+ * extra - Optional extra query data. Can be NULL.
+ * extra_size - Extra data size.
+ */
+static void
+_qemu_client_query_reply(QemudClient* qc,
+ int ok_ko,
+ const void* extra,
+ size_t extra_size)
+{
+ const char* ok_ko_str;
+ size_t payload_size;
+
+ /* Make sure extra_size is 0 if extra is NULL. */
+ if (extra == NULL && extra_size != 0) {
+ W("%s: 'extra' = NULL, while 'extra_size' = %d",
+ __FUNCTION__, (int)extra_size);
+ extra_size = 0;
+ }
+
+ /* Calculate total payload size, and select appropriate 'ok'/'ko' prefix */
+ if (extra_size) {
+ /* 'extra' size + 2 'ok'/'ko' bytes + 1 ':' separator byte. */
+ payload_size = extra_size + 3;
+ ok_ko_str = ok_ko ? OK_REPLY_DATA : KO_REPLY_DATA;
+ } else {
+ /* No extra data: just zero-terminated 'ok'/'ko'. */
+ payload_size = 3;
+ ok_ko_str = ok_ko ? OK_REPLY : KO_REPLY;
+ }
+
+ /* Send payload size first. */
+ _qemu_client_reply_payload(qc, payload_size);
+ /* Send 'ok[:]'/'ko[:]' next. Note that if there is no extra data, we still
+ * need to send a zero-terminator for 'ok'/'ko' string instead of the ':'
+ * separator. So, one way or another, the prefix is always 3 bytes. */
+ qemud_client_send(qc, (const uint8_t*)ok_ko_str, 3);
+ /* Send extra data (if present). */
+ if (extra != NULL) {
+ qemud_client_send(qc, (const uint8_t*)extra, extra_size);
+ }
+}
+
+/* Replies query success ("OK") back to the client.
+ * Param:
+ * qc - Qemu client to send the reply to.
+ * ok_str - An optional string containing query results. Can be NULL.
+ */
+static void
+_qemu_client_reply_ok(QemudClient* qc, const char* ok_str)
+{
+ _qemu_client_query_reply(qc, 1, ok_str,
+ (ok_str != NULL) ? (strlen(ok_str) + 1) : 0);
+}
+
+/* Replies query failure ("KO") back to the client.
+ * Param:
+ * qc - Qemu client to send the reply to.
+ * ko_str - An optional string containing reason for failure. Can be NULL.
+ */
+static void
+_qemu_client_reply_ko(QemudClient* qc, const char* ko_str)
+{
+ _qemu_client_query_reply(qc, 0, ko_str,
+ (ko_str != NULL) ? (strlen(ko_str) + 1) : 0);
+}
+
+/********************************************************************************
+ * Camera Factory API
+ *******************************************************************************/
+
+/* Handles 'list' query received from the Factory client.
+ * Response to this query is a string that represents each connected camera in
+ * this format: 'name=devname framedims=widh1xheight1,widh2xheight2,widhNxheightN\n'
+ * Strings, representing each camera are separated with EOL symbol.
+ * Param:
+ * csd, client - Factory serivice, and client.
+ * Return:
+ * 0 on success, or != 0 on failure.
+ */
+static int
+_factory_client_list_cameras(CameraServiceDesc* csd, QemudClient* client)
+{
+ int n;
+ size_t reply_size = 0;
+ char* reply = NULL;
+
+ /* Lets see if there was anything found... */
+ if (csd->camera_count == 0) {
+ /* No cameras connected to the host. Reply with "\n" */
+ _qemu_client_reply_ok(client, "\n");
+ return 0;
+ }
+
+ /* "Stringify" each camera information into the reply string. */
+ for (n = 0; n < csd->camera_count; n++) {
+ const int res =
+ _camera_info_to_string(csd->camera_info + n, &reply, &reply_size);
+ if (res) {
+ if (reply != NULL) {
+ free(reply);
+ }
+ _qemu_client_reply_ko(client, "Memory allocation error");
+ return res;
+ }
+ }
+
+ D("%s Replied: %s", __FUNCTION__, reply);
+ _qemu_client_reply_ok(client, reply);
+ free(reply);
+
+ return 0;
+}
+
+/* Handles a message received from the emulated camera factory client.
+ * Queries received here are represented as strings:
+ * 'list' - Queries list of cameras connected to the host.
+ * Param:
+ * opaque - Camera service descriptor.
+ * msg, msglen - Message received from the camera factory client.
+ * client - Camera factory client pipe.
+ */
+static void
+_factory_client_recv(void* opaque,
+ uint8_t* msg,
+ int msglen,
+ QemudClient* client)
+{
+ /*
+ * Emulated camera factory client queries.
+ */
+
+ /* List cameras connected to the host. */
+ static const char _query_list[] = "list";
+
+ CameraServiceDesc* csd = (CameraServiceDesc*)opaque;
+ char query_name[64];
+ const char* query_param = NULL;
+
+ /* Parse the query, extracting query name and parameters. */
+ if (_parse_query((const char*)msg, query_name, sizeof(query_name),
+ &query_param)) {
+ E("%s: Invalid format in query '%s'", __FUNCTION__, (const char*)msg);
+ _qemu_client_reply_ko(client, "Invalid query format");
+ return;
+ }
+
+ D("%s Camera factory query '%s'", __FUNCTION__, query_name);
+
+ /* Dispatch the query to an appropriate handler. */
+ if (!strcmp(query_name, _query_list)) {
+ /* This is a "list" query. */
+ _factory_client_list_cameras(csd, client);
+ } else {
+ E("%s: Unknown camera factory query name in '%s'",
+ __FUNCTION__, (const char*)msg);
+ _qemu_client_reply_ko(client, "Unknown query name");
+ }
+}
+
+/* Emulated camera factory client has been disconnected from the service. */
+static void
+_factory_client_close(void* opaque)
+{
+ /* There is nothing to clean up here: factory service is just an alias for
+ * the "root" camera service, that doesn't require anything more, than camera
+ * dervice descriptor already provides. */
+}
+
+/********************************************************************************
+ * Camera client API
+ *******************************************************************************/
+
+/* Describes an emulated camera client.
+ */
+typedef struct CameraClient CameraClient;
+struct CameraClient
+{
+ /* Client name.
+ * On Linux this is the name of the camera device.
+ * On Windows this is the name of capturing window.
+ */
+ char* device_name;
+ /* Input channel to use to connect to the camera. */
+ int inp_channel;
+ /* Camera information. */
+ const CameraInfo* camera_info;
+ /* Emulated camera device descriptor. */
+ CameraDevice* camera;
+ /* Buffer allocated for video frames.
+ * Note that memory allocated for this buffer
+ * also contains preview framebuffer. */
+ uint8_t* video_frame;
+ /* Preview frame buffer.
+ * This address points inside the 'video_frame' buffer. */
+ uint16_t* preview_frame;
+ /* Byte size of the videoframe buffer. */
+ size_t video_frame_size;
+ /* Byte size of the preview frame buffer. */
+ size_t preview_frame_size;
+ /* Pixel format required by the guest. */
+ uint32_t pixel_format;
+ /* Frame width. */
+ int width;
+ /* Frame height. */
+ int height;
+ /* Number of pixels in a frame buffer. */
+ int pixel_num;
+ /* Status of video and preview frame cache. */
+ int frames_cached;
+};
+
+/* Frees emulated camera client descriptor. */
+static void
+_camera_client_free(CameraClient* cc)
+{
+ /* The only exception to the "read only" rule: we have to mark the camera
+ * as being not used when we destroy a service for it. */
+ if (cc->camera_info != NULL) {
+ ((CameraInfo*)cc->camera_info)->in_use = 0;
+ }
+ if (cc->camera != NULL) {
+ camera_device_close(cc->camera);
+ }
+ if (cc->video_frame != NULL) {
+ free(cc->video_frame);
+ }
+ if (cc->device_name != NULL) {
+ free(cc->device_name);
+ }
+
+ AFREE(cc);
+}
+
+/* Creates descriptor for a connecting emulated camera client.
+ * Param:
+ * csd - Camera service descriptor.
+ * param - Client parameters. Must be formatted as described in comments to
+ * _get_param_value routine, and must contain at least 'name' parameter,
+ * identifiying the camera device to create the service for. Also parameters
+ * may contain a decimal 'inp_channel' parameter, selecting the input
+ * channel to use when communicating with the camera device.
+ * Return:
+ * Emulated camera client descriptor on success, or NULL on failure.
+ */
+static CameraClient*
+_camera_client_create(CameraServiceDesc* csd, const char* param)
+{
+ CameraClient* cc;
+ CameraInfo* ci;
+ int res;
+ ANEW0(cc);
+
+ /*
+ * Parse parameter string, containing camera client properties.
+ */
+
+ /* Pull required device name. */
+ if (_get_param_value_alloc(param, "name", &cc->device_name)) {
+ E("%s: Allocation failure, or required 'name' parameter is missing, or misformed in '%s'",
+ __FUNCTION__, param);
+ return NULL;
+ }
+
+ /* Pull optional input channel. */
+ res = _get_param_value_int(param, "inp_channel", &cc->inp_channel);
+ if (res != 0) {
+ if (res == -1) {
+ /* 'inp_channel' parameter has been ommited. Use default input
+ * channel, which is zero. */
+ cc->inp_channel = 0;
+ } else {
+ E("%s: 'inp_channel' parameter is misformed in '%s'",
+ __FUNCTION__, param);
+ return NULL;
+ }
+ }
+
+ /* Get camera info for the emulated camera represented with this service.
+ * Array of camera information records has been created when the camera
+ * service was enumerating camera devices during the service initialization.
+ * By the camera service protocol, camera service clients must first obtain
+ * list of enumerated cameras via the 'list' query to the camera service, and
+ * then use device name reported in the list to connect to an emulated camera
+ * service. So, if camera information for the given device name is not found
+ * in the array, we fail this connection due to protocol violation. */
+ ci = _camera_service_get_camera_info_by_device_name(csd, cc->device_name);
+ if (ci == NULL) {
+ E("%s: Cannot find camera info for device '%s'",
+ __FUNCTION__, cc->device_name);
+ _camera_client_free(cc);
+ return NULL;
+ }
+
+ /* We can't allow multiple camera services for a single camera device, Lets
+ * make sure that there is no client created for this camera. */
+ if (ci->in_use) {
+ E("%s: Camera device '%s' is in use", __FUNCTION__, cc->device_name);
+ _camera_client_free(cc);
+ return NULL;
+ }
+
+ /* We're done. Set camera in use, and succeed the connection. */
+ ci->in_use = 1;
+ cc->camera_info = ci;
+
+ D("%s: Camera service is created for device '%s' using input channel %d",
+ __FUNCTION__, cc->device_name, cc->inp_channel);
+
+ return cc;
+}
+
+/********************************************************************************
+ * Camera client queries
+ *******************************************************************************/
+
+/* Client has queried conection to the camera.
+ * Param:
+ * cc - Queried camera client descriptor.
+ * qc - Qemu client for the emulated camera.
+ * param - Query parameters. There are no parameters expected for this query.
+ */
+static void
+_camera_client_query_connect(CameraClient* cc, QemudClient* qc, const char* param)
+{
+ if (cc->camera != NULL) {
+ /* Already connected. */
+ W("%s: Camera '%s' is already connected", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ok(qc, "Camera is already connected");
+ return;
+ }
+
+ /* Open camera device. */
+ cc->camera = camera_device_open(cc->device_name, cc->inp_channel);
+ if (cc->camera == NULL) {
+ E("%s: Unable to open camera device '%s'", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ko(qc, "Unable to open camera device.");
+ return;
+ }
+
+ D("%s: Camera device '%s' is now connected", __FUNCTION__, cc->device_name);
+
+ _qemu_client_reply_ok(qc, NULL);
+}
+
+/* Client has queried disconection from the camera.
+ * Param:
+ * cc - Queried camera client descriptor.
+ * qc - Qemu client for the emulated camera.
+ * param - Query parameters. There are no parameters expected for this query.
+ */
+static void
+_camera_client_query_disconnect(CameraClient* cc,
+ QemudClient* qc,
+ const char* param)
+{
+ if (cc->camera == NULL) {
+ /* Already disconnected. */
+ W("%s: Camera '%s' is already disconnected", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ok(qc, "Camera is not connected");
+ return;
+ }
+
+ /* Before we can go ahead and disconnect, we must make sure that camera is
+ * not capturing frames. */
+ if (cc->video_frame != NULL) {
+ E("%s: Cannot disconnect camera '%s' while it is not stopped",
+ __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ko(qc, "Camera is not stopped");
+ return;
+ }
+
+ /* Close camera device. */
+ camera_device_close(cc->camera);
+ cc->camera = NULL;
+
+ D("Camera device '%s' is now disconnected", cc->device_name);
+
+ _qemu_client_reply_ok(qc, NULL);
+}
+
+/* Client has queried the client to start capturing video.
+ * Param:
+ * cc - Queried camera client descriptor.
+ * qc - Qemu client for the emulated camera.
+ * param - Query parameters. Parameters for this query must contain a 'dim', and
+ * a 'pix' parameters, where 'dim' must be "dim=<width>x<height>", and 'pix'
+ * must be "pix=<format>", where 'width' and 'height' must be numerical
+ * values for the capturing video frame width, and height, and 'format' must
+ * be a numerical value for the pixel format of the video frames expected by
+ * the client. 'format' must be one of the V4L2_PIX_FMT_XXX values.
+ */
+static void
+_camera_client_query_start(CameraClient* cc, QemudClient* qc, const char* param)
+{
+ char* w;
+ char dim[64];
+ int width, height, pix_format;
+
+ /* Sanity check. */
+ if (cc->camera == NULL) {
+ /* Not connected. */
+ E("%s: Camera '%s' is not connected", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ko(qc, "Camera is not connected");
+ return;
+ }
+
+ /*
+ * Parse parameters.
+ */
+
+ if (param == NULL) {
+ E("%s: Missing parameters for the query", __FUNCTION__);
+ _qemu_client_reply_ko(qc, "Missing parameters for the query");
+ return;
+ }
+
+ /* Pull required 'dim' parameter. */
+ if (_get_param_value(param, "dim", dim, sizeof(dim))) {
+ E("%s: Invalid or missing 'dim' parameter in '%s'", __FUNCTION__, param);
+ _qemu_client_reply_ko(qc, "Invalid or missing 'dim' parameter");
+ return;
+ }
+
+ /* Pull required 'pix' parameter. */
+ if (_get_param_value_int(param, "pix", &pix_format)) {
+ E("%s: Invalid or missing 'pix' parameter in '%s'", __FUNCTION__, param);
+ _qemu_client_reply_ko(qc, "Invalid or missing 'pix' parameter");
+ return;
+ }
+
+ /* Parse 'dim' parameter, and get requested frame width and height. */
+ w = strchr(dim, 'x');
+ if (w == NULL || w[1] == '\0') {
+ E("%s: Invalid 'dim' parameter in '%s'", __FUNCTION__, param);
+ _qemu_client_reply_ko(qc, "Invalid 'dim' parameter");
+ return;
+ }
+ *w = '\0'; w++;
+ errno = 0;
+ width = strtoi(dim, NULL, 10);
+ height = strtoi(w, NULL, 10);
+ if (errno) {
+ E("%s: Invalid 'dim' parameter in '%s'", __FUNCTION__, param);
+ _qemu_client_reply_ko(qc, "Invalid 'dim' parameter");
+ return;
+ }
+
+ /* After collecting capture parameters lets see if camera has already
+ * started, and if so, lets see if parameters match. */
+ if (cc->video_frame != NULL) {
+ /* Already started. Match capture parameters. */
+ if (cc->pixel_format != pix_format ||cc->width != width ||
+ cc->height != height) {
+ /* Parameters match. Succeed the query. */
+ W("%s: Camera '%s' is already started", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ok(qc, "Camera is already started");
+ } else {
+ /* Parameters don't match. Fail the query. */
+ E("%s: Camera '%s' is already started, and parameters don't match:\n"
+ "Current %.4s[%dx%d] != requested %.4s[%dx%d]",
+ __FUNCTION__, cc->device_name, (const char*)&cc->pixel_format,
+ cc->width, cc->height, (const char*)&pix_format, width, height);
+ _qemu_client_reply_ko(qc,
+ "Camera is already started with different capturing parameters");
+ }
+ return;
+ }
+
+ /*
+ * Start the camera.
+ */
+
+ /* Save capturing parameters. */
+ cc->pixel_format = pix_format;
+ cc->width = width;
+ cc->height = height;
+ cc->pixel_num = cc->width * cc->height;
+ cc->frames_cached = 0;
+
+ /* Make sure that pixel format is known, and calculate video framebuffer size
+ * along the lines. */
+ switch (cc->pixel_format) {
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_NV12:
+ case V4L2_PIX_FMT_NV21:
+ cc->video_frame_size = (cc->pixel_num * 12) / 8;
+ break;
+
+ default:
+ E("%s: Unknown pixel format %.4s",
+ __FUNCTION__, (char*)&cc->pixel_format);
+ _qemu_client_reply_ko(qc, "Pixel format is unknown");
+ return;
+ }
+
+ /* Make sure that we have a converters between the original camera pixel
+ * format and the one that the client expects. Also a converter must exist
+ * for the preview window pixel format (RGB32) */
+ if (!has_converter(cc->camera_info->pixel_format, cc->pixel_format) ||
+ !has_converter(cc->camera_info->pixel_format, V4L2_PIX_FMT_RGB32)) {
+ E("%s: No conversion exist between %.4s and %.4s (or RGB32) pixel formats",
+ __FUNCTION__, (char*)&cc->camera_info->pixel_format, (char*)&cc->pixel_format);
+ _qemu_client_reply_ko(qc, "No conversion exist for the requested pixel format");
+ return;
+ }
+
+ /* TODO: At the moment camera framework in the emulator requires RGB32 pixel
+ * format for preview window. So, we need to keep two framebuffers here: one
+ * for the video, and another for the preview window. Watch out when this
+ * changes (if changes). */
+ cc->preview_frame_size = cc->pixel_num * 4;
+
+ /* Allocate buffer large enough to contain both, video and preview
+ * framebuffers. */
+ cc->video_frame =
+ (uint8_t*)malloc(cc->video_frame_size + cc->preview_frame_size);
+ if (cc->video_frame == NULL) {
+ E("%s: Not enough memory for framebuffers %d + %d",
+ __FUNCTION__, cc->video_frame_size, cc->preview_frame_size);
+ _qemu_client_reply_ko(qc, "Out of memory");
+ return;
+ }
+
+ /* Set framebuffer pointers. */
+ cc->preview_frame = (uint16_t*)(cc->video_frame + cc->video_frame_size);
+
+ /* Start the camera. */
+ if (camera_device_start_capturing(cc->camera, cc->camera_info->pixel_format,
+ cc->width, cc->height)) {
+ E("%s: Cannot start camera '%s' for %.4s[%dx%d]: %s",
+ __FUNCTION__, cc->device_name, (const char*)&cc->pixel_format,
+ cc->width, cc->height, strerror(errno));
+ free(cc->video_frame);
+ cc->video_frame = NULL;
+ _qemu_client_reply_ko(qc, "Cannot start the camera");
+ return;
+ }
+
+ D("%s: Camera '%s' is now started for %.4s[%dx%d]",
+ __FUNCTION__, cc->device_name, (char*)&cc->pixel_format, cc->width,
+ cc->height);
+
+ _qemu_client_reply_ok(qc, NULL);
+}
+
+/* Client has queried the client to stop capturing video.
+ * Param:
+ * cc - Queried camera client descriptor.
+ * qc - Qemu client for the emulated camera.
+ * param - Query parameters. There are no parameters expected for this query.
+ */
+static void
+_camera_client_query_stop(CameraClient* cc, QemudClient* qc, const char* param)
+{
+ if (cc->video_frame == NULL) {
+ /* Not started. */
+ W("%s: Camera '%s' is not started", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ok(qc, "Camera is not started");
+ return;
+ }
+
+ /* Stop the camera. */
+ if (camera_device_stop_capturing(cc->camera)) {
+ E("%s: Cannot stop camera device '%s': %s",
+ __FUNCTION__, cc->device_name, strerror(errno));
+ _qemu_client_reply_ko(qc, "Cannot stop camera device");
+ return;
+ }
+
+ free(cc->video_frame);
+ cc->video_frame = NULL;
+
+ D("%s: Camera device '%s' is now stopped.", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ok(qc, NULL);
+}
+
+/* Client has queried next frame.
+ * Param:
+ * cc - Queried camera client descriptor.
+ * qc - Qemu client for the emulated camera.
+ * param - Query parameters. Parameters for this query must contain a 'video',
+ * and a 'preview' parameters, both must be decimal values, defining size of
+ * requested video, and preview frames respectively. Zero value for any of
+ * the parameters means that this particular frame is not requested.
+ */
+static void
+_camera_client_query_frame(CameraClient* cc, QemudClient* qc, const char* param)
+{
+ int video_size = 0;
+ int preview_size = 0;
+ int repeat;
+ ClientFrameBuffer fbs[2];
+ int fbs_num = 0;
+ size_t payload_size;
+ uint64_t tick;
+
+ /* Sanity check. */
+ if (cc->video_frame == NULL) {
+ /* Not started. */
+ E("%s: Camera '%s' is not started", __FUNCTION__, cc->device_name);
+ _qemu_client_reply_ko(qc, "Camera is not started");
+ return;
+ }
+
+ /* Pull required parameters. */
+ if (_get_param_value_int(param, "video", &video_size) ||
+ _get_param_value_int(param, "preview", &preview_size)) {
+ E("%s: Invalid or missing 'video', or 'preview' parameter in '%s'",
+ __FUNCTION__, param);
+ _qemu_client_reply_ko(qc,
+ "Invalid or missing 'video', or 'preview' parameter");
+ return;
+ }
+
+ /* Verify that framebuffer sizes match the ones that the started camera
+ * operates with. */
+ if ((video_size != 0 && cc->video_frame_size != video_size) ||
+ (preview_size != 0 && cc->preview_frame_size != preview_size)) {
+ E("%s: Frame sizes don't match for camera '%s':\n"
+ "Expected %d for video, and %d for preview. Requested %d, and %d",
+ __FUNCTION__, cc->device_name, cc->video_frame_size,
+ cc->preview_frame_size, video_size, preview_size);
+ _qemu_client_reply_ko(qc, "Frame size mismatch");
+ return;
+ }
+
+ /*
+ * Initialize framebuffer array for frame read.
+ */
+
+ if (video_size) {
+ fbs[fbs_num].pixel_format = cc->pixel_format;
+ fbs[fbs_num].framebuffer = cc->video_frame;
+ fbs_num++;
+ }
+ if (preview_size) {
+ /* TODO: Watch out for preview format changes! */
+ fbs[fbs_num].pixel_format = V4L2_PIX_FMT_RGB32;
+ fbs[fbs_num].framebuffer = cc->preview_frame;
+ fbs_num++;
+ }
+
+ /* Capture new frame. */
+ tick = _get_timestamp();
+ repeat = camera_device_read_frame(cc->camera, fbs, fbs_num);
+
+ /* Note that there is no (known) way how to wait on next frame being
+ * available, so we could dequeue frame buffer from the device only when we
+ * know it's available. Instead we're shooting in the dark, and quite often
+ * device will response with EAGAIN, indicating that it doesn't have frame
+ * ready. In turn, it means that the last frame we have obtained from the
+ * device is still good, and we can reply with the cached frames. The only
+ * case when we need to keep trying to obtain a new frame is when frame cache
+ * is empty. To prevent ourselves from an indefinite loop in case device got
+ * stuck on something (observed with some Microsoft devices) we will limit
+ * the loop by 2 second time period (which is more than enough to obtain
+ * something from the device) */
+ while (repeat == 1 && !cc->frames_cached &&
+ (_get_timestamp() - tick) < 2000000LL) {
+ /* Sleep for 10 millisec before repeating the attempt. */
+ _camera_sleep(10);
+ repeat = camera_device_read_frame(cc->camera, fbs, fbs_num);
+ }
+ if (repeat == 1 && !cc->frames_cached) {
+ /* Waited too long for the first frame. */
+ E("%s: Unable to obtain first video frame from the camera '%s' in %d milliseconds: %s.",
+ __FUNCTION__, cc->device_name,
+ (uint32_t)(_get_timestamp() - tick) / 1000, strerror(errno));
+ _qemu_client_reply_ko(qc, "Unable to obtain video frame from the camera");
+ return;
+ } else if (repeat < 0) {
+ /* An I/O error. */
+ E("%s: Unable to obtain video frame from the camera '%s': %s.",
+ __FUNCTION__, cc->device_name, strerror(errno));
+ _qemu_client_reply_ko(qc, strerror(errno));
+ return;
+ }
+
+ /* We have cached something... */
+ cc->frames_cached = 1;
+
+ /*
+ * Build the reply.
+ */
+
+ /* Payload includes "ok:" + requested video and preview frames. */
+ payload_size = 3 + video_size + preview_size;
+
+ /* Send payload size first. */
+ _qemu_client_reply_payload(qc, payload_size);
+
+ /* After that send the 'ok:'. Note that if there is no frames sent, we should
+ * use prefix "ok" instead of "ok:" */
+ if (video_size || preview_size) {
+ qemud_client_send(qc, (const uint8_t*)"ok:", 3);
+ } else {
+ /* Still 3 bytes: zero terminator is required in this case. */
+ qemud_client_send(qc, (const uint8_t*)"ok", 3);
+ }
+
+ /* After that send video frame (if requested). */
+ if (video_size) {
+ qemud_client_send(qc, cc->video_frame, video_size);
+ }
+
+ /* After that send preview frame (if requested). */
+ if (preview_size) {
+ qemud_client_send(qc, (const uint8_t*)cc->preview_frame, preview_size);
+ }
+}
+
+/* Handles a message received from the emulated camera client.
+ * Queries received here are represented as strings:
+ * - 'connect' - Connects to the camera device (opens it).
+ * - 'disconnect' - Disconnexts from the camera device (closes it).
+ * - 'start' - Starts capturing video from the connected camera device.
+ * - 'stop' - Stop capturing video from the connected camera device.
+ * - 'frame' - Queries video and preview frames captured from the camera.
+ * Param:
+ * opaque - Camera service descriptor.
+ * msg, msglen - Message received from the camera factory client.
+ * client - Camera factory client pipe.
+ */
+static void
+_camera_client_recv(void* opaque,
+ uint8_t* msg,
+ int msglen,
+ QemudClient* client)
+{
+ /*
+ * Emulated camera client queries.
+ */
+
+ /* Connect to the camera. */
+ static const char _query_connect[] = "connect";
+ /* Disconnect from the camera. */
+ static const char _query_disconnect[] = "disconnect";
+ /* Start video capturing. */
+ static const char _query_start[] = "start";
+ /* Stop video capturing. */
+ static const char _query_stop[] = "stop";
+ /* Query frame(s). */
+ static const char _query_frame[] = "frame";
+
+ char query_name[64];
+ const char* query_param;
+ CameraClient* cc = (CameraClient*)opaque;
+
+ /*
+ * Emulated camera queries are formatted as such:
+ * "<query name> [<parameters>]"
+ */
+
+ T("%s: Camera client query: '%s'", __FUNCTION__, (char*)msg);
+ if (_parse_query((const char*)msg, query_name, sizeof(query_name),
+ &query_param)) {
+ E("%s: Invalid query '%s'", __FUNCTION__, (char*)msg);
+ _qemu_client_reply_ko(client, "Invalid query");
+ return;
+ }
+
+ /* Dispatch the query to an appropriate handler. */
+ if (!strcmp(query_name, _query_frame)) {
+ /* A frame is queried. */
+ _camera_client_query_frame(cc, client, query_param);
+ } else if (!strcmp(query_name, _query_connect)) {
+ /* Camera connection is queried. */
+ _camera_client_query_connect(cc, client, query_param);
+ } else if (!strcmp(query_name, _query_disconnect)) {
+ /* Camera disnection is queried. */
+ _camera_client_query_disconnect(cc, client, query_param);
+ } else if (!strcmp(query_name, _query_start)) {
+ /* Start capturing is queried. */
+ _camera_client_query_start(cc, client, query_param);
+ } else if (!strcmp(query_name, _query_stop)) {
+ /* Stop capturing is queried. */
+ _camera_client_query_stop(cc, client, query_param);
+ } else {
+ E("%s: Unknown query '%s'", __FUNCTION__, (char*)msg);
+ _qemu_client_reply_ko(client, "Unknown query");
+ }
+}
+
+/* Emulated camera client has been disconnected from the service. */
+static void
+_camera_client_close(void* opaque)
+{
+ CameraClient* cc = (CameraClient*)opaque;
+
+ D("%s: Camera client for device '%s' on input channel %d is now closed",
+ __FUNCTION__, cc->device_name, cc->inp_channel);
+
+ _camera_client_free(cc);
+}
+
+/********************************************************************************
+ * Camera service API
+ *******************************************************************************/
+
+/* Connects a client to the camera service.
+ * There are two classes of the client that can connect to the service:
+ * - Camera factory that is insterested only in listing camera devices attached
+ * to the host.
+ * - Camera device emulators that attach to the actual camera devices.
+ * The distinction between these two classes is made by looking at extra
+ * parameters passed in client_param variable. If it's NULL, or empty, the client
+ * connects to a camera factory. Otherwise, parameters describe the camera device
+ * the client wants to connect to.
+ */
+static QemudClient*
+_camera_service_connect(void* opaque,
+ QemudService* serv,
+ int channel,
+ const char* client_param)
+{
+ QemudClient* client = NULL;
+ CameraServiceDesc* csd = (CameraServiceDesc*)opaque;
+
+ D("%s: Connecting camera client '%s'",
+ __FUNCTION__, client_param ? client_param : "Factory");
+ if (client_param == NULL || *client_param == '\0') {
+ /* This is an emulated camera factory client. */
+ client = qemud_client_new(serv, channel, client_param, csd,
+ _factory_client_recv, _factory_client_close,
+ NULL, NULL);
+ } else {
+ /* This is an emulated camera client. */
+ CameraClient* cc = _camera_client_create(csd, client_param);
+ if (cc != NULL) {
+ client = qemud_client_new(serv, channel, client_param, cc,
+ _camera_client_recv, _camera_client_close,
+ NULL, NULL);
+ }
+ }
+
+ return client;
+}
+
+void
+android_camera_service_init(void)
+{
+ static int _inited = 0;
+
+ if (!_inited) {
+ _camera_service_init(&_camera_service_desc);
+ QemudService* serv = qemud_service_register( SERVICE_NAME, 0,
+ &_camera_service_desc,
+ _camera_service_connect,
+ NULL, NULL);
+ if (serv == NULL) {
+ derror("%s: Could not register '%s' service",
+ __FUNCTION__, SERVICE_NAME);
+ return;
+ }
+ D("%s: Registered '%s' qemud service", __FUNCTION__, SERVICE_NAME);
+ }
+}
+
+void
+android_list_web_cameras(void)
+{
+ CameraInfo ci[MAX_CAMERA];
+ int connected_cnt;
+ int i;
+
+ /* Enumerate camera devices connected to the host. */
+ connected_cnt = enumerate_camera_devices(ci, MAX_CAMERA);
+ if (connected_cnt <= 0) {
+ return;
+ }
+
+ printf("List of web cameras connected to the computer:\n");
+ for (i = 0; i < connected_cnt; i++) {
+ printf(" Camera '%s' is connected to device '%s' on channel %d using pixel format '%.4s'\n",
+ ci[i].display_name, ci[i].device_name, ci[i].inp_channel,
+ (const char*)&ci[i].pixel_format);
+ }
+ printf("\n");
+}
diff --git a/android/camera/camera-service.h b/android/camera/camera-service.h
new file mode 100644
index 0000000..e8df288
--- /dev/null
+++ b/android/camera/camera-service.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_CAMERA_SERVICE_H_
+#define ANDROID_CAMERA_CAMERA_SERVICE_H_
+
+/*
+ * Contains public camera service API.
+ */
+
+/* Initializes camera emulation service over qemu pipe. */
+extern void android_camera_service_init(void);
+
+/* Lists available web cameras. */
+extern void android_list_web_cameras(void);
+
+#endif /* ANDROID_CAMERA_CAMERA_SERVICE_H_ */
diff --git a/android/camera/camera-win.h b/android/camera/camera-win.h
new file mode 100644
index 0000000..664adcc
--- /dev/null
+++ b/android/camera/camera-win.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CAMERA_CAMERA_WIN_H_
+#define ANDROID_CAMERA_CAMERA_WIN_H_
+
+/*
+ * Contains declarations that are missing in non-Linux headers.
+ */
+
+/* Four-character-code (FOURCC) */
+#define v4l2_fourcc(a,b,c,d)\
+ (((uint32_t)(a)<<0)|((uint32_t)(b)<<8)|((uint32_t)(c)<<16)|((uint32_t)(d)<<24))
+
+/* Pixel format FOURCC depth Description */
+#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R','G','B','1') /* 8 RGB-3-3-2 */
+#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R','4','4','4') /* 16 xxxxrrrr ggggbbbb */
+#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R','G','B','O') /* 16 RGB-5-5-5 */
+#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R','G','B','P') /* 16 RGB-5-6-5 */
+#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R','G','B','Q') /* 16 RGB-5-5-5 BE */
+#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R','G','B','R') /* 16 RGB-5-6-5 BE */
+#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B','G','R','3') /* 24 BGR-8-8-8 */
+#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R','G','B','3') /* 24 RGB-8-8-8 */
+#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B','G','R','4') /* 32 BGR-8-8-8-8 */
+#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R','G','B','4') /* 32 RGB-8-8-8-8 */
+#define V4L2_PIX_FMT_GREY v4l2_fourcc('G','R','E','Y') /* 8 Greyscale */
+#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P','A','L','8') /* 8 8-bit palette */
+#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y','V','U','9') /* 9 YVU 4:1:0 */
+#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y','V','1','2') /* 12 YVU 4:2:0 */
+#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y','U','Y','V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U','Y','V','Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4','2','2','P') /* 16 YVU422 planar */
+#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4','1','1','P') /* 16 YVU411 planar */
+#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y','4','1','P') /* 12 YUV 4:1:1 */
+#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y','4','4','4') /* 16 xxxxyyyy uuuuvvvv */
+#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y','U','V','O') /* 16 YUV-5-5-5 */
+#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y','U','V','P') /* 16 YUV-5-6-5 */
+#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y','U','V','4') /* 32 YUV-8-8-8-8 */
+
+/* two planes -- one Y, one Cr + Cb interleaved */
+#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N','V','1','2') /* 12 Y/CbCr 4:2:0 */
+#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N','V','2','1') /* 12 Y/CrCb 4:2:0 */
+
+/* The following formats are not defined in the V4L2 specification */
+#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y','U','V','9') /* 9 YUV 4:1:0 */
+#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y','U','1','2') /* 12 YUV 4:2:0 */
+#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y','Y','U','V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H','I','2','4') /* 8 8-bit color */
+#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H','M','1','2') /* 8 YUV 4:2:0 16x16 macroblocks */
+
+/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
+#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') /* 8 RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '\0') /* 10 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '\0') /* 10 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '\0') /* 10 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '\0') /* 10 RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') /* 12 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */
+
+#endif /* ANDROID_CAMERA_CAMERA_WIN_H_ */
diff --git a/android/cmdline-options.h b/android/cmdline-options.h
index b7d7bfa..0cf358f 100644
--- a/android/cmdline-options.h
+++ b/android/cmdline-options.h
@@ -154,6 +154,11 @@ OPT_PARAM( list_cores, "<host>", "list running core process" )
OPT_PARAM( attach_core, "<console socket>", "attach to a running core process" )
#endif // CONFIG_STANDALONE_UI
+OPT_PARAM( gpu, "<mode>", "set hardware OpenGLES emulation mode" )
+
+OPT_PARAM( fake_camera, "<mode>", "set fake camera emulation mode" )
+OPT_LIST( webcam, "name=<name>[,dir=<direction>]", "setup web camera emulation" )
+
#undef CFG_FLAG
#undef CFG_PARAM
#undef OPT_FLAG
diff --git a/android/help.c b/android/help.c
index cd5a3a6..4087911 100644
--- a/android/help.c
+++ b/android/help.c
@@ -1428,6 +1428,65 @@ help_shared_net_id(stralloc_t* out)
);
}
+static void
+help_gpu(stralloc_t* out)
+{
+ PRINTF(
+ " Use -gpu <mode> to force the mode of hardware OpenGLES emulation.\n"
+ " Valid values for <mode> are:\n\n"
+
+ " on -> enable GPU emulation\n"
+ " off -> disable GPU emulation\n"
+ " auto -> automatic detection\n"
+ " enabled -> same as 'on'\n"
+ " disabled -> same as 'off'\n\n"
+
+ " Note that enabling GPU emulation if the system image does not support it\n"
+ " will prevent the proper display of the emulated framebuffer.\n\n"
+
+ " You can always disable GPU emulation (i.e. '-gpu off'), and this will\n"
+ " force the virtual device to use the slow software renderer instead.\n"
+ " Note that OpenGLES 2.0 is _not_ supported by it.\n\n"
+
+ " The 'auto' mode is the default. It will only enable GPU emulation if the\n"
+ " virtual device supports it, and the host-side OpenGLES emulation library\n"
+ " could be properly initialized (this can fail when you run the emulator\n"
+ " under certain restricted environments where the program can't access the\n"
+ " graphics sub-system (e.g. head-less servers).\n"
+ );
+}
+
+static void
+help_fake_camera(stralloc_t* out)
+{
+ PRINTF(
+ " Use -fake-camera <mode> to control fake camera emulation.\n"
+ " Valid values for <mode> are:\n\n"
+
+ " off -> disable fake camera emulation\n"
+ " back -> fake camera is facing back\n"
+ " front -> fake camera is facing front\n\n"
+ );
+}
+
+static void
+help_webcam(stralloc_t* out)
+{
+ PRINTF(
+ " Use -webcam off to disable web camera emulation.\n"
+ " Use -webcam list to list web cameras available for emulation.\n"
+ " Use -webcam name=<name>[,dir=<direction>] to setup parameters for web camera emulation.\n"
+
+ " <name> platform-independent name identifying emulated camera device.\n"
+ " use '-webcam list' to obtain the list of emulated camera devices.\n"
+ " <direction> defines direction the camera is facing. Valid values are:\n\n"
+
+ " front -> emulate camera as facing front\n"
+ " back -> emulate camera as facing back\n\n"
+
+ " Default direction value for emulated web camera is 'front'\n\n"
+ );
+}
#define help_no_skin NULL
#define help_netspeed help_shaper
diff --git a/android/hw-control.c b/android/hw-control.c
index 8164bc0..e010c6f 100644
--- a/android/hw-control.c
+++ b/android/hw-control.c
@@ -62,11 +62,14 @@ _hw_control_qemud_client_recv( void* opaque,
/* called when a qemud client connects to the service */
static QemudClient*
-_hw_control_qemud_connect( void* opaque, QemudService* service, int channel )
+_hw_control_qemud_connect( void* opaque,
+ QemudService* service,
+ int channel,
+ const char* client_param )
{
QemudClient* client;
- client = qemud_client_new( service, channel,
+ client = qemud_client_new( service, channel, client_param,
opaque,
_hw_control_qemud_client_recv,
NULL, NULL, NULL );
diff --git a/android/hw-pipe-net.c b/android/hw-pipe-net.c
index dade446..193d60b 100644
--- a/android/hw-pipe-net.c
+++ b/android/hw-pipe-net.c
@@ -23,6 +23,7 @@
#include "android/utils/panic.h"
#include "android/utils/system.h"
#include "android/async-utils.h"
+#include "android/opengles.h"
#include "android/looper.h"
#include "hw/goldfish_pipe.h"
@@ -68,7 +69,6 @@ typedef struct {
int wakeWanted;
LoopIo io[1];
AsyncConnector connector[1];
- int shouldSetSocketOpt;
} NetPipe;
static void
@@ -192,7 +192,6 @@ netPipe_initFromAddress( void* hwpipe, const SockAddress* address, Looper* loop
pipe->hwpipe = hwpipe;
pipe->state = STATE_INIT;
- pipe->shouldSetSocketOpt = 0;
{
AsyncStatus status;
@@ -246,19 +245,6 @@ netPipe_sendBuffers( void* opaque, const GoldfishPipeBuffer* buffers, int numBuf
const GoldfishPipeBuffer* buff = buffers;
const GoldfishPipeBuffer* buffEnd = buff + numBuffers;
-#ifdef _WIN32
- if (pipe->shouldSetSocketOpt == 1) {
- int sndbuf = 128 * 1024;
- int len = sizeof(sndbuf);
- if (setsockopt(pipe->io->fd, SOL_SOCKET, SO_SNDBUF,
- (char*)&sndbuf, len) == SOCKET_ERROR) {
- D("Failed to set SO_SNDBUF to %d error=0x%x\n",
- sndbuf, WSAGetLastError());
- }
- pipe->shouldSetSocketOpt = 0;
- }
-#endif
-
for (; buff < buffEnd; buff++)
count += buff->size;
@@ -364,9 +350,9 @@ netPipe_poll( void* opaque )
unsigned ret = 0;
if (mask & LOOP_IO_READ)
- ret |= PIPE_WAKE_READ;
+ ret |= PIPE_POLL_IN;
if (mask & LOOP_IO_WRITE)
- ret |= PIPE_WAKE_WRITE;
+ ret |= PIPE_POLL_OUT;
return ret;
}
@@ -469,7 +455,11 @@ static const GoldfishPipeFuncs netPipeUnix_funcs = {
};
#endif
-#define DEFAULT_OPENGLES_PORT 22468
+/* This is set to 1 in android_init_opengles() below, and tested
+ * by openglesPipe_init() to refuse a pipe connection if the function
+ * was never called.
+ */
+static int _opengles_init;
static void*
openglesPipe_init( void* hwpipe, void* _looper, const char* args )
@@ -477,10 +467,45 @@ openglesPipe_init( void* hwpipe, void* _looper, const char* args )
char temp[32];
NetPipe *pipe;
- /* For now, simply connect through tcp */
- snprintf(temp, sizeof temp, "%d", DEFAULT_OPENGLES_PORT);
- pipe = (NetPipe *)netPipe_initTcp(hwpipe, _looper, temp);
- pipe->shouldSetSocketOpt = 1;
+ if (!_opengles_init) {
+ /* This should never happen, unless there is a bug in the
+ * emulator's initialization, or the system image. */
+ D("Trying to open the OpenGLES pipe without GPU emulation!");
+ return NULL;
+ }
+
+#ifndef _WIN32
+ if (android_gles_fast_pipes) {
+ char unix_path[PATH_MAX];
+ android_gles_unix_path(unix_path, sizeof(unix_path), ANDROID_OPENGLES_BASE_PORT);
+ pipe = (NetPipe *)netPipe_initUnix(hwpipe, _looper, unix_path);
+ D("Creating Unix OpenGLES pipe for GPU emulation: %s", unix_path);
+ } else {
+#else /* _WIN32 */
+ {
+#endif
+ /* Connect through TCP as a fallback */
+ snprintf(temp, sizeof temp, "%d", ANDROID_OPENGLES_BASE_PORT);
+ pipe = (NetPipe *)netPipe_initTcp(hwpipe, _looper, temp);
+ D("Creating TCP OpenGLES pipe for GPU emulation!");
+ }
+ if (pipe != NULL) {
+ // Disable TCP nagle algorithm to improve throughput of small packets
+ socket_set_nodelay(pipe->io->fd);
+
+ // On Win32, adjust buffer sizes
+#ifdef _WIN32
+ {
+ int sndbuf = 128 * 1024;
+ int len = sizeof(sndbuf);
+ if (setsockopt(pipe->io->fd, SOL_SOCKET, SO_SNDBUF,
+ (char*)&sndbuf, len) == SOCKET_ERROR) {
+ D("Failed to set SO_SNDBUF to %d error=0x%x\n",
+ sndbuf, WSAGetLastError());
+ }
+ }
+#endif /* _WIN32 */
+ }
return pipe;
}
@@ -494,7 +519,6 @@ static const GoldfishPipeFuncs openglesPipe_funcs = {
netPipe_wakeOn,
};
-
void
android_net_pipes_init(void)
{
@@ -506,3 +530,13 @@ android_net_pipes_init(void)
#endif
goldfish_pipe_add_type( "opengles", looper, &openglesPipe_funcs );
}
+
+int
+android_init_opengles_pipes(void)
+{
+ /* TODO: Check that we can load and initialize the host emulation
+ * libraries, and return -1 in case of error.
+ */
+ _opengles_init = 1;
+ return 0;
+}
diff --git a/android/hw-qemud.c b/android/hw-qemud.c
index e91ec78..0820e4c 100644
--- a/android/hw-qemud.c
+++ b/android/hw-qemud.c
@@ -14,7 +14,9 @@
#include "android/utils/misc.h"
#include "android/utils/system.h"
#include "android/utils/bufprint.h"
+#include "android/looper.h"
#include "hw/hw.h"
+#include "hw/goldfish_pipe.h"
#include "qemu-char.h"
#include "charpipe.h"
#include "cbuffer.h"
@@ -43,7 +45,9 @@
/* Version number of snapshots code. Increment whenever the data saved
* or the layout in which it is saved is changed.
*/
-#define QEMUD_SAVE_VERSION 1
+#define QEMUD_SAVE_VERSION 2
+
+#define min(a, b) (((a) < (b)) ? (a) : (b))
/* define SUPPORT_LEGACY_QEMUD to 1 if you want to support
@@ -575,18 +579,71 @@ qemud_serial_send( QemudSerial* s,
/** CLIENTS
**/
+/* Descriptor for a data buffer pending to be sent to a qemud pipe client.
+ *
+ * When a service decides to send data to the client, there could be cases when
+ * client is not ready to read them. In this case there is no GoldfishPipeBuffer
+ * available to write service's data to, So, we need to cache that data into the
+ * client descriptor, and "send" them over to the client in _qemudPipe_recvBuffers
+ * callback. Pending service data is stored in the client descriptor as a list
+ * of QemudPipeMessage instances.
+ */
+typedef struct QemudPipeMessage QemudPipeMessage;
+struct QemudPipeMessage {
+ /* Message to send. */
+ uint8_t* message;
+ /* Message size. */
+ size_t size;
+ /* Offset in the message buffer of the chunk, that has not been sent
+ * to the pipe yet. */
+ size_t offset;
+ /* Links next message in the client. */
+ QemudPipeMessage* next;
+};
+
+
/* A QemudClient models a single client as seen by the emulator.
- * Each client has its own channel id, and belongs to a given
- * QemudService (see below).
+ * Each client has its own channel id (for the serial qemud), or pipe descriptor
+ * (for the pipe based qemud), and belongs to a given QemudService (see below).
*
- * There is a global list of clients used to multiplex incoming
- * messages from the channel id (see qemud_multiplexer_serial_recv()).
+ * There is a global list of serial clients used to multiplex incoming
+ * messages from the channel id (see qemud_multiplexer_serial_recv()). Pipe
+ * clients don't need multiplexing, because they are communicated via qemud pipes
+ * that are unique for each client.
*
*/
+/* Defines type of the client: pipe, or serial.
+ */
+typedef enum QemudProtocol {
+ /* Client is communicating via pipe. */
+ QEMUD_PROTOCOL_PIPE,
+ /* Client is communicating via serial port. */
+ QEMUD_PROTOCOL_SERIAL
+} QemudProtocol;
+
+/* Descriptor for a QEMUD pipe connection.
+ *
+ * Every time a client connects to the QEMUD via pipe, an instance of this
+ * structure is created to represent a connection used by new pipe client.
+ */
+typedef struct QemudPipe {
+ /* Pipe descriptor. */
+ void* hwpipe;
+ /* Looper used for I/O */
+ void* looper;
+ /* Service for this pipe. */
+ QemudService* service;
+ /* Client for this pipe. */
+ QemudClient* client;
+} QemudPipe;
+
struct QemudClient {
- int channel;
- QemudSerial* serial;
+ /* Defines protocol, used by the client. */
+ QemudProtocol protocol;
+
+ /* Fields that are common for all protocols. */
+ char* param;
void* clie_opaque;
QemudClientRecv clie_recv;
QemudClientClose clie_close;
@@ -604,8 +661,28 @@ struct QemudClient {
QemudSink header[1];
uint8_t header0[FRAME_HEADER_SIZE];
QemudSink payload[1];
+
+ /* Fields that are protocol-specific. */
+ union {
+ /* Serial-specific fields. */
+ struct {
+ int channel;
+ QemudSerial* serial;
+ } Serial;
+ /* Pipe-specific fields. */
+ struct {
+ QemudPipe* qemud_pipe;
+ QemudPipeMessage* messages;
+ } Pipe;
+ } ProtocolSelector;
};
+static ABool
+_is_pipe_client(QemudClient* client)
+{
+ return (client-> protocol == QEMUD_PROTOCOL_PIPE) ? true : false;
+}
+
static void qemud_service_remove_client( QemudService* service,
QemudClient* client );
@@ -716,6 +793,33 @@ qemud_client_recv( void* opaque, uint8_t* msg, int msglen )
}
}
+/* Sends data to a pipe-based client.
+ */
+static void
+_qemud_pipe_send(QemudClient* client, const uint8_t* msg, int msglen);
+
+/* Frees memory allocated for the qemud client.
+ */
+static void
+_qemud_client_free(QemudClient* c)
+{
+ if ( c != NULL) {
+ if (_is_pipe_client(c)) {
+ /* Free outstanding messages. */
+ QemudPipeMessage** msg_list = &c->ProtocolSelector.Pipe.messages;
+ while (*msg_list != NULL) {
+ QemudPipeMessage* to_free = *msg_list;
+ *msg_list = to_free->next;
+ free(to_free);
+ }
+ }
+ if (c->param != NULL) {
+ free(c->param);
+ }
+ AFREE(c);
+ }
+}
+
/* disconnect a client. this automatically frees the QemudClient.
* note that this also removes the client from the global list
* and from its service's list, if any.
@@ -734,10 +838,19 @@ qemud_client_disconnect( void* opaque )
qemud_client_remove(c);
/* send a disconnect command to the daemon */
- if (c->channel > 0) {
+ if (_is_pipe_client(c)) {
+ char tmp[128], *p=tmp, *end=p+sizeof(tmp);
+ p = bufprint(tmp, end, "disconnect:00");
+ _qemud_pipe_send(c, (uint8_t*)tmp, p-tmp);
+ /* We must NULL the client reference in the QemuPipe for this connection,
+ * so if a sudden receive request comes after client has been closed, we
+ * don't blow up. */
+ c->ProtocolSelector.Pipe.qemud_pipe->client = NULL;
+ } else if (c->ProtocolSelector.Serial.channel > 0) {
char tmp[128], *p=tmp, *end=p+sizeof(tmp);
- p = bufprint(tmp, end, "disconnect:%02x", c->channel);
- qemud_serial_send(c->serial, 0, 0, (uint8_t*)tmp, p-tmp);
+ p = bufprint(tmp, end, "disconnect:%02x",
+ c->ProtocolSelector.Serial.channel);
+ qemud_serial_send(c->ProtocolSelector.Serial.serial, 0, 0, (uint8_t*)tmp, p-tmp);
}
/* call the client close callback */
@@ -753,12 +866,16 @@ qemud_client_disconnect( void* opaque )
c->service = NULL;
}
- AFREE(c);
+ _qemud_client_free(c);
}
-/* allocate a new QemudClient object */
+/* allocate a new QemudClient object
+ * NOTE: channel_id valie is used as a selector between serial and pipe clients.
+ * Since channel_id < 0 is an invalid value for a serial client, it would
+ * indicate that creating client is a pipe client. */
static QemudClient*
qemud_client_alloc( int channel_id,
+ const char* client_param,
void* clie_opaque,
QemudClientRecv clie_recv,
QemudClientClose clie_close,
@@ -771,14 +888,26 @@ qemud_client_alloc( int channel_id,
ANEW0(c);
- c->serial = serial;
- c->channel = channel_id;
+ if (channel_id < 0) {
+ /* Allocating a pipe client. */
+ c->protocol = QEMUD_PROTOCOL_PIPE;
+ c->ProtocolSelector.Pipe.messages = NULL;
+ c->ProtocolSelector.Pipe.qemud_pipe = NULL;
+ } else {
+ /* Allocating a serial client. */
+ c->protocol = QEMUD_PROTOCOL_SERIAL;
+ c->ProtocolSelector.Serial.serial = serial;
+ c->ProtocolSelector.Serial.channel = channel_id;
+ }
+ c->param = client_param ? ASTRDUP(client_param) : NULL;
c->clie_opaque = clie_opaque;
c->clie_recv = clie_recv;
c->clie_close = clie_close;
c->clie_save = clie_save;
c->clie_load = clie_load;
-
+ c->service = NULL;
+ c->next_serv = NULL;
+ c->next = NULL;
c->framing = 0;
c->need_header = 1;
qemud_sink_reset(c->header, FRAME_HEADER_SIZE, c->header0);
@@ -794,7 +923,8 @@ static char* qemud_service_load_name( QEMUFile* f );
static QemudService* qemud_service_find( QemudService* service_list,
const char* service_name );
static QemudClient* qemud_service_connect_client( QemudService *sv,
- int channel_id );
+ int channel_id,
+ const char* client_param);
/* Saves the client state needed to re-establish connections on load.
*/
@@ -803,7 +933,10 @@ qemud_client_save(QEMUFile* f, QemudClient* c)
{
/* save generic information */
qemud_service_save_name(f, c->service);
- qemu_put_be32(f, c->channel);
+ qemu_put_be32(f, c->protocol);
+ if (!_is_pipe_client(c)) {
+ qemu_put_be32(f, c->ProtocolSelector.Serial.channel);
+ }
/* save client-specific state */
if (c->clie_save)
@@ -826,7 +959,7 @@ qemud_client_save(QEMUFile* f, QemudClient* c)
* corresponding service.
*/
static int
-qemud_client_load(QEMUFile* f, QemudService* current_services )
+qemud_client_load(QEMUFile* f, QemudService* current_services, int version )
{
char *service_name = qemud_service_load_name(f);
if (service_name == NULL)
@@ -840,8 +973,19 @@ qemud_client_load(QEMUFile* f, QemudService* current_services )
return -EIO;
}
- /* get channel id */
- int channel = qemu_get_be32(f);
+ int channel = -1;
+
+ if (version >= 2) {
+ /* get protocol. */
+ QemudProtocol protocol = qemu_get_be32(f);
+ /* get channel id */
+ if (protocol == QEMUD_PROTOCOL_SERIAL) {
+ channel = qemu_get_be32(f);
+ }
+ } else {
+ channel = qemu_get_be32(f);
+ }
+
if (channel == 0) {
D("%s: illegal snapshot: client for control channel must no be saved\n",
__FUNCTION__);
@@ -849,7 +993,7 @@ qemud_client_load(QEMUFile* f, QemudService* current_services )
}
/* re-connect client */
- QemudClient* c = qemud_service_connect_client(sv, channel);
+ QemudClient* c = qemud_service_connect_client(sv, channel, NULL);
if(c == NULL)
return -EIO;
@@ -976,8 +1120,8 @@ qemud_service_remove_client( QemudService* s, QemudClient* c )
for (;;) {
node = *pnode;
if (node == NULL) {
- D("%s: could not find client %d for service '%s'",
- __FUNCTION__, c->channel, s->name);
+ D("%s: could not find client for service '%s'",
+ __FUNCTION__, s->name);
return;
}
if (node == c)
@@ -996,15 +1140,17 @@ qemud_service_remove_client( QemudService* s, QemudClient* c )
* returns the client or NULL if an error occurred
*/
static QemudClient*
-qemud_service_connect_client(QemudService *sv, int channel_id)
+qemud_service_connect_client(QemudService *sv,
+ int channel_id,
+ const char* client_param)
{
- QemudClient* client = sv->serv_connect( sv->serv_opaque, sv, channel_id );
+ QemudClient* client =
+ sv->serv_connect( sv->serv_opaque, sv, channel_id, client_param );
if (client == NULL) {
D("%s: registration failed for '%s' service",
__FUNCTION__, sv->name);
return NULL;
}
-
D("%s: registered client channel %d for '%s' service",
__FUNCTION__, channel_id, sv->name);
return client;
@@ -1153,7 +1299,7 @@ qemud_multiplexer_serial_recv( void* opaque,
* QemudClient that is setup in qemud_multiplexer_init()
*/
for ( ; c != NULL; c = c->next ) {
- if (c->channel == channel) {
+ if (!_is_pipe_client(c) && c->ProtocolSelector.Serial.channel == channel) {
qemud_client_recv(c, msg, msglen);
return;
}
@@ -1188,7 +1334,7 @@ qemud_multiplexer_connect( QemudMultiplexer* m,
}
/* connect a new client to the service on the given channel */
- if (qemud_service_connect_client(sv, channel_id) == NULL)
+ if (qemud_service_connect_client(sv, channel_id, NULL) == NULL)
return -1;
return 0;
@@ -1203,13 +1349,13 @@ qemud_multiplexer_disconnect( QemudMultiplexer* m,
/* find the client by its channel id, then disconnect it */
for (c = m->clients; c; c = c->next) {
- if (c->channel == channel) {
+ if (!_is_pipe_client(c) && c->ProtocolSelector.Serial.channel == channel) {
D("%s: disconnecting client %d",
__FUNCTION__, channel);
/* note thatt this removes the client from
* m->clients automatically.
*/
- c->channel = -1; /* no need to send disconnect:<id> */
+ c->ProtocolSelector.Serial.channel = -1; /* no need to send disconnect:<id> */
qemud_client_disconnect(c);
return;
}
@@ -1233,12 +1379,13 @@ qemud_multiplexer_disconnect_noncontrol( QemudMultiplexer* m )
c = next;
next = c->next; /* disconnect frees c, remember next in advance */
- if (c->channel > 0) { /* skip control channel */
+ if (!_is_pipe_client(c) && c->ProtocolSelector.Serial.channel > 0) {
+ /* skip control channel */
D("%s: disconnecting client %d",
- __FUNCTION__, c->channel);
+ __FUNCTION__, c->ProtocolSelector.Serial.channel);
D("%s: disconnecting client %d\n",
- __FUNCTION__, c->channel);
- c->channel = -1; /* do not send disconnect:<id> */
+ __FUNCTION__, c->ProtocolSelector.Serial.channel);
+ c->ProtocolSelector.Serial.channel = -1; /* do not send disconnect:<id> */
qemud_client_disconnect(c);
}
}
@@ -1396,6 +1543,7 @@ qemud_multiplexer_init( QemudMultiplexer* mult,
/* setup listener for channel 0 */
control = qemud_client_alloc( 0,
+ NULL,
mult,
qemud_multiplexer_control_recv,
NULL, NULL, NULL,
@@ -1422,6 +1570,7 @@ static QemudMultiplexer _multiplexer[1];
QemudClient*
qemud_client_new( QemudService* service,
int channelId,
+ const char* client_param,
void* clie_opaque,
QemudClientRecv clie_recv,
QemudClientClose clie_close,
@@ -1430,6 +1579,7 @@ qemud_client_new( QemudService* service,
{
QemudMultiplexer* m = _multiplexer;
QemudClient* c = qemud_client_alloc( channelId,
+ client_param,
clie_opaque,
clie_recv,
clie_close,
@@ -1442,13 +1592,92 @@ qemud_client_new( QemudService* service,
return c;
}
+/* Caches a service message into the client's descriptor.
+ *
+ * See comments on QemudPipeMessage structure for more info.
+ */
+static void
+_qemud_pipe_cache_buffer(QemudClient* client, const uint8_t* msg, int msglen)
+{
+ QemudPipeMessage* buf;
+ QemudPipeMessage** ins_at = &client->ProtocolSelector.Pipe.messages;
+
+ /* Allocate descriptor big enough to contain message as well. */
+ buf = (QemudPipeMessage*)malloc(msglen + sizeof(QemudPipeMessage));
+ if (buf != NULL) {
+ /* Message starts right after the descriptor. */
+ buf->message = (uint8_t*)buf + sizeof(QemudPipeMessage);
+ buf->size = msglen;
+ memcpy(buf->message, msg, msglen);
+ buf->offset = 0;
+ buf->next = NULL;
+ while (*ins_at != NULL) {
+ ins_at = &(*ins_at)->next;
+ }
+ *ins_at = buf;
+ /* Notify the pipe that there is data to read. */
+ goldfish_pipe_wake(client->ProtocolSelector.Pipe.qemud_pipe->hwpipe,
+ PIPE_WAKE_READ);
+ }
+}
+
+/* Sends service message to the client.
+ */
+static void
+_qemud_pipe_send(QemudClient* client, const uint8_t* msg, int msglen)
+{
+ uint8_t frame[FRAME_HEADER_SIZE];
+ int avail, len = msglen;
+ int framing = client->framing;
+
+ if (msglen <= 0)
+ return;
+
+ D("%s: len=%3d '%s'",
+ __FUNCTION__, msglen, quote_bytes((const void*)msg, msglen));
+
+ if (framing) {
+ len += FRAME_HEADER_SIZE;
+ }
+
+ /* packetize the payload for the serial MTU */
+ while (len > 0)
+ {
+ avail = len;
+ if (avail > MAX_SERIAL_PAYLOAD)
+ avail = MAX_SERIAL_PAYLOAD;
+
+ /* insert frame header when needed */
+ if (framing) {
+ int2hex(frame, FRAME_HEADER_SIZE, msglen);
+ T("%s: '%.*s'", __FUNCTION__, FRAME_HEADER_SIZE, frame);
+ _qemud_pipe_cache_buffer(client, frame, FRAME_HEADER_SIZE);
+ avail -= FRAME_HEADER_SIZE;
+ len -= FRAME_HEADER_SIZE;
+ framing = 0;
+ }
+
+ /* write message content */
+ T("%s: '%.*s'", __FUNCTION__, avail, msg);
+ _qemud_pipe_cache_buffer(client, msg, avail);
+ msg += avail;
+ len -= avail;
+ }
+}
+
/* this can be used by a service implementation to send an answer
* or message to a specific client.
*/
void
qemud_client_send ( QemudClient* client, const uint8_t* msg, int msglen )
{
- qemud_serial_send(client->serial, client->channel, client->framing != 0, msg, msglen);
+ if (_is_pipe_client(client)) {
+ _qemud_pipe_send(client, msg, msglen);
+ } else {
+ qemud_serial_send(client->ProtocolSelector.Serial.serial,
+ client->ProtocolSelector.Serial.channel,
+ client->framing != 0, msg, msglen);
+ }
}
/* enable framing for this client. When TRUE, this will
@@ -1488,7 +1717,8 @@ qemud_client_save_count(QEMUFile* f, QemudClient* c)
{
unsigned int client_count = 0;
for( ; c; c = c->next) // walk over linked list
- if (c->channel > 0) // skip control channel, which is not saved
+ // skip control channel, which is not saved
+ if (_is_pipe_client(c) || c->ProtocolSelector.Serial.channel > 0)
client_count++;
qemu_put_be32(f, client_count);
@@ -1530,7 +1760,8 @@ qemud_save(QEMUFile* f, void* opaque)
qemud_client_save_count(f, m->clients);
QemudClient *c;
for (c = m->clients; c; c = c->next) {
- if (c->channel > 0) { /* skip control channel client */
+ /* skip control channel client */
+ if (_is_pipe_client(c) || c->ProtocolSelector.Serial.channel > 0) {
qemud_client_save(f, c);
}
}
@@ -1566,7 +1797,7 @@ qemud_load_services( QEMUFile* f, QemudService* current_services )
* changes, there is no communication with the guest.
*/
static int
-qemud_load_clients(QEMUFile* f, QemudMultiplexer* m )
+qemud_load_clients(QEMUFile* f, QemudMultiplexer* m, int version )
{
/* Remove all clients, except on the control channel.*/
qemud_multiplexer_disconnect_noncontrol(m);
@@ -1575,7 +1806,7 @@ qemud_load_clients(QEMUFile* f, QemudMultiplexer* m )
int client_count = qemu_get_be32(f);
int i, ret;
for (i = 0; i < client_count; i++) {
- if ((ret = qemud_client_load(f, m->services))) {
+ if ((ret = qemud_client_load(f, m->services, version))) {
return ret;
}
}
@@ -1591,19 +1822,246 @@ qemud_load(QEMUFile *f, void* opaque, int version)
QemudMultiplexer *m = opaque;
int ret;
- if (version != QEMUD_SAVE_VERSION)
- return -1;
if ((ret = qemud_serial_load(f, m->serial)))
return ret;
if ((ret = qemud_load_services(f, m->services)))
return ret;
- if ((ret = qemud_load_clients(f, m)))
+ if ((ret = qemud_load_clients(f, m, version)))
return ret;
return 0;
}
+/*------------------------------------------------------------------------------
+ *
+ * QEMUD PIPE service callbacks
+ *
+ * ----------------------------------------------------------------------------*/
+
+/* This is a callback that gets invoked when guest is connecting to the service.
+ *
+ * Here we will create a new client as well as pipe descriptor representing new
+ * connection.
+ */
+static void*
+_qemudPipe_init(void* hwpipe, void* _looper, const char* args)
+{
+ QemudMultiplexer *m = _multiplexer;
+ QemudService* sv = m->services;
+ QemudClient* client;
+ QemudPipe* pipe = NULL;
+ char service_name[512];
+ const char* client_args;
+ size_t srv_name_len;
+
+ /* 'args' passed in this callback represents name of the service the guest is
+ * connecting to. It can't be NULL. */
+ if (args == NULL) {
+ D("%s: Missing address!", __FUNCTION__);
+ return NULL;
+ }
+
+ /* 'args' contain service name, and optional parameters for the client that
+ * is about to be created in this call. The parameters are separated from the
+ * service name wit ':'. Separate service name from the client param. */
+ client_args = strchr(args, ':');
+ if (client_args != NULL) {
+ srv_name_len = min(client_args - args, sizeof(service_name) - 1);
+ client_args++; // Past the ':'
+ if (*client_args == '\0') {
+ /* No actual parameters. */
+ client_args = NULL;
+ }
+ } else {
+ srv_name_len = min(strlen(args), sizeof(service_name) - 1);
+ }
+ memcpy(service_name, args, srv_name_len);
+ service_name[srv_name_len] = '\0';
+
+ /* Lookup registered service by its name. */
+ while (sv != NULL && strcmp(sv->name, service_name)) {
+ sv = sv->next;
+ }
+ if (sv == NULL) {
+ D("%s: Service '%s' has not been registered!", __FUNCTION__, service_name);
+ return NULL;
+ }
+
+ /* Create a client for this connection. -1 as a channel ID signals that this
+ * is a pipe client. */
+ client = qemud_service_connect_client(sv, -1, client_args);
+ if (client != NULL) {
+ ANEW0(pipe);
+ pipe->hwpipe = hwpipe;
+ pipe->looper = _looper;
+ pipe->service = sv;
+ pipe->client = client;
+ client->ProtocolSelector.Pipe.qemud_pipe = pipe;
+ }
+
+ return pipe;
+}
+
+/* Called when the guest wants to close the channel.
+*/
+static void
+_qemudPipe_closeFromGuest( void* opaque )
+{
+ QemudPipe* pipe = opaque;
+ QemudClient* client = pipe->client;
+ D("%s", __FUNCTION__);
+ if (client != NULL) {
+ qemud_client_disconnect(client);
+ } else {
+ D("%s: Unexpected NULL client", __FUNCTION__);
+ }
+}
+
+/* Called when the guest has sent some data to the client.
+ */
+static int
+_qemudPipe_sendBuffers(void* opaque,
+ const GoldfishPipeBuffer* buffers,
+ int numBuffers)
+{
+ QemudPipe* pipe = opaque;
+ QemudClient* client = pipe->client;
+ size_t transferred = 0;
+
+ if (client == NULL) {
+ D("%s: Unexpected NULL client", __FUNCTION__);
+ return -1;
+ }
+
+ if (numBuffers == 1) {
+ /* Simple case: all data are in one buffer. */
+ D("%s: %s", __FUNCTION__, quote_bytes((char*)buffers->data, buffers->size));
+ qemud_client_recv(client, buffers->data, buffers->size);
+ transferred = buffers->size;
+ } else {
+ /* If there are multiple buffers involved, collect all data in one buffer
+ * before calling the high level client. */
+ uint8_t* msg, *wrk;
+ int n;
+ for (n = 0; n < numBuffers; n++) {
+ transferred += buffers[n].size;
+ }
+ msg = malloc(transferred);
+ wrk = msg;
+ for (n = 0; n < numBuffers; n++) {
+ memcpy(wrk, buffers[n].data, buffers[n].size);
+ wrk += buffers[n].size;
+ }
+ D("%s: %s", __FUNCTION__, quote_bytes((char*)msg, transferred));
+ qemud_client_recv(client, msg, transferred);
+ free(msg);
+ }
+
+ return transferred;
+}
+
+/* Called when the guest is reading data from the client.
+ */
+static int
+_qemudPipe_recvBuffers(void* opaque, GoldfishPipeBuffer* buffers, int numBuffers)
+{
+ QemudPipe* pipe = opaque;
+ QemudClient* client = pipe->client;
+ QemudPipeMessage** msg_list;
+ GoldfishPipeBuffer* buff = buffers;
+ GoldfishPipeBuffer* endbuff = buffers + numBuffers;
+ size_t sent_bytes = 0;
+ size_t off_in_buff = 0;
+
+ if (client == NULL) {
+ D("%s: Unexpected NULL client", __FUNCTION__);
+ return -1;
+ }
+
+ msg_list = &client->ProtocolSelector.Pipe.messages;
+ if (*msg_list == NULL) {
+ /* No data to send. Let it block until we wake it up with
+ * PIPE_WAKE_READ when service sends data to the client. */
+ return PIPE_ERROR_AGAIN;
+ }
+
+ /* Fill in goldfish buffers while they are still available, and there are
+ * messages in the client's message list. */
+ while (buff != endbuff && *msg_list != NULL) {
+ QemudPipeMessage* msg = *msg_list;
+ /* Message data fiting the current pipe's buffer. */
+ size_t to_copy = min(msg->size - msg->offset, buff->size - off_in_buff);
+ memcpy(buff->data + off_in_buff, msg->message + msg->offset, to_copy);
+ /* Update offsets. */
+ off_in_buff += to_copy;
+ msg->offset += to_copy;
+ sent_bytes += to_copy;
+ if (msg->size == msg->offset) {
+ /* We're done with the current message. Go to the next one. */
+ *msg_list = msg->next;
+ free(msg);
+ }
+ if (off_in_buff == buff->size) {
+ /* Current pipe buffer is full. Continue with the next one. */
+ buff++;
+ off_in_buff = 0;
+ }
+ }
+
+ D("%s: -> %u (of %u)", __FUNCTION__, sent_bytes, buffers->size);
+
+ return sent_bytes;
+}
+
+static unsigned
+_qemudPipe_poll(void* opaque)
+{
+ QemudPipe* pipe = opaque;
+ QemudClient* client = pipe->client;
+ unsigned ret = 0;
+
+ if (client != NULL) {
+ ret |= PIPE_POLL_OUT;
+ if (client->ProtocolSelector.Pipe.messages != NULL) {
+ ret |= PIPE_POLL_IN;
+ }
+ } else {
+ D("%s: Unexpected NULL client", __FUNCTION__);
+ }
+
+ return ret;
+}
+
+static void
+_qemudPipe_wakeOn(void* opaque, int flags)
+{
+ D("%s: -> %X", __FUNCTION__, flags);
+}
+
+/* QEMUD pipe functions.
+ */
+static const GoldfishPipeFuncs _qemudPipe_funcs = {
+ _qemudPipe_init,
+ _qemudPipe_closeFromGuest,
+ _qemudPipe_sendBuffers,
+ _qemudPipe_recvBuffers,
+ _qemudPipe_poll,
+ _qemudPipe_wakeOn,
+};
+
+/* Initializes QEMUD pipe interface.
+ */
+static void
+_android_qemud_pipe_init(void)
+{
+ static ABool _qemud_pipe_initialized = false;
+
+ if (!_qemud_pipe_initialized) {
+ goldfish_pipe_add_type( "qemud", looper_newCore(), &_qemudPipe_funcs );
+ _qemud_pipe_initialized = true;
+ }
+}
/* this is the end of the serial charpipe that must be passed
* to the emulated tty implementation. The other end of the
@@ -1611,8 +2069,10 @@ qemud_load(QEMUFile *f, void* opaque, int version)
*/
static CharDriverState* android_qemud_cs;
-extern void
-android_qemud_init( void )
+/* Initializes QEMUD serial interface.
+ */
+static void
+_android_qemud_serial_init(void)
{
CharDriverState* cs;
@@ -1631,6 +2091,18 @@ android_qemud_init( void )
qemud_save, qemud_load, _multiplexer);
}
+extern void
+android_qemud_init( void )
+{
+ D("%s", __FUNCTION__);
+ /* We don't know in advance whether the guest system supports qemud pipes,
+ * so we will initialize both qemud machineries, the legacy (over serial
+ * port), and the new one (over qemu pipe). Then we let the guest to connect
+ * via one, or the other. */
+ _android_qemud_serial_init();
+ _android_qemud_pipe_init();
+}
+
/* return the serial charpipe endpoint that must be used
* by the emulated tty implementation.
*/
@@ -1663,20 +2135,19 @@ qemud_service_register( const char* service_name,
QemudServiceSave serv_save,
QemudServiceLoad serv_load )
{
- QemudMultiplexer* m = _multiplexer;
QemudService* sv;
+ QemudMultiplexer* m = _multiplexer;
- if (android_qemud_cs == NULL)
- android_qemud_init();
+ android_qemud_init();
sv = qemud_service_new(service_name,
- max_clients,
- serv_opaque,
- serv_connect,
- serv_save,
- serv_load,
- &m->services);
-
+ max_clients,
+ serv_opaque,
+ serv_connect,
+ serv_save,
+ serv_load,
+ &m->services);
+ D("Registered QEMUD service %s", service_name);
return sv;
}
@@ -1736,8 +2207,22 @@ _qemud_char_client_recv( void* opaque, uint8_t* msg, int msglen,
*/
static void
_qemud_char_client_close( void* opaque )
+
{
- derror("unexpected qemud char. channel close");
+ QemudClient* client = opaque;
+
+ /* At this point modem driver still uses char pipe to communicate with
+ * hw-qemud, while communication with the guest is done over qemu pipe.
+ * So, when guest disconnects from the qemu pipe, and emulator-side client
+ * goes through the disconnection process, this routine is called, since it
+ * has been set to called during service registration. Unless modem driver
+ * is changed to drop char pipe communication, this routine will be called
+ * due to guest disconnection. As long as the client was a qemu pipe - based
+ * client, it's fine, since we don't really need to do anything in this case.
+ */
+ if (!_is_pipe_client(client)) {
+ derror("unexpected qemud char. channel close");
+ }
}
@@ -1767,10 +2252,13 @@ _qemud_char_service_read( void* opaque, const uint8_t* from, int len )
* data from it.
*/
static QemudClient*
-_qemud_char_service_connect( void* opaque, QemudService* sv, int channel )
+_qemud_char_service_connect(void* opaque,
+ QemudService* sv,
+ int channel,
+ const char* client_param )
{
CharDriverState* cs = opaque;
- QemudClient* c = qemud_client_new( sv, channel,
+ QemudClient* c = qemud_client_new( sv, channel, client_param,
cs,
_qemud_char_client_recv,
_qemud_char_client_close,
diff --git a/android/hw-qemud.h b/android/hw-qemud.h
index cd05737..6a2cc66 100644
--- a/android/hw-qemud.h
+++ b/android/hw-qemud.h
@@ -89,8 +89,9 @@ typedef int (*QemudClientLoad) ( QEMUFile* f, QemudClient* client, void* opaq
* You should typically use this function within a QemudServiceConnect callback
* (see below).
*/
-extern QemudClient* qemud_client_new( QemudService* service,
+extern QemudClient* qemud_client_new( QemudService* service,
int channel_id,
+ const char* client_param,
void* clie_opaque,
QemudClientRecv clie_recv,
QemudClientClose clie_close,
@@ -114,7 +115,10 @@ extern void qemud_client_close( QemudClient* client );
* system tries to connect to a given qemud service. This should typically
* call qemud_client_new() to register a new client.
*/
-typedef QemudClient* (*QemudServiceConnect)( void* opaque, QemudService* service, int channel );
+typedef QemudClient* (*QemudServiceConnect)( void* opaque,
+ QemudService* service,
+ int channel,
+ const char* client_param );
/* A function that will be called when the state of the service should be
* saved to a snapshot.
diff --git a/android/hw-sensors.c b/android/hw-sensors.c
index 7d77b62..9c39196 100644
--- a/android/hw-sensors.c
+++ b/android/hw-sensors.c
@@ -462,11 +462,14 @@ _hwSensorClient_load( QEMUFile* f, QemudClient* client, void* opaque )
}
static QemudClient*
-_hwSensors_connect( void* opaque, QemudService* service, int channel )
+_hwSensors_connect( void* opaque,
+ QemudService* service,
+ int channel,
+ const char* client_param )
{
HwSensors* sensors = opaque;
HwSensorClient* cl = _hwSensorClient_new(sensors);
- QemudClient* client = qemud_client_new(service, channel, cl,
+ QemudClient* client = qemud_client_new(service, channel, client_param, cl,
_hwSensorClient_recv,
_hwSensorClient_close,
_hwSensorClient_save,
diff --git a/android/main-emulator.c b/android/main-emulator.c
index 39e5e1e..a6e95c7 100644
--- a/android/main-emulator.c
+++ b/android/main-emulator.c
@@ -41,8 +41,21 @@ int android_verbose;
# define D(...) do{}while(0)
#endif
+/* The extension used by dynamic libraries on the host platform */
+#ifdef _WIN32
+# define DLL_EXTENSION ".dll"
+#elif defined(__APPLE__)
+# define DLL_EXTENSION ".dylib"
+#else
+# define DLL_EXTENSION ".so"
+#endif
+
+#define GLES_EMULATION_LIB "libOpenglRender" DLL_EXTENSION
+
/* Forward declarations */
static char* getTargetEmulatorPath(const char* progName, const char* avdArch);
+static char* getSharedLibraryPath(const char* progName, const char* libName);
+static void prependSharedLibraryPath(const char* prefix);
#ifdef _WIN32
static char* quotePath(const char* path);
@@ -138,6 +151,20 @@ int main(int argc, char** argv)
}
#endif
+ /* We need to find the location of the GLES emulation shared libraries
+ * and modify either LD_LIBRARY_PATH or PATH accordingly
+ */
+ {
+ char* sharedLibPath = getSharedLibraryPath(emulatorPath, GLES_EMULATION_LIB);
+
+ if (sharedLibPath != NULL) {
+ D("Found OpenGLES emulation libraries in %s\n", sharedLibPath);
+ prependSharedLibraryPath(sharedLibPath);
+ } else {
+ D("Could not find OpenGLES emulation host libraries!\n");
+ }
+ }
+
/* Launch it with the same set of options ! */
safe_execv(emulatorPath, argv);
@@ -196,6 +223,107 @@ getTargetEmulatorPath(const char* progName, const char* avdArch)
return NULL;
}
+/* return 1 iff <path>/<filename> exists */
+static int
+probePathForFile(const char* path, const char* filename)
+{
+ char temp[PATH_MAX], *p=temp, *end=p+sizeof(temp);
+ p = bufprint(temp, end, "%s/%s", path, filename);
+ D("Probing for: %s\n", temp);
+ return (p < end && path_exists(temp));
+}
+
+/* Find the directory containing a given shared library required by the
+ * emulator (for GLES emulation). We will probe several directories
+ * that correspond to various use-cases.
+ *
+ * Caller must free() result string. NULL if not found.
+ */
+
+static char*
+getSharedLibraryPath(const char* progName, const char* libName)
+{
+ char* progDir;
+ char* result = NULL;
+ char temp[PATH_MAX], *p=temp, *end=p+sizeof(temp);
+
+ /* Get program's directory name */
+ path_split(progName, &progDir, NULL);
+
+ /* First, try to probe the program's directory itself, this corresponds
+ * to the standalone build with ./android-configure.sh where the script
+ * will copy the host shared library under external/qemu/objs where
+ * the binaries are located.
+ */
+ if (probePathForFile(progDir, libName)) {
+ return progDir;
+ }
+
+ /* Try under $progDir/lib/, this should correspond to the SDK installation
+ * where the binary is under tools/, and the libraries under tools/lib/
+ */
+ {
+ p = bufprint(temp, end, "%s/lib", progDir);
+ if (p < end && probePathForFile(temp, libName)) {
+ result = strdup(temp);
+ goto EXIT;
+ }
+ }
+
+ /* try in $progDir/../lib, this corresponds to the platform build
+ * where the emulator binary is under out/host/<system>/lib and
+ * the libraries are under out/host/<system>/lib
+ */
+ {
+ char* parentDir = path_parent(progDir, 1);
+
+ if (parentDir == NULL) {
+ parentDir = strdup(".");
+ }
+ p = bufprint(temp, end, "%s/lib", parentDir);
+ free(parentDir);
+ if (p < end && probePathForFile(temp, libName)) {
+ result = strdup(temp);
+ goto EXIT;
+ }
+ }
+
+ /* Nothing found! */
+EXIT:
+ free(progDir);
+ return result;
+}
+
+/* Prepend the path in 'prefix' to either LD_LIBRARY_PATH or PATH to
+ * ensure that the shared libraries inside the path will be available
+ * through dlopen() to the emulator program being launched.
+ */
+static void
+prependSharedLibraryPath(const char* prefix)
+{
+ char temp[2048], *p=temp, *end=p+sizeof(temp);
+#ifdef _WIN32
+ const char* path = getenv("PATH");
+ if (path == NULL || path[0] == '\0') {
+ p = bufprint(temp, end, "PATH=%s", prefix);
+ } else {
+ p = bufprint(temp, end, "PATH=%s;%s", path, prefix);
+ }
+ /* Ignore overflow, this will push some paths out of the variable, but
+ * so be it. */
+ D("Setting %s\n", temp);
+ putenv(strdup(temp));
+#else
+ const char* path = getenv("LD_LIBRARY_PATH");
+ if (path != NULL && path[0] != '\0') {
+ p = bufprint(temp, end, "%s:%s", prefix, path);
+ prefix = temp;
+ }
+ setenv("LD_LIBRARY_PATH",prefix,1);
+ D("Setting LD_LIBRARY_PATH=%s\n", prefix);
+#endif
+}
+
#ifdef _WIN32
static char*
quotePath(const char* path)
diff --git a/android/main.c b/android/main.c
index f20ea7b..56ae698 100644
--- a/android/main.c
+++ b/android/main.c
@@ -155,6 +155,65 @@ _adjustPartitionSize( const char* description,
return convertMBToBytes(imageMB);
}
+/* Parses a -webcam option, extracting 'name', and 'dir' values.
+ * Param:
+ * param - -webcam option, that should be formatted as such:
+ * name=<name>[,dir=<direction>]
+ * name, name_size - buffer (and its size) where to receive <name>
+ * dir, dir_size - buffer (and its size) where to receive <direction>
+ */
+static void
+_parseWebcamOption(const char* param,
+ char* name, size_t name_size,
+ char* dir, size_t dir_size)
+{
+ const char* dr;
+ const char* wc_opt = param;
+
+ /* Must start with 'name=' */
+ if (strlen(wc_opt) <= 5 || memcmp(wc_opt, "name=", 5)) {
+ derror("Invalid value for -webcam parameter: %s\n", param);
+ exit(1);
+ }
+
+ /* Move on to 'name' value. */
+ wc_opt += 5;
+ dr = strchr(wc_opt, ',');
+ if (dr == NULL) {
+ dr = wc_opt + strlen(wc_opt);
+ }
+
+ /* Make sure that <name> fits */
+ if ((dr - wc_opt) < name_size) {
+ memcpy(name, wc_opt, dr - wc_opt);
+ name[dr - wc_opt] = '\0';
+ if (*dr == '\0') {
+ /* Default direction value is 'front' */
+ strcpy(dir, "front");
+ return;
+ } else {
+ dr++;
+ }
+ } else {
+ derror("Invalid <name> value for -webcam parameter: %s\n", param);
+ exit(1);
+ }
+
+ /* Parse 'dir'. Must begin with 'dir=' */
+ if (strlen(dr) <= 4 || memcmp(dr, "dir=", 4)) {
+ derror("Invalid value for -webcam parameter: %s\n", param);
+ exit(1);
+ }
+ dr += 4;
+ /* Check the bounds, and the values */
+ if (strlen(dr) >= dir_size || (strcmp(dr, "front") && strcmp(dr, "back"))) {
+ derror("Invalid <direction> value for -webcam parameter: %s\n"
+ "Valid values are: 'front', or 'back'\n", param);
+ exit(1);
+ }
+ strcpy(dir, dr);
+}
+
int main(int argc, char **argv)
{
char tmp[MAX_PATH];
@@ -396,6 +455,17 @@ int main(int argc, char **argv)
opts->trace = tracePath;
}
+ /* Update CPU architecture for HW configs created from build dir. */
+ if (inAndroidBuild) {
+#if defined(TARGET_ARM)
+ free(android_hw->hw_cpu_arch);
+ android_hw->hw_cpu_arch = ASTRDUP("arm");
+#elif defined(TARGET_I386)
+ free(android_hw->hw_cpu_arch);
+ android_hw->hw_cpu_arch = ASTRDUP("x86");
+#endif
+ }
+
n = 1;
/* generate arguments for the underlying qemu main() */
{
@@ -485,8 +555,15 @@ int main(int argc, char **argv)
args[n++] = opts->dns_server;
}
- hw->disk_ramdisk_path = avdInfo_getRamdiskPath(avd);
- D("autoconfig: -ramdisk %s", hw->disk_ramdisk_path);
+ /* opts->ramdisk is never NULL (see createAVD) here */
+ if (opts->ramdisk) {
+ AFREE(hw->disk_ramdisk_path);
+ hw->disk_ramdisk_path = ASTRDUP(opts->ramdisk);
+ }
+ else if (!hw->disk_ramdisk_path[0]) {
+ hw->disk_ramdisk_path = avdInfo_getRamdiskPath(avd);
+ D("autoconfig: -ramdisk %s", hw->disk_ramdisk_path);
+ }
/* -partition-size is used to specify the max size of both the system
* and data partition sizes.
@@ -1063,6 +1140,95 @@ int main(int argc, char **argv)
args[n++] = opts->memcheck;
}
+ if (opts->gpu) {
+ const char* gpu = opts->gpu;
+ if (!strcmp(gpu,"on") || !strcmp(gpu,"enable")) {
+ hw->hw_gpu_enabled = 1;
+ } else if (!strcmp(gpu,"off") || !strcmp(gpu,"disable")) {
+ hw->hw_gpu_enabled = 0;
+ } else if (!strcmp(gpu,"auto")) {
+ /* Nothing to do */
+ } else {
+ derror("Invalid value for -gpu <mode> parameter: %s\n", gpu);
+ derror("Valid values are: on, off or auto\n");
+ exit(1);
+ }
+ }
+
+ if (opts->fake_camera) {
+ if (!strcmp(opts->fake_camera, "back") ||
+ !strcmp(opts->fake_camera, "front") ||
+ !strcmp(opts->fake_camera, "off")) {
+ hw->hw_fakeCamera = ASTRDUP(opts->fake_camera);
+ } else {
+ derror("Invalid value for -fake-camera <mode> parameter: %s\n",
+ opts->fake_camera);
+ derror("Valid values are: back, front, or off\n");
+ exit(1);
+ }
+ }
+
+ if (opts->webcam != NULL) {
+ ParamList* pl = opts->webcam;
+ int webcam_num = 0;
+ for ( ; pl != NULL; pl = pl->next ) {
+ char webcam_name[64];
+ char webcam_dir[16];
+ if (!strcmp(pl->param, "off")) {
+ /* If 'off' is passed, there must be no other -webcam options. */
+ if (webcam_num || pl->next != NULL) {
+ derror("'-webcam off' cannot be combined with other -webcam otions\n");
+ exit(1);
+ }
+ break;
+ }
+ if (!strcmp(pl->param, "list")) {
+ /* If 'list' is passed, there must be no other -webcam options. */
+ if (webcam_num || pl->next != NULL) {
+ derror("'-webcam list' cannot be combined with other -webcam otions\n");
+ exit(1);
+ }
+ args[n++] = "-list-webcam";
+ break;
+ }
+ /* Extract name, and direction */
+ _parseWebcamOption(pl->param, webcam_name, sizeof(webcam_name),
+ webcam_dir, sizeof(webcam_dir));
+ /* Save them to appropriate field in hw.ini */
+ switch (webcam_num) {
+ case 0:
+ hw->hw_webcam_0_name = ASTRDUP(webcam_name);
+ hw->hw_webcam_0_direction = ASTRDUP(webcam_dir);
+ break;
+ case 1:
+ hw->hw_webcam_1_name = ASTRDUP(webcam_name);
+ hw->hw_webcam_1_direction = ASTRDUP(webcam_dir);
+ break;
+ case 2:
+ hw->hw_webcam_2_name = ASTRDUP(webcam_name);
+ hw->hw_webcam_2_direction = ASTRDUP(webcam_dir);
+ break;
+ case 3:
+ hw->hw_webcam_3_name = ASTRDUP(webcam_name);
+ hw->hw_webcam_3_direction = ASTRDUP(webcam_dir);
+ break;
+ case 4:
+ hw->hw_webcam_4_name = ASTRDUP(webcam_name);
+ hw->hw_webcam_4_direction = ASTRDUP(webcam_dir);
+ break;
+ case 5:
+ hw->hw_webcam_5_name = ASTRDUP(webcam_name);
+ hw->hw_webcam_5_direction = ASTRDUP(webcam_dir);
+ break;
+ default:
+ derror("Too many -webcam options. Maximum number of -webcam options is 6\n");
+ exit(1);
+ }
+ webcam_num++;
+ }
+ hw->hw_webcam_count = webcam_num;
+ }
+
/* physical memory is now in hw->hw_ramSize */
hw->avd_name = ASTRDUP(avdInfo_getName(avd));
diff --git a/android/opengles.c b/android/opengles.c
new file mode 100644
index 0000000..4913d0c
--- /dev/null
+++ b/android/opengles.c
@@ -0,0 +1,201 @@
+/* Copyright (C) 2011 The Android Open Source Project
+**
+** This software is licensed under the terms of the GNU General Public
+** License version 2, as published by the Free Software Foundation, and
+** may be copied, distributed, and modified under those terms.
+**
+** This program is distributed in the hope that it will be useful,
+** but WITHOUT ANY WARRANTY; without even the implied warranty of
+** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+** GNU General Public License for more details.
+*/
+
+#include "config-host.h"
+#include "android/opengles.h"
+#include "android/globals.h"
+#include <android/utils/debug.h>
+#include <android/utils/path.h>
+#include <android/utils/bufprint.h>
+#include <android/utils/dll.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define D(...) VERBOSE_PRINT(init,__VA_ARGS__)
+#define DD(...) VERBOSE_PRINT(gles,__VA_ARGS__)
+
+/* Declared in "android/globals.h" */
+int android_gles_fast_pipes = 1;
+
+/* Name of the GLES rendering library we're going to use */
+#define RENDERER_LIB_NAME "libOpenglRender"
+
+/* These definitions *must* match those under:
+ * development/tools/emulator/opengl/host/include/libOpenglRender/render_api.h
+ */
+#define DYNLINK_FUNCTIONS \
+ DYNLINK_FUNC(int,initLibrary,(void),(),return) \
+ DYNLINK_FUNC(int,setStreamMode,(int a),(a),return) \
+ DYNLINK_FUNC(int,initOpenGLRenderer,(int width, int height, int port),(width,height,port),return) \
+ DYNLINK_FUNC(int,createOpenGLSubwindow,(void* window, int x, int y, int width, int height, float zRot),(window,x,y,width,height,zRot),return)\
+ DYNLINK_FUNC(int,destroyOpenGLSubwindow,(void),(),return)\
+ DYNLINK_FUNC(void,repaintOpenGLDisplay,(void),(),)\
+ DYNLINK_FUNC(void,stopOpenGLRenderer,(void),(),)
+
+#define STREAM_MODE_DEFAULT 0
+#define STREAM_MODE_TCP 1
+#define STREAM_MODE_UNIX 2
+#define STREAM_MODE_PIPE 3
+
+#ifndef CONFIG_STANDALONE_UI
+/* Defined in android/hw-pipe-net.c */
+extern int android_init_opengles_pipes(void);
+#endif
+
+static ADynamicLibrary* rendererLib;
+
+/* Define the pointers and the wrapper functions to call them */
+#define DYNLINK_FUNC(result,name,sig,params,ret) \
+ static result (*_ptr_##name) sig; \
+ static result name sig { \
+ ret (*_ptr_##name) params ; \
+ }
+
+DYNLINK_FUNCTIONS
+
+#undef DYNLINK_FUNC
+
+static int
+initOpenglesEmulationFuncs(ADynamicLibrary* rendererLib)
+{
+ void* symbol;
+ char* error;
+#define DYNLINK_FUNC(result,name,sig,params,ret) \
+ symbol = adynamicLibrary_findSymbol( rendererLib, #name, &error ); \
+ if (symbol != NULL) { \
+ _ptr_##name = symbol; \
+ } else { \
+ derror("GLES emulation: Could not find required symbol (%s): %s", #name, error); \
+ free(error); \
+ return -1; \
+ }
+DYNLINK_FUNCTIONS
+#undef DYNLINK_FUNC
+ return 0;
+}
+
+int
+android_initOpenglesEmulation(void)
+{
+ char* error = NULL;
+
+ if (rendererLib != NULL)
+ return 0;
+
+ D("Initializing hardware OpenGLES emulation support");
+
+ rendererLib = adynamicLibrary_open(RENDERER_LIB_NAME, &error);
+ if (rendererLib == NULL) {
+ derror("Could not load OpenGLES emulation library: %s", error);
+ return -1;
+ }
+
+#ifndef CONFIG_STANDALONE_UI
+ android_init_opengles_pipes();
+#endif
+
+
+ /* Resolve the functions */
+ if (initOpenglesEmulationFuncs(rendererLib) < 0) {
+ derror("OpenGLES emulation library mismatch. Be sure to use the correct version!");
+ goto BAD_EXIT;
+ }
+
+ if (!initLibrary()) {
+ derror("OpenGLES initialization failed!");
+ goto BAD_EXIT;
+ }
+
+ if (android_gles_fast_pipes) {
+#ifdef _WIN32
+ /* XXX: NEED Win32 pipe implementation */
+ setStreamMode(STREAM_MODE_TCP);
+#else
+ setStreamMode(STREAM_MODE_UNIX);
+#endif
+ } else {
+ setStreamMode(STREAM_MODE_TCP);
+ }
+ return 0;
+
+BAD_EXIT:
+ derror("OpenGLES emulation library could not be initialized!");
+ adynamicLibrary_close(rendererLib);
+ rendererLib = NULL;
+ return -1;
+}
+
+int
+android_startOpenglesRenderer(int width, int height)
+{
+ if (!rendererLib) {
+ D("Can't start OpenGLES renderer without support libraries");
+ return -1;
+ }
+
+ if (initOpenGLRenderer(width, height,ANDROID_OPENGLES_BASE_PORT) != 0) {
+ D("Can't start OpenGLES renderer?");
+ return -1;
+ }
+ return 0;
+}
+
+void
+android_stopOpenglesRenderer(void)
+{
+ if (rendererLib) {
+ stopOpenGLRenderer();
+ }
+}
+
+int
+android_showOpenglesWindow(void* window, int x, int y, int width, int height, float rotation)
+{
+ if (rendererLib) {
+ return createOpenGLSubwindow(window, x, y, width, height, rotation);
+ } else {
+ return -1;
+ }
+}
+
+int
+android_hideOpenglesWindow(void)
+{
+ if (rendererLib) {
+ return destroyOpenGLSubwindow();
+ } else {
+ return -1;
+ }
+}
+
+void
+android_redrawOpenglesWindow(void)
+{
+ if (rendererLib) {
+ repaintOpenGLDisplay();
+ }
+}
+
+void
+android_gles_unix_path(char* buff, size_t buffsize, int port)
+{
+ const char* user = getenv("USER");
+ char *p = buff, *end = buff + buffsize;
+
+ /* The logic here must correspond to the one inside
+ * development/tools/emulator/opengl/shared/libOpenglCodecCommon/UnixStream.cpp */
+ p = bufprint(p, end, "/tmp/");
+ if (user && user[0]) {
+ p = bufprint(p, end, "android-%s/", user);
+ }
+ p = bufprint(p, end, "qemu-gles-%d", port);
+}
diff --git a/android/opengles.h b/android/opengles.h
new file mode 100644
index 0000000..2202e92
--- /dev/null
+++ b/android/opengles.h
@@ -0,0 +1,48 @@
+/* Copyright (C) 2011 The Android Open Source Project
+**
+** This software is licensed under the terms of the GNU General Public
+** License version 2, as published by the Free Software Foundation, and
+** may be copied, distributed, and modified under those terms.
+**
+** This program is distributed in the hope that it will be useful,
+** but WITHOUT ANY WARRANTY; without even the implied warranty of
+** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+** GNU General Public License for more details.
+*/
+#ifndef ANDROID_OPENGLES_H
+#define ANDROID_OPENGLES_H
+
+#include <stddef.h>
+
+#define ANDROID_OPENGLES_BASE_PORT 22468
+
+/* Call this function to initialize the hardware opengles emulation.
+ * This function will abort if we can't find the corresponding host
+ * libraries through dlopen() or equivalent.
+ */
+int android_initOpenglesEmulation(void);
+
+/* Tries to start the renderer process. Returns 0 on success, -1 on error.
+ * At the moment, this must be done before the VM starts.
+ */
+int android_startOpenglesRenderer(int width, int height);
+
+int android_showOpenglesWindow(void* window, int x, int y, int width, int height, float rotation);
+
+int android_hideOpenglesWindow(void);
+
+void android_redrawOpenglesWindow(void);
+
+/* Stop the renderer process */
+void android_stopOpenglesRenderer(void);
+
+/* set to TRUE if you want to use fast GLES pipes, 0 if you want to
+ * fallback to local TCP ones
+ */
+extern int android_gles_fast_pipes;
+
+/* Write the path of the Unix socket we're going to use to access GLES on a given <port> */
+/* The result is only valid on Unix systems */
+void android_gles_unix_path(char* buff, size_t buffsize, int port);
+
+#endif /* ANDROID_OPENGLES_H */
diff --git a/android/skin/scaler.c b/android/skin/scaler.c
index 907c5ca..5672869 100644
--- a/android/skin/scaler.c
+++ b/android/skin/scaler.c
@@ -81,6 +81,28 @@ typedef struct {
void
+skin_scaler_get_scaled_rect( SkinScaler* scaler,
+ SkinRect* srect,
+ SkinRect* drect )
+{
+ int sx = srect->pos.x;
+ int sy = srect->pos.y;
+ int sw = srect->size.w;
+ int sh = srect->size.h;
+ double scale = scaler->scale;
+
+ if (!scaler->valid) {
+ drect[0] = srect[0];
+ return;
+ }
+
+ drect->pos.x = (int)(sx * scale + scaler->xdisp);
+ drect->pos.y = (int)(sy * scale + scaler->ydisp);
+ drect->size.w = (int)(ceil((sx + sw) * scale + scaler->xdisp)) - drect->pos.x;
+ drect->size.h = (int)(ceil((sy + sh) * scale + scaler->ydisp)) - drect->pos.y;
+}
+
+void
skin_scaler_scale( SkinScaler* scaler,
SDL_Surface* dst_surface,
SDL_Surface* src_surface,
diff --git a/android/skin/scaler.h b/android/skin/scaler.h
index 4e0ec5a..e2d7641 100644
--- a/android/skin/scaler.h
+++ b/android/skin/scaler.h
@@ -26,6 +26,12 @@ extern int skin_scaler_set( SkinScaler* scaler,
double xDisp,
double yDisp );
+/* retrieve the position of the scaled source rectangle 'srect' into 'drect'
+ * you can use the same pointer for both parameters. */
+extern void skin_scaler_get_scaled_rect( SkinScaler* scaler,
+ SkinRect* srect,
+ SkinRect* drect );
+
extern void skin_scaler_free( SkinScaler* scaler );
extern void skin_scaler_scale( SkinScaler* scaler,
diff --git a/android/skin/window.c b/android/skin/window.c
index 9a72db5..5d8c684 100644
--- a/android/skin/window.c
+++ b/android/skin/window.c
@@ -22,6 +22,7 @@
#include <math.h>
#include "android/framebuffer.h"
+#include "android/opengles.h"
/* when shrinking, we reduce the pixel ratio by this fixed amount */
#define SHRINK_SCALE 0.6
@@ -1140,6 +1141,44 @@ skin_window_show_trackball( SkinWindow* window, int enable )
}
}
+/* Hide the OpenGL ES framebuffer */
+static void
+skin_window_hide_opengles( SkinWindow* window )
+{
+ android_hideOpenglesWindow();
+}
+
+/* Show the OpenGL ES framebuffer window */
+static void
+skin_window_show_opengles( SkinWindow* window )
+{
+ {
+ SDL_SysWMinfo wminfo;
+ void* winhandle;
+ ADisplay* disp = window->layout.displays;
+ SkinRect drect = disp->rect;
+
+ memset(&wminfo, 0, sizeof(wminfo));
+ SDL_GetWMInfo(&wminfo);
+#ifdef _WIN32
+ winhandle = (void*)wminfo.window;
+#elif defined(CONFIG_DARWIN)
+ winhandle = (void*)wminfo.nsWindowPtr;
+#else
+ winhandle = (void*)wminfo.info.x11.window;
+#endif
+ skin_scaler_get_scaled_rect(window->scaler, &drect, &drect);
+
+ android_showOpenglesWindow(winhandle, drect.pos.x, drect.pos.y,
+ drect.size.w, drect.size.h, disp->rotation * -90.);
+ }
+}
+
+static void
+skin_window_redraw_opengles( SkinWindow* window )
+{
+ android_redrawOpenglesWindow();
+}
static int skin_window_reset_internal (SkinWindow*, SkinLayout*);
@@ -1224,6 +1263,8 @@ skin_window_create( SkinLayout* slayout, int x, int y, double scale, int no
dprint( "emulator window was out of view and was recentered\n" );
}
+ skin_window_show_opengles(window);
+
return window;
}
@@ -1261,6 +1302,9 @@ skin_window_set_title( SkinWindow* window, const char* title )
static void
skin_window_resize( SkinWindow* window )
{
+ if ( !window->no_display )
+ skin_window_hide_opengles(window);
+
/* now resize window */
if (window->surface) {
SDL_FreeSurface(window->surface);
@@ -1342,7 +1386,10 @@ skin_window_resize( SkinWindow* window )
}
if (scale == 1.0)
+ {
window->surface = surface;
+ skin_scaler_set( window->scaler, 1.0, 0, 0 );
+ }
else
{
window_w = (int) ceil(window_w / scale );
@@ -1361,6 +1408,8 @@ skin_window_resize( SkinWindow* window )
}
skin_scaler_set( window->scaler, scale, window->effective_x, window->effective_y );
}
+
+ skin_window_show_opengles(window);
}
}
@@ -1552,6 +1601,7 @@ skin_window_redraw( SkinWindow* window, SkinRect* rect )
SDL_UpdateRects( window->surface, 1, &rd );
}
+ skin_window_redraw_opengles( window );
}
}
@@ -1681,6 +1731,10 @@ skin_window_process_event( SkinWindow* window, SDL_Event* ev )
}
}
break;
+
+ case SDL_VIDEOEXPOSE:
+ skin_window_redraw_opengles(window);
+ break;
}
}
diff --git a/android/utils/debug.h b/android/utils/debug.h
index 06b9baf..096b002 100644
--- a/android/utils/debug.h
+++ b/android/utils/debug.h
@@ -35,6 +35,8 @@
_VERBOSE_TAG(avd_config, "android virtual device configuration") \
_VERBOSE_TAG(sensors, "emulated sensors") \
_VERBOSE_TAG(memcheck, "memory checker") \
+ _VERBOSE_TAG(camera, "camera") \
+ _VERBOSE_TAG(gles, "hardware OpenGLES emulation") \
#define _VERBOSE_TAG(x,y) VERBOSE_##x,
typedef enum {
diff --git a/android/utils/dll.c b/android/utils/dll.c
new file mode 100644
index 0000000..a46a462
--- /dev/null
+++ b/android/utils/dll.c
@@ -0,0 +1,194 @@
+/* Copyright (C) 2011 The Android Open Source Project
+**
+** This software is licensed under the terms of the GNU General Public
+** License version 2, as published by the Free Software Foundation, and
+** may be copied, distributed, and modified under those terms.
+**
+** This program is distributed in the hope that it will be useful,
+** but WITHOUT ANY WARRANTY; without even the implied warranty of
+** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+** GNU General Public License for more details.
+*/
+#include <android/utils/dll.h>
+#include <android/utils/system.h>
+#include <android/utils/path.h>
+
+#include <stdlib.h>
+
+/* Utility function, append one string to another, caller must free result */
+static char*
+append_string( const char* str1, const char* str2 )
+{
+ int len1 = strlen(str1);
+ int len2 = strlen(str2);
+ char* result = malloc(len1+len2+1);
+
+ if (result != NULL) {
+ memcpy(result, str1, len1);
+ memcpy(result + len1, str2, len2);
+ result[len1+len2] = '\0';
+ }
+ return result;
+}
+
+#ifdef _WIN32
+
+#include <windows.h>
+
+/* This function is used to revert all forward slashes (/) in a path
+ * string into unquoted backwards one (\). This is necessary because
+ * LoadLibrary() and AddDllDirectory() do not support forward slashes.
+ *
+ * Caller must free the result string
+ */
+static char*
+reverse_slashes( const char* path )
+{
+ int len = strlen(path);
+ char* result = malloc(len+1);
+ int nn;
+
+ for (nn = 0; nn < len; nn++) {
+ int ch = path[nn];
+ if (ch == '/') {
+ ch = '\\';
+ }
+ result[nn] = (char)ch;
+ }
+ result[nn] = '\0';
+
+ return result;
+}
+
+ADynamicLibrary*
+adynamicLibrary_open( const char* libraryName,
+ char** pError)
+{
+ char* libName = (char*) libraryName;
+ void* result;
+
+ /* Append a .dll to the library name if it doesn't have an extension */
+ if (strchr(libraryName,'.') == NULL) {
+ libName = append_string(libraryName, ".dll");
+ }
+
+ /* Now do our magic */
+ *pError = NULL;
+ result = (ADynamicLibrary*) LoadLibrary( libName );
+ if (result == NULL) {
+ *pError = ASTRDUP("Could not load DLL!");
+ }
+
+ /* Free the library name if we modified it */
+ if (libName != libraryName) {
+ free(libName);
+ }
+
+ return (ADynamicLibrary*) result;
+}
+
+void*
+adynamicLibrary_findSymbol( ADynamicLibrary* lib,
+ const char* symbolName,
+ char** pError)
+{
+ void* result;
+
+ *pError = NULL;
+
+ if (lib == NULL) {
+ *pError = strdup("NULL library pointer");
+ return NULL;
+ }
+ if (symbolName == NULL || symbolName[0] == '\0') {
+ *pError = strdup("NULL or empty symbolName");
+ return NULL;
+ }
+ result = GetProcAddress( (HMODULE)lib, symbolName );
+ if (result == NULL) {
+ *pError = ASTRDUP("Could not find symbol");
+ }
+ return result;
+}
+
+/* Close/unload a given dynamic library */
+void
+adynamicLibrary_close( ADynamicLibrary* lib )
+{
+ if (lib != NULL) {
+ FreeLibrary( (HMODULE)lib );
+ }
+}
+
+#else /* !_WIN32 */
+
+#include <dlfcn.h>
+#include <stdlib.h>
+
+ADynamicLibrary*
+adynamicLibrary_open( const char* libraryName,
+ char** pError)
+{
+ char* libName = (char*) libraryName;
+ void* result;
+
+#ifdef __APPLE__
+# define SO_EXTENSION ".dylib"
+#else
+# define SO_EXTENSION ".so"
+#endif
+
+ /* Append a .so to the library name if it doesn't have an extension */
+ if (strchr(libraryName,'.') == NULL) {
+ libName = append_string(libraryName, SO_EXTENSION);
+ }
+
+ /* Now do our magic */
+ *pError = NULL;
+ result = dlopen( libName, RTLD_LAZY );
+ if (result == NULL) {
+ *pError = strdup(dlerror());
+ }
+
+ /* Free the library name if we modified it */
+ if (libName != (char*)libraryName) {
+ free(libName);
+ }
+
+ return (ADynamicLibrary*) result;
+}
+
+void*
+adynamicLibrary_findSymbol( ADynamicLibrary* lib,
+ const char* symbolName,
+ char** pError)
+{
+ void* result;
+
+ *pError = NULL;
+
+ if (lib == NULL) {
+ *pError = strdup("NULL library pointer");
+ return NULL;
+ }
+ if (symbolName == NULL || symbolName[0] == '\0') {
+ *pError = strdup("NULL or empty symbolName");
+ return NULL;
+ }
+ result = dlsym(lib, symbolName);
+ if (result == NULL) {
+ *pError = strdup(dlerror());
+ }
+ return result;
+}
+
+/* Close/unload a given dynamic library */
+void
+adynamicLibrary_close( ADynamicLibrary* lib )
+{
+ if (lib != NULL) {
+ dlclose(lib);
+ }
+}
+
+#endif /* !_WIN32 */
diff --git a/android/utils/dll.h b/android/utils/dll.h
new file mode 100644
index 0000000..66f3a6d
--- /dev/null
+++ b/android/utils/dll.h
@@ -0,0 +1,44 @@
+/* Copyright (C) 2011 The Android Open Source Project
+**
+** This software is licensed under the terms of the GNU General Public
+** License version 2, as published by the Free Software Foundation, and
+** may be copied, distributed, and modified under those terms.
+**
+** This program is distributed in the hope that it will be useful,
+** but WITHOUT ANY WARRANTY; without even the implied warranty of
+** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+** GNU General Public License for more details.
+*/
+#ifndef ANDROID_UTILS_DLL_H
+#define ANDROID_UTILS_DLL_H
+
+/* Opaque type to model a dynamic library handle */
+typedef struct ADynamicLibrary ADynamicLibrary;
+
+/* Try to load/open a dynamic library named 'libraryName', looking for
+ * it in the optional paths listed by 'libraryPaths'.
+ *
+ * Once opened, you can use adynamicLibrary_findSymbol() and
+ * adynamicLibrary_close() on it.
+ *
+ * libraryName :: library name, if no extension is provided, then '.so'
+ * will be appended on Unix systems, or '.dll' on Windows.
+ *
+ * pError :: On success, '*pError' will be set to NULL. On error, it will
+ * point to a string describing the error, which must be freed by
+ * the caller.
+ *
+ * returns an ADynamicLibrary pointer.
+ */
+ADynamicLibrary* adynamicLibrary_open( const char* libraryName,
+ char** pError);
+
+/* Find a symbol inside a dynamic library. */
+void* adynamicLibrary_findSymbol( ADynamicLibrary* lib,
+ const char* symbolName,
+ char** pError);
+
+/* Close/unload a given dynamic library */
+void adynamicLibrary_close( ADynamicLibrary* lib );
+
+#endif /* ANDROID_UTILS_DLL_H */
diff --git a/android/utils/path.c b/android/utils/path.c
index f64e517..1bcdc4e 100644
--- a/android/utils/path.c
+++ b/android/utils/path.c
@@ -427,6 +427,61 @@ path_is_absolute( const char* path )
#endif
}
+char*
+path_get_absolute( const char* path )
+{
+ if (path_is_absolute(path)) {
+ return ASTRDUP(path);
+ }
+
+#ifdef _WIN32
+ {
+ char* result;
+ int pathLen = strlen(path);
+ int currentLen = GetCurrentDirectory(0, NULL);
+
+ if (currentLen <= 0) {
+ /* Could not get size of working directory. something is
+ * really fishy here, return a simple copy */
+ return ASTRDUP(path);
+ }
+ result = malloc(currentLen + pathLen + 2);
+
+ GetCurrentDirectory(currentLen+1, result);
+ if (currentLen == 0 || result[currentLen-1] != '\\') {
+ result[currentLen++] = '\\';
+ }
+ memcpy(result + currentLen, path, pathLen+1);
+
+ return result;
+ }
+#else
+ {
+ int pathLen = strlen(path);
+ char currentDir[PATH_MAX];
+ int currentLen;
+ char* result;
+
+ if (getcwd(currentDir, sizeof(currentDir)) == NULL) {
+ /* Could not get the current working directory. something is really
+ * fishy here, so don't do anything and return a copy */
+ return ASTRDUP(path);
+ }
+
+ /* Make a new path with <current-path>/<path> */
+ currentLen = strlen(currentDir);
+ result = malloc(currentLen + pathLen + 2);
+
+ memcpy(result, currentDir, currentLen);
+ if (currentLen == 0 || result[currentLen-1] != '/') {
+ result[currentLen++] = '/';
+ }
+ memcpy(result + currentLen, path, pathLen+1);
+
+ return result;
+ }
+#endif
+}
/** OTHER FILE UTILITIES
**
diff --git a/android/utils/path.h b/android/utils/path.h
index 419e6bf..2926e5e 100644
--- a/android/utils/path.h
+++ b/android/utils/path.h
@@ -118,6 +118,12 @@ extern char* path_basename( const char* path );
*/
extern char* path_search_exec( const char* filename );
+/* Return the absolute version of a path. If 'path' is already absolute,
+ * this will be a simple copy. Otherwise, this function will prepend the
+ * current working directory to the result.
+ */
+extern char* path_get_absolute( const char* path );
+
/** OTHER FILE UTILITIES
**
** path_empty_file() creates an empty file at a given path location.
diff --git a/arch_init.c b/arch_init.c
index b966d8a..7285d27 100644
--- a/arch_init.c
+++ b/arch_init.c
@@ -382,7 +382,7 @@ int ram_load(QEMUFile *f, void *opaque, int version_id)
addr &= TARGET_PAGE_MASK;
if (flags & RAM_SAVE_FLAG_MEM_SIZE) {
- if (version_id == 3) {
+ if (version_id != 3) {
if (addr != ram_bytes_total()) {
return -EINVAL;
}
@@ -418,13 +418,11 @@ int ram_load(QEMUFile *f, void *opaque, int version_id)
total_ram_bytes -= length;
}
}
- }
-
- if (flags & RAM_SAVE_FLAG_COMPRESS) {
+ } else if (flags & RAM_SAVE_FLAG_COMPRESS) {
void *host;
uint8_t ch;
- if (version_id == 3)
+ if (version_id != 3)
host = qemu_get_ram_ptr(addr);
else
host = host_from_stream_offset(f, addr, flags);
@@ -443,7 +441,7 @@ int ram_load(QEMUFile *f, void *opaque, int version_id)
} else if (flags & RAM_SAVE_FLAG_PAGE) {
void *host;
- if (version_id == 3)
+ if (version_id != 3)
host = qemu_get_ram_ptr(addr);
else
host = host_from_stream_offset(f, addr, flags);
diff --git a/docs/KERNEL.TXT b/docs/ANDROID-KERNEL.TXT
index 35d1f1f..d5a1930 100644
--- a/docs/KERNEL.TXT
+++ b/docs/ANDROID-KERNEL.TXT
@@ -7,14 +7,19 @@ I. Helper script:
We now provide a helper script to rebuild the kernel,
it is under distrib/rebuild-kernel.sh.
-You need the sources in android.git.kernel.org/kernel/common.git,
-in branch origin/archive/android-gldfish-2.6.29 (note the typo!)
+You need the sources in android.git.kernel.org/kernel/qemu.git,
+in branch origin/android-goldfish-2.6.29
-To rebuild the ARM kernel:
+To rebuild the ARMv5TE kernel:
cd $KERNEL_SOURCES
/path/to/rebuild-kernel.sh --out=$ANDROID/prebuilt/android-arm/kernel
+To rebuild the ARMv7-A one:
+
+ cd $KERNEL_SOURCES
+ /path/to/rebuild-kernel.sh --armv7 --out=$ANDROID/prebuilt/android-arm/kernel
+
To rebuild the x86 kernel:
cd $KERNEL_SOURCES
diff --git a/docs/ANDROID-QEMUD.TXT b/docs/ANDROID-QEMUD.TXT
index 7841399..8b4f8d6 100644
--- a/docs/ANDROID-QEMUD.TXT
+++ b/docs/ANDROID-QEMUD.TXT
@@ -6,7 +6,9 @@ I. Overview:
The Android system image includes a small daemon program named "qemud"
which is started at boot time. Its purpose is to provide a multiplexing
communication channel between the emulated system and the emulator program
-itself.
+itself. Another way to support communication between the emulated system and
+the emulator program is using qemu pipes (see ANDROID-QEMU-PIPE.TXT for details
+on qemu pipes).
Its goal is to allow certain parts of the system to talk directly to the
emulator without requiring special kernel support; this simplifies a lot of
@@ -169,6 +171,17 @@ Since the "cupcake" platform, this works as follows:
Certain services do not need it at all (GSM, GPS) so it is optional and
must be used depending on which service you talk to by clients.
+- QEMU pipe communication model works similarly to the serial port multiplexing,
+ but also has some differences as far as connecting client with the service is
+ concerned:
+
+ emulator <-+--> /dev/qemu_pipe/qemud:srv1 <---> client1
+ |
+ +--> /dev/qemu_pipe/qemud:srv2 <---> client2
+
+ In the pipe model each client gets connected to the emulator through a unique
+ handle to /dev/qemu_pipe (a "pipe"), so there is no need for multiplexing the
+ channels.
III. Legacy 'qemud':
--------------------
@@ -254,7 +267,7 @@ only uses a single socket and allows concurrent clients for a all services.
IV. State snapshots:
--------------------
-Support for snapshots relies on the symmetric qemud_*_save and qemud_*_load
+Support for snapshots relies on the symmetric qemud_*_save and qemud_*_load
functions which save the state of the various Qemud* structs defined in
android/hw-qemud.c. The high-level process is as follows.
diff --git a/hw/android_arm.c b/hw/android_arm.c
index 188051b..c93a4db 100644
--- a/hw/android_arm.c
+++ b/hw/android_arm.c
@@ -77,7 +77,6 @@ static void android_arm_init_(ram_addr_t ram_size,
cpu_model = "arm926";
env = cpu_init(cpu_model);
- register_savevm( "cpu", 0, ARM_CPU_SAVE_VERSION, cpu_save, cpu_load, env );
ram_offset = qemu_ram_alloc(NULL,"android_arm",ram_size);
cpu_register_physical_memory(0, ram_size, ram_offset | IO_MEM_RAM);
diff --git a/hw/goldfish_audio.c b/hw/goldfish_audio.c
index 434522c..60e652c 100644
--- a/hw/goldfish_audio.c
+++ b/hw/goldfish_audio.c
@@ -263,7 +263,7 @@ static int audio_state_load( QEMUFile* f, void* opaque, int version_id )
goldfish_audio_buff_get( s->out_buff2, f );
goldfish_audio_buff_get (s->in_buff, f);
}
- return -1;
+ return ret;
}
static void enable_audio(struct goldfish_audio_state *s, int enable)
diff --git a/hw/goldfish_device.c b/hw/goldfish_device.c
index e98161a..e3dbfcb 100644
--- a/hw/goldfish_device.c
+++ b/hw/goldfish_device.c
@@ -12,6 +12,7 @@
#include "qemu_file.h"
#include "arm_pic.h"
#include "goldfish_device.h"
+#include "android/utils/debug.h"
#ifdef TARGET_I386
#include "kvm.h"
#endif
@@ -59,6 +60,18 @@ int goldfish_add_device_no_io(struct goldfish_device *dev)
if(dev->irq == 0 && dev->irq_count > 0) {
dev->irq = goldfish_free_irq;
goldfish_free_irq += dev->irq_count;
+#ifdef TARGET_I386
+ /* Make sure that we pass by the reserved IRQs. */
+ while (goldfish_free_irq == GFD_KBD_IRQ ||
+ goldfish_free_irq == GFD_MOUSE_IRQ ||
+ goldfish_free_irq == GFD_ERR_IRQ) {
+ goldfish_free_irq++;
+ }
+#endif
+ if (goldfish_free_irq >= GFD_MAX_IRQ) {
+ derror("Goldfish device has exceeded available IRQ number.");
+ exit(1);
+ }
}
//printf("goldfish_add_device: %s, base %x %x, irq %d %d\n",
// dev->name, dev->base, dev->size, dev->irq, dev->irq_count);
diff --git a/hw/goldfish_device.h b/hw/goldfish_device.h
index 19f4b32..a9e3f83 100644
--- a/hw/goldfish_device.h
+++ b/hw/goldfish_device.h
@@ -55,4 +55,18 @@ void trace_dev_init();
void events_dev_init(uint32_t base, qemu_irq irq);
void nand_dev_init(uint32_t base);
+#ifdef TARGET_I386
+/* Maximum IRQ number available for a device on x86. */
+#define GFD_MAX_IRQ 16
+/* IRQ reserved for keyboard. */
+#define GFD_KBD_IRQ 1
+/* IRQ reserved for mouse. */
+#define GFD_MOUSE_IRQ 12
+/* IRQ reserved for error (raising an exception in TB code). */
+#define GFD_ERR_IRQ 13
+#else
+/* Maximum IRQ number available for a device on ARM. */
+#define GFD_MAX_IRQ 32
#endif
+
+#endif /* GOLDFISH_DEVICE_H */
diff --git a/hw/goldfish_interrupt.c b/hw/goldfish_interrupt.c
index c620664..f4c5a89 100644
--- a/hw/goldfish_interrupt.c
+++ b/hw/goldfish_interrupt.c
@@ -166,7 +166,7 @@ qemu_irq* goldfish_interrupt_init(uint32_t base, qemu_irq parent_irq, qemu_irq
qemu_irq* qi;
s = qemu_mallocz(sizeof(*s));
- qi = qemu_allocate_irqs(goldfish_int_set_irq, s, 32);
+ qi = qemu_allocate_irqs(goldfish_int_set_irq, s, GFD_MAX_IRQ);
s->dev.name = "goldfish_interrupt_controller";
s->dev.id = -1;
s->dev.base = base;
diff --git a/hw/goldfish_nand.c b/hw/goldfish_nand.c
index eb0c355..31e814b 100644
--- a/hw/goldfish_nand.c
+++ b/hw/goldfish_nand.c
@@ -130,6 +130,8 @@ typedef struct {
uint32_t addr_high;
uint32_t transfer_size;
uint32_t data;
+ uint32_t batch_addr_low;
+ uint32_t batch_addr_high;
uint32_t result;
} nand_dev_controller_state;
@@ -457,6 +459,18 @@ uint32_t nand_dev_do_cmd(nand_dev_controller_state *s, uint32_t cmd)
uint64_t addr;
nand_dev *dev;
+ if (cmd == NAND_CMD_WRITE_BATCH || cmd == NAND_CMD_READ_BATCH ||
+ cmd == NAND_CMD_ERASE_BATCH) {
+ struct batch_data bd;
+ uint64_t bd_addr = ((uint64_t)s->batch_addr_high << 32) | s->batch_addr_low;
+
+ cpu_physical_memory_read(bd_addr, (void*)&bd, sizeof(struct batch_data));
+ s->dev = bd.dev;
+ s->addr_low = bd.addr_low;
+ s->addr_high = bd.addr_high;
+ s->transfer_size = bd.transfer_size;
+ s->data = bd.data;
+ }
addr = s->addr_low | ((uint64_t)s->addr_high << 32);
size = s->transfer_size;
if(s->dev >= nand_dev_count)
@@ -473,6 +487,7 @@ uint32_t nand_dev_do_cmd(nand_dev_controller_state *s, uint32_t cmd)
#endif
cpu_memory_rw_debug(cpu_single_env, s->data, (uint8_t*)dev->devname, size, 1);
return size;
+ case NAND_CMD_READ_BATCH:
case NAND_CMD_READ:
if(addr >= dev->max_size)
return 0;
@@ -486,6 +501,7 @@ uint32_t nand_dev_do_cmd(nand_dev_controller_state *s, uint32_t cmd)
#endif
cpu_memory_rw_debug(cpu_single_env,s->data, &dev->data[addr], size, 1);
return size;
+ case NAND_CMD_WRITE_BATCH:
case NAND_CMD_WRITE:
if(dev->flags & NAND_DEV_FLAG_READ_ONLY)
return 0;
@@ -501,6 +517,7 @@ uint32_t nand_dev_do_cmd(nand_dev_controller_state *s, uint32_t cmd)
#endif
cpu_memory_rw_debug(cpu_single_env,s->data, &dev->data[addr], size, 0);
return size;
+ case NAND_CMD_ERASE_BATCH:
case NAND_CMD_ERASE:
if(dev->flags & NAND_DEV_FLAG_READ_ONLY)
return 0;
@@ -542,6 +559,12 @@ static void nand_dev_write(void *opaque, target_phys_addr_t offset, uint32_t val
case NAND_ADDR_LOW:
s->addr_low = value;
break;
+ case NAND_BATCH_ADDR_LOW:
+ s->batch_addr_low = value;
+ break;
+ case NAND_BATCH_ADDR_HIGH:
+ s->batch_addr_high = value;
+ break;
case NAND_TRANSFER_SIZE:
s->transfer_size = value;
break;
@@ -550,6 +573,13 @@ static void nand_dev_write(void *opaque, target_phys_addr_t offset, uint32_t val
break;
case NAND_COMMAND:
s->result = nand_dev_do_cmd(s, value);
+ if (value == NAND_CMD_WRITE_BATCH || value == NAND_CMD_READ_BATCH ||
+ value == NAND_CMD_ERASE_BATCH) {
+ struct batch_data bd;
+ uint64_t bd_addr = ((uint64_t)s->batch_addr_high << 32) | s->batch_addr_low;
+ bd.result = s->result;
+ cpu_physical_memory_write(bd_addr, (void*)&bd, sizeof(struct batch_data));
+ }
break;
default:
cpu_abort(cpu_single_env, "nand_dev_write: Bad offset %x\n", offset);
@@ -765,7 +795,18 @@ void nand_add_dev(const char *arg)
}
if(rwfilename) {
- rwfd = open(rwfilename, O_BINARY | (read_only ? O_RDONLY : O_RDWR));
+ if (initfilename) {
+ /* Overwrite with content of the 'initfilename'. */
+ if (read_only) {
+ /* Cannot be readonly when initializing the device from another file. */
+ XLOG("incompatible read only option is requested while initializing %.*s from %s\n",
+ devname_len, devname, initfilename);
+ exit(1);
+ }
+ rwfd = open(rwfilename, O_BINARY | O_TRUNC | O_RDWR);
+ } else {
+ rwfd = open(rwfilename, O_BINARY | (read_only ? O_RDONLY : O_RDWR));
+ }
if(rwfd < 0) {
XLOG("could not open file %s, %s\n", rwfilename, strerror(errno));
exit(1);
@@ -810,6 +851,9 @@ void nand_add_dev(const char *arg)
if(dev->data == NULL)
goto out_of_memory;
dev->flags = read_only ? NAND_DEV_FLAG_READ_ONLY : 0;
+#ifdef TARGET_I386
+ dev->flags |= NAND_DEV_FLAG_BATCH_CAP;
+#endif
if (initfd >= 0) {
do {
diff --git a/hw/goldfish_nand_reg.h b/hw/goldfish_nand_reg.h
index ea91461..34d7c44 100644
--- a/hw/goldfish_nand_reg.h
+++ b/hw/goldfish_nand_reg.h
@@ -18,11 +18,24 @@ enum nand_cmd {
NAND_CMD_WRITE,
NAND_CMD_ERASE,
NAND_CMD_BLOCK_BAD_GET, // NAND_RESULT is 1 if block is bad, 0 if it is not
- NAND_CMD_BLOCK_BAD_SET
+ NAND_CMD_BLOCK_BAD_SET,
+ NAND_CMD_READ_BATCH, // BATCH OP extensions.
+ NAND_CMD_WRITE_BATCH,
+ NAND_CMD_ERASE_BATCH
+};
+
+struct batch_data{
+ uint32_t dev;
+ uint32_t addr_low;
+ uint32_t addr_high;
+ uint32_t transfer_size;
+ uint32_t data;
+ uint32_t result;
};
enum nand_dev_flags {
- NAND_DEV_FLAG_READ_ONLY = 0x00000001
+ NAND_DEV_FLAG_READ_ONLY = 0x00000001,
+ NAND_DEV_FLAG_BATCH_CAP = 0x00000002
};
#define NAND_VERSION_CURRENT (1)
@@ -49,6 +62,8 @@ enum nand_reg {
NAND_TRANSFER_SIZE = 0x04c,
NAND_ADDR_LOW = 0x050,
NAND_ADDR_HIGH = 0x054,
+ NAND_BATCH_ADDR_LOW = 0x058,
+ NAND_BATCH_ADDR_HIGH= 0x05c,
};
#endif
diff --git a/hw/goldfish_pipe.c b/hw/goldfish_pipe.c
index fd31a2b..b3c6975 100644
--- a/hw/goldfish_pipe.c
+++ b/hw/goldfish_pipe.c
@@ -14,8 +14,11 @@
#include "hw/goldfish_pipe.h"
#include "hw/goldfish_device.h"
#include "qemu-timer.h"
+#ifdef CONFIG_KVM
+#include "kvm.h"
+#endif
-#define DEBUG 1
+#define DEBUG 0
/* Set to 1 to debug i/o register reads/writes */
#define DEBUG_REGS 0
@@ -323,7 +326,7 @@ pipeConnector_recvBuffers( void* opaque, GoldfishPipeBuffer* buffers, int numBuf
static unsigned
pipeConnector_poll( void* opaque )
{
- return PIPE_WAKE_WRITE;
+ return PIPE_POLL_OUT;
}
static void
@@ -406,7 +409,7 @@ zeroPipe_recvBuffers( void* opaque, GoldfishPipeBuffer* buffers, int numBuffers
static unsigned
zeroPipe_poll( void* opaque )
{
- return PIPE_WAKE_READ | PIPE_WAKE_WRITE;
+ return PIPE_POLL_IN | PIPE_POLL_OUT;
}
static void
@@ -597,10 +600,10 @@ pingPongPipe_poll( void* opaque )
unsigned ret = 0;
if (pipe->count < pipe->size)
- ret |= PIPE_WAKE_WRITE;
+ ret |= PIPE_POLL_OUT;
if (pipe->count > 0)
- ret |= PIPE_WAKE_READ;
+ ret |= PIPE_POLL_IN;
return ret;
}
@@ -768,10 +771,10 @@ throttlePipe_poll( void* opaque )
unsigned ret = pingPongPipe_poll(&pipe->pingpong);
if (pipe->sendExpiration > 0)
- ret &= ~PIPE_WAKE_WRITE;
+ ret &= ~PIPE_POLL_OUT;
if (pipe->recvExpiration > 0)
- ret &= ~PIPE_WAKE_READ;
+ ret &= ~PIPE_POLL_IN;
return ret;
}
@@ -875,7 +878,13 @@ pipeDevice_doCommand( PipeDevice* dev, uint32_t command )
GoldfishPipeBuffer buffer;
uint32_t address = dev->address;
uint32_t page = address & TARGET_PAGE_MASK;
- target_phys_addr_t phys = cpu_get_phys_page_debug(env, page);
+ target_phys_addr_t phys;
+#ifdef CONFIG_KVM
+ if(kvm_enabled()) {
+ cpu_synchronize_state(env, 0);
+ }
+#endif
+ phys = cpu_get_phys_page_debug(env, page);
buffer.data = qemu_get_ram_ptr(phys) + (address - page);
buffer.size = dev->size;
dev->status = pipe->funcs->recvBuffers(pipe->opaque, &buffer, 1);
@@ -889,7 +898,13 @@ pipeDevice_doCommand( PipeDevice* dev, uint32_t command )
GoldfishPipeBuffer buffer;
uint32_t address = dev->address;
uint32_t page = address & TARGET_PAGE_MASK;
- target_phys_addr_t phys = cpu_get_phys_page_debug(env, page);
+ target_phys_addr_t phys;
+#ifdef CONFIG_KVM
+ if(kvm_enabled()) {
+ cpu_synchronize_state(env, 0);
+ }
+#endif
+ phys = cpu_get_phys_page_debug(env, page);
buffer.data = qemu_get_ram_ptr(phys) + (address - page);
buffer.size = dev->size;
dev->status = pipe->funcs->sendBuffers(pipe->opaque, &buffer, 1);
diff --git a/hw/goldfish_pipe.h b/hw/goldfish_pipe.h
index be5c449..8074619 100644
--- a/hw/goldfish_pipe.h
+++ b/hw/goldfish_pipe.h
@@ -85,7 +85,7 @@ typedef struct {
int (*recvBuffers)( void* pipe, GoldfishPipeBuffer* buffers, int numBuffers );
/* Called when guest wants to poll the read/write status for the pipe.
- * Should return a combination of PIPE_WAKE_XXX flags.
+ * Should return a combination of PIPE_POLL_XXX flags.
*/
unsigned (*poll)( void* pipe );
diff --git a/hw/i8259.c b/hw/i8259.c
index 091ba7a..0049e73 100644
--- a/hw/i8259.c
+++ b/hw/i8259.c
@@ -25,6 +25,7 @@
#include "pc.h"
#include "isa.h"
#include "monitor.h"
+#include "goldfish_device.h"
/* debug PIC */
//#define DEBUG_PIC
@@ -559,7 +560,7 @@ qemu_irq *i8259_init(qemu_irq parent_irq)
s->pics[0].pics_state = s;
s->pics[1].pics_state = s;
isa_pic = s;
- return qemu_allocate_irqs(i8259_set_irq, s, 16);
+ return qemu_allocate_irqs(i8259_set_irq, s, GFD_MAX_IRQ);
}
void pic_set_alt_irq_func(PicState2 *s, SetIRQFunc *alt_irq_func,
diff --git a/hw/pc.c b/hw/pc.c
index 0114ff5..f44c44e 100644
--- a/hw/pc.c
+++ b/hw/pc.c
@@ -41,6 +41,7 @@
#include "console.h"
#include "goldfish_device.h"
+#include "goldfish_pipe.h"
char* audio_input_source = NULL;
/* output Bochs bios info messages */
@@ -1032,7 +1033,7 @@ static void pc_init1(ram_addr_t ram_size,
cpu_irq = qemu_allocate_irqs(pic_irq_request, NULL, 1);
i8259 = i8259_init(cpu_irq[0]);
- ferr_irq = i8259[13];
+ ferr_irq = i8259[GFD_ERR_IRQ];
#define IRQ_PDEV_BUS 4
goldfish_device_init(i8259, 0xff010000, 0x7f0000, 5, 5);
@@ -1046,6 +1047,7 @@ static void pc_init1(ram_addr_t ram_size,
#ifdef CONFIG_NAND
goldfish_add_device_no_io(&nand_device);
nand_dev_init(nand_device.base);
+ pipe_dev_init();
#endif
{
@@ -1175,7 +1177,7 @@ static void pc_init1(ram_addr_t ram_size,
}
#endif
- i8042_init(i8259[1], i8259[12], 0x60);
+ i8042_init(i8259[GFD_KBD_IRQ], i8259[GFD_MOUSE_IRQ], 0x60);
DMA_init(0);
goldfish_fb_init(0);
diff --git a/proxy/proxy_http_rewriter.c b/proxy/proxy_http_rewriter.c
index af3f5e7..859e560 100644
--- a/proxy/proxy_http_rewriter.c
+++ b/proxy/proxy_http_rewriter.c
@@ -337,6 +337,13 @@ static const char* const body_mode_str[BODY_MODE_MAX] = {
"NONE", "KNOWN_LENGTH", "UNTIL_CLOSE", "CHUNKED"
};
+enum {
+ CHUNK_HEADER, // Waiting for a chunk header + CR LF
+ CHUNK_DATA, // Waiting for chunk data
+ CHUNK_DATA_END, // Waiting for the CR LF after the chunk data
+ CHUNK_TRAILER // Waiting for the chunk trailer + CR LF
+};
+
typedef struct {
ProxyConnection root[1];
int slirp_fd;
@@ -348,6 +355,7 @@ typedef struct {
int64_t body_sent;
int64_t chunk_length;
int64_t chunk_total;
+ int chunk_state;
char body_has_data;
char body_is_full;
char body_is_closed;
@@ -660,6 +668,7 @@ rewrite_connection_get_body_length( RewriteConnection* conn,
conn->parse_chunk_trailer = 0;
conn->chunk_length = -1;
conn->chunk_total = 0;
+ conn->chunk_state = CHUNK_HEADER;
}
}
if (conn->body_mode == BODY_NONE) {
@@ -725,9 +734,32 @@ rewrite_connection_read_body( RewriteConnection* conn, int fd )
break;
case BODY_CHUNKED:
- if (conn->chunk_length < 0) {
- /* chunk_length < 0 means we need to read a chunk header */
- /* ensure that 'str' is flushed before doing this */
+ if (conn->chunk_state == CHUNK_DATA_END) {
+ /* We're waiting for the CR LF after the chunk data */
+ ret = proxy_connection_receive_line(root, fd);
+ if (ret != DATA_COMPLETED)
+ return ret;
+
+ if (str->s[0] != 0) { /* this should be an empty line */
+ PROXY_LOG("%s: invalid chunk data end: '%s'",
+ root->name, str->s);
+ return DATA_ERROR;
+ }
+ /* proxy_connection_receive_line() did remove the
+ * trailing \r\n, but we must preserve it when we
+ * send the chunk size end to the proxy.
+ */
+ stralloc_add_str(root->str, "\r\n");
+ conn->chunk_state = CHUNK_HEADER;
+ /* fall-through */
+ }
+
+ if (conn->chunk_state == CHUNK_HEADER) {
+ char* line;
+ char* end;
+ long long length;
+ /* Ensure that the previous chunk was flushed before
+ * accepting a new header */
if (!conn->parse_chunk_header) {
if (conn->body_has_data)
return DATA_NEED_MORE;
@@ -735,40 +767,40 @@ rewrite_connection_read_body( RewriteConnection* conn, int fd )
conn->parse_chunk_header = 1;
}
ret = proxy_connection_receive_line(root, fd);
- if (ret == DATA_COMPLETED) {
- char* line = str->s;
- char* end;
- long long length;
-
- length = strtoll(line, &end, 16);
- if (line[0] == ' ' || (end[0] != '\0' && end[0] != ';')) {
- PROXY_LOG("%s: invalid chunk header: %s",
- root->name, line);
- return DATA_ERROR;
- }
- if (length < 0) {
- PROXY_LOG("%s: invalid chunk length %lld",
- root->name, length);
- return DATA_ERROR;
- }
- /* proxy_connection_receive_line() did remove the
- * trailing \r\n, but we must preserve it when we
- * send the chunk size to the proxy.
- */
- stralloc_add_str(root->str, "\r\n");
-
- conn->chunk_length = length;
- conn->chunk_total = 0;
- if (length == 0) {
- /* the last chunk, no we need to add the trailer */
- conn->parse_chunk_trailer = 0;
- }
- conn->parse_chunk_header = 0;
+ if (ret != DATA_COMPLETED) {
+ return ret;
+ }
+ conn->parse_chunk_header = 0;
+
+ line = str->s;
+ length = strtoll(line, &end, 16);
+ if (line[0] == ' ' || (end[0] != '\0' && end[0] != ';')) {
+ PROXY_LOG("%s: invalid chunk header: %s",
+ root->name, line);
+ return DATA_ERROR;
+ }
+ if (length < 0) {
+ PROXY_LOG("%s: invalid chunk length %lld",
+ root->name, length);
+ return DATA_ERROR;
+ }
+ /* proxy_connection_receive_line() did remove the
+ * trailing \r\n, but we must preserve it when we
+ * send the chunk size to the proxy.
+ */
+ stralloc_add_str(root->str, "\r\n");
+
+ conn->chunk_length = length;
+ conn->chunk_total = 0;
+ conn->chunk_state = CHUNK_DATA;
+ if (length == 0) {
+ /* the last chunk, no we need to add the trailer */
+ conn->chunk_state = CHUNK_TRAILER;
+ conn->parse_chunk_trailer = 0;
}
}
- if (conn->chunk_length == 0) {
- /* chunk_length == 0 means we're reading the chunk trailer */
+ if (conn->chunk_state == CHUNK_TRAILER) {
/* ensure that 'str' is flushed before reading the trailer */
if (!conn->parse_chunk_trailer) {
if (conn->body_has_data)
@@ -833,10 +865,11 @@ rewrite_connection_read_body( RewriteConnection* conn, int fd )
if (conn->chunk_length == 0) {
D("%s: chunk completed (%lld bytes)",
- root->name, conn->chunk_length);
+ root->name, conn->chunk_total);
conn->body_total += conn->chunk_total;
conn->chunk_total = 0;
conn->chunk_length = -1;
+ conn->chunk_state = CHUNK_DATA;
}
break;
diff --git a/qemu-options.hx b/qemu-options.hx
index 76432a5..880025e 100644
--- a/qemu-options.hx
+++ b/qemu-options.hx
@@ -1787,4 +1787,7 @@ DEF("audio-test-out", 0, QEMU_OPTION_audio_test_out, \
DEF("snapshot-no-time-update", 0, QEMU_OPTION_snapshot_no_time_update, \
"-snapshot-no-time-update Disable time update when restoring snapshots\n")
+DEF("list-webcam", 0, QEMU_OPTION_list_webcam, \
+ "-list-webcam List web cameras available for emulation\n")
+
#endif /* ANDROID */
diff --git a/savevm.c b/savevm.c
index 5da7a8c..c08e8fa 100644
--- a/savevm.c
+++ b/savevm.c
@@ -1121,7 +1121,11 @@ int qemu_loadvm_state(QEMUFile *f)
le->next = first_le;
first_le = le;
- le->se->load_state(f, le->se->opaque, le->version_id);
+ if (le->se->load_state(f, le->se->opaque, le->version_id)) {
+ fprintf(stderr, "savevm: unable to load section %s\n", idstr);
+ ret = -EINVAL;
+ goto out;
+ }
break;
case QEMU_VM_SECTION_PART:
case QEMU_VM_SECTION_END:
diff --git a/slirp-android/tcp_subr.c b/slirp-android/tcp_subr.c
index cfcc3cb..a679013 100644
--- a/slirp-android/tcp_subr.c
+++ b/slirp-android/tcp_subr.c
@@ -413,6 +413,7 @@ int is_qemu_special_address(unsigned long dst_addr, unsigned long *redir_addr)
int tcp_fconnect(struct socket *so)
{
int ret=0;
+ int try_proxy = 0;
SockAddress sockaddr;
unsigned long sock_ip;
int sock_port;
@@ -518,13 +519,7 @@ int tcp_fconnect(struct socket *so)
/* A normal connection - keep the original destination addr/port */
else {
- if (!proxy_manager_add(&sockaddr, SOCKET_STREAM,
- (ProxyEventFunc) tcp_proxy_event, so)) {
- soisfconnecting(so);
- so->s = -1;
- so->so_state |= SS_PROXIFIED;
- return 0;
- }
+ try_proxy = 1;
sock_ip = so->so_faddr_ip; /* original dst addr */
sock_port= so->so_faddr_port; /* original dst port */
@@ -536,6 +531,16 @@ int tcp_fconnect(struct socket *so)
sock_address_init_inet( &sockaddr, sock_ip, sock_port );
+ if (try_proxy) {
+ if (!proxy_manager_add(&sockaddr, SOCKET_STREAM,
+ (ProxyEventFunc) tcp_proxy_event, so)) {
+ soisfconnecting(so);
+ so->s = -1;
+ so->so_state |= SS_PROXIFIED;
+ return 0;
+ }
+ }
+
/* We don't care what port we get */
socket_connect(s, &sockaddr);
diff --git a/target-i386/helper.c b/target-i386/helper.c
index 550b348..6d0f18c 100644
--- a/target-i386/helper.c
+++ b/target-i386/helper.c
@@ -1590,8 +1590,15 @@ void cpu_x86_cpuid(CPUX86State *env, uint32_t index, uint32_t count,
break;
case 1:
*eax = env->cpuid_version;
+ if (kvm_enabled() && !env->cpuid_vendor_override) {
+ /* take only subset of ext features which processor can handle */
+ uint32_t unused;
+ host_cpuid(1, 0, NULL, &unused, ecx, &unused);
+ } else {
+ *ecx = UINT32_MAX;
+ }
*ebx = (env->cpuid_apic_id << 24) | 8 << 8; /* CLFLUSH size in quad words, Linux wants it. */
- *ecx = env->cpuid_ext_features;
+ *ecx &= env->cpuid_ext_features;
*edx = env->cpuid_features;
/* "Hypervisor present" bit required for Microsoft SVVP */
diff --git a/vl-android.c b/vl-android.c
index 32b5eac..322ddca 100644
--- a/vl-android.c
+++ b/vl-android.c
@@ -54,6 +54,7 @@
#include "android/hw-kmsg.h"
#include "android/hw-pipe-net.h"
#include "android/hw-qemud.h"
+#include "android/camera/camera-service.h"
#include "android/charmap.h"
#include "android/globals.h"
#include "android/utils/bufprint.h"
@@ -65,6 +66,7 @@
#include "android/display-core.h"
#include "android/utils/timezone.h"
#include "android/snapshot.h"
+#include "android/opengles.h"
#include "targphys.h"
#include "tcpdump.h"
@@ -3465,6 +3467,11 @@ int main(int argc, char **argv, char **envp)
case QEMU_OPTION_snapshot_no_time_update:
android_snapshot_update_time = 0;
break;
+
+ case QEMU_OPTION_list_webcam:
+ android_list_web_cameras();
+ exit(0);
+
default:
os_parse_cmd_args(popt->index, optarg);
}
@@ -3682,6 +3689,9 @@ int main(int argc, char **argv, char **envp)
hwLcd_setBootProperty(density);
}
+ /* Initialize presence of hardware nav button */
+ boot_property_add("qemu.hw.mainkeys", android_hw->hw_mainKeys ? "1" : "0");
+
/* Initialize TCP dump */
if (android_op_tcpdump) {
if (qemu_tcpdump_start(android_op_tcpdump) < 0) {
@@ -3731,6 +3741,16 @@ int main(int argc, char **argv, char **envp)
/* Initialize OpenGLES emulation */
//android_hw_opengles_init();
+ /* Initialize fake camera */
+ if (android_hw->hw_fakeCamera) {
+ boot_property_add("qemu.sf.fake_camera", android_hw->hw_fakeCamera);
+ } else {
+ boot_property_add("qemu.sf.fake_camera", "back");
+ }
+
+ /* Initialize camera emulation. */
+ android_camera_service_init();
+
if (android_op_cpu_delay) {
char* end;
long delay = strtol(android_op_cpu_delay, &end, 0);
@@ -3813,6 +3833,29 @@ int main(int argc, char **argv, char **envp)
nand_add_dev(tmp);
}
+ /* qemu.gles will be read by the OpenGLES emulation libraries.
+ * If set to 0, the software GLES renderer will be used as a fallback.
+ * If the parameter is undefined, this means the system image runs
+ * inside an emulator that doesn't support GPU emulation at all.
+ */
+ {
+ int gles_emul = 0;
+
+ if (android_hw->hw_gpu_enabled) {
+ if (android_initOpenglesEmulation() == 0) {
+ gles_emul = 1;
+ android_startOpenglesRenderer(android_hw->hw_lcd_width, android_hw->hw_lcd_height);
+ } else {
+ dwarning("Could not initialize OpenglES emulation, using software renderer.");
+ }
+ }
+ if (gles_emul) {
+ stralloc_add_str(kernel_params, " qemu.gles=1");
+ } else {
+ stralloc_add_str(kernel_params, " qemu.gles=0");
+ }
+ }
+
/* We always force qemu=1 when running inside QEMU */
stralloc_add_str(kernel_params, " qemu=1");
@@ -4129,7 +4172,7 @@ int main(int argc, char **argv, char **envp)
android_hw->hw_cpu_arch);
exit(1);
}
-#elif defined(TARGET_X86)
+#elif defined(TARGET_I386)
if (strcmp(android_hw->hw_cpu_arch,"x86") != 0) {
fprintf(stderr, "-- Invalid CPU architecture: %s, expected 'x86'\n",
android_hw->hw_cpu_arch);