diff options
Diffstat (limited to 'core/jni')
-rw-r--r-- | core/jni/android_hardware_Camera.cpp | 2 | ||||
-rw-r--r-- | core/jni/android_media_AudioRecord.cpp | 19 | ||||
-rw-r--r-- | core/jni/android_media_AudioTrack.cpp | 43 | ||||
-rw-r--r-- | core/jni/android_view_GLRenderer.cpp | 11 | ||||
-rw-r--r-- | core/jni/android_view_ThreadedRenderer.cpp | 8 |
5 files changed, 53 insertions, 30 deletions
diff --git a/core/jni/android_hardware_Camera.cpp b/core/jni/android_hardware_Camera.cpp index 58b61ba..307293f 100644 --- a/core/jni/android_hardware_Camera.cpp +++ b/core/jni/android_hardware_Camera.cpp @@ -230,7 +230,7 @@ void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int ssize_t offset; size_t size; sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size); - ALOGV("copyAndPost: off=%ld, size=%d", offset, size); + ALOGV("copyAndPost: off=%zd, size=%zu", offset, size); uint8_t *heapBase = (uint8_t*)heap->base(); if (heapBase != NULL) { diff --git a/core/jni/android_media_AudioRecord.cpp b/core/jni/android_media_AudioRecord.cpp index 0132b5f..d8faaf3 100644 --- a/core/jni/android_media_AudioRecord.cpp +++ b/core/jni/android_media_AudioRecord.cpp @@ -166,7 +166,7 @@ android_media_AudioRecord_setup(JNIEnv *env, jobject thiz, jobject weak_this, ALOGE("Error creating AudioRecord: channel mask %#x is not valid.", channelMask); return (jint) AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK; } - uint32_t nbChannels = popcount(channelMask); + uint32_t channelCount = popcount(channelMask); // compare the format against the Java constants audio_format_t format = audioFormatToNative(audioFormat); @@ -181,7 +181,7 @@ android_media_AudioRecord_setup(JNIEnv *env, jobject thiz, jobject weak_this, ALOGE("Error creating AudioRecord: frameCount is 0."); return (jint) AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT; } - size_t frameSize = nbChannels * bytesPerSample; + size_t frameSize = channelCount * bytesPerSample; size_t frameCount = buffSizeInBytes / frameSize; if ((uint32_t(source) >= AUDIO_SOURCE_CNT) && (uint32_t(source) != AUDIO_SOURCE_HOTWORD)) { @@ -220,7 +220,7 @@ android_media_AudioRecord_setup(JNIEnv *env, jobject thiz, jobject weak_this, lpCallbackData->audioRecord_ref = env->NewGlobalRef(weak_this); lpCallbackData->busy = false; - lpRecorder->set((audio_source_t) source, + const status_t status = lpRecorder->set((audio_source_t) source, sampleRateInHertz, format, // word length, PCM channelMask, @@ -231,7 +231,6 @@ android_media_AudioRecord_setup(JNIEnv *env, jobject thiz, jobject weak_this, true, // threadCanCallJava sessionId); - const status_t status = lpRecorder->initCheck(); if (status != NO_ERROR) { ALOGE("Error creating AudioRecord instance: initialization check failed with status %d.", status); @@ -312,7 +311,7 @@ static void android_media_AudioRecord_release(JNIEnv *env, jobject thiz) { if (lpRecorder == NULL) { return; } - ALOGV("About to delete lpRecorder: %" PRIxPTR "\n", lpRecorder.get()); + ALOGV("About to delete lpRecorder: %p", lpRecorder.get()); lpRecorder->stop(); audiorecord_callback_cookie *lpCookie = (audiorecord_callback_cookie *)env->GetLongField( @@ -325,7 +324,7 @@ static void android_media_AudioRecord_release(JNIEnv *env, jobject thiz) { // delete the callback information if (lpCookie) { Mutex::Autolock l(sLock); - ALOGV("deleting lpCookie: %" PRIxPTR "\n", lpCookie); + ALOGV("deleting lpCookie: %p", lpCookie); while (lpCookie->busy) { if (lpCookie->cond.waitRelative(sLock, milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) != @@ -501,17 +500,17 @@ static jint android_media_AudioRecord_get_pos_update_period(JNIEnv *env, jobjec // returns 0 if the parameter combination is not supported. // return -1 if there was an error querying the buffer size. static jint android_media_AudioRecord_get_min_buff_size(JNIEnv *env, jobject thiz, - jint sampleRateInHertz, jint nbChannels, jint audioFormat) { + jint sampleRateInHertz, jint channelCount, jint audioFormat) { ALOGV(">> android_media_AudioRecord_get_min_buff_size(%d, %d, %d)", - sampleRateInHertz, nbChannels, audioFormat); + sampleRateInHertz, channelCount, audioFormat); size_t frameCount = 0; audio_format_t format = audioFormatToNative(audioFormat); status_t result = AudioRecord::getMinFrameCount(&frameCount, sampleRateInHertz, format, - audio_channel_in_mask_from_count(nbChannels)); + audio_channel_in_mask_from_count(channelCount)); if (result == BAD_VALUE) { return 0; @@ -519,7 +518,7 @@ static jint android_media_AudioRecord_get_min_buff_size(JNIEnv *env, jobject th if (result != NO_ERROR) { return -1; } - return frameCount * nbChannels * audio_bytes_per_sample(format); + return frameCount * channelCount * audio_bytes_per_sample(format); } diff --git a/core/jni/android_media_AudioTrack.cpp b/core/jni/android_media_AudioTrack.cpp index 3a5b566..79a46fa 100644 --- a/core/jni/android_media_AudioTrack.cpp +++ b/core/jni/android_media_AudioTrack.cpp @@ -203,25 +203,30 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, uint32_t afSampleRate; size_t afFrameCount; - if (AudioSystem::getOutputFrameCount(&afFrameCount, (audio_stream_type_t) streamType) != NO_ERROR) { - ALOGE("Error creating AudioTrack: Could not get AudioSystem frame count."); + status_t status = AudioSystem::getOutputFrameCount(&afFrameCount, + (audio_stream_type_t) streamType); + if (status != NO_ERROR) { + ALOGE("Error %d creating AudioTrack: Could not get AudioSystem frame count " + "for stream type %d.", status, streamType); return (jint) AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM; } - if (AudioSystem::getOutputSamplingRate(&afSampleRate, (audio_stream_type_t) streamType) != NO_ERROR) { - ALOGE("Error creating AudioTrack: Could not get AudioSystem sampling rate."); + status = AudioSystem::getOutputSamplingRate(&afSampleRate, (audio_stream_type_t) streamType); + if (status != NO_ERROR) { + ALOGE("Error %d creating AudioTrack: Could not get AudioSystem sampling rate " + "for stream type %d.", status, streamType); return (jint) AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM; } // Java channel masks don't map directly to the native definition, but it's a simple shift // to skip the two deprecated channel configurations "default" and "mono". - uint32_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2; + audio_channel_mask_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2; if (!audio_is_output_channel(nativeChannelMask)) { ALOGE("Error creating AudioTrack: invalid channel mask %#x.", javaChannelMask); return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK; } - int nbChannels = popcount(nativeChannelMask); + uint32_t channelCount = popcount(nativeChannelMask); // check the stream type audio_stream_type_t atStreamType; @@ -237,7 +242,7 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, atStreamType = (audio_stream_type_t) streamType; break; default: - ALOGE("Error creating AudioTrack: unknown stream type."); + ALOGE("Error creating AudioTrack: unknown stream type %d.", streamType); return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE; } @@ -245,8 +250,7 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, // This function was called from Java, so we compare the format against the Java constants audio_format_t format = audioFormatToNative(audioFormat); if (format == AUDIO_FORMAT_INVALID) { - - ALOGE("Error creating AudioTrack: unsupported audio format."); + ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat); return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT; } @@ -265,7 +269,7 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, // compute the frame count const size_t bytesPerSample = audio_bytes_per_sample(format); - size_t frameCount = buffSizeInBytes / (nbChannels * bytesPerSample); + size_t frameCount = buffSizeInBytes / (channelCount * bytesPerSample); jclass clazz = env->GetObjectClass(thiz); if (clazz == NULL) { @@ -303,7 +307,7 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, switch (memoryMode) { case MODE_STREAM: - lpTrack->set( + status = lpTrack->set( atStreamType,// stream type sampleRateInHertz, format,// word length, PCM @@ -325,7 +329,7 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, goto native_init_failure; } - lpTrack->set( + status = lpTrack->set( atStreamType,// stream type sampleRateInHertz, format,// word length, PCM @@ -344,8 +348,8 @@ android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, goto native_init_failure; } - if (lpTrack->initCheck() != NO_ERROR) { - ALOGE("Error initializing AudioTrack"); + if (status != NO_ERROR) { + ALOGE("Error %d initializing AudioTrack", status); goto native_init_failure; } @@ -853,9 +857,10 @@ static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobjec break; } - if (AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType) != NO_ERROR) { - ALOGE("AudioSystem::getOutputSamplingRate() for stream type %d failed in AudioTrack JNI", - nativeStreamType); + status_t status = AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType); + if (status != NO_ERROR) { + ALOGE("Error %d in AudioSystem::getOutputSamplingRate() for stream type %d " + "in AudioTrack JNI", status, nativeStreamType); return DEFAULT_OUTPUT_SAMPLE_RATE; } else { return afSamplingRate; @@ -867,7 +872,7 @@ static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobjec // returns the minimum required size for the successful creation of a streaming AudioTrack // returns -1 if there was an error querying the hardware. static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz, - jint sampleRateInHertz, jint nbChannels, jint audioFormat) { + jint sampleRateInHertz, jint channelCount, jint audioFormat) { size_t frameCount; const status_t status = AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT, @@ -879,7 +884,7 @@ static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thi } const audio_format_t format = audioFormatToNative(audioFormat); const size_t bytesPerSample = audio_bytes_per_sample(format); - return frameCount * nbChannels * bytesPerSample; + return frameCount * channelCount * bytesPerSample; } // ---------------------------------------------------------------------------- diff --git a/core/jni/android_view_GLRenderer.cpp b/core/jni/android_view_GLRenderer.cpp index e45c1b9..180c625 100644 --- a/core/jni/android_view_GLRenderer.cpp +++ b/core/jni/android_view_GLRenderer.cpp @@ -26,6 +26,8 @@ #include <utils/Timers.h> +#include <private/hwui/DrawGlInfo.h> + #include <Caches.h> #include <Extensions.h> #include <LayerRenderer.h> @@ -155,6 +157,14 @@ static void android_view_GLRenderer_updateRenderNodeProperties(JNIEnv* env, jobj renderNode->updateProperties(); } +static void android_view_GLRenderer_invokeFunctor(JNIEnv* env, jobject clazz, + jlong functorPtr, jboolean hasContext) { + using namespace android::uirenderer; + Functor* functor = reinterpret_cast<Functor*>(functorPtr); + DrawGlInfo::Mode mode = hasContext ? DrawGlInfo::kModeProcess : DrawGlInfo::kModeProcessNoContext; + (*functor)(mode, NULL); +} + #endif // USE_OPENGL_RENDERER // ---------------------------------------------------------------------------- @@ -187,6 +197,7 @@ static JNINativeMethod gMethods[] = { { "nDestroyLayer", "(J)V", (void*) android_view_GLRenderer_destroyLayer }, { "nSetDisplayListData", "(JJ)V", (void*) android_view_GLRenderer_setDisplayListData }, { "nUpdateRenderNodeProperties", "(J)V", (void*) android_view_GLRenderer_updateRenderNodeProperties }, + { "nInvokeFunctor", "(JZ)V", (void*) android_view_GLRenderer_invokeFunctor }, #endif { "setupShadersDiskCache", "(Ljava/lang/String;)V", diff --git a/core/jni/android_view_ThreadedRenderer.cpp b/core/jni/android_view_ThreadedRenderer.cpp index 28cee4b..98bec1b 100644 --- a/core/jni/android_view_ThreadedRenderer.cpp +++ b/core/jni/android_view_ThreadedRenderer.cpp @@ -139,6 +139,13 @@ static void android_view_ThreadedRenderer_detachFunctor(JNIEnv* env, jobject cla proxy->detachFunctor(functor); } +static void android_view_ThreadedRenderer_invokeFunctor(JNIEnv* env, jobject clazz, + jlong proxyPtr, jlong functorPtr, jboolean waitForCompletion) { + RenderProxy* proxy = reinterpret_cast<RenderProxy*>(proxyPtr); + Functor* functor = reinterpret_cast<Functor*>(functorPtr); + proxy->invokeFunctor(functor, waitForCompletion); +} + static void android_view_ThreadedRenderer_runWithGlContext(JNIEnv* env, jobject clazz, jlong proxyPtr, jobject jrunnable) { RenderProxy* proxy = reinterpret_cast<RenderProxy*>(proxyPtr); @@ -196,6 +203,7 @@ static JNINativeMethod gMethods[] = { { "nDestroyCanvas", "(J)V", (void*) android_view_ThreadedRenderer_destroyCanvas }, { "nAttachFunctor", "(JJ)V", (void*) android_view_ThreadedRenderer_attachFunctor }, { "nDetachFunctor", "(JJ)V", (void*) android_view_ThreadedRenderer_detachFunctor }, + { "nInvokeFunctor", "(JJZ)V", (void*) android_view_ThreadedRenderer_invokeFunctor }, { "nRunWithGlContext", "(JLjava/lang/Runnable;)V", (void*) android_view_ThreadedRenderer_runWithGlContext }, { "nCreateDisplayListLayer", "(JII)J", (void*) android_view_ThreadedRenderer_createDisplayListLayer }, { "nCreateTextureLayer", "(J)J", (void*) android_view_ThreadedRenderer_createTextureLayer }, |