OSDN Git Service

am 1f3c7f3f: am f18ceefc: am cd6e00c6: Merge changes from topic \'layoutlib-api-15...
[android-x86/frameworks-base.git] / core / jni / android_media_AudioTrack.cpp
1 /*
2  * Copyright (C) 2008 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 //#define LOG_NDEBUG 0
17
18 #define LOG_TAG "AudioTrack-JNI"
19
20 #include <JNIHelp.h>
21 #include <JniConstants.h>
22 #include "core_jni_helpers.h"
23
24 #include "ScopedBytes.h"
25
26 #include <utils/Log.h>
27 #include <media/AudioSystem.h>
28 #include <media/AudioTrack.h>
29 #include <audio_utils/primitives.h>
30
31 #include <binder/MemoryHeapBase.h>
32 #include <binder/MemoryBase.h>
33
34 #include "android_media_AudioFormat.h"
35 #include "android_media_AudioErrors.h"
36
37 // ----------------------------------------------------------------------------
38
39 using namespace android;
40
41 // ----------------------------------------------------------------------------
42 static const char* const kClassPathName = "android/media/AudioTrack";
43 static const char* const kAudioAttributesClassPathName = "android/media/AudioAttributes";
44
45 struct audio_track_fields_t {
46     // these fields provide access from C++ to the...
47     jmethodID postNativeEventInJava; //... event post callback method
48     jfieldID  nativeTrackInJavaObj;  // stores in Java the native AudioTrack object
49     jfieldID  jniData;      // stores in Java additional resources used by the native AudioTrack
50     jfieldID  fieldStreamType; // ... mStreamType field in the AudioTrack Java object
51 };
52 struct audio_attributes_fields_t {
53     jfieldID  fieldUsage;        // AudioAttributes.mUsage
54     jfieldID  fieldContentType;  // AudioAttributes.mContentType
55     jfieldID  fieldFlags;        // AudioAttributes.mFlags
56     jfieldID  fieldFormattedTags;// AudioAttributes.mFormattedTags
57 };
58 static audio_track_fields_t      javaAudioTrackFields;
59 static audio_attributes_fields_t javaAudioAttrFields;
60
61 struct audiotrack_callback_cookie {
62     jclass      audioTrack_class;
63     jobject     audioTrack_ref;
64     bool        busy;
65     Condition   cond;
66 };
67
68 // keep these values in sync with AudioTrack.java
69 #define MODE_STATIC 0
70 #define MODE_STREAM 1
71
72 // ----------------------------------------------------------------------------
73 class AudioTrackJniStorage {
74     public:
75         sp<MemoryHeapBase>         mMemHeap;
76         sp<MemoryBase>             mMemBase;
77         audiotrack_callback_cookie mCallbackData;
78
79     AudioTrackJniStorage() {
80         mCallbackData.audioTrack_class = 0;
81         mCallbackData.audioTrack_ref = 0;
82     }
83
84     ~AudioTrackJniStorage() {
85         mMemBase.clear();
86         mMemHeap.clear();
87     }
88
89     bool allocSharedMem(int sizeInBytes) {
90         mMemHeap = new MemoryHeapBase(sizeInBytes, 0, "AudioTrack Heap Base");
91         if (mMemHeap->getHeapID() < 0) {
92             return false;
93         }
94         mMemBase = new MemoryBase(mMemHeap, 0, sizeInBytes);
95         return true;
96     }
97 };
98
99 static Mutex sLock;
100 static SortedVector <audiotrack_callback_cookie *> sAudioTrackCallBackCookies;
101
102 // ----------------------------------------------------------------------------
103 #define DEFAULT_OUTPUT_SAMPLE_RATE   44100
104
105 #define AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM         -16
106 #define AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK  -17
107 #define AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT       -18
108 #define AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE   -19
109 #define AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED    -20
110
111 // ----------------------------------------------------------------------------
112 static void audioCallback(int event, void* user, void *info) {
113
114     audiotrack_callback_cookie *callbackInfo = (audiotrack_callback_cookie *)user;
115     {
116         Mutex::Autolock l(sLock);
117         if (sAudioTrackCallBackCookies.indexOf(callbackInfo) < 0) {
118             return;
119         }
120         callbackInfo->busy = true;
121     }
122
123     switch (event) {
124     case AudioTrack::EVENT_MARKER: {
125         JNIEnv *env = AndroidRuntime::getJNIEnv();
126         if (user != NULL && env != NULL) {
127             env->CallStaticVoidMethod(
128                 callbackInfo->audioTrack_class,
129                 javaAudioTrackFields.postNativeEventInJava,
130                 callbackInfo->audioTrack_ref, event, 0,0, NULL);
131             if (env->ExceptionCheck()) {
132                 env->ExceptionDescribe();
133                 env->ExceptionClear();
134             }
135         }
136         } break;
137
138     case AudioTrack::EVENT_NEW_POS: {
139         JNIEnv *env = AndroidRuntime::getJNIEnv();
140         if (user != NULL && env != NULL) {
141             env->CallStaticVoidMethod(
142                 callbackInfo->audioTrack_class,
143                 javaAudioTrackFields.postNativeEventInJava,
144                 callbackInfo->audioTrack_ref, event, 0,0, NULL);
145             if (env->ExceptionCheck()) {
146                 env->ExceptionDescribe();
147                 env->ExceptionClear();
148             }
149         }
150         } break;
151     }
152
153     {
154         Mutex::Autolock l(sLock);
155         callbackInfo->busy = false;
156         callbackInfo->cond.broadcast();
157     }
158 }
159
160
161 // ----------------------------------------------------------------------------
162 static sp<AudioTrack> getAudioTrack(JNIEnv* env, jobject thiz)
163 {
164     Mutex::Autolock l(sLock);
165     AudioTrack* const at =
166             (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
167     return sp<AudioTrack>(at);
168 }
169
170 static sp<AudioTrack> setAudioTrack(JNIEnv* env, jobject thiz, const sp<AudioTrack>& at)
171 {
172     Mutex::Autolock l(sLock);
173     sp<AudioTrack> old =
174             (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
175     if (at.get()) {
176         at->incStrong((void*)setAudioTrack);
177     }
178     if (old != 0) {
179         old->decStrong((void*)setAudioTrack);
180     }
181     env->SetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (jlong)at.get());
182     return old;
183 }
184 // ----------------------------------------------------------------------------
185 static jint
186 android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
187         jobject jaa,
188         jint sampleRateInHertz, jint javaChannelMask,
189         jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession) {
190
191     ALOGV("sampleRate=%d, audioFormat(from Java)=%d, channel mask=%x, buffSize=%d",
192         sampleRateInHertz, audioFormat, javaChannelMask, buffSizeInBytes);
193
194     if (jaa == 0) {
195         ALOGE("Error creating AudioTrack: invalid audio attributes");
196         return (jint) AUDIO_JAVA_ERROR;
197     }
198
199     // Java channel masks don't map directly to the native definition, but it's a simple shift
200     // to skip the two deprecated channel configurations "default" and "mono".
201     audio_channel_mask_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2;
202
203     if (!audio_is_output_channel(nativeChannelMask)) {
204         ALOGE("Error creating AudioTrack: invalid channel mask %#x.", javaChannelMask);
205         return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK;
206     }
207
208     uint32_t channelCount = audio_channel_count_from_out_mask(nativeChannelMask);
209
210     // check the format.
211     // This function was called from Java, so we compare the format against the Java constants
212     audio_format_t format = audioFormatToNative(audioFormat);
213     if (format == AUDIO_FORMAT_INVALID) {
214         ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat);
215         return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT;
216     }
217
218     // for the moment 8bitPCM in MODE_STATIC is not supported natively in the AudioTrack C++ class
219     // so we declare everything as 16bitPCM, the 8->16bit conversion for MODE_STATIC will be handled
220     // in android_media_AudioTrack_native_write_byte()
221     if ((format == AUDIO_FORMAT_PCM_8_BIT)
222         && (memoryMode == MODE_STATIC)) {
223         ALOGV("android_media_AudioTrack_setup(): requesting MODE_STATIC for 8bit \
224             buff size of %dbytes, switching to 16bit, buff size of %dbytes",
225             buffSizeInBytes, 2*buffSizeInBytes);
226         format = AUDIO_FORMAT_PCM_16_BIT;
227         // we will need twice the memory to store the data
228         buffSizeInBytes *= 2;
229     }
230
231     // compute the frame count
232     size_t frameCount;
233     if (audio_is_linear_pcm(format)) {
234         const size_t bytesPerSample = audio_bytes_per_sample(format);
235         frameCount = buffSizeInBytes / (channelCount * bytesPerSample);
236     } else {
237         frameCount = buffSizeInBytes;
238     }
239
240     jclass clazz = env->GetObjectClass(thiz);
241     if (clazz == NULL) {
242         ALOGE("Can't find %s when setting up callback.", kClassPathName);
243         return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
244     }
245
246     if (jSession == NULL) {
247         ALOGE("Error creating AudioTrack: invalid session ID pointer");
248         return (jint) AUDIO_JAVA_ERROR;
249     }
250
251     jint* nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
252     if (nSession == NULL) {
253         ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
254         return (jint) AUDIO_JAVA_ERROR;
255     }
256     int sessionId = nSession[0];
257     env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
258     nSession = NULL;
259
260     // create the native AudioTrack object
261     sp<AudioTrack> lpTrack = new AudioTrack();
262
263     audio_attributes_t *paa = NULL;
264     // read the AudioAttributes values
265     paa = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
266     const jstring jtags =
267             (jstring) env->GetObjectField(jaa, javaAudioAttrFields.fieldFormattedTags);
268     const char* tags = env->GetStringUTFChars(jtags, NULL);
269     // copying array size -1, char array for tags was calloc'd, no need to NULL-terminate it
270     strncpy(paa->tags, tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
271     env->ReleaseStringUTFChars(jtags, tags);
272     paa->usage = (audio_usage_t) env->GetIntField(jaa, javaAudioAttrFields.fieldUsage);
273     paa->content_type =
274             (audio_content_type_t) env->GetIntField(jaa, javaAudioAttrFields.fieldContentType);
275     paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags);
276
277     ALOGV("AudioTrack_setup for usage=%d content=%d flags=0x%#x tags=%s",
278             paa->usage, paa->content_type, paa->flags, paa->tags);
279
280     // initialize the callback information:
281     // this data will be passed with every AudioTrack callback
282     AudioTrackJniStorage* lpJniStorage = new AudioTrackJniStorage();
283     lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
284     // we use a weak reference so the AudioTrack object can be garbage collected.
285     lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
286     lpJniStorage->mCallbackData.busy = false;
287
288     // initialize the native AudioTrack object
289     status_t status = NO_ERROR;
290     switch (memoryMode) {
291     case MODE_STREAM:
292
293         status = lpTrack->set(
294                 AUDIO_STREAM_DEFAULT,// stream type, but more info conveyed in paa (last argument)
295                 sampleRateInHertz,
296                 format,// word length, PCM
297                 nativeChannelMask,
298                 frameCount,
299                 AUDIO_OUTPUT_FLAG_NONE,
300                 audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)
301                 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
302                 0,// shared mem
303                 true,// thread can call Java
304                 sessionId,// audio session ID
305                 AudioTrack::TRANSFER_SYNC,
306                 NULL,                         // default offloadInfo
307                 -1, -1,                       // default uid, pid values
308                 paa);
309         break;
310
311     case MODE_STATIC:
312         // AudioTrack is using shared memory
313
314         if (!lpJniStorage->allocSharedMem(buffSizeInBytes)) {
315             ALOGE("Error creating AudioTrack in static mode: error creating mem heap base");
316             goto native_init_failure;
317         }
318
319         status = lpTrack->set(
320                 AUDIO_STREAM_DEFAULT,// stream type, but more info conveyed in paa (last argument)
321                 sampleRateInHertz,
322                 format,// word length, PCM
323                 nativeChannelMask,
324                 frameCount,
325                 AUDIO_OUTPUT_FLAG_NONE,
326                 audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user));
327                 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
328                 lpJniStorage->mMemBase,// shared mem
329                 true,// thread can call Java
330                 sessionId,// audio session ID
331                 AudioTrack::TRANSFER_SHARED,
332                 NULL,                         // default offloadInfo
333                 -1, -1,                       // default uid, pid values
334                 paa);
335         break;
336
337     default:
338         ALOGE("Unknown mode %d", memoryMode);
339         goto native_init_failure;
340     }
341
342     if (status != NO_ERROR) {
343         ALOGE("Error %d initializing AudioTrack", status);
344         goto native_init_failure;
345     }
346
347     nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
348     if (nSession == NULL) {
349         ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
350         goto native_init_failure;
351     }
352     // read the audio session ID back from AudioTrack in case we create a new session
353     nSession[0] = lpTrack->getSessionId();
354     env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
355     nSession = NULL;
356
357     {   // scope for the lock
358         Mutex::Autolock l(sLock);
359         sAudioTrackCallBackCookies.add(&lpJniStorage->mCallbackData);
360     }
361     // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field
362     // of the Java object (in mNativeTrackInJavaObj)
363     setAudioTrack(env, thiz, lpTrack);
364
365     // save the JNI resources so we can free them later
366     //ALOGV("storing lpJniStorage: %x\n", (long)lpJniStorage);
367     env->SetLongField(thiz, javaAudioTrackFields.jniData, (jlong)lpJniStorage);
368
369     // since we had audio attributes, the stream type was derived from them during the
370     // creation of the native AudioTrack: push the same value to the Java object
371     env->SetIntField(thiz, javaAudioTrackFields.fieldStreamType, (jint) lpTrack->streamType());
372     // audio attributes were copied in AudioTrack creation
373     free(paa);
374     paa = NULL;
375
376
377     return (jint) AUDIO_JAVA_SUCCESS;
378
379     // failures:
380 native_init_failure:
381     if (paa != NULL) {
382         free(paa);
383     }
384     if (nSession != NULL) {
385         env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
386     }
387     env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_class);
388     env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_ref);
389     delete lpJniStorage;
390     env->SetLongField(thiz, javaAudioTrackFields.jniData, 0);
391
392     return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
393 }
394
395
396 // ----------------------------------------------------------------------------
397 static void
398 android_media_AudioTrack_start(JNIEnv *env, jobject thiz)
399 {
400     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
401     if (lpTrack == NULL) {
402         jniThrowException(env, "java/lang/IllegalStateException",
403             "Unable to retrieve AudioTrack pointer for start()");
404         return;
405     }
406
407     lpTrack->start();
408 }
409
410
411 // ----------------------------------------------------------------------------
412 static void
413 android_media_AudioTrack_stop(JNIEnv *env, jobject thiz)
414 {
415     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
416     if (lpTrack == NULL) {
417         jniThrowException(env, "java/lang/IllegalStateException",
418             "Unable to retrieve AudioTrack pointer for stop()");
419         return;
420     }
421
422     lpTrack->stop();
423 }
424
425
426 // ----------------------------------------------------------------------------
427 static void
428 android_media_AudioTrack_pause(JNIEnv *env, jobject thiz)
429 {
430     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
431     if (lpTrack == NULL) {
432         jniThrowException(env, "java/lang/IllegalStateException",
433             "Unable to retrieve AudioTrack pointer for pause()");
434         return;
435     }
436
437     lpTrack->pause();
438 }
439
440
441 // ----------------------------------------------------------------------------
442 static void
443 android_media_AudioTrack_flush(JNIEnv *env, jobject thiz)
444 {
445     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
446     if (lpTrack == NULL) {
447         jniThrowException(env, "java/lang/IllegalStateException",
448             "Unable to retrieve AudioTrack pointer for flush()");
449         return;
450     }
451
452     lpTrack->flush();
453 }
454
455 // ----------------------------------------------------------------------------
456 static void
457 android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol )
458 {
459     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
460     if (lpTrack == NULL) {
461         jniThrowException(env, "java/lang/IllegalStateException",
462             "Unable to retrieve AudioTrack pointer for setVolume()");
463         return;
464     }
465
466     lpTrack->setVolume(leftVol, rightVol);
467 }
468
469 // ----------------------------------------------------------------------------
470
471 #define CALLBACK_COND_WAIT_TIMEOUT_MS 1000
472 static void android_media_AudioTrack_release(JNIEnv *env,  jobject thiz) {
473     sp<AudioTrack> lpTrack = setAudioTrack(env, thiz, 0);
474     if (lpTrack == NULL) {
475         return;
476     }
477     //ALOGV("deleting lpTrack: %x\n", (int)lpTrack);
478     lpTrack->stop();
479
480     // delete the JNI data
481     AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetLongField(
482         thiz, javaAudioTrackFields.jniData);
483     // reset the native resources in the Java object so any attempt to access
484     // them after a call to release fails.
485     env->SetLongField(thiz, javaAudioTrackFields.jniData, 0);
486
487     if (pJniStorage) {
488         Mutex::Autolock l(sLock);
489         audiotrack_callback_cookie *lpCookie = &pJniStorage->mCallbackData;
490         //ALOGV("deleting pJniStorage: %x\n", (int)pJniStorage);
491         while (lpCookie->busy) {
492             if (lpCookie->cond.waitRelative(sLock,
493                                             milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) !=
494                                                     NO_ERROR) {
495                 break;
496             }
497         }
498         sAudioTrackCallBackCookies.remove(lpCookie);
499         // delete global refs created in native_setup
500         env->DeleteGlobalRef(lpCookie->audioTrack_class);
501         env->DeleteGlobalRef(lpCookie->audioTrack_ref);
502         delete pJniStorage;
503     }
504 }
505
506
507 // ----------------------------------------------------------------------------
508 static void android_media_AudioTrack_finalize(JNIEnv *env,  jobject thiz) {
509     //ALOGV("android_media_AudioTrack_finalize jobject: %x\n", (int)thiz);
510     android_media_AudioTrack_release(env, thiz);
511 }
512
513 // ----------------------------------------------------------------------------
514 jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const jbyte* data,
515                   jint offsetInBytes, jint sizeInBytes, bool blocking = true) {
516     // give the data to the native AudioTrack object (the data starts at the offset)
517     ssize_t written = 0;
518     // regular write() or copy the data to the AudioTrack's shared memory?
519     if (track->sharedBuffer() == 0) {
520         written = track->write(data + offsetInBytes, sizeInBytes, blocking);
521         // for compatibility with earlier behavior of write(), return 0 in this case
522         if (written == (ssize_t) WOULD_BLOCK) {
523             written = 0;
524         }
525     } else {
526         const audio_format_t format = audioFormatToNative(audioFormat);
527         switch (format) {
528
529         default:
530         case AUDIO_FORMAT_PCM_FLOAT:
531         case AUDIO_FORMAT_PCM_16_BIT: {
532             // writing to shared memory, check for capacity
533             if ((size_t)sizeInBytes > track->sharedBuffer()->size()) {
534                 sizeInBytes = track->sharedBuffer()->size();
535             }
536             memcpy(track->sharedBuffer()->pointer(), data + offsetInBytes, sizeInBytes);
537             written = sizeInBytes;
538             } break;
539
540         case AUDIO_FORMAT_PCM_8_BIT: {
541             // data contains 8bit data we need to expand to 16bit before copying
542             // to the shared memory
543             // writing to shared memory, check for capacity,
544             // note that input data will occupy 2X the input space due to 8 to 16bit conversion
545             if (((size_t)sizeInBytes)*2 > track->sharedBuffer()->size()) {
546                 sizeInBytes = track->sharedBuffer()->size() / 2;
547             }
548             int count = sizeInBytes;
549             int16_t *dst = (int16_t *)track->sharedBuffer()->pointer();
550             const uint8_t *src = (const uint8_t *)(data + offsetInBytes);
551             memcpy_to_i16_from_u8(dst, src, count);
552             // even though we wrote 2*sizeInBytes, we only report sizeInBytes as written to hide
553             // the 8bit mixer restriction from the user of this function
554             written = sizeInBytes;
555             } break;
556
557         }
558     }
559     return written;
560
561 }
562
563 // ----------------------------------------------------------------------------
564 static jint android_media_AudioTrack_write_byte(JNIEnv *env,  jobject thiz,
565                                                   jbyteArray javaAudioData,
566                                                   jint offsetInBytes, jint sizeInBytes,
567                                                   jint javaAudioFormat,
568                                                   jboolean isWriteBlocking) {
569     //ALOGV("android_media_AudioTrack_write_byte(offset=%d, sizeInBytes=%d) called",
570     //    offsetInBytes, sizeInBytes);
571     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
572     if (lpTrack == NULL) {
573         jniThrowException(env, "java/lang/IllegalStateException",
574             "Unable to retrieve AudioTrack pointer for write()");
575         return 0;
576     }
577
578     // get the pointer for the audio data from the java array
579     // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such
580     // a way that it becomes much more efficient. When doing so, we will have to prevent the
581     // AudioSystem callback to be called while in critical section (in case of media server
582     // process crash for instance)
583     jbyte* cAudioData = NULL;
584     if (javaAudioData) {
585         cAudioData = (jbyte *)env->GetByteArrayElements(javaAudioData, NULL);
586         if (cAudioData == NULL) {
587             ALOGE("Error retrieving source of audio data to play, can't play");
588             return 0; // out of memory or no data to load
589         }
590     } else {
591         ALOGE("NULL java array of audio data to play, can't play");
592         return 0;
593     }
594
595     jint written = writeToTrack(lpTrack, javaAudioFormat, cAudioData, offsetInBytes, sizeInBytes,
596             isWriteBlocking == JNI_TRUE /* blocking */);
597
598     env->ReleaseByteArrayElements(javaAudioData, cAudioData, 0);
599
600     //ALOGV("write wrote %d (tried %d) bytes in the native AudioTrack with offset %d",
601     //     (int)written, (int)(sizeInBytes), (int)offsetInBytes);
602     return written;
603 }
604
605
606 // ----------------------------------------------------------------------------
607 static jint android_media_AudioTrack_write_native_bytes(JNIEnv *env,  jobject thiz,
608         jbyteArray javaBytes, jint byteOffset, jint sizeInBytes,
609         jint javaAudioFormat, jboolean isWriteBlocking) {
610     //ALOGV("android_media_AudioTrack_write_native_bytes(offset=%d, sizeInBytes=%d) called",
611     //    offsetInBytes, sizeInBytes);
612     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
613     if (lpTrack == NULL) {
614         jniThrowException(env, "java/lang/IllegalStateException",
615                 "Unable to retrieve AudioTrack pointer for write()");
616         return 0;
617     }
618
619     ScopedBytesRO bytes(env, javaBytes);
620     if (bytes.get() == NULL) {
621         ALOGE("Error retrieving source of audio data to play, can't play");
622         return (jint)AUDIO_JAVA_BAD_VALUE;
623     }
624
625     jint written = writeToTrack(lpTrack, javaAudioFormat, bytes.get(), byteOffset,
626             sizeInBytes, isWriteBlocking == JNI_TRUE /* blocking */);
627
628     return written;
629 }
630
631 // ----------------------------------------------------------------------------
632 static jint android_media_AudioTrack_write_short(JNIEnv *env,  jobject thiz,
633                                                   jshortArray javaAudioData,
634                                                   jint offsetInShorts, jint sizeInShorts,
635                                                   jint javaAudioFormat) {
636
637     //ALOGV("android_media_AudioTrack_write_short(offset=%d, sizeInShorts=%d) called",
638     //    offsetInShorts, sizeInShorts);
639     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
640     if (lpTrack == NULL) {
641         jniThrowException(env, "java/lang/IllegalStateException",
642             "Unable to retrieve AudioTrack pointer for write()");
643         return 0;
644     }
645
646     // get the pointer for the audio data from the java array
647     // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such
648     // a way that it becomes much more efficient. When doing so, we will have to prevent the
649     // AudioSystem callback to be called while in critical section (in case of media server
650     // process crash for instance)
651     jshort* cAudioData = NULL;
652     if (javaAudioData) {
653         cAudioData = (jshort *)env->GetShortArrayElements(javaAudioData, NULL);
654         if (cAudioData == NULL) {
655             ALOGE("Error retrieving source of audio data to play, can't play");
656             return 0; // out of memory or no data to load
657         }
658     } else {
659         ALOGE("NULL java array of audio data to play, can't play");
660         return 0;
661     }
662     jint written = writeToTrack(lpTrack, javaAudioFormat, (jbyte *)cAudioData,
663                                 offsetInShorts * sizeof(short), sizeInShorts * sizeof(short),
664             true /*blocking write, legacy behavior*/);
665     env->ReleaseShortArrayElements(javaAudioData, cAudioData, 0);
666
667     if (written > 0) {
668         written /= sizeof(short);
669     }
670     //ALOGV("write wrote %d (tried %d) shorts in the native AudioTrack with offset %d",
671     //     (int)written, (int)(sizeInShorts), (int)offsetInShorts);
672
673     return written;
674 }
675
676
677 // ----------------------------------------------------------------------------
678 static jint android_media_AudioTrack_write_float(JNIEnv *env,  jobject thiz,
679                                                   jfloatArray javaAudioData,
680                                                   jint offsetInFloats, jint sizeInFloats,
681                                                   jint javaAudioFormat,
682                                                   jboolean isWriteBlocking) {
683
684     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
685     if (lpTrack == NULL) {
686         jniThrowException(env, "java/lang/IllegalStateException",
687             "Unable to retrieve AudioTrack pointer for write()");
688         return 0;
689     }
690
691     jfloat* cAudioData = NULL;
692     if (javaAudioData) {
693         cAudioData = (jfloat *)env->GetFloatArrayElements(javaAudioData, NULL);
694         if (cAudioData == NULL) {
695             ALOGE("Error retrieving source of audio data to play, can't play");
696             return 0; // out of memory or no data to load
697         }
698     } else {
699         ALOGE("NULL java array of audio data to play, can't play");
700         return 0;
701     }
702     jint written = writeToTrack(lpTrack, javaAudioFormat, (jbyte *)cAudioData,
703                                 offsetInFloats * sizeof(float), sizeInFloats * sizeof(float),
704                                 isWriteBlocking == JNI_TRUE /* blocking */);
705     env->ReleaseFloatArrayElements(javaAudioData, cAudioData, 0);
706
707     if (written > 0) {
708         written /= sizeof(float);
709     }
710
711     return written;
712 }
713
714
715 // ----------------------------------------------------------------------------
716 static jint android_media_AudioTrack_get_native_frame_count(JNIEnv *env,  jobject thiz) {
717     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
718     if (lpTrack == NULL) {
719         jniThrowException(env, "java/lang/IllegalStateException",
720             "Unable to retrieve AudioTrack pointer for frameCount()");
721         return (jint)AUDIO_JAVA_ERROR;
722     }
723
724     return lpTrack->frameCount();
725 }
726
727
728 // ----------------------------------------------------------------------------
729 static jint android_media_AudioTrack_set_playback_rate(JNIEnv *env,  jobject thiz,
730         jint sampleRateInHz) {
731     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
732     if (lpTrack == NULL) {
733         jniThrowException(env, "java/lang/IllegalStateException",
734             "Unable to retrieve AudioTrack pointer for setSampleRate()");
735         return (jint)AUDIO_JAVA_ERROR;
736     }
737     return nativeToJavaStatus(lpTrack->setSampleRate(sampleRateInHz));
738 }
739
740
741 // ----------------------------------------------------------------------------
742 static jint android_media_AudioTrack_get_playback_rate(JNIEnv *env,  jobject thiz) {
743     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
744     if (lpTrack == NULL) {
745         jniThrowException(env, "java/lang/IllegalStateException",
746             "Unable to retrieve AudioTrack pointer for getSampleRate()");
747         return (jint)AUDIO_JAVA_ERROR;
748     }
749     return (jint) lpTrack->getSampleRate();
750 }
751
752
753 // ----------------------------------------------------------------------------
754 static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env,  jobject thiz,
755         jint markerPos) {
756     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
757     if (lpTrack == NULL) {
758         jniThrowException(env, "java/lang/IllegalStateException",
759             "Unable to retrieve AudioTrack pointer for setMarkerPosition()");
760         return (jint)AUDIO_JAVA_ERROR;
761     }
762     return nativeToJavaStatus( lpTrack->setMarkerPosition(markerPos) );
763 }
764
765
766 // ----------------------------------------------------------------------------
767 static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env,  jobject thiz) {
768     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
769     uint32_t markerPos = 0;
770
771     if (lpTrack == NULL) {
772         jniThrowException(env, "java/lang/IllegalStateException",
773             "Unable to retrieve AudioTrack pointer for getMarkerPosition()");
774         return (jint)AUDIO_JAVA_ERROR;
775     }
776     lpTrack->getMarkerPosition(&markerPos);
777     return (jint)markerPos;
778 }
779
780
781 // ----------------------------------------------------------------------------
782 static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env,  jobject thiz,
783         jint period) {
784     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
785     if (lpTrack == NULL) {
786         jniThrowException(env, "java/lang/IllegalStateException",
787             "Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()");
788         return (jint)AUDIO_JAVA_ERROR;
789     }
790     return nativeToJavaStatus( lpTrack->setPositionUpdatePeriod(period) );
791 }
792
793
794 // ----------------------------------------------------------------------------
795 static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env,  jobject thiz) {
796     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
797     uint32_t period = 0;
798
799     if (lpTrack == NULL) {
800         jniThrowException(env, "java/lang/IllegalStateException",
801             "Unable to retrieve AudioTrack pointer for getPositionUpdatePeriod()");
802         return (jint)AUDIO_JAVA_ERROR;
803     }
804     lpTrack->getPositionUpdatePeriod(&period);
805     return (jint)period;
806 }
807
808
809 // ----------------------------------------------------------------------------
810 static jint android_media_AudioTrack_set_position(JNIEnv *env,  jobject thiz,
811         jint position) {
812     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
813     if (lpTrack == NULL) {
814         jniThrowException(env, "java/lang/IllegalStateException",
815             "Unable to retrieve AudioTrack pointer for setPosition()");
816         return (jint)AUDIO_JAVA_ERROR;
817     }
818     return nativeToJavaStatus( lpTrack->setPosition(position) );
819 }
820
821
822 // ----------------------------------------------------------------------------
823 static jint android_media_AudioTrack_get_position(JNIEnv *env,  jobject thiz) {
824     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
825     uint32_t position = 0;
826
827     if (lpTrack == NULL) {
828         jniThrowException(env, "java/lang/IllegalStateException",
829             "Unable to retrieve AudioTrack pointer for getPosition()");
830         return (jint)AUDIO_JAVA_ERROR;
831     }
832     lpTrack->getPosition(&position);
833     return (jint)position;
834 }
835
836
837 // ----------------------------------------------------------------------------
838 static jint android_media_AudioTrack_get_latency(JNIEnv *env,  jobject thiz) {
839     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
840
841     if (lpTrack == NULL) {
842         jniThrowException(env, "java/lang/IllegalStateException",
843             "Unable to retrieve AudioTrack pointer for latency()");
844         return (jint)AUDIO_JAVA_ERROR;
845     }
846     return (jint)lpTrack->latency();
847 }
848
849
850 // ----------------------------------------------------------------------------
851 static jint android_media_AudioTrack_get_timestamp(JNIEnv *env,  jobject thiz, jlongArray jTimestamp) {
852     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
853
854     if (lpTrack == NULL) {
855         ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()");
856         return (jint)AUDIO_JAVA_ERROR;
857     }
858     AudioTimestamp timestamp;
859     status_t status = lpTrack->getTimestamp(timestamp);
860     if (status == OK) {
861         jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL);
862         if (nTimestamp == NULL) {
863             ALOGE("Unable to get array for getTimestamp()");
864             return (jint)AUDIO_JAVA_ERROR;
865         }
866         nTimestamp[0] = (jlong) timestamp.mPosition;
867         nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec);
868         env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0);
869     }
870     return (jint) nativeToJavaStatus(status);
871 }
872
873
874 // ----------------------------------------------------------------------------
875 static jint android_media_AudioTrack_set_loop(JNIEnv *env,  jobject thiz,
876         jint loopStart, jint loopEnd, jint loopCount) {
877     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
878     if (lpTrack == NULL) {
879         jniThrowException(env, "java/lang/IllegalStateException",
880             "Unable to retrieve AudioTrack pointer for setLoop()");
881         return (jint)AUDIO_JAVA_ERROR;
882     }
883     return nativeToJavaStatus( lpTrack->setLoop(loopStart, loopEnd, loopCount) );
884 }
885
886
887 // ----------------------------------------------------------------------------
888 static jint android_media_AudioTrack_reload(JNIEnv *env,  jobject thiz) {
889     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
890     if (lpTrack == NULL) {
891         jniThrowException(env, "java/lang/IllegalStateException",
892             "Unable to retrieve AudioTrack pointer for reload()");
893         return (jint)AUDIO_JAVA_ERROR;
894     }
895     return nativeToJavaStatus( lpTrack->reload() );
896 }
897
898
899 // ----------------------------------------------------------------------------
900 static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env,  jobject thiz,
901         jint javaStreamType) {
902     uint32_t afSamplingRate;
903     // convert the stream type from Java to native value
904     // FIXME: code duplication with android_media_AudioTrack_setup()
905     audio_stream_type_t nativeStreamType;
906     switch (javaStreamType) {
907     case AUDIO_STREAM_VOICE_CALL:
908     case AUDIO_STREAM_SYSTEM:
909     case AUDIO_STREAM_RING:
910     case AUDIO_STREAM_MUSIC:
911     case AUDIO_STREAM_ALARM:
912     case AUDIO_STREAM_NOTIFICATION:
913     case AUDIO_STREAM_BLUETOOTH_SCO:
914     case AUDIO_STREAM_DTMF:
915         nativeStreamType = (audio_stream_type_t) javaStreamType;
916         break;
917     default:
918         nativeStreamType = AUDIO_STREAM_DEFAULT;
919         break;
920     }
921
922     status_t status = AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType);
923     if (status != NO_ERROR) {
924         ALOGE("Error %d in AudioSystem::getOutputSamplingRate() for stream type %d "
925               "in AudioTrack JNI", status, nativeStreamType);
926         return DEFAULT_OUTPUT_SAMPLE_RATE;
927     } else {
928         return afSamplingRate;
929     }
930 }
931
932
933 // ----------------------------------------------------------------------------
934 // returns the minimum required size for the successful creation of a streaming AudioTrack
935 // returns -1 if there was an error querying the hardware.
936 static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env,  jobject thiz,
937     jint sampleRateInHertz, jint channelCount, jint audioFormat) {
938
939     size_t frameCount;
940     const status_t status = AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT,
941             sampleRateInHertz);
942     if (status != NO_ERROR) {
943         ALOGE("AudioTrack::getMinFrameCount() for sample rate %d failed with status %d",
944                 sampleRateInHertz, status);
945         return -1;
946     }
947     const audio_format_t format = audioFormatToNative(audioFormat);
948     if (audio_is_linear_pcm(format)) {
949         const size_t bytesPerSample = audio_bytes_per_sample(format);
950         return frameCount * channelCount * bytesPerSample;
951     } else {
952         return frameCount;
953     }
954 }
955
956 // ----------------------------------------------------------------------------
957 static jint
958 android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level )
959 {
960     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
961     if (lpTrack == NULL ) {
962         jniThrowException(env, "java/lang/IllegalStateException",
963             "Unable to retrieve AudioTrack pointer for setAuxEffectSendLevel()");
964         return -1;
965     }
966
967     status_t status = lpTrack->setAuxEffectSendLevel(level);
968     if (status != NO_ERROR) {
969         ALOGE("AudioTrack::setAuxEffectSendLevel() for level %g failed with status %d",
970                 level, status);
971     }
972     return (jint) status;
973 }
974
975 // ----------------------------------------------------------------------------
976 static jint android_media_AudioTrack_attachAuxEffect(JNIEnv *env,  jobject thiz,
977         jint effectId) {
978     sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
979     if (lpTrack == NULL) {
980         jniThrowException(env, "java/lang/IllegalStateException",
981             "Unable to retrieve AudioTrack pointer for attachAuxEffect()");
982         return (jint)AUDIO_JAVA_ERROR;
983     }
984     return nativeToJavaStatus( lpTrack->attachAuxEffect(effectId) );
985 }
986
987 // ----------------------------------------------------------------------------
988 // ----------------------------------------------------------------------------
989 static JNINativeMethod gMethods[] = {
990     // name,              signature,     funcPtr
991     {"native_start",         "()V",      (void *)android_media_AudioTrack_start},
992     {"native_stop",          "()V",      (void *)android_media_AudioTrack_stop},
993     {"native_pause",         "()V",      (void *)android_media_AudioTrack_pause},
994     {"native_flush",         "()V",      (void *)android_media_AudioTrack_flush},
995     {"native_setup",     "(Ljava/lang/Object;Ljava/lang/Object;IIIII[I)I",
996                                          (void *)android_media_AudioTrack_setup},
997     {"native_finalize",      "()V",      (void *)android_media_AudioTrack_finalize},
998     {"native_release",       "()V",      (void *)android_media_AudioTrack_release},
999     {"native_write_byte",    "([BIIIZ)I",(void *)android_media_AudioTrack_write_byte},
1000     {"native_write_native_bytes",
1001                              "(Ljava/lang/Object;IIIZ)I",
1002                                          (void *)android_media_AudioTrack_write_native_bytes},
1003     {"native_write_short",   "([SIII)I", (void *)android_media_AudioTrack_write_short},
1004     {"native_write_float",   "([FIIIZ)I",(void *)android_media_AudioTrack_write_float},
1005     {"native_setVolume",     "(FF)V",    (void *)android_media_AudioTrack_set_volume},
1006     {"native_get_native_frame_count",
1007                              "()I",      (void *)android_media_AudioTrack_get_native_frame_count},
1008     {"native_set_playback_rate",
1009                              "(I)I",     (void *)android_media_AudioTrack_set_playback_rate},
1010     {"native_get_playback_rate",
1011                              "()I",      (void *)android_media_AudioTrack_get_playback_rate},
1012     {"native_set_marker_pos","(I)I",     (void *)android_media_AudioTrack_set_marker_pos},
1013     {"native_get_marker_pos","()I",      (void *)android_media_AudioTrack_get_marker_pos},
1014     {"native_set_pos_update_period",
1015                              "(I)I",     (void *)android_media_AudioTrack_set_pos_update_period},
1016     {"native_get_pos_update_period",
1017                              "()I",      (void *)android_media_AudioTrack_get_pos_update_period},
1018     {"native_set_position",  "(I)I",     (void *)android_media_AudioTrack_set_position},
1019     {"native_get_position",  "()I",      (void *)android_media_AudioTrack_get_position},
1020     {"native_get_latency",   "()I",      (void *)android_media_AudioTrack_get_latency},
1021     {"native_get_timestamp", "([J)I",    (void *)android_media_AudioTrack_get_timestamp},
1022     {"native_set_loop",      "(III)I",   (void *)android_media_AudioTrack_set_loop},
1023     {"native_reload_static", "()I",      (void *)android_media_AudioTrack_reload},
1024     {"native_get_output_sample_rate",
1025                              "(I)I",      (void *)android_media_AudioTrack_get_output_sample_rate},
1026     {"native_get_min_buff_size",
1027                              "(III)I",   (void *)android_media_AudioTrack_get_min_buff_size},
1028     {"native_setAuxEffectSendLevel",
1029                              "(F)I",     (void *)android_media_AudioTrack_setAuxEffectSendLevel},
1030     {"native_attachAuxEffect",
1031                              "(I)I",     (void *)android_media_AudioTrack_attachAuxEffect},
1032 };
1033
1034
1035 // field names found in android/media/AudioTrack.java
1036 #define JAVA_POSTEVENT_CALLBACK_NAME                    "postEventFromNative"
1037 #define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME            "mNativeTrackInJavaObj"
1038 #define JAVA_JNIDATA_FIELD_NAME                         "mJniData"
1039 #define JAVA_STREAMTYPE_FIELD_NAME                      "mStreamType"
1040
1041 // ----------------------------------------------------------------------------
1042 // preconditions:
1043 //    theClass is valid
1044 bool android_media_getIntConstantFromClass(JNIEnv* pEnv, jclass theClass, const char* className,
1045                              const char* constName, int* constVal) {
1046     jfieldID javaConst = NULL;
1047     javaConst = pEnv->GetStaticFieldID(theClass, constName, "I");
1048     if (javaConst != NULL) {
1049         *constVal = pEnv->GetStaticIntField(theClass, javaConst);
1050         return true;
1051     } else {
1052         ALOGE("Can't find %s.%s", className, constName);
1053         return false;
1054     }
1055 }
1056
1057
1058 // ----------------------------------------------------------------------------
1059 int register_android_media_AudioTrack(JNIEnv *env)
1060 {
1061     javaAudioTrackFields.nativeTrackInJavaObj = NULL;
1062     javaAudioTrackFields.postNativeEventInJava = NULL;
1063
1064     // Get the AudioTrack class
1065     jclass audioTrackClass = FindClassOrDie(env, kClassPathName);
1066
1067     // Get the postEvent method
1068     javaAudioTrackFields.postNativeEventInJava = GetStaticMethodIDOrDie(env,
1069             audioTrackClass, JAVA_POSTEVENT_CALLBACK_NAME,
1070             "(Ljava/lang/Object;IIILjava/lang/Object;)V");
1071
1072     // Get the variables fields
1073     //      nativeTrackInJavaObj
1074     javaAudioTrackFields.nativeTrackInJavaObj = GetFieldIDOrDie(env,
1075             audioTrackClass, JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME, "J");
1076     //      jniData
1077     javaAudioTrackFields.jniData = GetFieldIDOrDie(env,
1078             audioTrackClass, JAVA_JNIDATA_FIELD_NAME, "J");
1079     //      fieldStreamType
1080     javaAudioTrackFields.fieldStreamType = GetFieldIDOrDie(env,
1081             audioTrackClass, JAVA_STREAMTYPE_FIELD_NAME, "I");
1082
1083     // Get the AudioAttributes class and fields
1084     jclass audioAttrClass = FindClassOrDie(env, kAudioAttributesClassPathName);
1085     javaAudioAttrFields.fieldUsage = GetFieldIDOrDie(env, audioAttrClass, "mUsage", "I");
1086     javaAudioAttrFields.fieldContentType = GetFieldIDOrDie(env,
1087             audioAttrClass, "mContentType", "I");
1088     javaAudioAttrFields.fieldFlags = GetFieldIDOrDie(env, audioAttrClass, "mFlags", "I");
1089     javaAudioAttrFields.fieldFormattedTags = GetFieldIDOrDie(env,
1090             audioAttrClass, "mFormattedTags", "Ljava/lang/String;");
1091
1092     return RegisterMethodsOrDie(env, kClassPathName, gMethods, NELEM(gMethods));
1093 }
1094
1095
1096 // ----------------------------------------------------------------------------