1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 //#define LOG_NDEBUG 0 17 18 #define LOG_TAG "AudioTrack-JNI" 19 20 #include <jni.h> 21 #include <JNIHelp.h> 22 #include <android_runtime/AndroidRuntime.h> 23 24 #include <utils/Log.h> 25 #include <media/AudioSystem.h> 26 #include <media/AudioTrack.h> 27 28 #include <binder/MemoryHeapBase.h> 29 #include <binder/MemoryBase.h> 30 31 #include <system/audio.h> 32 33 // ---------------------------------------------------------------------------- 34 35 using namespace android; 36 37 // ---------------------------------------------------------------------------- 38 static const char* const kClassPathName = "android/media/AudioTrack"; 39 40 struct fields_t { 41 // these fields provide access from C++ to the... 42 jmethodID postNativeEventInJava; //... event post callback method 43 jfieldID nativeTrackInJavaObj; // stores in Java the native AudioTrack object 44 jfieldID jniData; // stores in Java additional resources used by the native AudioTrack 45 }; 46 static fields_t javaAudioTrackFields; 47 48 struct audiotrack_callback_cookie { 49 jclass audioTrack_class; 50 jobject audioTrack_ref; 51 bool busy; 52 Condition cond; 53 }; 54 55 // keep these values in sync with AudioTrack.java 56 #define MODE_STATIC 0 57 #define MODE_STREAM 1 58 // keep these values in sync with AudioFormat.java 59 #define ENCODING_PCM_16BIT 2 60 #define ENCODING_PCM_8BIT 3 61 62 // ---------------------------------------------------------------------------- 63 class AudioTrackJniStorage { 64 public: 65 sp<MemoryHeapBase> mMemHeap; 66 sp<MemoryBase> mMemBase; 67 audiotrack_callback_cookie mCallbackData; 68 audio_stream_type_t mStreamType; 69 70 AudioTrackJniStorage() { 71 mCallbackData.audioTrack_class = 0; 72 mCallbackData.audioTrack_ref = 0; 73 mStreamType = AUDIO_STREAM_DEFAULT; 74 } 75 76 ~AudioTrackJniStorage() { 77 mMemBase.clear(); 78 mMemHeap.clear(); 79 } 80 81 bool allocSharedMem(int sizeInBytes) { 82 mMemHeap = new MemoryHeapBase(sizeInBytes, 0, "AudioTrack Heap Base"); 83 if (mMemHeap->getHeapID() < 0) { 84 return false; 85 } 86 mMemBase = new MemoryBase(mMemHeap, 0, sizeInBytes); 87 return true; 88 } 89 }; 90 91 static Mutex sLock; 92 static SortedVector <audiotrack_callback_cookie *> sAudioTrackCallBackCookies; 93 94 // ---------------------------------------------------------------------------- 95 #define DEFAULT_OUTPUT_SAMPLE_RATE 44100 96 97 #define AUDIOTRACK_SUCCESS 0 98 #define AUDIOTRACK_ERROR -1 99 #define AUDIOTRACK_ERROR_BAD_VALUE -2 100 #define AUDIOTRACK_ERROR_INVALID_OPERATION -3 101 #define AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM -16 102 #define AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK -17 103 #define AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT -18 104 #define AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE -19 105 #define AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED -20 106 107 108 jint android_media_translateErrorCode(int code) { 109 switch (code) { 110 case NO_ERROR: 111 return AUDIOTRACK_SUCCESS; 112 case BAD_VALUE: 113 return AUDIOTRACK_ERROR_BAD_VALUE; 114 case INVALID_OPERATION: 115 return AUDIOTRACK_ERROR_INVALID_OPERATION; 116 default: 117 return AUDIOTRACK_ERROR; 118 } 119 } 120 121 122 // ---------------------------------------------------------------------------- 123 static void audioCallback(int event, void* user, void *info) { 124 125 audiotrack_callback_cookie *callbackInfo = (audiotrack_callback_cookie *)user; 126 { 127 Mutex::Autolock l(sLock); 128 if (sAudioTrackCallBackCookies.indexOf(callbackInfo) < 0) { 129 return; 130 } 131 callbackInfo->busy = true; 132 } 133 134 switch (event) { 135 case AudioTrack::EVENT_MARKER: { 136 JNIEnv *env = AndroidRuntime::getJNIEnv(); 137 if (user != NULL && env != NULL) { 138 env->CallStaticVoidMethod( 139 callbackInfo->audioTrack_class, 140 javaAudioTrackFields.postNativeEventInJava, 141 callbackInfo->audioTrack_ref, event, 0,0, NULL); 142 if (env->ExceptionCheck()) { 143 env->ExceptionDescribe(); 144 env->ExceptionClear(); 145 } 146 } 147 } break; 148 149 case AudioTrack::EVENT_NEW_POS: { 150 JNIEnv *env = AndroidRuntime::getJNIEnv(); 151 if (user != NULL && env != NULL) { 152 env->CallStaticVoidMethod( 153 callbackInfo->audioTrack_class, 154 javaAudioTrackFields.postNativeEventInJava, 155 callbackInfo->audioTrack_ref, event, 0,0, NULL); 156 if (env->ExceptionCheck()) { 157 env->ExceptionDescribe(); 158 env->ExceptionClear(); 159 } 160 } 161 } break; 162 } 163 164 { 165 Mutex::Autolock l(sLock); 166 callbackInfo->busy = false; 167 callbackInfo->cond.broadcast(); 168 } 169 } 170 171 172 // ---------------------------------------------------------------------------- 173 static sp<AudioTrack> getAudioTrack(JNIEnv* env, jobject thiz) 174 { 175 Mutex::Autolock l(sLock); 176 AudioTrack* const at = 177 (AudioTrack*)env->GetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj); 178 return sp<AudioTrack>(at); 179 } 180 181 static sp<AudioTrack> setAudioTrack(JNIEnv* env, jobject thiz, const sp<AudioTrack>& at) 182 { 183 Mutex::Autolock l(sLock); 184 sp<AudioTrack> old = 185 (AudioTrack*)env->GetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj); 186 if (at.get()) { 187 at->incStrong((void*)setAudioTrack); 188 } 189 if (old != 0) { 190 old->decStrong((void*)setAudioTrack); 191 } 192 env->SetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (int)at.get()); 193 return old; 194 } 195 196 // ---------------------------------------------------------------------------- 197 static int 198 android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_this, 199 jint streamType, jint sampleRateInHertz, jint javaChannelMask, 200 jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession) 201 { 202 ALOGV("sampleRate=%d, audioFormat(from Java)=%d, channel mask=%x, buffSize=%d", 203 sampleRateInHertz, audioFormat, javaChannelMask, buffSizeInBytes); 204 uint32_t afSampleRate; 205 size_t afFrameCount; 206 207 if (AudioSystem::getOutputFrameCount(&afFrameCount, (audio_stream_type_t) streamType) != NO_ERROR) { 208 ALOGE("Error creating AudioTrack: Could not get AudioSystem frame count."); 209 return AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM; 210 } 211 if (AudioSystem::getOutputSamplingRate(&afSampleRate, (audio_stream_type_t) streamType) != NO_ERROR) { 212 ALOGE("Error creating AudioTrack: Could not get AudioSystem sampling rate."); 213 return AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM; 214 } 215 216 // Java channel masks don't map directly to the native definition, but it's a simple shift 217 // to skip the two deprecated channel configurations "default" and "mono". 218 uint32_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2; 219 220 if (!audio_is_output_channel(nativeChannelMask)) { 221 ALOGE("Error creating AudioTrack: invalid channel mask %#x.", javaChannelMask); 222 return AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK; 223 } 224 225 int nbChannels = popcount(nativeChannelMask); 226 227 // check the stream type 228 audio_stream_type_t atStreamType; 229 switch (streamType) { 230 case AUDIO_STREAM_VOICE_CALL: 231 case AUDIO_STREAM_SYSTEM: 232 case AUDIO_STREAM_RING: 233 case AUDIO_STREAM_MUSIC: 234 case AUDIO_STREAM_ALARM: 235 case AUDIO_STREAM_NOTIFICATION: 236 case AUDIO_STREAM_BLUETOOTH_SCO: 237 case AUDIO_STREAM_DTMF: 238 atStreamType = (audio_stream_type_t) streamType; 239 break; 240 default: 241 ALOGE("Error creating AudioTrack: unknown stream type."); 242 return AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE; 243 } 244 245 // check the format. 246 // This function was called from Java, so we compare the format against the Java constants 247 if ((audioFormat != ENCODING_PCM_16BIT) && (audioFormat != ENCODING_PCM_8BIT)) { 248 249 ALOGE("Error creating AudioTrack: unsupported audio format."); 250 return AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT; 251 } 252 253 // for the moment 8bitPCM in MODE_STATIC is not supported natively in the AudioTrack C++ class 254 // so we declare everything as 16bitPCM, the 8->16bit conversion for MODE_STATIC will be handled 255 // in android_media_AudioTrack_native_write_byte() 256 if ((audioFormat == ENCODING_PCM_8BIT) 257 && (memoryMode == MODE_STATIC)) { 258 ALOGV("android_media_AudioTrack_native_setup(): requesting MODE_STATIC for 8bit \ 259 buff size of %dbytes, switching to 16bit, buff size of %dbytes", 260 buffSizeInBytes, 2*buffSizeInBytes); 261 audioFormat = ENCODING_PCM_16BIT; 262 // we will need twice the memory to store the data 263 buffSizeInBytes *= 2; 264 } 265 266 // compute the frame count 267 int bytesPerSample = audioFormat == ENCODING_PCM_16BIT ? 2 : 1; 268 audio_format_t format = audioFormat == ENCODING_PCM_16BIT ? 269 AUDIO_FORMAT_PCM_16_BIT : AUDIO_FORMAT_PCM_8_BIT; 270 int frameCount = buffSizeInBytes / (nbChannels * bytesPerSample); 271 272 jclass clazz = env->GetObjectClass(thiz); 273 if (clazz == NULL) { 274 ALOGE("Can't find %s when setting up callback.", kClassPathName); 275 return AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED; 276 } 277 278 if (jSession == NULL) { 279 ALOGE("Error creating AudioTrack: invalid session ID pointer"); 280 return AUDIOTRACK_ERROR; 281 } 282 283 jint* nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL); 284 if (nSession == NULL) { 285 ALOGE("Error creating AudioTrack: Error retrieving session id pointer"); 286 return AUDIOTRACK_ERROR; 287 } 288 int sessionId = nSession[0]; 289 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); 290 nSession = NULL; 291 292 // create the native AudioTrack object 293 sp<AudioTrack> lpTrack = new AudioTrack(); 294 295 // initialize the callback information: 296 // this data will be passed with every AudioTrack callback 297 AudioTrackJniStorage* lpJniStorage = new AudioTrackJniStorage(); 298 lpJniStorage->mStreamType = atStreamType; 299 lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz); 300 // we use a weak reference so the AudioTrack object can be garbage collected. 301 lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this); 302 lpJniStorage->mCallbackData.busy = false; 303 304 // initialize the native AudioTrack object 305 switch (memoryMode) { 306 case MODE_STREAM: 307 308 lpTrack->set( 309 atStreamType,// stream type 310 sampleRateInHertz, 311 format,// word length, PCM 312 nativeChannelMask, 313 frameCount, 314 AUDIO_OUTPUT_FLAG_NONE, 315 audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user) 316 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack 317 0,// shared mem 318 true,// thread can call Java 319 sessionId);// audio session ID 320 break; 321 322 case MODE_STATIC: 323 // AudioTrack is using shared memory 324 325 if (!lpJniStorage->allocSharedMem(buffSizeInBytes)) { 326 ALOGE("Error creating AudioTrack in static mode: error creating mem heap base"); 327 goto native_init_failure; 328 } 329 330 lpTrack->set( 331 atStreamType,// stream type 332 sampleRateInHertz, 333 format,// word length, PCM 334 nativeChannelMask, 335 frameCount, 336 AUDIO_OUTPUT_FLAG_NONE, 337 audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)); 338 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack 339 lpJniStorage->mMemBase,// shared mem 340 true,// thread can call Java 341 sessionId);// audio session ID 342 break; 343 344 default: 345 ALOGE("Unknown mode %d", memoryMode); 346 goto native_init_failure; 347 } 348 349 if (lpTrack->initCheck() != NO_ERROR) { 350 ALOGE("Error initializing AudioTrack"); 351 goto native_init_failure; 352 } 353 354 nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL); 355 if (nSession == NULL) { 356 ALOGE("Error creating AudioTrack: Error retrieving session id pointer"); 357 goto native_init_failure; 358 } 359 // read the audio session ID back from AudioTrack in case we create a new session 360 nSession[0] = lpTrack->getSessionId(); 361 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); 362 nSession = NULL; 363 364 { // scope for the lock 365 Mutex::Autolock l(sLock); 366 sAudioTrackCallBackCookies.add(&lpJniStorage->mCallbackData); 367 } 368 // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field 369 // of the Java object (in mNativeTrackInJavaObj) 370 setAudioTrack(env, thiz, lpTrack); 371 372 // save the JNI resources so we can free them later 373 //ALOGV("storing lpJniStorage: %x\n", (int)lpJniStorage); 374 env->SetIntField(thiz, javaAudioTrackFields.jniData, (int)lpJniStorage); 375 376 return AUDIOTRACK_SUCCESS; 377 378 // failures: 379 native_init_failure: 380 if (nSession != NULL) { 381 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); 382 } 383 env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_class); 384 env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_ref); 385 delete lpJniStorage; 386 env->SetIntField(thiz, javaAudioTrackFields.jniData, 0); 387 388 return AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED; 389 } 390 391 392 // ---------------------------------------------------------------------------- 393 static void 394 android_media_AudioTrack_start(JNIEnv *env, jobject thiz) 395 { 396 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 397 if (lpTrack == NULL) { 398 jniThrowException(env, "java/lang/IllegalStateException", 399 "Unable to retrieve AudioTrack pointer for start()"); 400 return; 401 } 402 403 lpTrack->start(); 404 } 405 406 407 // ---------------------------------------------------------------------------- 408 static void 409 android_media_AudioTrack_stop(JNIEnv *env, jobject thiz) 410 { 411 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 412 if (lpTrack == NULL) { 413 jniThrowException(env, "java/lang/IllegalStateException", 414 "Unable to retrieve AudioTrack pointer for stop()"); 415 return; 416 } 417 418 lpTrack->stop(); 419 } 420 421 422 // ---------------------------------------------------------------------------- 423 static void 424 android_media_AudioTrack_pause(JNIEnv *env, jobject thiz) 425 { 426 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 427 if (lpTrack == NULL) { 428 jniThrowException(env, "java/lang/IllegalStateException", 429 "Unable to retrieve AudioTrack pointer for pause()"); 430 return; 431 } 432 433 lpTrack->pause(); 434 } 435 436 437 // ---------------------------------------------------------------------------- 438 static void 439 android_media_AudioTrack_flush(JNIEnv *env, jobject thiz) 440 { 441 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 442 if (lpTrack == NULL) { 443 jniThrowException(env, "java/lang/IllegalStateException", 444 "Unable to retrieve AudioTrack pointer for flush()"); 445 return; 446 } 447 448 lpTrack->flush(); 449 } 450 451 // ---------------------------------------------------------------------------- 452 static void 453 android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol ) 454 { 455 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 456 if (lpTrack == NULL) { 457 jniThrowException(env, "java/lang/IllegalStateException", 458 "Unable to retrieve AudioTrack pointer for setVolume()"); 459 return; 460 } 461 462 lpTrack->setVolume(leftVol, rightVol); 463 } 464 465 // ---------------------------------------------------------------------------- 466 467 #define CALLBACK_COND_WAIT_TIMEOUT_MS 1000 468 static void android_media_AudioTrack_native_release(JNIEnv *env, jobject thiz) { 469 sp<AudioTrack> lpTrack = setAudioTrack(env, thiz, 0); 470 if (lpTrack == NULL) { 471 return; 472 } 473 //ALOGV("deleting lpTrack: %x\n", (int)lpTrack); 474 lpTrack->stop(); 475 476 // delete the JNI data 477 AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetIntField( 478 thiz, javaAudioTrackFields.jniData); 479 // reset the native resources in the Java object so any attempt to access 480 // them after a call to release fails. 481 env->SetIntField(thiz, javaAudioTrackFields.jniData, 0); 482 483 if (pJniStorage) { 484 Mutex::Autolock l(sLock); 485 audiotrack_callback_cookie *lpCookie = &pJniStorage->mCallbackData; 486 //ALOGV("deleting pJniStorage: %x\n", (int)pJniStorage); 487 while (lpCookie->busy) { 488 if (lpCookie->cond.waitRelative(sLock, 489 milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) != 490 NO_ERROR) { 491 break; 492 } 493 } 494 sAudioTrackCallBackCookies.remove(lpCookie); 495 // delete global refs created in native_setup 496 env->DeleteGlobalRef(lpCookie->audioTrack_class); 497 env->DeleteGlobalRef(lpCookie->audioTrack_ref); 498 delete pJniStorage; 499 } 500 } 501 502 503 // ---------------------------------------------------------------------------- 504 static void android_media_AudioTrack_native_finalize(JNIEnv *env, jobject thiz) { 505 //ALOGV("android_media_AudioTrack_native_finalize jobject: %x\n", (int)thiz); 506 android_media_AudioTrack_native_release(env, thiz); 507 } 508 509 // ---------------------------------------------------------------------------- 510 jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, jbyte* data, 511 jint offsetInBytes, jint sizeInBytes) { 512 // give the data to the native AudioTrack object (the data starts at the offset) 513 ssize_t written = 0; 514 // regular write() or copy the data to the AudioTrack's shared memory? 515 if (track->sharedBuffer() == 0) { 516 written = track->write(data + offsetInBytes, sizeInBytes); 517 // for compatibility with earlier behavior of write(), return 0 in this case 518 if (written == (ssize_t) WOULD_BLOCK) { 519 written = 0; 520 } 521 } else { 522 if (audioFormat == ENCODING_PCM_16BIT) { 523 // writing to shared memory, check for capacity 524 if ((size_t)sizeInBytes > track->sharedBuffer()->size()) { 525 sizeInBytes = track->sharedBuffer()->size(); 526 } 527 memcpy(track->sharedBuffer()->pointer(), data + offsetInBytes, sizeInBytes); 528 written = sizeInBytes; 529 } else if (audioFormat == ENCODING_PCM_8BIT) { 530 // data contains 8bit data we need to expand to 16bit before copying 531 // to the shared memory 532 // writing to shared memory, check for capacity, 533 // note that input data will occupy 2X the input space due to 8 to 16bit conversion 534 if (((size_t)sizeInBytes)*2 > track->sharedBuffer()->size()) { 535 sizeInBytes = track->sharedBuffer()->size() / 2; 536 } 537 int count = sizeInBytes; 538 int16_t *dst = (int16_t *)track->sharedBuffer()->pointer(); 539 const int8_t *src = (const int8_t *)(data + offsetInBytes); 540 while (count--) { 541 *dst++ = (int16_t)(*src++^0x80) << 8; 542 } 543 // even though we wrote 2*sizeInBytes, we only report sizeInBytes as written to hide 544 // the 8bit mixer restriction from the user of this function 545 written = sizeInBytes; 546 } 547 } 548 return written; 549 550 } 551 552 // ---------------------------------------------------------------------------- 553 static jint android_media_AudioTrack_native_write_byte(JNIEnv *env, jobject thiz, 554 jbyteArray javaAudioData, 555 jint offsetInBytes, jint sizeInBytes, 556 jint javaAudioFormat) { 557 //ALOGV("android_media_AudioTrack_native_write_byte(offset=%d, sizeInBytes=%d) called", 558 // offsetInBytes, sizeInBytes); 559 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 560 if (lpTrack == NULL) { 561 jniThrowException(env, "java/lang/IllegalStateException", 562 "Unable to retrieve AudioTrack pointer for write()"); 563 return 0; 564 } 565 566 // get the pointer for the audio data from the java array 567 // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such 568 // a way that it becomes much more efficient. When doing so, we will have to prevent the 569 // AudioSystem callback to be called while in critical section (in case of media server 570 // process crash for instance) 571 jbyte* cAudioData = NULL; 572 if (javaAudioData) { 573 cAudioData = (jbyte *)env->GetByteArrayElements(javaAudioData, NULL); 574 if (cAudioData == NULL) { 575 ALOGE("Error retrieving source of audio data to play, can't play"); 576 return 0; // out of memory or no data to load 577 } 578 } else { 579 ALOGE("NULL java array of audio data to play, can't play"); 580 return 0; 581 } 582 583 jint written = writeToTrack(lpTrack, javaAudioFormat, cAudioData, offsetInBytes, sizeInBytes); 584 585 env->ReleaseByteArrayElements(javaAudioData, cAudioData, 0); 586 587 //ALOGV("write wrote %d (tried %d) bytes in the native AudioTrack with offset %d", 588 // (int)written, (int)(sizeInBytes), (int)offsetInBytes); 589 return written; 590 } 591 592 593 // ---------------------------------------------------------------------------- 594 static jint android_media_AudioTrack_native_write_short(JNIEnv *env, jobject thiz, 595 jshortArray javaAudioData, 596 jint offsetInShorts, jint sizeInShorts, 597 jint javaAudioFormat) { 598 jint written = android_media_AudioTrack_native_write_byte(env, thiz, 599 (jbyteArray) javaAudioData, 600 offsetInShorts*2, sizeInShorts*2, 601 javaAudioFormat); 602 if (written > 0) { 603 written /= 2; 604 } 605 return written; 606 } 607 608 609 // ---------------------------------------------------------------------------- 610 static jint android_media_AudioTrack_get_native_frame_count(JNIEnv *env, jobject thiz) { 611 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 612 if (lpTrack == NULL) { 613 jniThrowException(env, "java/lang/IllegalStateException", 614 "Unable to retrieve AudioTrack pointer for frameCount()"); 615 return AUDIOTRACK_ERROR; 616 } 617 618 return lpTrack->frameCount(); 619 } 620 621 622 // ---------------------------------------------------------------------------- 623 static jint android_media_AudioTrack_set_playback_rate(JNIEnv *env, jobject thiz, 624 jint sampleRateInHz) { 625 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 626 if (lpTrack == NULL) { 627 jniThrowException(env, "java/lang/IllegalStateException", 628 "Unable to retrieve AudioTrack pointer for setSampleRate()"); 629 return AUDIOTRACK_ERROR; 630 } 631 return android_media_translateErrorCode(lpTrack->setSampleRate(sampleRateInHz)); 632 } 633 634 635 // ---------------------------------------------------------------------------- 636 static jint android_media_AudioTrack_get_playback_rate(JNIEnv *env, jobject thiz) { 637 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 638 if (lpTrack == NULL) { 639 jniThrowException(env, "java/lang/IllegalStateException", 640 "Unable to retrieve AudioTrack pointer for getSampleRate()"); 641 return AUDIOTRACK_ERROR; 642 } 643 return (jint) lpTrack->getSampleRate(); 644 } 645 646 647 // ---------------------------------------------------------------------------- 648 static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env, jobject thiz, 649 jint markerPos) { 650 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 651 if (lpTrack == NULL) { 652 jniThrowException(env, "java/lang/IllegalStateException", 653 "Unable to retrieve AudioTrack pointer for setMarkerPosition()"); 654 return AUDIOTRACK_ERROR; 655 } 656 return android_media_translateErrorCode( lpTrack->setMarkerPosition(markerPos) ); 657 } 658 659 660 // ---------------------------------------------------------------------------- 661 static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env, jobject thiz) { 662 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 663 uint32_t markerPos = 0; 664 665 if (lpTrack == NULL) { 666 jniThrowException(env, "java/lang/IllegalStateException", 667 "Unable to retrieve AudioTrack pointer for getMarkerPosition()"); 668 return AUDIOTRACK_ERROR; 669 } 670 lpTrack->getMarkerPosition(&markerPos); 671 return (jint)markerPos; 672 } 673 674 675 // ---------------------------------------------------------------------------- 676 static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env, jobject thiz, 677 jint period) { 678 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 679 if (lpTrack == NULL) { 680 jniThrowException(env, "java/lang/IllegalStateException", 681 "Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()"); 682 return AUDIOTRACK_ERROR; 683 } 684 return android_media_translateErrorCode( lpTrack->setPositionUpdatePeriod(period) ); 685 } 686 687 688 // ---------------------------------------------------------------------------- 689 static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env, jobject thiz) { 690 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 691 uint32_t period = 0; 692 693 if (lpTrack == NULL) { 694 jniThrowException(env, "java/lang/IllegalStateException", 695 "Unable to retrieve AudioTrack pointer for getPositionUpdatePeriod()"); 696 return AUDIOTRACK_ERROR; 697 } 698 lpTrack->getPositionUpdatePeriod(&period); 699 return (jint)period; 700 } 701 702 703 // ---------------------------------------------------------------------------- 704 static jint android_media_AudioTrack_set_position(JNIEnv *env, jobject thiz, 705 jint position) { 706 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 707 if (lpTrack == NULL) { 708 jniThrowException(env, "java/lang/IllegalStateException", 709 "Unable to retrieve AudioTrack pointer for setPosition()"); 710 return AUDIOTRACK_ERROR; 711 } 712 return android_media_translateErrorCode( lpTrack->setPosition(position) ); 713 } 714 715 716 // ---------------------------------------------------------------------------- 717 static jint android_media_AudioTrack_get_position(JNIEnv *env, jobject thiz) { 718 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 719 uint32_t position = 0; 720 721 if (lpTrack == NULL) { 722 jniThrowException(env, "java/lang/IllegalStateException", 723 "Unable to retrieve AudioTrack pointer for getPosition()"); 724 return AUDIOTRACK_ERROR; 725 } 726 lpTrack->getPosition(&position); 727 return (jint)position; 728 } 729 730 731 // ---------------------------------------------------------------------------- 732 static jint android_media_AudioTrack_get_latency(JNIEnv *env, jobject thiz) { 733 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 734 735 if (lpTrack == NULL) { 736 jniThrowException(env, "java/lang/IllegalStateException", 737 "Unable to retrieve AudioTrack pointer for latency()"); 738 return AUDIOTRACK_ERROR; 739 } 740 return (jint)lpTrack->latency(); 741 } 742 743 744 // ---------------------------------------------------------------------------- 745 static jint android_media_AudioTrack_get_timestamp(JNIEnv *env, jobject thiz, jlongArray jTimestamp) { 746 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 747 748 if (lpTrack == NULL) { 749 ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()"); 750 return AUDIOTRACK_ERROR; 751 } 752 AudioTimestamp timestamp; 753 status_t status = lpTrack->getTimestamp(timestamp); 754 if (status == OK) { 755 jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL); 756 if (nTimestamp == NULL) { 757 ALOGE("Unable to get array for getTimestamp()"); 758 return AUDIOTRACK_ERROR; 759 } 760 nTimestamp[0] = (jlong) timestamp.mPosition; 761 nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec); 762 env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0); 763 } 764 return (jint) android_media_translateErrorCode(status); 765 } 766 767 768 // ---------------------------------------------------------------------------- 769 static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz, 770 jint loopStart, jint loopEnd, jint loopCount) { 771 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 772 if (lpTrack == NULL) { 773 jniThrowException(env, "java/lang/IllegalStateException", 774 "Unable to retrieve AudioTrack pointer for setLoop()"); 775 return AUDIOTRACK_ERROR; 776 } 777 return android_media_translateErrorCode( lpTrack->setLoop(loopStart, loopEnd, loopCount) ); 778 } 779 780 781 // ---------------------------------------------------------------------------- 782 static jint android_media_AudioTrack_reload(JNIEnv *env, jobject thiz) { 783 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 784 if (lpTrack == NULL) { 785 jniThrowException(env, "java/lang/IllegalStateException", 786 "Unable to retrieve AudioTrack pointer for reload()"); 787 return AUDIOTRACK_ERROR; 788 } 789 return android_media_translateErrorCode( lpTrack->reload() ); 790 } 791 792 793 // ---------------------------------------------------------------------------- 794 static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobject thiz, 795 jint javaStreamType) { 796 uint32_t afSamplingRate; 797 // convert the stream type from Java to native value 798 // FIXME: code duplication with android_media_AudioTrack_native_setup() 799 audio_stream_type_t nativeStreamType; 800 switch (javaStreamType) { 801 case AUDIO_STREAM_VOICE_CALL: 802 case AUDIO_STREAM_SYSTEM: 803 case AUDIO_STREAM_RING: 804 case AUDIO_STREAM_MUSIC: 805 case AUDIO_STREAM_ALARM: 806 case AUDIO_STREAM_NOTIFICATION: 807 case AUDIO_STREAM_BLUETOOTH_SCO: 808 case AUDIO_STREAM_DTMF: 809 nativeStreamType = (audio_stream_type_t) javaStreamType; 810 break; 811 default: 812 nativeStreamType = AUDIO_STREAM_DEFAULT; 813 break; 814 } 815 816 if (AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType) != NO_ERROR) { 817 ALOGE("AudioSystem::getOutputSamplingRate() for stream type %d failed in AudioTrack JNI", 818 nativeStreamType); 819 return DEFAULT_OUTPUT_SAMPLE_RATE; 820 } else { 821 return afSamplingRate; 822 } 823 } 824 825 826 // ---------------------------------------------------------------------------- 827 // returns the minimum required size for the successful creation of a streaming AudioTrack 828 // returns -1 if there was an error querying the hardware. 829 static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz, 830 jint sampleRateInHertz, jint nbChannels, jint audioFormat) { 831 832 size_t frameCount = 0; 833 if (AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT, 834 sampleRateInHertz) != NO_ERROR) { 835 return -1; 836 } 837 return frameCount * nbChannels * (audioFormat == ENCODING_PCM_16BIT ? 2 : 1); 838 } 839 840 // ---------------------------------------------------------------------------- 841 static void 842 android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level ) 843 { 844 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 845 if (lpTrack == NULL ) { 846 jniThrowException(env, "java/lang/IllegalStateException", 847 "Unable to retrieve AudioTrack pointer for setAuxEffectSendLevel()"); 848 return; 849 } 850 851 lpTrack->setAuxEffectSendLevel(level); 852 } 853 854 // ---------------------------------------------------------------------------- 855 static jint android_media_AudioTrack_attachAuxEffect(JNIEnv *env, jobject thiz, 856 jint effectId) { 857 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 858 if (lpTrack == NULL) { 859 jniThrowException(env, "java/lang/IllegalStateException", 860 "Unable to retrieve AudioTrack pointer for attachAuxEffect()"); 861 return AUDIOTRACK_ERROR; 862 } 863 return android_media_translateErrorCode( lpTrack->attachAuxEffect(effectId) ); 864 } 865 866 // ---------------------------------------------------------------------------- 867 // ---------------------------------------------------------------------------- 868 static JNINativeMethod gMethods[] = { 869 // name, signature, funcPtr 870 {"native_start", "()V", (void *)android_media_AudioTrack_start}, 871 {"native_stop", "()V", (void *)android_media_AudioTrack_stop}, 872 {"native_pause", "()V", (void *)android_media_AudioTrack_pause}, 873 {"native_flush", "()V", (void *)android_media_AudioTrack_flush}, 874 {"native_setup", "(Ljava/lang/Object;IIIIII[I)I", 875 (void *)android_media_AudioTrack_native_setup}, 876 {"native_finalize", "()V", (void *)android_media_AudioTrack_native_finalize}, 877 {"native_release", "()V", (void *)android_media_AudioTrack_native_release}, 878 {"native_write_byte", "([BIII)I", (void *)android_media_AudioTrack_native_write_byte}, 879 {"native_write_short", "([SIII)I", (void *)android_media_AudioTrack_native_write_short}, 880 {"native_setVolume", "(FF)V", (void *)android_media_AudioTrack_set_volume}, 881 {"native_get_native_frame_count", 882 "()I", (void *)android_media_AudioTrack_get_native_frame_count}, 883 {"native_set_playback_rate", 884 "(I)I", (void *)android_media_AudioTrack_set_playback_rate}, 885 {"native_get_playback_rate", 886 "()I", (void *)android_media_AudioTrack_get_playback_rate}, 887 {"native_set_marker_pos","(I)I", (void *)android_media_AudioTrack_set_marker_pos}, 888 {"native_get_marker_pos","()I", (void *)android_media_AudioTrack_get_marker_pos}, 889 {"native_set_pos_update_period", 890 "(I)I", (void *)android_media_AudioTrack_set_pos_update_period}, 891 {"native_get_pos_update_period", 892 "()I", (void *)android_media_AudioTrack_get_pos_update_period}, 893 {"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position}, 894 {"native_get_position", "()I", (void *)android_media_AudioTrack_get_position}, 895 {"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency}, 896 {"native_get_timestamp", "([J)I", (void *)android_media_AudioTrack_get_timestamp}, 897 {"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop}, 898 {"native_reload_static", "()I", (void *)android_media_AudioTrack_reload}, 899 {"native_get_output_sample_rate", 900 "(I)I", (void *)android_media_AudioTrack_get_output_sample_rate}, 901 {"native_get_min_buff_size", 902 "(III)I", (void *)android_media_AudioTrack_get_min_buff_size}, 903 {"native_setAuxEffectSendLevel", 904 "(F)V", (void *)android_media_AudioTrack_setAuxEffectSendLevel}, 905 {"native_attachAuxEffect", 906 "(I)I", (void *)android_media_AudioTrack_attachAuxEffect}, 907 }; 908 909 910 // field names found in android/media/AudioTrack.java 911 #define JAVA_POSTEVENT_CALLBACK_NAME "postEventFromNative" 912 #define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME "mNativeTrackInJavaObj" 913 #define JAVA_JNIDATA_FIELD_NAME "mJniData" 914 915 // ---------------------------------------------------------------------------- 916 // preconditions: 917 // theClass is valid 918 bool android_media_getIntConstantFromClass(JNIEnv* pEnv, jclass theClass, const char* className, 919 const char* constName, int* constVal) { 920 jfieldID javaConst = NULL; 921 javaConst = pEnv->GetStaticFieldID(theClass, constName, "I"); 922 if (javaConst != NULL) { 923 *constVal = pEnv->GetStaticIntField(theClass, javaConst); 924 return true; 925 } else { 926 ALOGE("Can't find %s.%s", className, constName); 927 return false; 928 } 929 } 930 931 932 // ---------------------------------------------------------------------------- 933 int register_android_media_AudioTrack(JNIEnv *env) 934 { 935 javaAudioTrackFields.nativeTrackInJavaObj = NULL; 936 javaAudioTrackFields.postNativeEventInJava = NULL; 937 938 // Get the AudioTrack class 939 jclass audioTrackClass = env->FindClass(kClassPathName); 940 if (audioTrackClass == NULL) { 941 ALOGE("Can't find %s", kClassPathName); 942 return -1; 943 } 944 945 // Get the postEvent method 946 javaAudioTrackFields.postNativeEventInJava = env->GetStaticMethodID( 947 audioTrackClass, 948 JAVA_POSTEVENT_CALLBACK_NAME, "(Ljava/lang/Object;IIILjava/lang/Object;)V"); 949 if (javaAudioTrackFields.postNativeEventInJava == NULL) { 950 ALOGE("Can't find AudioTrack.%s", JAVA_POSTEVENT_CALLBACK_NAME); 951 return -1; 952 } 953 954 // Get the variables fields 955 // nativeTrackInJavaObj 956 javaAudioTrackFields.nativeTrackInJavaObj = env->GetFieldID( 957 audioTrackClass, 958 JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME, "I"); 959 if (javaAudioTrackFields.nativeTrackInJavaObj == NULL) { 960 ALOGE("Can't find AudioTrack.%s", JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME); 961 return -1; 962 } 963 // jniData; 964 javaAudioTrackFields.jniData = env->GetFieldID( 965 audioTrackClass, 966 JAVA_JNIDATA_FIELD_NAME, "I"); 967 if (javaAudioTrackFields.jniData == NULL) { 968 ALOGE("Can't find AudioTrack.%s", JAVA_JNIDATA_FIELD_NAME); 969 return -1; 970 } 971 972 return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods)); 973 } 974 975 976 // ---------------------------------------------------------------------------- 977