1 /* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #define LOG_TAG "StreamOutHAL" 18 19 #include "core/default/StreamOut.h" 20 #include "core/default/Util.h" 21 22 //#define LOG_NDEBUG 0 23 #define ATRACE_TAG ATRACE_TAG_AUDIO 24 25 #include <memory> 26 27 #include <android/log.h> 28 #include <hardware/audio.h> 29 #include <utils/Trace.h> 30 31 namespace android { 32 namespace hardware { 33 namespace audio { 34 namespace CPP_VERSION { 35 namespace implementation { 36 37 namespace { 38 39 class WriteThread : public Thread { 40 public: 41 // WriteThread's lifespan never exceeds StreamOut's lifespan. 42 WriteThread(std::atomic<bool>* stop, audio_stream_out_t* stream, 43 StreamOut::CommandMQ* commandMQ, StreamOut::DataMQ* dataMQ, 44 StreamOut::StatusMQ* statusMQ, EventFlag* efGroup) 45 : Thread(false /*canCallJava*/), 46 mStop(stop), 47 mStream(stream), 48 mCommandMQ(commandMQ), 49 mDataMQ(dataMQ), 50 mStatusMQ(statusMQ), 51 mEfGroup(efGroup), 52 mBuffer(nullptr) {} 53 bool init() { 54 mBuffer.reset(new (std::nothrow) uint8_t[mDataMQ->getQuantumCount()]); 55 return mBuffer != nullptr; 56 } 57 virtual ~WriteThread() {} 58 59 private: 60 std::atomic<bool>* mStop; 61 audio_stream_out_t* mStream; 62 StreamOut::CommandMQ* mCommandMQ; 63 StreamOut::DataMQ* mDataMQ; 64 StreamOut::StatusMQ* mStatusMQ; 65 EventFlag* mEfGroup; 66 std::unique_ptr<uint8_t[]> mBuffer; 67 IStreamOut::WriteStatus mStatus; 68 69 bool threadLoop() override; 70 71 void doGetLatency(); 72 void doGetPresentationPosition(); 73 void doWrite(); 74 }; 75 76 void WriteThread::doWrite() { 77 const size_t availToRead = mDataMQ->availableToRead(); 78 mStatus.retval = Result::OK; 79 mStatus.reply.written = 0; 80 if (mDataMQ->read(&mBuffer[0], availToRead)) { 81 ssize_t writeResult = mStream->write(mStream, &mBuffer[0], availToRead); 82 if (writeResult >= 0) { 83 mStatus.reply.written = writeResult; 84 } else { 85 mStatus.retval = Stream::analyzeStatus("write", writeResult); 86 } 87 } 88 } 89 90 void WriteThread::doGetPresentationPosition() { 91 mStatus.retval = 92 StreamOut::getPresentationPositionImpl(mStream, &mStatus.reply.presentationPosition.frames, 93 &mStatus.reply.presentationPosition.timeStamp); 94 } 95 96 void WriteThread::doGetLatency() { 97 mStatus.retval = Result::OK; 98 mStatus.reply.latencyMs = mStream->get_latency(mStream); 99 } 100 101 bool WriteThread::threadLoop() { 102 // This implementation doesn't return control back to the Thread until it 103 // decides to stop, 104 // as the Thread uses mutexes, and this can lead to priority inversion. 105 while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) { 106 uint32_t efState = 0; 107 mEfGroup->wait(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY), &efState); 108 if (!(efState & static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY))) { 109 continue; // Nothing to do. 110 } 111 if (!mCommandMQ->read(&mStatus.replyTo)) { 112 continue; // Nothing to do. 113 } 114 switch (mStatus.replyTo) { 115 case IStreamOut::WriteCommand::WRITE: 116 doWrite(); 117 break; 118 case IStreamOut::WriteCommand::GET_PRESENTATION_POSITION: 119 doGetPresentationPosition(); 120 break; 121 case IStreamOut::WriteCommand::GET_LATENCY: 122 doGetLatency(); 123 break; 124 default: 125 ALOGE("Unknown write thread command code %d", mStatus.replyTo); 126 mStatus.retval = Result::NOT_SUPPORTED; 127 break; 128 } 129 if (!mStatusMQ->write(&mStatus)) { 130 ALOGE("status message queue write failed"); 131 } 132 mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL)); 133 } 134 135 return false; 136 } 137 138 } // namespace 139 140 StreamOut::StreamOut(const sp<Device>& device, audio_stream_out_t* stream) 141 : mIsClosed(false), 142 mDevice(device), 143 mStream(stream), 144 mStreamCommon(new Stream(&stream->common)), 145 mStreamMmap(new StreamMmap<audio_stream_out_t>(stream)), 146 mEfGroup(nullptr), 147 mStopWriteThread(false) {} 148 149 StreamOut::~StreamOut() { 150 ATRACE_CALL(); 151 close(); 152 if (mWriteThread.get()) { 153 ATRACE_NAME("mWriteThread->join"); 154 status_t status = mWriteThread->join(); 155 ALOGE_IF(status, "write thread exit error: %s", strerror(-status)); 156 } 157 if (mEfGroup) { 158 status_t status = EventFlag::deleteEventFlag(&mEfGroup); 159 ALOGE_IF(status, "write MQ event flag deletion error: %s", strerror(-status)); 160 } 161 mCallback.clear(); 162 mDevice->closeOutputStream(mStream); 163 // Closing the output stream in the HAL waits for the callback to finish, 164 // and joins the callback thread. Thus is it guaranteed that the callback 165 // thread will not be accessing our object anymore. 166 mStream = nullptr; 167 } 168 169 // Methods from ::android::hardware::audio::CPP_VERSION::IStream follow. 170 Return<uint64_t> StreamOut::getFrameSize() { 171 return audio_stream_out_frame_size(mStream); 172 } 173 174 Return<uint64_t> StreamOut::getFrameCount() { 175 return mStreamCommon->getFrameCount(); 176 } 177 178 Return<uint64_t> StreamOut::getBufferSize() { 179 return mStreamCommon->getBufferSize(); 180 } 181 182 Return<uint32_t> StreamOut::getSampleRate() { 183 return mStreamCommon->getSampleRate(); 184 } 185 186 #if MAJOR_VERSION == 2 187 Return<void> StreamOut::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) { 188 return mStreamCommon->getSupportedChannelMasks(_hidl_cb); 189 } 190 Return<void> StreamOut::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) { 191 return mStreamCommon->getSupportedSampleRates(_hidl_cb); 192 } 193 #endif 194 195 Return<void> StreamOut::getSupportedChannelMasks(AudioFormat format, 196 getSupportedChannelMasks_cb _hidl_cb) { 197 return mStreamCommon->getSupportedChannelMasks(format, _hidl_cb); 198 } 199 Return<void> StreamOut::getSupportedSampleRates(AudioFormat format, 200 getSupportedSampleRates_cb _hidl_cb) { 201 return mStreamCommon->getSupportedSampleRates(format, _hidl_cb); 202 } 203 204 Return<Result> StreamOut::setSampleRate(uint32_t sampleRateHz) { 205 return mStreamCommon->setSampleRate(sampleRateHz); 206 } 207 208 Return<AudioChannelBitfield> StreamOut::getChannelMask() { 209 return mStreamCommon->getChannelMask(); 210 } 211 212 Return<Result> StreamOut::setChannelMask(AudioChannelBitfield mask) { 213 return mStreamCommon->setChannelMask(mask); 214 } 215 216 Return<AudioFormat> StreamOut::getFormat() { 217 return mStreamCommon->getFormat(); 218 } 219 220 Return<void> StreamOut::getSupportedFormats(getSupportedFormats_cb _hidl_cb) { 221 return mStreamCommon->getSupportedFormats(_hidl_cb); 222 } 223 224 Return<Result> StreamOut::setFormat(AudioFormat format) { 225 return mStreamCommon->setFormat(format); 226 } 227 228 Return<void> StreamOut::getAudioProperties(getAudioProperties_cb _hidl_cb) { 229 return mStreamCommon->getAudioProperties(_hidl_cb); 230 } 231 232 Return<Result> StreamOut::addEffect(uint64_t effectId) { 233 return mStreamCommon->addEffect(effectId); 234 } 235 236 Return<Result> StreamOut::removeEffect(uint64_t effectId) { 237 return mStreamCommon->removeEffect(effectId); 238 } 239 240 Return<Result> StreamOut::standby() { 241 return mStreamCommon->standby(); 242 } 243 244 Return<Result> StreamOut::setHwAvSync(uint32_t hwAvSync) { 245 return mStreamCommon->setHwAvSync(hwAvSync); 246 } 247 248 #if MAJOR_VERSION == 2 249 Return<Result> StreamOut::setConnectedState(const DeviceAddress& address, bool connected) { 250 return mStreamCommon->setConnectedState(address, connected); 251 } 252 253 Return<AudioDevice> StreamOut::getDevice() { 254 return mStreamCommon->getDevice(); 255 } 256 257 Return<Result> StreamOut::setDevice(const DeviceAddress& address) { 258 return mStreamCommon->setDevice(address); 259 } 260 261 Return<void> StreamOut::getParameters(const hidl_vec<hidl_string>& keys, 262 getParameters_cb _hidl_cb) { 263 return mStreamCommon->getParameters(keys, _hidl_cb); 264 } 265 266 Return<Result> StreamOut::setParameters(const hidl_vec<ParameterValue>& parameters) { 267 return mStreamCommon->setParameters(parameters); 268 } 269 270 Return<void> StreamOut::debugDump(const hidl_handle& fd) { 271 return mStreamCommon->debugDump(fd); 272 } 273 #elif MAJOR_VERSION >= 4 274 Return<void> StreamOut::getDevices(getDevices_cb _hidl_cb) { 275 return mStreamCommon->getDevices(_hidl_cb); 276 } 277 278 Return<Result> StreamOut::setDevices(const hidl_vec<DeviceAddress>& devices) { 279 return mStreamCommon->setDevices(devices); 280 } 281 Return<void> StreamOut::getParameters(const hidl_vec<ParameterValue>& context, 282 const hidl_vec<hidl_string>& keys, 283 getParameters_cb _hidl_cb) { 284 return mStreamCommon->getParameters(context, keys, _hidl_cb); 285 } 286 287 Return<Result> StreamOut::setParameters(const hidl_vec<ParameterValue>& context, 288 const hidl_vec<ParameterValue>& parameters) { 289 return mStreamCommon->setParameters(context, parameters); 290 } 291 #endif 292 293 Return<Result> StreamOut::close() { 294 if (mIsClosed) return Result::INVALID_STATE; 295 mIsClosed = true; 296 if (mWriteThread.get()) { 297 mStopWriteThread.store(true, std::memory_order_release); 298 } 299 if (mEfGroup) { 300 mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY)); 301 } 302 return Result::OK; 303 } 304 305 // Methods from ::android::hardware::audio::CPP_VERSION::IStreamOut follow. 306 Return<uint32_t> StreamOut::getLatency() { 307 return mStream->get_latency(mStream); 308 } 309 310 Return<Result> StreamOut::setVolume(float left, float right) { 311 if (mStream->set_volume == NULL) { 312 return Result::NOT_SUPPORTED; 313 } 314 if (!isGainNormalized(left)) { 315 ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left, right); 316 return Result::INVALID_ARGUMENTS; 317 } 318 return Stream::analyzeStatus("set_volume", mStream->set_volume(mStream, left, right)); 319 } 320 321 Return<void> StreamOut::prepareForWriting(uint32_t frameSize, uint32_t framesCount, 322 prepareForWriting_cb _hidl_cb) { 323 status_t status; 324 ThreadInfo threadInfo = {0, 0}; 325 326 // Wrap the _hidl_cb to return an error 327 auto sendError = [&threadInfo, &_hidl_cb](Result result) { 328 _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), StatusMQ::Descriptor(), 329 threadInfo); 330 }; 331 332 // Create message queues. 333 if (mDataMQ) { 334 ALOGE("the client attempts to call prepareForWriting twice"); 335 sendError(Result::INVALID_STATE); 336 return Void(); 337 } 338 std::unique_ptr<CommandMQ> tempCommandMQ(new CommandMQ(1)); 339 340 // Check frameSize and framesCount 341 if (frameSize == 0 || framesCount == 0) { 342 ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, framesCount); 343 sendError(Result::INVALID_ARGUMENTS); 344 return Void(); 345 } 346 if (frameSize > Stream::MAX_BUFFER_SIZE / framesCount) { 347 ALOGE("Buffer too big: %u*%u bytes > MAX_BUFFER_SIZE (%u)", frameSize, framesCount, 348 Stream::MAX_BUFFER_SIZE); 349 sendError(Result::INVALID_ARGUMENTS); 350 return Void(); 351 } 352 std::unique_ptr<DataMQ> tempDataMQ(new DataMQ(frameSize * framesCount, true /* EventFlag */)); 353 354 std::unique_ptr<StatusMQ> tempStatusMQ(new StatusMQ(1)); 355 if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || !tempStatusMQ->isValid()) { 356 ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid"); 357 ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid"); 358 ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid"); 359 sendError(Result::INVALID_ARGUMENTS); 360 return Void(); 361 } 362 EventFlag* tempRawEfGroup{}; 363 status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &tempRawEfGroup); 364 std::unique_ptr<EventFlag, void (*)(EventFlag*)> tempElfGroup( 365 tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); }); 366 if (status != OK || !tempElfGroup) { 367 ALOGE("failed creating event flag for data MQ: %s", strerror(-status)); 368 sendError(Result::INVALID_ARGUMENTS); 369 return Void(); 370 } 371 372 // Create and launch the thread. 373 auto tempWriteThread = 374 std::make_unique<WriteThread>(&mStopWriteThread, mStream, tempCommandMQ.get(), 375 tempDataMQ.get(), tempStatusMQ.get(), tempElfGroup.get()); 376 if (!tempWriteThread->init()) { 377 ALOGW("failed to start writer thread: %s", strerror(-status)); 378 sendError(Result::INVALID_ARGUMENTS); 379 return Void(); 380 } 381 status = tempWriteThread->run("writer", PRIORITY_URGENT_AUDIO); 382 if (status != OK) { 383 ALOGW("failed to start writer thread: %s", strerror(-status)); 384 sendError(Result::INVALID_ARGUMENTS); 385 return Void(); 386 } 387 388 mCommandMQ = std::move(tempCommandMQ); 389 mDataMQ = std::move(tempDataMQ); 390 mStatusMQ = std::move(tempStatusMQ); 391 mWriteThread = tempWriteThread.release(); 392 mEfGroup = tempElfGroup.release(); 393 threadInfo.pid = getpid(); 394 threadInfo.tid = mWriteThread->getTid(); 395 _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), *mStatusMQ->getDesc(), 396 threadInfo); 397 return Void(); 398 } 399 400 Return<void> StreamOut::getRenderPosition(getRenderPosition_cb _hidl_cb) { 401 uint32_t halDspFrames; 402 Result retval = Stream::analyzeStatus("get_render_position", 403 mStream->get_render_position(mStream, &halDspFrames)); 404 _hidl_cb(retval, halDspFrames); 405 return Void(); 406 } 407 408 Return<void> StreamOut::getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) { 409 Result retval(Result::NOT_SUPPORTED); 410 int64_t timestampUs = 0; 411 if (mStream->get_next_write_timestamp != NULL) { 412 retval = Stream::analyzeStatus("get_next_write_timestamp", 413 mStream->get_next_write_timestamp(mStream, ×tampUs)); 414 } 415 _hidl_cb(retval, timestampUs); 416 return Void(); 417 } 418 419 Return<Result> StreamOut::setCallback(const sp<IStreamOutCallback>& callback) { 420 if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED; 421 // Safe to pass 'this' because it is guaranteed that the callback thread 422 // is joined prior to exit from StreamOut's destructor. 423 int result = mStream->set_callback(mStream, StreamOut::asyncCallback, this); 424 if (result == 0) { 425 mCallback = callback; 426 } 427 return Stream::analyzeStatus("set_callback", result); 428 } 429 430 Return<Result> StreamOut::clearCallback() { 431 if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED; 432 mCallback.clear(); 433 return Result::OK; 434 } 435 436 // static 437 int StreamOut::asyncCallback(stream_callback_event_t event, void*, void* cookie) { 438 // It is guaranteed that the callback thread is joined prior 439 // to exiting from StreamOut's destructor. Must *not* use sp<StreamOut> 440 // here because it can make this code the last owner of StreamOut, 441 // and an attempt to run the destructor on the callback thread 442 // will cause a deadlock in the legacy HAL code. 443 StreamOut* self = reinterpret_cast<StreamOut*>(cookie); 444 // It's correct to hold an sp<> to callback because the reference 445 // in the StreamOut instance can be cleared in the meantime. There is 446 // no difference on which thread to run IStreamOutCallback's destructor. 447 sp<IStreamOutCallback> callback = self->mCallback; 448 if (callback.get() == nullptr) return 0; 449 ALOGV("asyncCallback() event %d", event); 450 switch (event) { 451 case STREAM_CBK_EVENT_WRITE_READY: 452 callback->onWriteReady(); 453 break; 454 case STREAM_CBK_EVENT_DRAIN_READY: 455 callback->onDrainReady(); 456 break; 457 case STREAM_CBK_EVENT_ERROR: 458 callback->onError(); 459 break; 460 default: 461 ALOGW("asyncCallback() unknown event %d", event); 462 break; 463 } 464 return 0; 465 } 466 467 Return<void> StreamOut::supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) { 468 _hidl_cb(mStream->pause != NULL, mStream->resume != NULL); 469 return Void(); 470 } 471 472 Return<Result> StreamOut::pause() { 473 return mStream->pause != NULL ? Stream::analyzeStatus("pause", mStream->pause(mStream)) 474 : Result::NOT_SUPPORTED; 475 } 476 477 Return<Result> StreamOut::resume() { 478 return mStream->resume != NULL ? Stream::analyzeStatus("resume", mStream->resume(mStream)) 479 : Result::NOT_SUPPORTED; 480 } 481 482 Return<bool> StreamOut::supportsDrain() { 483 return mStream->drain != NULL; 484 } 485 486 Return<Result> StreamOut::drain(AudioDrain type) { 487 return mStream->drain != NULL 488 ? Stream::analyzeStatus( 489 "drain", mStream->drain(mStream, static_cast<audio_drain_type_t>(type))) 490 : Result::NOT_SUPPORTED; 491 } 492 493 Return<Result> StreamOut::flush() { 494 return mStream->flush != NULL ? Stream::analyzeStatus("flush", mStream->flush(mStream)) 495 : Result::NOT_SUPPORTED; 496 } 497 498 // static 499 Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream, uint64_t* frames, 500 TimeSpec* timeStamp) { 501 // Don't logspam on EINVAL--it's normal for get_presentation_position 502 // to return it sometimes. EAGAIN may be returned by A2DP audio HAL 503 // implementation. ENODATA can also be reported while the writer is 504 // continuously querying it, but the stream has been stopped. 505 static const std::vector<int> ignoredErrors{EINVAL, EAGAIN, ENODATA}; 506 Result retval(Result::NOT_SUPPORTED); 507 if (stream->get_presentation_position == NULL) return retval; 508 struct timespec halTimeStamp; 509 retval = Stream::analyzeStatus("get_presentation_position", 510 stream->get_presentation_position(stream, frames, &halTimeStamp), 511 ignoredErrors); 512 if (retval == Result::OK) { 513 timeStamp->tvSec = halTimeStamp.tv_sec; 514 timeStamp->tvNSec = halTimeStamp.tv_nsec; 515 } 516 return retval; 517 } 518 519 Return<void> StreamOut::getPresentationPosition(getPresentationPosition_cb _hidl_cb) { 520 uint64_t frames = 0; 521 TimeSpec timeStamp = {0, 0}; 522 Result retval = getPresentationPositionImpl(mStream, &frames, &timeStamp); 523 _hidl_cb(retval, frames, timeStamp); 524 return Void(); 525 } 526 527 Return<Result> StreamOut::start() { 528 return mStreamMmap->start(); 529 } 530 531 Return<Result> StreamOut::stop() { 532 return mStreamMmap->stop(); 533 } 534 535 Return<void> StreamOut::createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) { 536 return mStreamMmap->createMmapBuffer(minSizeFrames, audio_stream_out_frame_size(mStream), 537 _hidl_cb); 538 } 539 540 Return<void> StreamOut::getMmapPosition(getMmapPosition_cb _hidl_cb) { 541 return mStreamMmap->getMmapPosition(_hidl_cb); 542 } 543 544 Return<void> StreamOut::debug(const hidl_handle& fd, const hidl_vec<hidl_string>& options) { 545 return mStreamCommon->debug(fd, options); 546 } 547 548 #if MAJOR_VERSION >= 4 549 Return<void> StreamOut::updateSourceMetadata(const SourceMetadata& sourceMetadata) { 550 if (mStream->update_source_metadata == nullptr) { 551 return Void(); // not supported by the HAL 552 } 553 std::vector<playback_track_metadata> halTracks; 554 halTracks.reserve(sourceMetadata.tracks.size()); 555 for (auto& metadata : sourceMetadata.tracks) { 556 halTracks.push_back({ 557 .usage = static_cast<audio_usage_t>(metadata.usage), 558 .content_type = static_cast<audio_content_type_t>(metadata.contentType), 559 .gain = metadata.gain, 560 }); 561 } 562 const source_metadata_t halMetadata = { 563 .track_count = halTracks.size(), 564 .tracks = halTracks.data(), 565 }; 566 mStream->update_source_metadata(mStream, &halMetadata); 567 return Void(); 568 } 569 Return<Result> StreamOut::selectPresentation(int32_t /*presentationId*/, int32_t /*programId*/) { 570 return Result::NOT_SUPPORTED; // TODO: propagate to legacy 571 } 572 #endif 573 574 } // namespace implementation 575 } // namespace CPP_VERSION 576 } // namespace audio 577 } // namespace hardware 578 } // namespace android 579