Home | History | Annotate | Download | only in default
      1 /*
      2  * Copyright (C) 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "StreamOutHAL"
     18 //#define LOG_NDEBUG 0
     19 #define ATRACE_TAG ATRACE_TAG_AUDIO
     20 
     21 #include <memory>
     22 
     23 #include <android/log.h>
     24 #include <hardware/audio.h>
     25 #include <utils/Trace.h>
     26 
     27 #include "StreamOut.h"
     28 #include "Util.h"
     29 
     30 namespace android {
     31 namespace hardware {
     32 namespace audio {
     33 namespace V2_0 {
     34 namespace implementation {
     35 
     36 using ::android::hardware::audio::common::V2_0::ThreadInfo;
     37 
     38 namespace {
     39 
     40 class WriteThread : public Thread {
     41    public:
     42     // WriteThread's lifespan never exceeds StreamOut's lifespan.
     43     WriteThread(std::atomic<bool>* stop, audio_stream_out_t* stream,
     44                 StreamOut::CommandMQ* commandMQ, StreamOut::DataMQ* dataMQ,
     45                 StreamOut::StatusMQ* statusMQ, EventFlag* efGroup)
     46         : Thread(false /*canCallJava*/),
     47           mStop(stop),
     48           mStream(stream),
     49           mCommandMQ(commandMQ),
     50           mDataMQ(dataMQ),
     51           mStatusMQ(statusMQ),
     52           mEfGroup(efGroup),
     53           mBuffer(nullptr) {}
     54     bool init() {
     55         mBuffer.reset(new (std::nothrow) uint8_t[mDataMQ->getQuantumCount()]);
     56         return mBuffer != nullptr;
     57     }
     58     virtual ~WriteThread() {}
     59 
     60    private:
     61     std::atomic<bool>* mStop;
     62     audio_stream_out_t* mStream;
     63     StreamOut::CommandMQ* mCommandMQ;
     64     StreamOut::DataMQ* mDataMQ;
     65     StreamOut::StatusMQ* mStatusMQ;
     66     EventFlag* mEfGroup;
     67     std::unique_ptr<uint8_t[]> mBuffer;
     68     IStreamOut::WriteStatus mStatus;
     69 
     70     bool threadLoop() override;
     71 
     72     void doGetLatency();
     73     void doGetPresentationPosition();
     74     void doWrite();
     75 };
     76 
     77 void WriteThread::doWrite() {
     78     const size_t availToRead = mDataMQ->availableToRead();
     79     mStatus.retval = Result::OK;
     80     mStatus.reply.written = 0;
     81     if (mDataMQ->read(&mBuffer[0], availToRead)) {
     82         ssize_t writeResult = mStream->write(mStream, &mBuffer[0], availToRead);
     83         if (writeResult >= 0) {
     84             mStatus.reply.written = writeResult;
     85         } else {
     86             mStatus.retval = Stream::analyzeStatus("write", writeResult);
     87         }
     88     }
     89 }
     90 
     91 void WriteThread::doGetPresentationPosition() {
     92     mStatus.retval = StreamOut::getPresentationPositionImpl(
     93         mStream, &mStatus.reply.presentationPosition.frames,
     94         &mStatus.reply.presentationPosition.timeStamp);
     95 }
     96 
     97 void WriteThread::doGetLatency() {
     98     mStatus.retval = Result::OK;
     99     mStatus.reply.latencyMs = mStream->get_latency(mStream);
    100 }
    101 
    102 bool WriteThread::threadLoop() {
    103     // This implementation doesn't return control back to the Thread until it
    104     // decides to stop,
    105     // as the Thread uses mutexes, and this can lead to priority inversion.
    106     while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) {
    107         uint32_t efState = 0;
    108         mEfGroup->wait(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY),
    109                        &efState);
    110         if (!(efState &
    111               static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY))) {
    112             continue;  // Nothing to do.
    113         }
    114         if (!mCommandMQ->read(&mStatus.replyTo)) {
    115             continue;  // Nothing to do.
    116         }
    117         switch (mStatus.replyTo) {
    118             case IStreamOut::WriteCommand::WRITE:
    119                 doWrite();
    120                 break;
    121             case IStreamOut::WriteCommand::GET_PRESENTATION_POSITION:
    122                 doGetPresentationPosition();
    123                 break;
    124             case IStreamOut::WriteCommand::GET_LATENCY:
    125                 doGetLatency();
    126                 break;
    127             default:
    128                 ALOGE("Unknown write thread command code %d", mStatus.replyTo);
    129                 mStatus.retval = Result::NOT_SUPPORTED;
    130                 break;
    131         }
    132         if (!mStatusMQ->write(&mStatus)) {
    133             ALOGE("status message queue write failed");
    134         }
    135         mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
    136     }
    137 
    138     return false;
    139 }
    140 
    141 }  // namespace
    142 
    143 StreamOut::StreamOut(const sp<Device>& device, audio_stream_out_t* stream)
    144     : mIsClosed(false),
    145       mDevice(device),
    146       mStream(stream),
    147       mStreamCommon(new Stream(&stream->common)),
    148       mStreamMmap(new StreamMmap<audio_stream_out_t>(stream)),
    149       mEfGroup(nullptr),
    150       mStopWriteThread(false) {}
    151 
    152 StreamOut::~StreamOut() {
    153     ATRACE_CALL();
    154     close();
    155     if (mWriteThread.get()) {
    156         ATRACE_NAME("mWriteThread->join");
    157         status_t status = mWriteThread->join();
    158         ALOGE_IF(status, "write thread exit error: %s", strerror(-status));
    159     }
    160     if (mEfGroup) {
    161         status_t status = EventFlag::deleteEventFlag(&mEfGroup);
    162         ALOGE_IF(status, "write MQ event flag deletion error: %s",
    163                  strerror(-status));
    164     }
    165     mCallback.clear();
    166     mDevice->closeOutputStream(mStream);
    167     mStream = nullptr;
    168 }
    169 
    170 // Methods from ::android::hardware::audio::V2_0::IStream follow.
    171 Return<uint64_t> StreamOut::getFrameSize() {
    172     return audio_stream_out_frame_size(mStream);
    173 }
    174 
    175 Return<uint64_t> StreamOut::getFrameCount() {
    176     return mStreamCommon->getFrameCount();
    177 }
    178 
    179 Return<uint64_t> StreamOut::getBufferSize() {
    180     return mStreamCommon->getBufferSize();
    181 }
    182 
    183 Return<uint32_t> StreamOut::getSampleRate() {
    184     return mStreamCommon->getSampleRate();
    185 }
    186 
    187 Return<void> StreamOut::getSupportedSampleRates(
    188     getSupportedSampleRates_cb _hidl_cb) {
    189     return mStreamCommon->getSupportedSampleRates(_hidl_cb);
    190 }
    191 
    192 Return<Result> StreamOut::setSampleRate(uint32_t sampleRateHz) {
    193     return mStreamCommon->setSampleRate(sampleRateHz);
    194 }
    195 
    196 Return<AudioChannelMask> StreamOut::getChannelMask() {
    197     return mStreamCommon->getChannelMask();
    198 }
    199 
    200 Return<void> StreamOut::getSupportedChannelMasks(
    201     getSupportedChannelMasks_cb _hidl_cb) {
    202     return mStreamCommon->getSupportedChannelMasks(_hidl_cb);
    203 }
    204 
    205 Return<Result> StreamOut::setChannelMask(AudioChannelMask mask) {
    206     return mStreamCommon->setChannelMask(mask);
    207 }
    208 
    209 Return<AudioFormat> StreamOut::getFormat() {
    210     return mStreamCommon->getFormat();
    211 }
    212 
    213 Return<void> StreamOut::getSupportedFormats(getSupportedFormats_cb _hidl_cb) {
    214     return mStreamCommon->getSupportedFormats(_hidl_cb);
    215 }
    216 
    217 Return<Result> StreamOut::setFormat(AudioFormat format) {
    218     return mStreamCommon->setFormat(format);
    219 }
    220 
    221 Return<void> StreamOut::getAudioProperties(getAudioProperties_cb _hidl_cb) {
    222     return mStreamCommon->getAudioProperties(_hidl_cb);
    223 }
    224 
    225 Return<Result> StreamOut::addEffect(uint64_t effectId) {
    226     return mStreamCommon->addEffect(effectId);
    227 }
    228 
    229 Return<Result> StreamOut::removeEffect(uint64_t effectId) {
    230     return mStreamCommon->removeEffect(effectId);
    231 }
    232 
    233 Return<Result> StreamOut::standby() {
    234     return mStreamCommon->standby();
    235 }
    236 
    237 Return<AudioDevice> StreamOut::getDevice() {
    238     return mStreamCommon->getDevice();
    239 }
    240 
    241 Return<Result> StreamOut::setDevice(const DeviceAddress& address) {
    242     return mStreamCommon->setDevice(address);
    243 }
    244 
    245 Return<Result> StreamOut::setConnectedState(const DeviceAddress& address,
    246                                             bool connected) {
    247     return mStreamCommon->setConnectedState(address, connected);
    248 }
    249 
    250 Return<Result> StreamOut::setHwAvSync(uint32_t hwAvSync) {
    251     return mStreamCommon->setHwAvSync(hwAvSync);
    252 }
    253 
    254 Return<void> StreamOut::getParameters(const hidl_vec<hidl_string>& keys,
    255                                       getParameters_cb _hidl_cb) {
    256     return mStreamCommon->getParameters(keys, _hidl_cb);
    257 }
    258 
    259 Return<Result> StreamOut::setParameters(
    260     const hidl_vec<ParameterValue>& parameters) {
    261     return mStreamCommon->setParameters(parameters);
    262 }
    263 
    264 Return<void> StreamOut::debugDump(const hidl_handle& fd) {
    265     return mStreamCommon->debugDump(fd);
    266 }
    267 
    268 Return<Result> StreamOut::close() {
    269     if (mIsClosed) return Result::INVALID_STATE;
    270     mIsClosed = true;
    271     if (mWriteThread.get()) {
    272         mStopWriteThread.store(true, std::memory_order_release);
    273     }
    274     if (mEfGroup) {
    275         mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY));
    276     }
    277     return Result::OK;
    278 }
    279 
    280 // Methods from ::android::hardware::audio::V2_0::IStreamOut follow.
    281 Return<uint32_t> StreamOut::getLatency() {
    282     return mStream->get_latency(mStream);
    283 }
    284 
    285 Return<Result> StreamOut::setVolume(float left, float right) {
    286     if (mStream->set_volume == NULL) {
    287         return Result::NOT_SUPPORTED;
    288     }
    289     if (!isGainNormalized(left)) {
    290         ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left,
    291               right);
    292         return Result::INVALID_ARGUMENTS;
    293     }
    294     return Stream::analyzeStatus("set_volume",
    295                                  mStream->set_volume(mStream, left, right));
    296 }
    297 
    298 Return<void> StreamOut::prepareForWriting(uint32_t frameSize,
    299                                           uint32_t framesCount,
    300                                           prepareForWriting_cb _hidl_cb) {
    301     status_t status;
    302     ThreadInfo threadInfo = {0, 0};
    303 
    304     // Wrap the _hidl_cb to return an error
    305     auto sendError = [this, &threadInfo, &_hidl_cb](Result result) {
    306         _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(),
    307                  StatusMQ::Descriptor(), threadInfo);
    308 
    309     };
    310 
    311     // Create message queues.
    312     if (mDataMQ) {
    313         ALOGE("the client attempts to call prepareForWriting twice");
    314         sendError(Result::INVALID_STATE);
    315         return Void();
    316     }
    317     std::unique_ptr<CommandMQ> tempCommandMQ(new CommandMQ(1));
    318 
    319     // Check frameSize and framesCount
    320     if (frameSize == 0 || framesCount == 0) {
    321         ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize,
    322               framesCount);
    323         sendError(Result::INVALID_ARGUMENTS);
    324         return Void();
    325     }
    326     if (frameSize > Stream::MAX_BUFFER_SIZE / framesCount) {
    327         ALOGE("Buffer too big: %u*%u bytes > MAX_BUFFER_SIZE (%u)", frameSize, framesCount,
    328               Stream::MAX_BUFFER_SIZE);
    329         sendError(Result::INVALID_ARGUMENTS);
    330         return Void();
    331     }
    332     std::unique_ptr<DataMQ> tempDataMQ(
    333         new DataMQ(frameSize * framesCount, true /* EventFlag */));
    334 
    335     std::unique_ptr<StatusMQ> tempStatusMQ(new StatusMQ(1));
    336     if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() ||
    337         !tempStatusMQ->isValid()) {
    338         ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid");
    339         ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid");
    340         ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid");
    341         sendError(Result::INVALID_ARGUMENTS);
    342         return Void();
    343     }
    344     EventFlag* tempRawEfGroup{};
    345     status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(),
    346                                         &tempRawEfGroup);
    347     std::unique_ptr<EventFlag, void (*)(EventFlag*)> tempElfGroup(
    348         tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); });
    349     if (status != OK || !tempElfGroup) {
    350         ALOGE("failed creating event flag for data MQ: %s", strerror(-status));
    351         sendError(Result::INVALID_ARGUMENTS);
    352         return Void();
    353     }
    354 
    355     // Create and launch the thread.
    356     auto tempWriteThread = std::make_unique<WriteThread>(
    357         &mStopWriteThread, mStream, tempCommandMQ.get(), tempDataMQ.get(),
    358         tempStatusMQ.get(), tempElfGroup.get());
    359     if (!tempWriteThread->init()) {
    360         ALOGW("failed to start writer thread: %s", strerror(-status));
    361         sendError(Result::INVALID_ARGUMENTS);
    362         return Void();
    363     }
    364     status = tempWriteThread->run("writer", PRIORITY_URGENT_AUDIO);
    365     if (status != OK) {
    366         ALOGW("failed to start writer thread: %s", strerror(-status));
    367         sendError(Result::INVALID_ARGUMENTS);
    368         return Void();
    369     }
    370 
    371     mCommandMQ = std::move(tempCommandMQ);
    372     mDataMQ = std::move(tempDataMQ);
    373     mStatusMQ = std::move(tempStatusMQ);
    374     mWriteThread = tempWriteThread.release();
    375     mEfGroup = tempElfGroup.release();
    376     threadInfo.pid = getpid();
    377     threadInfo.tid = mWriteThread->getTid();
    378     _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(),
    379              *mStatusMQ->getDesc(), threadInfo);
    380     return Void();
    381 }
    382 
    383 Return<void> StreamOut::getRenderPosition(getRenderPosition_cb _hidl_cb) {
    384     uint32_t halDspFrames;
    385     Result retval = Stream::analyzeStatus(
    386         "get_render_position",
    387         mStream->get_render_position(mStream, &halDspFrames));
    388     _hidl_cb(retval, halDspFrames);
    389     return Void();
    390 }
    391 
    392 Return<void> StreamOut::getNextWriteTimestamp(
    393     getNextWriteTimestamp_cb _hidl_cb) {
    394     Result retval(Result::NOT_SUPPORTED);
    395     int64_t timestampUs = 0;
    396     if (mStream->get_next_write_timestamp != NULL) {
    397         retval = Stream::analyzeStatus(
    398             "get_next_write_timestamp",
    399             mStream->get_next_write_timestamp(mStream, &timestampUs));
    400     }
    401     _hidl_cb(retval, timestampUs);
    402     return Void();
    403 }
    404 
    405 Return<Result> StreamOut::setCallback(const sp<IStreamOutCallback>& callback) {
    406     if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
    407     int result = mStream->set_callback(mStream, StreamOut::asyncCallback, this);
    408     if (result == 0) {
    409         mCallback = callback;
    410     }
    411     return Stream::analyzeStatus("set_callback", result);
    412 }
    413 
    414 Return<Result> StreamOut::clearCallback() {
    415     if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
    416     mCallback.clear();
    417     return Result::OK;
    418 }
    419 
    420 // static
    421 int StreamOut::asyncCallback(stream_callback_event_t event, void*,
    422                              void* cookie) {
    423     wp<StreamOut> weakSelf(reinterpret_cast<StreamOut*>(cookie));
    424     sp<StreamOut> self = weakSelf.promote();
    425     if (self == nullptr || self->mCallback == nullptr) return 0;
    426     ALOGV("asyncCallback() event %d", event);
    427     switch (event) {
    428         case STREAM_CBK_EVENT_WRITE_READY:
    429             self->mCallback->onWriteReady();
    430             break;
    431         case STREAM_CBK_EVENT_DRAIN_READY:
    432             self->mCallback->onDrainReady();
    433             break;
    434         case STREAM_CBK_EVENT_ERROR:
    435             self->mCallback->onError();
    436             break;
    437         default:
    438             ALOGW("asyncCallback() unknown event %d", event);
    439             break;
    440     }
    441     return 0;
    442 }
    443 
    444 Return<void> StreamOut::supportsPauseAndResume(
    445     supportsPauseAndResume_cb _hidl_cb) {
    446     _hidl_cb(mStream->pause != NULL, mStream->resume != NULL);
    447     return Void();
    448 }
    449 
    450 Return<Result> StreamOut::pause() {
    451     return mStream->pause != NULL
    452                ? Stream::analyzeStatus("pause", mStream->pause(mStream))
    453                : Result::NOT_SUPPORTED;
    454 }
    455 
    456 Return<Result> StreamOut::resume() {
    457     return mStream->resume != NULL
    458                ? Stream::analyzeStatus("resume", mStream->resume(mStream))
    459                : Result::NOT_SUPPORTED;
    460 }
    461 
    462 Return<bool> StreamOut::supportsDrain() {
    463     return mStream->drain != NULL;
    464 }
    465 
    466 Return<Result> StreamOut::drain(AudioDrain type) {
    467     return mStream->drain != NULL
    468                ? Stream::analyzeStatus(
    469                      "drain",
    470                      mStream->drain(mStream,
    471                                     static_cast<audio_drain_type_t>(type)))
    472                : Result::NOT_SUPPORTED;
    473 }
    474 
    475 Return<Result> StreamOut::flush() {
    476     return mStream->flush != NULL
    477                ? Stream::analyzeStatus("flush", mStream->flush(mStream))
    478                : Result::NOT_SUPPORTED;
    479 }
    480 
    481 // static
    482 Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream,
    483                                               uint64_t* frames,
    484                                               TimeSpec* timeStamp) {
    485     // Don't logspam on EINVAL--it's normal for get_presentation_position
    486     // to return it sometimes. EAGAIN may be returned by A2DP audio HAL
    487     // implementation. ENODATA can also be reported while the writer is
    488     // continuously querying it, but the stream has been stopped.
    489     static const std::vector<int> ignoredErrors{EINVAL, EAGAIN, ENODATA};
    490     Result retval(Result::NOT_SUPPORTED);
    491     if (stream->get_presentation_position == NULL) return retval;
    492     struct timespec halTimeStamp;
    493     retval = Stream::analyzeStatus("get_presentation_position",
    494                                    stream->get_presentation_position(stream, frames, &halTimeStamp),
    495                                    ignoredErrors);
    496     if (retval == Result::OK) {
    497         timeStamp->tvSec = halTimeStamp.tv_sec;
    498         timeStamp->tvNSec = halTimeStamp.tv_nsec;
    499     }
    500     return retval;
    501 }
    502 
    503 Return<void> StreamOut::getPresentationPosition(
    504     getPresentationPosition_cb _hidl_cb) {
    505     uint64_t frames = 0;
    506     TimeSpec timeStamp = {0, 0};
    507     Result retval = getPresentationPositionImpl(mStream, &frames, &timeStamp);
    508     _hidl_cb(retval, frames, timeStamp);
    509     return Void();
    510 }
    511 
    512 Return<Result> StreamOut::start() {
    513     return mStreamMmap->start();
    514 }
    515 
    516 Return<Result> StreamOut::stop() {
    517     return mStreamMmap->stop();
    518 }
    519 
    520 Return<void> StreamOut::createMmapBuffer(int32_t minSizeFrames,
    521                                          createMmapBuffer_cb _hidl_cb) {
    522     return mStreamMmap->createMmapBuffer(
    523         minSizeFrames, audio_stream_out_frame_size(mStream), _hidl_cb);
    524 }
    525 
    526 Return<void> StreamOut::getMmapPosition(getMmapPosition_cb _hidl_cb) {
    527     return mStreamMmap->getMmapPosition(_hidl_cb);
    528 }
    529 
    530 }  // namespace implementation
    531 }  // namespace V2_0
    532 }  // namespace audio
    533 }  // namespace hardware
    534 }  // namespace android
    535