Home | History | Annotate | Download | only in webaudio
      1 /*
      2  * Copyright (C) 2010, Google Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  * 1.  Redistributions of source code must retain the above copyright
      8  *    notice, this list of conditions and the following disclaimer.
      9  * 2.  Redistributions in binary form must reproduce the above copyright
     10  *    notice, this list of conditions and the following disclaimer in the
     11  *    documentation and/or other materials provided with the distribution.
     12  *
     13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
     14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
     15  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
     16  * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
     17  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     18  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
     19  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
     20  * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     21  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
     22  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     23  */
     24 
     25 #include "config.h"
     26 
     27 #if ENABLE(WEB_AUDIO)
     28 
     29 #include "modules/webaudio/ScriptProcessorNode.h"
     30 
     31 #include "core/dom/Document.h"
     32 #include "modules/webaudio/AudioBuffer.h"
     33 #include "modules/webaudio/AudioContext.h"
     34 #include "modules/webaudio/AudioNodeInput.h"
     35 #include "modules/webaudio/AudioNodeOutput.h"
     36 #include "modules/webaudio/AudioProcessingEvent.h"
     37 #include "public/platform/Platform.h"
     38 #include "wtf/Float32Array.h"
     39 #include "wtf/MainThread.h"
     40 
     41 namespace WebCore {
     42 
     43 static size_t chooseBufferSize()
     44 {
     45     // Choose a buffer size based on the audio hardware buffer size. Arbitarily make it a power of
     46     // two that is 4 times greater than the hardware buffer size.
     47     // FIXME: What is the best way to choose this?
     48     size_t hardwareBufferSize = blink::Platform::current()->audioHardwareBufferSize();
     49     size_t bufferSize = 1 << static_cast<unsigned>(log2(4 * hardwareBufferSize) + 0.5);
     50 
     51     if (bufferSize < 256)
     52         return 256;
     53     if (bufferSize > 16384)
     54         return 16384;
     55 
     56     return bufferSize;
     57 }
     58 
     59 PassRefPtrWillBeRawPtr<ScriptProcessorNode> ScriptProcessorNode::create(AudioContext* context, float sampleRate, size_t bufferSize, unsigned numberOfInputChannels, unsigned numberOfOutputChannels)
     60 {
     61     // Check for valid buffer size.
     62     switch (bufferSize) {
     63     case 0:
     64         bufferSize = chooseBufferSize();
     65         break;
     66     case 256:
     67     case 512:
     68     case 1024:
     69     case 2048:
     70     case 4096:
     71     case 8192:
     72     case 16384:
     73         break;
     74     default:
     75         return nullptr;
     76     }
     77 
     78     if (!numberOfInputChannels && !numberOfOutputChannels)
     79         return nullptr;
     80 
     81     if (numberOfInputChannels > AudioContext::maxNumberOfChannels())
     82         return nullptr;
     83 
     84     if (numberOfOutputChannels > AudioContext::maxNumberOfChannels())
     85         return nullptr;
     86 
     87     return adoptRefWillBeNoop(new ScriptProcessorNode(context, sampleRate, bufferSize, numberOfInputChannels, numberOfOutputChannels));
     88 }
     89 
     90 ScriptProcessorNode::ScriptProcessorNode(AudioContext* context, float sampleRate, size_t bufferSize, unsigned numberOfInputChannels, unsigned numberOfOutputChannels)
     91     : AudioNode(context, sampleRate)
     92     , m_doubleBufferIndex(0)
     93     , m_doubleBufferIndexForEvent(0)
     94     , m_bufferSize(bufferSize)
     95     , m_bufferReadWriteIndex(0)
     96     , m_numberOfInputChannels(numberOfInputChannels)
     97     , m_numberOfOutputChannels(numberOfOutputChannels)
     98     , m_internalInputBus(AudioBus::create(numberOfInputChannels, AudioNode::ProcessingSizeInFrames, false))
     99 {
    100     ScriptWrappable::init(this);
    101     // Regardless of the allowed buffer sizes, we still need to process at the granularity of the AudioNode.
    102     if (m_bufferSize < AudioNode::ProcessingSizeInFrames)
    103         m_bufferSize = AudioNode::ProcessingSizeInFrames;
    104 
    105     ASSERT(numberOfInputChannels <= AudioContext::maxNumberOfChannels());
    106 
    107     addInput(adoptPtr(new AudioNodeInput(this)));
    108     addOutput(adoptPtr(new AudioNodeOutput(this, numberOfOutputChannels)));
    109 
    110     setNodeType(NodeTypeJavaScript);
    111 
    112     initialize();
    113 }
    114 
    115 ScriptProcessorNode::~ScriptProcessorNode()
    116 {
    117     uninitialize();
    118 }
    119 
    120 void ScriptProcessorNode::initialize()
    121 {
    122     if (isInitialized())
    123         return;
    124 
    125     float sampleRate = context()->sampleRate();
    126 
    127     // Create double buffers on both the input and output sides.
    128     // These AudioBuffers will be directly accessed in the main thread by JavaScript.
    129     for (unsigned i = 0; i < 2; ++i) {
    130         RefPtrWillBeRawPtr<AudioBuffer> inputBuffer = m_numberOfInputChannels ? AudioBuffer::create(m_numberOfInputChannels, bufferSize(), sampleRate) : nullptr;
    131         RefPtrWillBeRawPtr<AudioBuffer> outputBuffer = m_numberOfOutputChannels ? AudioBuffer::create(m_numberOfOutputChannels, bufferSize(), sampleRate) : nullptr;
    132 
    133         m_inputBuffers.append(inputBuffer);
    134         m_outputBuffers.append(outputBuffer);
    135     }
    136 
    137     AudioNode::initialize();
    138 }
    139 
    140 void ScriptProcessorNode::uninitialize()
    141 {
    142     if (!isInitialized())
    143         return;
    144 
    145     m_inputBuffers.clear();
    146     m_outputBuffers.clear();
    147 
    148     AudioNode::uninitialize();
    149 }
    150 
    151 void ScriptProcessorNode::process(size_t framesToProcess)
    152 {
    153     // Discussion about inputs and outputs:
    154     // As in other AudioNodes, ScriptProcessorNode uses an AudioBus for its input and output (see inputBus and outputBus below).
    155     // Additionally, there is a double-buffering for input and output which is exposed directly to JavaScript (see inputBuffer and outputBuffer below).
    156     // This node is the producer for inputBuffer and the consumer for outputBuffer.
    157     // The JavaScript code is the consumer of inputBuffer and the producer for outputBuffer.
    158 
    159     // Get input and output busses.
    160     AudioBus* inputBus = this->input(0)->bus();
    161     AudioBus* outputBus = this->output(0)->bus();
    162 
    163     // Get input and output buffers. We double-buffer both the input and output sides.
    164     unsigned doubleBufferIndex = this->doubleBufferIndex();
    165     bool isDoubleBufferIndexGood = doubleBufferIndex < 2 && doubleBufferIndex < m_inputBuffers.size() && doubleBufferIndex < m_outputBuffers.size();
    166     ASSERT(isDoubleBufferIndexGood);
    167     if (!isDoubleBufferIndexGood)
    168         return;
    169 
    170     AudioBuffer* inputBuffer = m_inputBuffers[doubleBufferIndex].get();
    171     AudioBuffer* outputBuffer = m_outputBuffers[doubleBufferIndex].get();
    172 
    173     // Check the consistency of input and output buffers.
    174     unsigned numberOfInputChannels = m_internalInputBus->numberOfChannels();
    175     bool buffersAreGood = outputBuffer && bufferSize() == outputBuffer->length() && m_bufferReadWriteIndex + framesToProcess <= bufferSize();
    176 
    177     // If the number of input channels is zero, it's ok to have inputBuffer = 0.
    178     if (m_internalInputBus->numberOfChannels())
    179         buffersAreGood = buffersAreGood && inputBuffer && bufferSize() == inputBuffer->length();
    180 
    181     ASSERT(buffersAreGood);
    182     if (!buffersAreGood)
    183         return;
    184 
    185     // We assume that bufferSize() is evenly divisible by framesToProcess - should always be true, but we should still check.
    186     bool isFramesToProcessGood = framesToProcess && bufferSize() >= framesToProcess && !(bufferSize() % framesToProcess);
    187     ASSERT(isFramesToProcessGood);
    188     if (!isFramesToProcessGood)
    189         return;
    190 
    191     unsigned numberOfOutputChannels = outputBus->numberOfChannels();
    192 
    193     bool channelsAreGood = (numberOfInputChannels == m_numberOfInputChannels) && (numberOfOutputChannels == m_numberOfOutputChannels);
    194     ASSERT(channelsAreGood);
    195     if (!channelsAreGood)
    196         return;
    197 
    198     for (unsigned i = 0; i < numberOfInputChannels; i++)
    199         m_internalInputBus->setChannelMemory(i, inputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, framesToProcess);
    200 
    201     if (numberOfInputChannels)
    202         m_internalInputBus->copyFrom(*inputBus);
    203 
    204     // Copy from the output buffer to the output.
    205     for (unsigned i = 0; i < numberOfOutputChannels; ++i)
    206         memcpy(outputBus->channel(i)->mutableData(), outputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, sizeof(float) * framesToProcess);
    207 
    208     // Update the buffering index.
    209     m_bufferReadWriteIndex = (m_bufferReadWriteIndex + framesToProcess) % bufferSize();
    210 
    211     // m_bufferReadWriteIndex will wrap back around to 0 when the current input and output buffers are full.
    212     // When this happens, fire an event and swap buffers.
    213     if (!m_bufferReadWriteIndex) {
    214         // Avoid building up requests on the main thread to fire process events when they're not being handled.
    215         // This could be a problem if the main thread is very busy doing other things and is being held up handling previous requests.
    216         // The audio thread can't block on this lock, so we call tryLock() instead.
    217         MutexTryLocker tryLocker(m_processEventLock);
    218         if (!tryLocker.locked()) {
    219             // We're late in handling the previous request. The main thread must be very busy.
    220             // The best we can do is clear out the buffer ourself here.
    221             outputBuffer->zero();
    222         } else {
    223             // Reference ourself so we don't accidentally get deleted before fireProcessEvent() gets called.
    224             ref();
    225 
    226             // Fire the event on the main thread, not this one (which is the realtime audio thread).
    227             m_doubleBufferIndexForEvent = m_doubleBufferIndex;
    228             callOnMainThread(fireProcessEventDispatch, this);
    229         }
    230 
    231         swapBuffers();
    232     }
    233 }
    234 
    235 void ScriptProcessorNode::fireProcessEventDispatch(void* userData)
    236 {
    237     ScriptProcessorNode* jsAudioNode = static_cast<ScriptProcessorNode*>(userData);
    238     ASSERT(jsAudioNode);
    239     if (!jsAudioNode)
    240         return;
    241 
    242     jsAudioNode->fireProcessEvent();
    243 
    244     // De-reference to match the ref() call in process().
    245     jsAudioNode->deref();
    246 }
    247 
    248 void ScriptProcessorNode::fireProcessEvent()
    249 {
    250     ASSERT(isMainThread());
    251 
    252     bool isIndexGood = m_doubleBufferIndexForEvent < 2;
    253     ASSERT(isIndexGood);
    254     if (!isIndexGood)
    255         return;
    256 
    257     AudioBuffer* inputBuffer = m_inputBuffers[m_doubleBufferIndexForEvent].get();
    258     AudioBuffer* outputBuffer = m_outputBuffers[m_doubleBufferIndexForEvent].get();
    259     ASSERT(outputBuffer);
    260     if (!outputBuffer)
    261         return;
    262 
    263     // Avoid firing the event if the document has already gone away.
    264     if (context()->executionContext()) {
    265         // This synchronizes with process().
    266         MutexLocker processLocker(m_processEventLock);
    267 
    268         // Calculate a playbackTime with the buffersize which needs to be processed each time onaudioprocess is called.
    269         // The outputBuffer being passed to JS will be played after exhuasting previous outputBuffer by double-buffering.
    270         double playbackTime = (context()->currentSampleFrame() + m_bufferSize) / static_cast<double>(context()->sampleRate());
    271 
    272         // Call the JavaScript event handler which will do the audio processing.
    273         dispatchEvent(AudioProcessingEvent::create(inputBuffer, outputBuffer, playbackTime));
    274     }
    275 }
    276 
    277 double ScriptProcessorNode::tailTime() const
    278 {
    279     return std::numeric_limits<double>::infinity();
    280 }
    281 
    282 double ScriptProcessorNode::latencyTime() const
    283 {
    284     return std::numeric_limits<double>::infinity();
    285 }
    286 
    287 void ScriptProcessorNode::trace(Visitor* visitor)
    288 {
    289     visitor->trace(m_inputBuffers);
    290     visitor->trace(m_outputBuffers);
    291     AudioNode::trace(visitor);
    292 }
    293 
    294 } // namespace WebCore
    295 
    296 #endif // ENABLE(WEB_AUDIO)
    297