Home | History | Annotate | Download | only in webaudio
      1 /*
      2  * Copyright (C) 2010, Google Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  * 1.  Redistributions of source code must retain the above copyright
      8  *    notice, this list of conditions and the following disclaimer.
      9  * 2.  Redistributions in binary form must reproduce the above copyright
     10  *    notice, this list of conditions and the following disclaimer in the
     11  *    documentation and/or other materials provided with the distribution.
     12  *
     13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
     14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
     15  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
     16  * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
     17  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     18  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
     19  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
     20  * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     21  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
     22  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     23  */
     24 
     25 #ifndef AudioContext_h
     26 #define AudioContext_h
     27 
     28 #include "core/dom/ActiveDOMObject.h"
     29 #include "core/events/EventListener.h"
     30 #include "modules/EventTargetModules.h"
     31 #include "modules/webaudio/AsyncAudioDecoder.h"
     32 #include "modules/webaudio/AudioDestinationNode.h"
     33 #include "platform/audio/AudioBus.h"
     34 #include "platform/heap/Handle.h"
     35 #include "wtf/HashSet.h"
     36 #include "wtf/MainThread.h"
     37 #include "wtf/OwnPtr.h"
     38 #include "wtf/PassRefPtr.h"
     39 #include "wtf/RefPtr.h"
     40 #include "wtf/ThreadSafeRefCounted.h"
     41 #include "wtf/Threading.h"
     42 #include "wtf/Vector.h"
     43 #include "wtf/text/AtomicStringHash.h"
     44 
     45 namespace blink {
     46 
     47 class AnalyserNode;
     48 class AudioBuffer;
     49 class AudioBufferCallback;
     50 class AudioBufferSourceNode;
     51 class AudioListener;
     52 class AudioSummingJunction;
     53 class BiquadFilterNode;
     54 class ChannelMergerNode;
     55 class ChannelSplitterNode;
     56 class ConvolverNode;
     57 class DelayNode;
     58 class Document;
     59 class DynamicsCompressorNode;
     60 class ExceptionState;
     61 class GainNode;
     62 class HTMLMediaElement;
     63 class MediaElementAudioSourceNode;
     64 class MediaStreamAudioDestinationNode;
     65 class MediaStreamAudioSourceNode;
     66 class OscillatorNode;
     67 class PannerNode;
     68 class PeriodicWave;
     69 class ScriptProcessorNode;
     70 class WaveShaperNode;
     71 
     72 // AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it.
     73 // For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
     74 
     75 class AudioContext : public RefCountedGarbageCollectedWillBeGarbageCollectedFinalized<AudioContext>, public ActiveDOMObject, public EventTargetWithInlineData {
     76     DEFINE_EVENT_TARGET_REFCOUNTING_WILL_BE_REMOVED(RefCountedGarbageCollected<AudioContext>);
     77     DEFINE_WRAPPERTYPEINFO();
     78     WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(AudioContext);
     79 public:
     80     // Create an AudioContext for rendering to the audio hardware.
     81     static AudioContext* create(Document&, ExceptionState&);
     82 
     83     virtual ~AudioContext();
     84 
     85     virtual void trace(Visitor*) OVERRIDE;
     86 
     87     bool isInitialized() const { return m_isInitialized; }
     88     bool isOfflineContext() { return m_isOfflineContext; }
     89 
     90     // Document notification
     91     virtual void stop() OVERRIDE FINAL;
     92     virtual bool hasPendingActivity() const OVERRIDE;
     93 
     94     AudioDestinationNode* destination() { return m_destinationNode.get(); }
     95     size_t currentSampleFrame() const { return m_destinationNode->currentSampleFrame(); }
     96     double currentTime() const { return m_destinationNode->currentTime(); }
     97     float sampleRate() const { return m_destinationNode->sampleRate(); }
     98 
     99     AudioBuffer* createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
    100 
    101     // Asynchronous audio file data decoding.
    102     void decodeAudioData(ArrayBuffer*, AudioBufferCallback*, AudioBufferCallback*, ExceptionState&);
    103 
    104     AudioListener* listener() { return m_listener.get(); }
    105 
    106     // The AudioNode create methods are called on the main thread (from JavaScript).
    107     AudioBufferSourceNode* createBufferSource();
    108     MediaElementAudioSourceNode* createMediaElementSource(HTMLMediaElement*, ExceptionState&);
    109     MediaStreamAudioSourceNode* createMediaStreamSource(MediaStream*, ExceptionState&);
    110     MediaStreamAudioDestinationNode* createMediaStreamDestination();
    111     GainNode* createGain();
    112     BiquadFilterNode* createBiquadFilter();
    113     WaveShaperNode* createWaveShaper();
    114     DelayNode* createDelay(ExceptionState&);
    115     DelayNode* createDelay(double maxDelayTime, ExceptionState&);
    116     PannerNode* createPanner();
    117     ConvolverNode* createConvolver();
    118     DynamicsCompressorNode* createDynamicsCompressor();
    119     AnalyserNode* createAnalyser();
    120     ScriptProcessorNode* createScriptProcessor(ExceptionState&);
    121     ScriptProcessorNode* createScriptProcessor(size_t bufferSize, ExceptionState&);
    122     ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&);
    123     ScriptProcessorNode* createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&);
    124     ChannelSplitterNode* createChannelSplitter(ExceptionState&);
    125     ChannelSplitterNode* createChannelSplitter(size_t numberOfOutputs, ExceptionState&);
    126     ChannelMergerNode* createChannelMerger(ExceptionState&);
    127     ChannelMergerNode* createChannelMerger(size_t numberOfInputs, ExceptionState&);
    128     OscillatorNode* createOscillator();
    129     PeriodicWave* createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
    130 
    131     // When a source node has no more processing to do (has finished playing), then it tells the context to dereference it.
    132     void notifyNodeFinishedProcessing(AudioNode*);
    133 
    134     // Called at the start of each render quantum.
    135     void handlePreRenderTasks();
    136 
    137     // Called at the end of each render quantum.
    138     void handlePostRenderTasks();
    139 
    140     // Called periodically at the end of each render quantum to dereference finished source nodes.
    141     void derefFinishedSourceNodes();
    142 
    143     void registerLiveAudioSummingJunction(AudioSummingJunction&);
    144     void registerLiveNode(AudioNode&);
    145 
    146     // AudioContext can pull node(s) at the end of each render quantum even when they are not connected to any downstream nodes.
    147     // These two methods are called by the nodes who want to add/remove themselves into/from the automatic pull lists.
    148     void addAutomaticPullNode(AudioNode*);
    149     void removeAutomaticPullNode(AudioNode*);
    150 
    151     // Called right before handlePostRenderTasks() to handle nodes which need to be pulled even when they are not connected to anything.
    152     void processAutomaticPullNodes(size_t framesToProcess);
    153 
    154     // Keep track of AudioNode's that have their channel count mode changed. We process the changes
    155     // in the post rendering phase.
    156     void addChangedChannelCountMode(AudioNode*);
    157     void removeChangedChannelCountMode(AudioNode*);
    158     void updateChangedChannelCountMode();
    159 
    160     // Keeps track of the number of connections made.
    161     void incrementConnectionCount()
    162     {
    163         ASSERT(isMainThread());
    164         m_connectionCount++;
    165     }
    166 
    167     unsigned connectionCount() const { return m_connectionCount; }
    168 
    169     //
    170     // Thread Safety and Graph Locking:
    171     //
    172 
    173     void setAudioThread(ThreadIdentifier thread) { m_audioThread = thread; } // FIXME: check either not initialized or the same
    174     ThreadIdentifier audioThread() const { return m_audioThread; }
    175     bool isAudioThread() const;
    176 
    177     // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
    178     void lock(bool& mustReleaseLock);
    179 
    180     // Returns true if we own the lock.
    181     // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
    182     bool tryLock(bool& mustReleaseLock);
    183 
    184     void unlock();
    185 
    186     // Returns true if this thread owns the context's lock.
    187     bool isGraphOwner() const;
    188 
    189     // Returns the maximum numuber of channels we can support.
    190     static unsigned maxNumberOfChannels() { return MaxNumberOfChannels;}
    191 
    192     class AutoLocker {
    193         STACK_ALLOCATED();
    194     public:
    195         explicit AutoLocker(AudioContext* context)
    196             : m_context(context)
    197         {
    198             ASSERT(context);
    199             context->lock(m_mustReleaseLock);
    200         }
    201 
    202         ~AutoLocker()
    203         {
    204             if (m_mustReleaseLock)
    205                 m_context->unlock();
    206         }
    207     private:
    208         Member<AudioContext> m_context;
    209         bool m_mustReleaseLock;
    210     };
    211 
    212     // In AudioNode::breakConnection() and deref(), a tryLock() is used for
    213     // calling actual processing, but if it fails keep track here.
    214     void addDeferredBreakConnection(AudioNode&);
    215 
    216     // In the audio thread at the start of each render cycle, we'll call this.
    217     void handleDeferredAudioNodeTasks();
    218 
    219     // Only accessed when the graph lock is held.
    220     void markSummingJunctionDirty(AudioSummingJunction*);
    221     // Only accessed when the graph lock is held. Must be called on the main thread.
    222     void removeMarkedSummingJunction(AudioSummingJunction*);
    223     void markAudioNodeOutputDirty(AudioNodeOutput*);
    224     void removeMarkedAudioNodeOutput(AudioNodeOutput*);
    225     void disposeOutputs(AudioNode&);
    226 
    227     // EventTarget
    228     virtual const AtomicString& interfaceName() const OVERRIDE FINAL;
    229     virtual ExecutionContext* executionContext() const OVERRIDE FINAL;
    230 
    231     DEFINE_ATTRIBUTE_EVENT_LISTENER(complete);
    232 
    233     void startRendering();
    234     void fireCompletionEvent();
    235 
    236     static unsigned s_hardwareContextCount;
    237 
    238 protected:
    239     explicit AudioContext(Document*);
    240     AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
    241 
    242 private:
    243     void initialize();
    244     void uninitialize();
    245 
    246     // ExecutionContext calls stop twice.
    247     // We'd like to schedule only one stop action for them.
    248     bool m_isStopScheduled;
    249     bool m_isCleared;
    250     void clear();
    251 
    252     // Set to true when the destination node has been initialized and is ready to process data.
    253     bool m_isInitialized;
    254 
    255     // The context itself keeps a reference to all source nodes.  The source nodes, then reference all nodes they're connected to.
    256     // In turn, these nodes reference all nodes they're connected to.  All nodes are ultimately connected to the AudioDestinationNode.
    257     // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is
    258     // uniquely connected to.  See the AudioNode::ref() and AudioNode::deref() methods for more details.
    259     void refNode(AudioNode*);
    260     void derefNode(AudioNode*);
    261 
    262     // When the context goes away, there might still be some sources which haven't finished playing.
    263     // Make sure to dereference them here.
    264     void derefUnfinishedSourceNodes();
    265 
    266     Member<AudioDestinationNode> m_destinationNode;
    267     Member<AudioListener> m_listener;
    268 
    269     // Only accessed in the audio thread.
    270     // Oilpan: Since items are added to the vector by the audio thread (not registered to Oilpan),
    271     // we cannot use a HeapVector.
    272     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    273     Vector<AudioNode*> m_finishedNodes;
    274 
    275     // List of source nodes. This is either accessed when the graph lock is
    276     // held, or on the main thread when the audio thread has finished.
    277     // Oilpan: This Vector holds connection references. We must call
    278     // AudioNode::makeConnection when we add an AudioNode to this, and must call
    279     // AudioNode::breakConnection() when we remove an AudioNode from this.
    280     HeapVector<Member<AudioNode> > m_referencedNodes;
    281 
    282     class AudioNodeDisposer {
    283     public:
    284         explicit AudioNodeDisposer(AudioNode& node) : m_node(node) { }
    285         ~AudioNodeDisposer();
    286 
    287     private:
    288         AudioNode& m_node;
    289     };
    290     HeapHashMap<WeakMember<AudioNode>, OwnPtr<AudioNodeDisposer> > m_liveNodes;
    291 
    292     class AudioSummingJunctionDisposer {
    293     public:
    294         explicit AudioSummingJunctionDisposer(AudioSummingJunction& junction) : m_junction(junction) { }
    295         ~AudioSummingJunctionDisposer();
    296 
    297     private:
    298         AudioSummingJunction& m_junction;
    299     };
    300     // The purpose of m_liveAudioSummingJunctions is to remove a dying
    301     // AudioSummingJunction from m_dirtySummingJunctions. However we put all of
    302     // AudioSummingJunction objects to m_liveAudioSummingJunctions to avoid
    303     // concurrent access to m_liveAudioSummingJunctions.
    304     HeapHashMap<WeakMember<AudioSummingJunction>, OwnPtr<AudioSummingJunctionDisposer> > m_liveAudioSummingJunctions;
    305 
    306     // These two HashSet must be accessed only when the graph lock is held.
    307     // Oilpan: These HashSet should be HeapHashSet<WeakMember<AudioNodeOutput>>
    308     // ideally. But it's difficult to lock them correctly during GC.
    309     // Oilpan: Since items are added to these hash sets by the audio thread (not registered to Oilpan),
    310     // we cannot use HeapHashSets.
    311     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    312     HashSet<AudioSummingJunction*> m_dirtySummingJunctions;
    313     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    314     HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
    315     void handleDirtyAudioSummingJunctions();
    316     void handleDirtyAudioNodeOutputs();
    317 
    318     // For the sake of thread safety, we maintain a seperate Vector of automatic pull nodes for rendering in m_renderingAutomaticPullNodes.
    319     // It will be copied from m_automaticPullNodes by updateAutomaticPullNodes() at the very start or end of the rendering quantum.
    320     // Oilpan: Since items are added to the vector/hash set by the audio thread (not registered to Oilpan),
    321     // we cannot use a HeapVector/HeapHashSet.
    322     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    323     HashSet<AudioNode*> m_automaticPullNodes;
    324     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    325     Vector<AudioNode*> m_renderingAutomaticPullNodes;
    326     // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is modified.
    327     bool m_automaticPullNodesNeedUpdating;
    328     void updateAutomaticPullNodes();
    329 
    330     unsigned m_connectionCount;
    331 
    332     // Graph locking.
    333     Mutex m_contextGraphMutex;
    334     volatile ThreadIdentifier m_audioThread;
    335     volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then this is the thread which owns it, otherwise == UndefinedThreadIdentifier
    336 
    337     // Only accessed in the audio thread.
    338     // Oilpan: Since items are added to these vectors by the audio thread (not registered to Oilpan),
    339     // we cannot use HeapVectors.
    340     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    341     Vector<AudioNode*> m_deferredBreakConnectionList;
    342 
    343     Member<AudioBuffer> m_renderTarget;
    344 
    345     bool m_isOfflineContext;
    346 
    347     AsyncAudioDecoder m_audioDecoder;
    348 
    349     // Collection of nodes where the channel count mode has changed. We want the channel count mode
    350     // to change in the pre- or post-rendering phase so as not to disturb the running audio thread.
    351     GC_PLUGIN_IGNORE("http://crbug.com/404527")
    352     HashSet<AudioNode*> m_deferredCountModeChange;
    353 
    354     // This is considering 32 is large enough for multiple channels audio.
    355     // It is somewhat arbitrary and could be increased if necessary.
    356     enum { MaxNumberOfChannels = 32 };
    357 };
    358 
    359 } // namespace blink
    360 
    361 #endif // AudioContext_h
    362