Home | History | Annotate | Download | only in webaudio
      1 /*
      2  * Copyright (C) 2010, Google Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  * 1.  Redistributions of source code must retain the above copyright
      8  *    notice, this list of conditions and the following disclaimer.
      9  * 2.  Redistributions in binary form must reproduce the above copyright
     10  *    notice, this list of conditions and the following disclaimer in the
     11  *    documentation and/or other materials provided with the distribution.
     12  *
     13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
     14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
     15  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
     16  * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
     17  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     18  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
     19  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
     20  * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     21  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
     22  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     23  */
     24 
     25 #ifndef AudioContext_h
     26 #define AudioContext_h
     27 
     28 #include "ActiveDOMObject.h"
     29 #include "AudioBus.h"
     30 #include "AudioDestinationNode.h"
     31 #include "EventListener.h"
     32 #include "EventTarget.h"
     33 #include "HRTFDatabaseLoader.h"
     34 #include <wtf/HashSet.h>
     35 #include <wtf/OwnPtr.h>
     36 #include <wtf/PassRefPtr.h>
     37 #include <wtf/RefCounted.h>
     38 #include <wtf/RefPtr.h>
     39 #include <wtf/Threading.h>
     40 #include <wtf/Vector.h>
     41 #include <wtf/text/AtomicStringHash.h>
     42 
     43 namespace WebCore {
     44 
     45 class ArrayBuffer;
     46 class AudioBuffer;
     47 class AudioBufferSourceNode;
     48 class AudioChannelMerger;
     49 class AudioChannelSplitter;
     50 class AudioGainNode;
     51 class AudioPannerNode;
     52 class AudioListener;
     53 class DelayNode;
     54 class Document;
     55 class LowPass2FilterNode;
     56 class HighPass2FilterNode;
     57 class ConvolverNode;
     58 class RealtimeAnalyserNode;
     59 class JavaScriptAudioNode;
     60 
     61 // AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it.
     62 // For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
     63 
     64 class AudioContext : public ActiveDOMObject, public RefCounted<AudioContext>, public EventTarget {
     65 public:
     66     // Create an AudioContext for rendering to the audio hardware.
     67     static PassRefPtr<AudioContext> create(Document*);
     68 
     69     // Create an AudioContext for offline (non-realtime) rendering.
     70     static PassRefPtr<AudioContext> createOfflineContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, double sampleRate);
     71 
     72     virtual ~AudioContext();
     73 
     74     bool isInitialized() const;
     75 
     76     bool isOfflineContext() { return m_isOfflineContext; }
     77 
     78     // Returns true when initialize() was called AND all asynchronous initialization has completed.
     79     bool isRunnable() const;
     80 
     81     // Document notification
     82     virtual void stop();
     83 
     84     Document* document() const; // ASSERTs if document no longer exists.
     85     bool hasDocument();
     86 
     87     AudioDestinationNode* destination() { return m_destinationNode.get(); }
     88     double currentTime() { return m_destinationNode->currentTime(); }
     89     double sampleRate() { return m_destinationNode->sampleRate(); }
     90 
     91     PassRefPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, double sampleRate);
     92     PassRefPtr<AudioBuffer> createBuffer(ArrayBuffer* arrayBuffer, bool mixToMono);
     93 
     94     // Keep track of this buffer so we can release memory after the context is shut down...
     95     void refBuffer(PassRefPtr<AudioBuffer> buffer);
     96 
     97     AudioListener* listener() { return m_listener.get(); }
     98 
     99     // The AudioNode create methods are called on the main thread (from JavaScript).
    100     PassRefPtr<AudioBufferSourceNode> createBufferSource();
    101     PassRefPtr<AudioGainNode> createGainNode();
    102     PassRefPtr<DelayNode> createDelayNode();
    103     PassRefPtr<LowPass2FilterNode> createLowPass2Filter();
    104     PassRefPtr<HighPass2FilterNode> createHighPass2Filter();
    105     PassRefPtr<AudioPannerNode> createPanner();
    106     PassRefPtr<ConvolverNode> createConvolver();
    107     PassRefPtr<RealtimeAnalyserNode> createAnalyser();
    108     PassRefPtr<JavaScriptAudioNode> createJavaScriptNode(size_t bufferSize);
    109     PassRefPtr<AudioChannelSplitter> createChannelSplitter();
    110     PassRefPtr<AudioChannelMerger> createChannelMerger();
    111 
    112     AudioBus* temporaryMonoBus() { return m_temporaryMonoBus.get(); }
    113     AudioBus* temporaryStereoBus() { return m_temporaryStereoBus.get(); }
    114 
    115     // When a source node has no more processing to do (has finished playing), then it tells the context to dereference it.
    116     void notifyNodeFinishedProcessing(AudioNode*);
    117 
    118     // Called at the start of each render quantum.
    119     void handlePreRenderTasks();
    120 
    121     // Called at the end of each render quantum.
    122     void handlePostRenderTasks();
    123 
    124     // Called periodically at the end of each render quantum to dereference finished source nodes.
    125     void derefFinishedSourceNodes();
    126 
    127     // We reap all marked nodes at the end of each realtime render quantum in deleteMarkedNodes().
    128     void markForDeletion(AudioNode*);
    129     void deleteMarkedNodes();
    130 
    131     // Keeps track of the number of connections made.
    132     void incrementConnectionCount()
    133     {
    134         ASSERT(isMainThread());
    135         m_connectionCount++;
    136     }
    137 
    138     unsigned connectionCount() const { return m_connectionCount; }
    139 
    140     //
    141     // Thread Safety and Graph Locking:
    142     //
    143 
    144     void setAudioThread(ThreadIdentifier thread) { m_audioThread = thread; } // FIXME: check either not initialized or the same
    145     ThreadIdentifier audioThread() const { return m_audioThread; }
    146     bool isAudioThread() const;
    147 
    148     // Returns true only after the audio thread has been started and then shutdown.
    149     bool isAudioThreadFinished() { return m_isAudioThreadFinished; }
    150 
    151     // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
    152     void lock(bool& mustReleaseLock);
    153 
    154     // Returns true if we own the lock.
    155     // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
    156     bool tryLock(bool& mustReleaseLock);
    157 
    158     void unlock();
    159 
    160     // Returns true if this thread owns the context's lock.
    161     bool isGraphOwner() const;
    162 
    163     class AutoLocker {
    164     public:
    165         AutoLocker(AudioContext* context)
    166             : m_context(context)
    167         {
    168             ASSERT(context);
    169             context->lock(m_mustReleaseLock);
    170         }
    171 
    172         ~AutoLocker()
    173         {
    174             if (m_mustReleaseLock)
    175                 m_context->unlock();
    176         }
    177     private:
    178         AudioContext* m_context;
    179         bool m_mustReleaseLock;
    180     };
    181 
    182     // In AudioNode::deref() a tryLock() is used for calling finishDeref(), but if it fails keep track here.
    183     void addDeferredFinishDeref(AudioNode*, AudioNode::RefType);
    184 
    185     // In the audio thread at the start of each render cycle, we'll call handleDeferredFinishDerefs().
    186     void handleDeferredFinishDerefs();
    187 
    188     // Only accessed when the graph lock is held.
    189     void markAudioNodeInputDirty(AudioNodeInput*);
    190     void markAudioNodeOutputDirty(AudioNodeOutput*);
    191 
    192     // EventTarget
    193     virtual ScriptExecutionContext* scriptExecutionContext() const;
    194     virtual AudioContext* toAudioContext();
    195     virtual EventTargetData* eventTargetData() { return &m_eventTargetData; }
    196     virtual EventTargetData* ensureEventTargetData() { return &m_eventTargetData; }
    197 
    198     DEFINE_ATTRIBUTE_EVENT_LISTENER(complete);
    199 
    200     // Reconcile ref/deref which are defined both in AudioNode and EventTarget.
    201     using RefCounted<AudioContext>::ref;
    202     using RefCounted<AudioContext>::deref;
    203 
    204     void startRendering();
    205     void fireCompletionEvent();
    206 
    207 private:
    208     AudioContext(Document*);
    209     AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, double sampleRate);
    210     void constructCommon();
    211 
    212     void lazyInitialize();
    213     void uninitialize();
    214 
    215     bool m_isInitialized;
    216     bool m_isAudioThreadFinished;
    217     bool m_isAudioThreadShutdown;
    218 
    219     Document* m_document;
    220 
    221     // The context itself keeps a reference to all source nodes.  The source nodes, then reference all nodes they're connected to.
    222     // In turn, these nodes reference all nodes they're connected to.  All nodes are ultimately connected to the AudioDestinationNode.
    223     // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is
    224     // uniquely connected to.  See the AudioNode::ref() and AudioNode::deref() methods for more details.
    225     void refNode(AudioNode*);
    226     void derefNode(AudioNode*);
    227 
    228     // When the context goes away, there might still be some sources which haven't finished playing.
    229     // Make sure to dereference them here.
    230     void derefUnfinishedSourceNodes();
    231 
    232     RefPtr<AudioDestinationNode> m_destinationNode;
    233     RefPtr<AudioListener> m_listener;
    234 
    235     // Only accessed in the main thread.
    236     Vector<RefPtr<AudioBuffer> > m_allocatedBuffers;
    237 
    238     // Only accessed in the audio thread.
    239     Vector<AudioNode*> m_finishedNodes;
    240 
    241     // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation
    242     // with an optional argument for refType.  We need to use the special refType: RefTypeConnection
    243     // Either accessed when the graph lock is held, or on the main thread when the audio thread has finished.
    244     Vector<AudioNode*> m_referencedNodes;
    245 
    246     // Accumulate nodes which need to be deleted at the end of a render cycle (in realtime thread) here.
    247     Vector<AudioNode*> m_nodesToDelete;
    248 
    249     // Only accessed when the graph lock is held.
    250     HashSet<AudioNodeInput*> m_dirtyAudioNodeInputs;
    251     HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
    252     void handleDirtyAudioNodeInputs();
    253     void handleDirtyAudioNodeOutputs();
    254 
    255     OwnPtr<AudioBus> m_temporaryMonoBus;
    256     OwnPtr<AudioBus> m_temporaryStereoBus;
    257 
    258     unsigned m_connectionCount;
    259 
    260     // Graph locking.
    261     Mutex m_contextGraphMutex;
    262     volatile ThreadIdentifier m_audioThread;
    263     volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then this is the thread which owns it, otherwise == UndefinedThreadIdentifier
    264 
    265     // Deferred de-referencing.
    266     struct RefInfo {
    267         RefInfo(AudioNode* node, AudioNode::RefType refType)
    268             : m_node(node)
    269             , m_refType(refType)
    270         {
    271         }
    272         AudioNode* m_node;
    273         AudioNode::RefType m_refType;
    274     };
    275 
    276     // Only accessed in the audio thread.
    277     Vector<RefInfo> m_deferredFinishDerefList;
    278 
    279     // HRTF Database loader
    280     RefPtr<HRTFDatabaseLoader> m_hrtfDatabaseLoader;
    281 
    282     // EventTarget
    283     virtual void refEventTarget() { ref(); }
    284     virtual void derefEventTarget() { deref(); }
    285     EventTargetData m_eventTargetData;
    286 
    287     RefPtr<AudioBuffer> m_renderTarget;
    288 
    289     bool m_isOfflineContext;
    290 };
    291 
    292 } // WebCore
    293 
    294 #endif // AudioContext_h
    295