1 // Copyright 2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_UNBOUND_QUEUE_INL_H_ 29 #define V8_UNBOUND_QUEUE_INL_H_ 30 31 #include "unbound-queue.h" 32 33 #include "atomicops.h" 34 35 namespace v8 { 36 namespace internal { 37 38 template<typename Record> 39 struct UnboundQueue<Record>::Node: public Malloced { 40 explicit Node(const Record& value) 41 : value(value), next(NULL) { 42 } 43 44 Record value; 45 Node* next; 46 }; 47 48 49 template<typename Record> 50 UnboundQueue<Record>::UnboundQueue() { 51 first_ = new Node(Record()); 52 divider_ = last_ = reinterpret_cast<AtomicWord>(first_); 53 } 54 55 56 template<typename Record> 57 UnboundQueue<Record>::~UnboundQueue() { 58 while (first_ != NULL) DeleteFirst(); 59 } 60 61 62 template<typename Record> 63 void UnboundQueue<Record>::DeleteFirst() { 64 Node* tmp = first_; 65 first_ = tmp->next; 66 delete tmp; 67 } 68 69 70 template<typename Record> 71 bool UnboundQueue<Record>::Dequeue(Record* rec) { 72 if (divider_ == Acquire_Load(&last_)) return false; 73 Node* next = reinterpret_cast<Node*>(divider_)->next; 74 *rec = next->value; 75 Release_Store(÷r_, reinterpret_cast<AtomicWord>(next)); 76 return true; 77 } 78 79 80 template<typename Record> 81 void UnboundQueue<Record>::Enqueue(const Record& rec) { 82 Node*& next = reinterpret_cast<Node*>(last_)->next; 83 next = new Node(rec); 84 Release_Store(&last_, reinterpret_cast<AtomicWord>(next)); 85 86 while (first_ != reinterpret_cast<Node*>(Acquire_Load(÷r_))) { 87 DeleteFirst(); 88 } 89 } 90 91 92 template<typename Record> 93 bool UnboundQueue<Record>::IsEmpty() const { 94 return NoBarrier_Load(÷r_) == NoBarrier_Load(&last_); 95 } 96 97 98 template<typename Record> 99 Record* UnboundQueue<Record>::Peek() const { 100 if (divider_ == Acquire_Load(&last_)) return NULL; 101 Node* next = reinterpret_cast<Node*>(divider_)->next; 102 return &next->value; 103 } 104 105 } } // namespace v8::internal 106 107 #endif // V8_UNBOUND_QUEUE_INL_H_ 108