Home | History | Annotate | Download | only in service
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_
      6 #define GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_
      7 
      8 #include <queue>
      9 
     10 #include "base/atomic_ref_count.h"
     11 #include "base/atomicops.h"
     12 #include "base/callback.h"
     13 #include "base/memory/linked_ptr.h"
     14 #include "base/memory/ref_counted.h"
     15 #include "base/memory/scoped_ptr.h"
     16 #include "base/memory/shared_memory.h"
     17 #include "base/memory/weak_ptr.h"
     18 #include "gpu/command_buffer/common/command_buffer.h"
     19 #include "gpu/command_buffer/service/cmd_buffer_engine.h"
     20 #include "gpu/command_buffer/service/cmd_parser.h"
     21 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
     22 #include "gpu/gpu_export.h"
     23 
     24 namespace gfx {
     25 class GLFence;
     26 }
     27 
     28 namespace gpu {
     29 
     30 class PreemptionFlag
     31     : public base::RefCountedThreadSafe<PreemptionFlag> {
     32  public:
     33   PreemptionFlag() : flag_(0) {}
     34 
     35   bool IsSet() { return !base::AtomicRefCountIsZero(&flag_); }
     36   void Set() { base::AtomicRefCountInc(&flag_); }
     37   void Reset() { base::subtle::NoBarrier_Store(&flag_, 0); }
     38 
     39  private:
     40   base::AtomicRefCount flag_;
     41 
     42   ~PreemptionFlag() {}
     43 
     44   friend class base::RefCountedThreadSafe<PreemptionFlag>;
     45 };
     46 
     47 // This class schedules commands that have been flushed. They are received via
     48 // a command buffer and forwarded to a command parser. TODO(apatrick): This
     49 // class should not know about the decoder. Do not add additional dependencies
     50 // on it.
     51 class GPU_EXPORT GpuScheduler
     52     : NON_EXPORTED_BASE(public CommandBufferEngine),
     53       public base::SupportsWeakPtr<GpuScheduler> {
     54  public:
     55   GpuScheduler(CommandBuffer* command_buffer,
     56                AsyncAPIInterface* handler,
     57                gles2::GLES2Decoder* decoder);
     58 
     59   virtual ~GpuScheduler();
     60 
     61   void PutChanged();
     62 
     63   void SetPreemptByFlag(scoped_refptr<PreemptionFlag> flag) {
     64     preemption_flag_ = flag;
     65   }
     66 
     67   // Sets whether commands should be processed by this scheduler. Setting to
     68   // false unschedules. Setting to true reschedules. Whether or not the
     69   // scheduler is currently scheduled is "reference counted". Every call with
     70   // false must eventually be paired by a call with true.
     71   void SetScheduled(bool is_scheduled);
     72 
     73   // Returns whether the scheduler is currently able to process more commands.
     74   bool IsScheduled();
     75 
     76   // Returns whether the scheduler needs to be polled again in the future.
     77   bool HasMoreWork();
     78 
     79   typedef base::Callback<void(bool /* scheduled */)> SchedulingChangedCallback;
     80 
     81   // Sets a callback that is invoked just before scheduler is rescheduled
     82   // or descheduled. Takes ownership of callback object.
     83   void SetSchedulingChangedCallback(const SchedulingChangedCallback& callback);
     84 
     85   // Implementation of CommandBufferEngine.
     86   virtual Buffer GetSharedMemoryBuffer(int32 shm_id) OVERRIDE;
     87   virtual void set_token(int32 token) OVERRIDE;
     88   virtual bool SetGetBuffer(int32 transfer_buffer_id) OVERRIDE;
     89   virtual bool SetGetOffset(int32 offset) OVERRIDE;
     90   virtual int32 GetGetOffset() OVERRIDE;
     91 
     92   void SetCommandProcessedCallback(const base::Closure& callback);
     93 
     94   void DeferToFence(base::Closure task);
     95 
     96   // Polls the fences, invoking callbacks that were waiting to be triggered
     97   // by them and returns whether all fences were complete.
     98   bool PollUnscheduleFences();
     99 
    100   bool HasMoreIdleWork();
    101   void PerformIdleWork();
    102 
    103   CommandParser* parser() const {
    104     return parser_.get();
    105   }
    106 
    107   bool IsPreempted();
    108 
    109  private:
    110   // Artificially reschedule if the scheduler is still unscheduled after a
    111   // timeout.
    112   void RescheduleTimeOut();
    113 
    114   // The GpuScheduler holds a weak reference to the CommandBuffer. The
    115   // CommandBuffer owns the GpuScheduler and holds a strong reference to it
    116   // through the ProcessCommands callback.
    117   CommandBuffer* command_buffer_;
    118 
    119   // The parser uses this to execute commands.
    120   AsyncAPIInterface* handler_;
    121 
    122   // Does not own decoder. TODO(apatrick): The GpuScheduler shouldn't need a
    123   // pointer to the decoder, it is only used to initialize the CommandParser,
    124   // which could be an argument to the constructor, and to determine the
    125   // reason for context lost.
    126   gles2::GLES2Decoder* decoder_;
    127 
    128   // TODO(apatrick): The GpuScheduler currently creates and owns the parser.
    129   // This should be an argument to the constructor.
    130   scoped_ptr<CommandParser> parser_;
    131 
    132   // Greater than zero if this is waiting to be rescheduled before continuing.
    133   int unscheduled_count_;
    134 
    135   // The number of times this scheduler has been artificially rescheduled on
    136   // account of a timeout.
    137   int rescheduled_count_;
    138 
    139   // A factory for outstanding rescheduling tasks that is invalidated whenever
    140   // the scheduler is rescheduled.
    141   base::WeakPtrFactory<GpuScheduler> reschedule_task_factory_;
    142 
    143   // The GpuScheduler will unschedule itself in the event that further GL calls
    144   // are issued to it before all these fences have been crossed by the GPU.
    145   struct UnscheduleFence {
    146     UnscheduleFence(gfx::GLFence* fence, base::Closure task);
    147     ~UnscheduleFence();
    148 
    149     scoped_ptr<gfx::GLFence> fence;
    150     base::Time issue_time;
    151     base::Closure task;
    152   };
    153   std::queue<linked_ptr<UnscheduleFence> > unschedule_fences_;
    154 
    155   SchedulingChangedCallback scheduling_changed_callback_;
    156   base::Closure descheduled_callback_;
    157   base::Closure command_processed_callback_;
    158 
    159   // If non-NULL and |preemption_flag_->IsSet()|, exit PutChanged early.
    160   scoped_refptr<PreemptionFlag> preemption_flag_;
    161   bool was_preempted_;
    162 
    163   DISALLOW_COPY_AND_ASSIGN(GpuScheduler);
    164 };
    165 
    166 }  // namespace gpu
    167 
    168 #endif  // GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_
    169