Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "optimizing-compiler-thread.h"
     29 
     30 #include "v8.h"
     31 
     32 #include "full-codegen.h"
     33 #include "hydrogen.h"
     34 #include "isolate.h"
     35 #include "v8threads.h"
     36 
     37 namespace v8 {
     38 namespace internal {
     39 
     40 OptimizingCompilerThread::~OptimizingCompilerThread() {
     41   ASSERT_EQ(0, input_queue_length_);
     42   DeleteArray(input_queue_);
     43   if (FLAG_concurrent_osr) {
     44 #ifdef DEBUG
     45     for (int i = 0; i < osr_buffer_capacity_; i++) {
     46       CHECK_EQ(NULL, osr_buffer_[i]);
     47     }
     48 #endif
     49     DeleteArray(osr_buffer_);
     50   }
     51 }
     52 
     53 
     54 void OptimizingCompilerThread::Run() {
     55 #ifdef DEBUG
     56   { LockGuard<Mutex> lock_guard(&thread_id_mutex_);
     57     thread_id_ = ThreadId::Current().ToInteger();
     58   }
     59 #endif
     60   Isolate::SetIsolateThreadLocals(isolate_, NULL);
     61   DisallowHeapAllocation no_allocation;
     62   DisallowHandleAllocation no_handles;
     63   DisallowHandleDereference no_deref;
     64 
     65   ElapsedTimer total_timer;
     66   if (FLAG_trace_concurrent_recompilation) total_timer.Start();
     67 
     68   while (true) {
     69     input_queue_semaphore_.Wait();
     70     Logger::TimerEventScope timer(
     71         isolate_, Logger::TimerEventScope::v8_recompile_concurrent);
     72 
     73     if (FLAG_concurrent_recompilation_delay != 0) {
     74       OS::Sleep(FLAG_concurrent_recompilation_delay);
     75     }
     76 
     77     switch (static_cast<StopFlag>(Acquire_Load(&stop_thread_))) {
     78       case CONTINUE:
     79         break;
     80       case STOP:
     81         if (FLAG_trace_concurrent_recompilation) {
     82           time_spent_total_ = total_timer.Elapsed();
     83         }
     84         stop_semaphore_.Signal();
     85         return;
     86       case FLUSH:
     87         // The main thread is blocked, waiting for the stop semaphore.
     88         { AllowHandleDereference allow_handle_dereference;
     89           FlushInputQueue(true);
     90         }
     91         Release_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE));
     92         stop_semaphore_.Signal();
     93         // Return to start of consumer loop.
     94         continue;
     95     }
     96 
     97     ElapsedTimer compiling_timer;
     98     if (FLAG_trace_concurrent_recompilation) compiling_timer.Start();
     99 
    100     CompileNext();
    101 
    102     if (FLAG_trace_concurrent_recompilation) {
    103       time_spent_compiling_ += compiling_timer.Elapsed();
    104     }
    105   }
    106 }
    107 
    108 
    109 RecompileJob* OptimizingCompilerThread::NextInput() {
    110   LockGuard<Mutex> access_input_queue_(&input_queue_mutex_);
    111   if (input_queue_length_ == 0) return NULL;
    112   RecompileJob* job = input_queue_[InputQueueIndex(0)];
    113   ASSERT_NE(NULL, job);
    114   input_queue_shift_ = InputQueueIndex(1);
    115   input_queue_length_--;
    116   return job;
    117 }
    118 
    119 
    120 void OptimizingCompilerThread::CompileNext() {
    121   RecompileJob* job = NextInput();
    122   ASSERT_NE(NULL, job);
    123 
    124   // The function may have already been optimized by OSR.  Simply continue.
    125   RecompileJob::Status status = job->OptimizeGraph();
    126   USE(status);   // Prevent an unused-variable error in release mode.
    127   ASSERT(status != RecompileJob::FAILED);
    128 
    129   // The function may have already been optimized by OSR.  Simply continue.
    130   // Use a mutex to make sure that functions marked for install
    131   // are always also queued.
    132   output_queue_.Enqueue(job);
    133   isolate_->stack_guard()->RequestInstallCode();
    134 }
    135 
    136 
    137 static void DisposeRecompileJob(RecompileJob* job,
    138                                 bool restore_function_code) {
    139   // The recompile job is allocated in the CompilationInfo's zone.
    140   CompilationInfo* info = job->info();
    141   if (restore_function_code) {
    142     if (info->is_osr()) {
    143       if (!job->IsWaitingForInstall()) BackEdgeTable::RemoveStackCheck(info);
    144     } else {
    145       Handle<JSFunction> function = info->closure();
    146       function->ReplaceCode(function->shared()->code());
    147     }
    148   }
    149   delete info;
    150 }
    151 
    152 
    153 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
    154   RecompileJob* job;
    155   while ((job = NextInput())) {
    156     // This should not block, since we have one signal on the input queue
    157     // semaphore corresponding to each element in the input queue.
    158     input_queue_semaphore_.Wait();
    159     // OSR jobs are dealt with separately.
    160     if (!job->info()->is_osr()) {
    161       DisposeRecompileJob(job, restore_function_code);
    162     }
    163   }
    164 }
    165 
    166 
    167 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
    168   RecompileJob* job;
    169   while (output_queue_.Dequeue(&job)) {
    170     // OSR jobs are dealt with separately.
    171     if (!job->info()->is_osr()) {
    172       DisposeRecompileJob(job, restore_function_code);
    173     }
    174   }
    175 }
    176 
    177 
    178 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
    179   for (int i = 0; i < osr_buffer_capacity_; i++) {
    180     if (osr_buffer_[i] != NULL) {
    181       DisposeRecompileJob(osr_buffer_[i], restore_function_code);
    182       osr_buffer_[i] = NULL;
    183     }
    184   }
    185 }
    186 
    187 
    188 void OptimizingCompilerThread::Flush() {
    189   ASSERT(!IsOptimizerThread());
    190   Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH));
    191   if (FLAG_block_concurrent_recompilation) Unblock();
    192   input_queue_semaphore_.Signal();
    193   stop_semaphore_.Wait();
    194   FlushOutputQueue(true);
    195   if (FLAG_concurrent_osr) FlushOsrBuffer(true);
    196   if (FLAG_trace_concurrent_recompilation) {
    197     PrintF("  ** Flushed concurrent recompilation queues.\n");
    198   }
    199 }
    200 
    201 
    202 void OptimizingCompilerThread::Stop() {
    203   ASSERT(!IsOptimizerThread());
    204   Release_Store(&stop_thread_, static_cast<AtomicWord>(STOP));
    205   if (FLAG_block_concurrent_recompilation) Unblock();
    206   input_queue_semaphore_.Signal();
    207   stop_semaphore_.Wait();
    208 
    209   if (FLAG_concurrent_recompilation_delay != 0) {
    210     // At this point the optimizing compiler thread's event loop has stopped.
    211     // There is no need for a mutex when reading input_queue_length_.
    212     while (input_queue_length_ > 0) CompileNext();
    213     InstallOptimizedFunctions();
    214   } else {
    215     FlushInputQueue(false);
    216     FlushOutputQueue(false);
    217   }
    218 
    219   if (FLAG_concurrent_osr) FlushOsrBuffer(false);
    220 
    221   if (FLAG_trace_concurrent_recompilation) {
    222     double percentage = time_spent_compiling_.PercentOf(time_spent_total_);
    223     PrintF("  ** Compiler thread did %.2f%% useful work\n", percentage);
    224   }
    225 
    226   if ((FLAG_trace_osr || FLAG_trace_concurrent_recompilation) &&
    227       FLAG_concurrent_osr) {
    228     PrintF("[COSR hit rate %d / %d]\n", osr_hits_, osr_attempts_);
    229   }
    230 
    231   Join();
    232 }
    233 
    234 
    235 void OptimizingCompilerThread::InstallOptimizedFunctions() {
    236   ASSERT(!IsOptimizerThread());
    237   HandleScope handle_scope(isolate_);
    238 
    239   RecompileJob* job;
    240   while (output_queue_.Dequeue(&job)) {
    241     CompilationInfo* info = job->info();
    242     if (info->is_osr()) {
    243       if (FLAG_trace_osr) {
    244         PrintF("[COSR - ");
    245         info->closure()->PrintName();
    246         PrintF(" is ready for install and entry at AST id %d]\n",
    247                info->osr_ast_id().ToInt());
    248       }
    249       job->WaitForInstall();
    250       BackEdgeTable::RemoveStackCheck(info);
    251     } else {
    252       Compiler::InstallOptimizedCode(job);
    253     }
    254   }
    255 }
    256 
    257 
    258 void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
    259   ASSERT(IsQueueAvailable());
    260   ASSERT(!IsOptimizerThread());
    261   CompilationInfo* info = job->info();
    262   if (info->is_osr()) {
    263     if (FLAG_trace_concurrent_recompilation) {
    264       PrintF("  ** Queueing ");
    265       info->closure()->PrintName();
    266       PrintF(" for concurrent on-stack replacement.\n");
    267     }
    268     osr_attempts_++;
    269     BackEdgeTable::AddStackCheck(info);
    270     AddToOsrBuffer(job);
    271     // Add job to the front of the input queue.
    272     LockGuard<Mutex> access_input_queue(&input_queue_mutex_);
    273     ASSERT_LT(input_queue_length_, input_queue_capacity_);
    274     // Move shift_ back by one.
    275     input_queue_shift_ = InputQueueIndex(input_queue_capacity_ - 1);
    276     input_queue_[InputQueueIndex(0)] = job;
    277     input_queue_length_++;
    278   } else {
    279     info->closure()->MarkInRecompileQueue();
    280     // Add job to the back of the input queue.
    281     LockGuard<Mutex> access_input_queue(&input_queue_mutex_);
    282     ASSERT_LT(input_queue_length_, input_queue_capacity_);
    283     input_queue_[InputQueueIndex(input_queue_length_)] = job;
    284     input_queue_length_++;
    285   }
    286   if (FLAG_block_concurrent_recompilation) {
    287     blocked_jobs_++;
    288   } else {
    289     input_queue_semaphore_.Signal();
    290   }
    291 }
    292 
    293 
    294 void OptimizingCompilerThread::Unblock() {
    295   ASSERT(!IsOptimizerThread());
    296   while (blocked_jobs_ > 0) {
    297     input_queue_semaphore_.Signal();
    298     blocked_jobs_--;
    299   }
    300 }
    301 
    302 
    303 RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
    304     Handle<JSFunction> function, uint32_t osr_pc_offset) {
    305   ASSERT(!IsOptimizerThread());
    306   for (int i = 0; i < osr_buffer_capacity_; i++) {
    307     RecompileJob* current = osr_buffer_[i];
    308     if (current != NULL &&
    309         current->IsWaitingForInstall() &&
    310         current->info()->HasSameOsrEntry(function, osr_pc_offset)) {
    311       osr_hits_++;
    312       osr_buffer_[i] = NULL;
    313       return current;
    314     }
    315   }
    316   return NULL;
    317 }
    318 
    319 
    320 bool OptimizingCompilerThread::IsQueuedForOSR(Handle<JSFunction> function,
    321                                               uint32_t osr_pc_offset) {
    322   ASSERT(!IsOptimizerThread());
    323   for (int i = 0; i < osr_buffer_capacity_; i++) {
    324     RecompileJob* current = osr_buffer_[i];
    325     if (current != NULL &&
    326         current->info()->HasSameOsrEntry(function, osr_pc_offset)) {
    327       return !current->IsWaitingForInstall();
    328     }
    329   }
    330   return false;
    331 }
    332 
    333 
    334 bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
    335   ASSERT(!IsOptimizerThread());
    336   for (int i = 0; i < osr_buffer_capacity_; i++) {
    337     RecompileJob* current = osr_buffer_[i];
    338     if (current != NULL && *current->info()->closure() == function) {
    339       return !current->IsWaitingForInstall();
    340     }
    341   }
    342   return false;
    343 }
    344 
    345 
    346 void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
    347   ASSERT(!IsOptimizerThread());
    348   // Find the next slot that is empty or has a stale job.
    349   RecompileJob* stale = NULL;
    350   while (true) {
    351     stale = osr_buffer_[osr_buffer_cursor_];
    352     if (stale == NULL || stale->IsWaitingForInstall()) break;
    353     osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
    354   }
    355 
    356   // Add to found slot and dispose the evicted job.
    357   if (stale != NULL) {
    358     ASSERT(stale->IsWaitingForInstall());
    359     CompilationInfo* info = stale->info();
    360     if (FLAG_trace_osr) {
    361       PrintF("[COSR - Discarded ");
    362       info->closure()->PrintName();
    363       PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
    364     }
    365     DisposeRecompileJob(stale, false);
    366   }
    367   osr_buffer_[osr_buffer_cursor_] = job;
    368   osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
    369 }
    370 
    371 
    372 #ifdef DEBUG
    373 bool OptimizingCompilerThread::IsOptimizerThread(Isolate* isolate) {
    374   return isolate->concurrent_recompilation_enabled() &&
    375          isolate->optimizing_compiler_thread()->IsOptimizerThread();
    376 }
    377 
    378 
    379 bool OptimizingCompilerThread::IsOptimizerThread() {
    380   LockGuard<Mutex> lock_guard(&thread_id_mutex_);
    381   return ThreadId::Current().ToInteger() == thread_id_;
    382 }
    383 #endif
    384 
    385 
    386 } }  // namespace v8::internal
    387