1 // Copyright 2008 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "compilation-cache.h" 31 #include "serialize.h" 32 33 namespace v8 { 34 namespace internal { 35 36 37 // The number of generations for each sub cache. 38 // The number of ScriptGenerations is carefully chosen based on histograms. 39 // See issue 458: http://code.google.com/p/v8/issues/detail?id=458 40 static const int kScriptGenerations = 5; 41 static const int kEvalGlobalGenerations = 2; 42 static const int kEvalContextualGenerations = 2; 43 static const int kRegExpGenerations = 2; 44 45 // Initial size of each compilation cache table allocated. 46 static const int kInitialCacheSize = 64; 47 48 49 CompilationCache::CompilationCache(Isolate* isolate) 50 : isolate_(isolate), 51 script_(isolate, kScriptGenerations), 52 eval_global_(isolate, kEvalGlobalGenerations), 53 eval_contextual_(isolate, kEvalContextualGenerations), 54 reg_exp_(isolate, kRegExpGenerations), 55 enabled_(true), 56 eager_optimizing_set_(NULL) { 57 CompilationSubCache* subcaches[kSubCacheCount] = 58 {&script_, &eval_global_, &eval_contextual_, ®_exp_}; 59 for (int i = 0; i < kSubCacheCount; ++i) { 60 subcaches_[i] = subcaches[i]; 61 } 62 } 63 64 65 CompilationCache::~CompilationCache() { 66 delete eager_optimizing_set_; 67 eager_optimizing_set_ = NULL; 68 } 69 70 71 static Handle<CompilationCacheTable> AllocateTable(Isolate* isolate, int size) { 72 CALL_HEAP_FUNCTION(isolate, 73 CompilationCacheTable::Allocate(size), 74 CompilationCacheTable); 75 } 76 77 78 Handle<CompilationCacheTable> CompilationSubCache::GetTable(int generation) { 79 ASSERT(generation < generations_); 80 Handle<CompilationCacheTable> result; 81 if (tables_[generation]->IsUndefined()) { 82 result = AllocateTable(isolate(), kInitialCacheSize); 83 tables_[generation] = *result; 84 } else { 85 CompilationCacheTable* table = 86 CompilationCacheTable::cast(tables_[generation]); 87 result = Handle<CompilationCacheTable>(table, isolate()); 88 } 89 return result; 90 } 91 92 void CompilationSubCache::Age() { 93 // Age the generations implicitly killing off the oldest. 94 for (int i = generations_ - 1; i > 0; i--) { 95 tables_[i] = tables_[i - 1]; 96 } 97 98 // Set the first generation as unborn. 99 tables_[0] = isolate()->heap()->undefined_value(); 100 } 101 102 103 void CompilationSubCache::IterateFunctions(ObjectVisitor* v) { 104 Object* undefined = isolate()->heap()->raw_unchecked_undefined_value(); 105 for (int i = 0; i < generations_; i++) { 106 if (tables_[i] != undefined) { 107 reinterpret_cast<CompilationCacheTable*>(tables_[i])->IterateElements(v); 108 } 109 } 110 } 111 112 113 void CompilationSubCache::Iterate(ObjectVisitor* v) { 114 v->VisitPointers(&tables_[0], &tables_[generations_]); 115 } 116 117 118 void CompilationSubCache::Clear() { 119 MemsetPointer(tables_, isolate()->heap()->undefined_value(), generations_); 120 } 121 122 123 void CompilationSubCache::Remove(Handle<SharedFunctionInfo> function_info) { 124 // Probe the script generation tables. Make sure not to leak handles 125 // into the caller's handle scope. 126 { HandleScope scope(isolate()); 127 for (int generation = 0; generation < generations(); generation++) { 128 Handle<CompilationCacheTable> table = GetTable(generation); 129 table->Remove(*function_info); 130 } 131 } 132 } 133 134 135 CompilationCacheScript::CompilationCacheScript(Isolate* isolate, 136 int generations) 137 : CompilationSubCache(isolate, generations), 138 script_histogram_(NULL), 139 script_histogram_initialized_(false) { } 140 141 142 // We only re-use a cached function for some script source code if the 143 // script originates from the same place. This is to avoid issues 144 // when reporting errors, etc. 145 bool CompilationCacheScript::HasOrigin( 146 Handle<SharedFunctionInfo> function_info, 147 Handle<Object> name, 148 int line_offset, 149 int column_offset) { 150 Handle<Script> script = 151 Handle<Script>(Script::cast(function_info->script()), isolate()); 152 // If the script name isn't set, the boilerplate script should have 153 // an undefined name to have the same origin. 154 if (name.is_null()) { 155 return script->name()->IsUndefined(); 156 } 157 // Do the fast bailout checks first. 158 if (line_offset != script->line_offset()->value()) return false; 159 if (column_offset != script->column_offset()->value()) return false; 160 // Check that both names are strings. If not, no match. 161 if (!name->IsString() || !script->name()->IsString()) return false; 162 // Compare the two name strings for equality. 163 return String::cast(*name)->Equals(String::cast(script->name())); 164 } 165 166 167 // TODO(245): Need to allow identical code from different contexts to 168 // be cached in the same script generation. Currently the first use 169 // will be cached, but subsequent code from different source / line 170 // won't. 171 Handle<SharedFunctionInfo> CompilationCacheScript::Lookup(Handle<String> source, 172 Handle<Object> name, 173 int line_offset, 174 int column_offset) { 175 Object* result = NULL; 176 int generation; 177 178 // Probe the script generation tables. Make sure not to leak handles 179 // into the caller's handle scope. 180 { HandleScope scope(isolate()); 181 for (generation = 0; generation < generations(); generation++) { 182 Handle<CompilationCacheTable> table = GetTable(generation); 183 Handle<Object> probe(table->Lookup(*source), isolate()); 184 if (probe->IsSharedFunctionInfo()) { 185 Handle<SharedFunctionInfo> function_info = 186 Handle<SharedFunctionInfo>::cast(probe); 187 // Break when we've found a suitable shared function info that 188 // matches the origin. 189 if (HasOrigin(function_info, name, line_offset, column_offset)) { 190 result = *function_info; 191 break; 192 } 193 } 194 } 195 } 196 197 if (!script_histogram_initialized_) { 198 script_histogram_ = isolate()->stats_table()->CreateHistogram( 199 "V8.ScriptCache", 200 0, 201 kScriptGenerations, 202 kScriptGenerations + 1); 203 script_histogram_initialized_ = true; 204 } 205 206 if (script_histogram_ != NULL) { 207 // The level NUMBER_OF_SCRIPT_GENERATIONS is equivalent to a cache miss. 208 isolate()->stats_table()->AddHistogramSample(script_histogram_, generation); 209 } 210 211 // Once outside the manacles of the handle scope, we need to recheck 212 // to see if we actually found a cached script. If so, we return a 213 // handle created in the caller's handle scope. 214 if (result != NULL) { 215 Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(result), 216 isolate()); 217 ASSERT(HasOrigin(shared, name, line_offset, column_offset)); 218 // If the script was found in a later generation, we promote it to 219 // the first generation to let it survive longer in the cache. 220 if (generation != 0) Put(source, shared); 221 isolate()->counters()->compilation_cache_hits()->Increment(); 222 return shared; 223 } else { 224 isolate()->counters()->compilation_cache_misses()->Increment(); 225 return Handle<SharedFunctionInfo>::null(); 226 } 227 } 228 229 230 MaybeObject* CompilationCacheScript::TryTablePut( 231 Handle<String> source, 232 Handle<SharedFunctionInfo> function_info) { 233 Handle<CompilationCacheTable> table = GetFirstTable(); 234 return table->Put(*source, *function_info); 235 } 236 237 238 Handle<CompilationCacheTable> CompilationCacheScript::TablePut( 239 Handle<String> source, 240 Handle<SharedFunctionInfo> function_info) { 241 CALL_HEAP_FUNCTION(isolate(), 242 TryTablePut(source, function_info), 243 CompilationCacheTable); 244 } 245 246 247 void CompilationCacheScript::Put(Handle<String> source, 248 Handle<SharedFunctionInfo> function_info) { 249 HandleScope scope(isolate()); 250 SetFirstTable(TablePut(source, function_info)); 251 } 252 253 254 Handle<SharedFunctionInfo> CompilationCacheEval::Lookup( 255 Handle<String> source, 256 Handle<Context> context, 257 StrictModeFlag strict_mode) { 258 // Make sure not to leak the table into the surrounding handle 259 // scope. Otherwise, we risk keeping old tables around even after 260 // having cleared the cache. 261 Object* result = NULL; 262 int generation; 263 { HandleScope scope(isolate()); 264 for (generation = 0; generation < generations(); generation++) { 265 Handle<CompilationCacheTable> table = GetTable(generation); 266 result = table->LookupEval(*source, *context, strict_mode); 267 if (result->IsSharedFunctionInfo()) { 268 break; 269 } 270 } 271 } 272 if (result->IsSharedFunctionInfo()) { 273 Handle<SharedFunctionInfo> 274 function_info(SharedFunctionInfo::cast(result), isolate()); 275 if (generation != 0) { 276 Put(source, context, function_info); 277 } 278 isolate()->counters()->compilation_cache_hits()->Increment(); 279 return function_info; 280 } else { 281 isolate()->counters()->compilation_cache_misses()->Increment(); 282 return Handle<SharedFunctionInfo>::null(); 283 } 284 } 285 286 287 MaybeObject* CompilationCacheEval::TryTablePut( 288 Handle<String> source, 289 Handle<Context> context, 290 Handle<SharedFunctionInfo> function_info) { 291 Handle<CompilationCacheTable> table = GetFirstTable(); 292 return table->PutEval(*source, *context, *function_info); 293 } 294 295 296 Handle<CompilationCacheTable> CompilationCacheEval::TablePut( 297 Handle<String> source, 298 Handle<Context> context, 299 Handle<SharedFunctionInfo> function_info) { 300 CALL_HEAP_FUNCTION(isolate(), 301 TryTablePut(source, context, function_info), 302 CompilationCacheTable); 303 } 304 305 306 void CompilationCacheEval::Put(Handle<String> source, 307 Handle<Context> context, 308 Handle<SharedFunctionInfo> function_info) { 309 HandleScope scope(isolate()); 310 SetFirstTable(TablePut(source, context, function_info)); 311 } 312 313 314 Handle<FixedArray> CompilationCacheRegExp::Lookup(Handle<String> source, 315 JSRegExp::Flags flags) { 316 // Make sure not to leak the table into the surrounding handle 317 // scope. Otherwise, we risk keeping old tables around even after 318 // having cleared the cache. 319 Object* result = NULL; 320 int generation; 321 { HandleScope scope(isolate()); 322 for (generation = 0; generation < generations(); generation++) { 323 Handle<CompilationCacheTable> table = GetTable(generation); 324 result = table->LookupRegExp(*source, flags); 325 if (result->IsFixedArray()) { 326 break; 327 } 328 } 329 } 330 if (result->IsFixedArray()) { 331 Handle<FixedArray> data(FixedArray::cast(result), isolate()); 332 if (generation != 0) { 333 Put(source, flags, data); 334 } 335 isolate()->counters()->compilation_cache_hits()->Increment(); 336 return data; 337 } else { 338 isolate()->counters()->compilation_cache_misses()->Increment(); 339 return Handle<FixedArray>::null(); 340 } 341 } 342 343 344 MaybeObject* CompilationCacheRegExp::TryTablePut( 345 Handle<String> source, 346 JSRegExp::Flags flags, 347 Handle<FixedArray> data) { 348 Handle<CompilationCacheTable> table = GetFirstTable(); 349 return table->PutRegExp(*source, flags, *data); 350 } 351 352 353 Handle<CompilationCacheTable> CompilationCacheRegExp::TablePut( 354 Handle<String> source, 355 JSRegExp::Flags flags, 356 Handle<FixedArray> data) { 357 CALL_HEAP_FUNCTION(isolate(), 358 TryTablePut(source, flags, data), 359 CompilationCacheTable); 360 } 361 362 363 void CompilationCacheRegExp::Put(Handle<String> source, 364 JSRegExp::Flags flags, 365 Handle<FixedArray> data) { 366 HandleScope scope(isolate()); 367 SetFirstTable(TablePut(source, flags, data)); 368 } 369 370 371 void CompilationCache::Remove(Handle<SharedFunctionInfo> function_info) { 372 if (!IsEnabled()) return; 373 374 eval_global_.Remove(function_info); 375 eval_contextual_.Remove(function_info); 376 script_.Remove(function_info); 377 } 378 379 380 Handle<SharedFunctionInfo> CompilationCache::LookupScript(Handle<String> source, 381 Handle<Object> name, 382 int line_offset, 383 int column_offset) { 384 if (!IsEnabled()) { 385 return Handle<SharedFunctionInfo>::null(); 386 } 387 388 return script_.Lookup(source, name, line_offset, column_offset); 389 } 390 391 392 Handle<SharedFunctionInfo> CompilationCache::LookupEval( 393 Handle<String> source, 394 Handle<Context> context, 395 bool is_global, 396 StrictModeFlag strict_mode) { 397 if (!IsEnabled()) { 398 return Handle<SharedFunctionInfo>::null(); 399 } 400 401 Handle<SharedFunctionInfo> result; 402 if (is_global) { 403 result = eval_global_.Lookup(source, context, strict_mode); 404 } else { 405 result = eval_contextual_.Lookup(source, context, strict_mode); 406 } 407 return result; 408 } 409 410 411 Handle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source, 412 JSRegExp::Flags flags) { 413 if (!IsEnabled()) { 414 return Handle<FixedArray>::null(); 415 } 416 417 return reg_exp_.Lookup(source, flags); 418 } 419 420 421 void CompilationCache::PutScript(Handle<String> source, 422 Handle<SharedFunctionInfo> function_info) { 423 if (!IsEnabled()) { 424 return; 425 } 426 427 script_.Put(source, function_info); 428 } 429 430 431 void CompilationCache::PutEval(Handle<String> source, 432 Handle<Context> context, 433 bool is_global, 434 Handle<SharedFunctionInfo> function_info) { 435 if (!IsEnabled()) { 436 return; 437 } 438 439 HandleScope scope(isolate()); 440 if (is_global) { 441 eval_global_.Put(source, context, function_info); 442 } else { 443 eval_contextual_.Put(source, context, function_info); 444 } 445 } 446 447 448 449 void CompilationCache::PutRegExp(Handle<String> source, 450 JSRegExp::Flags flags, 451 Handle<FixedArray> data) { 452 if (!IsEnabled()) { 453 return; 454 } 455 456 reg_exp_.Put(source, flags, data); 457 } 458 459 460 static bool SourceHashCompare(void* key1, void* key2) { 461 return key1 == key2; 462 } 463 464 465 HashMap* CompilationCache::EagerOptimizingSet() { 466 if (eager_optimizing_set_ == NULL) { 467 eager_optimizing_set_ = new HashMap(&SourceHashCompare); 468 } 469 return eager_optimizing_set_; 470 } 471 472 473 bool CompilationCache::ShouldOptimizeEagerly(Handle<JSFunction> function) { 474 if (FLAG_opt_eagerly) return true; 475 uint32_t hash = function->SourceHash(); 476 void* key = reinterpret_cast<void*>(hash); 477 return EagerOptimizingSet()->Lookup(key, hash, false) != NULL; 478 } 479 480 481 void CompilationCache::MarkForEagerOptimizing(Handle<JSFunction> function) { 482 uint32_t hash = function->SourceHash(); 483 void* key = reinterpret_cast<void*>(hash); 484 EagerOptimizingSet()->Lookup(key, hash, true); 485 } 486 487 488 void CompilationCache::MarkForLazyOptimizing(Handle<JSFunction> function) { 489 uint32_t hash = function->SourceHash(); 490 void* key = reinterpret_cast<void*>(hash); 491 EagerOptimizingSet()->Remove(key, hash); 492 } 493 494 495 void CompilationCache::ResetEagerOptimizingData() { 496 HashMap* set = EagerOptimizingSet(); 497 if (set->occupancy() > 0) set->Clear(); 498 } 499 500 501 void CompilationCache::Clear() { 502 for (int i = 0; i < kSubCacheCount; i++) { 503 subcaches_[i]->Clear(); 504 } 505 } 506 507 508 void CompilationCache::Iterate(ObjectVisitor* v) { 509 for (int i = 0; i < kSubCacheCount; i++) { 510 subcaches_[i]->Iterate(v); 511 } 512 } 513 514 515 void CompilationCache::IterateFunctions(ObjectVisitor* v) { 516 for (int i = 0; i < kSubCacheCount; i++) { 517 subcaches_[i]->IterateFunctions(v); 518 } 519 } 520 521 522 void CompilationCache::MarkCompactPrologue() { 523 for (int i = 0; i < kSubCacheCount; i++) { 524 subcaches_[i]->Age(); 525 } 526 } 527 528 529 void CompilationCache::Enable() { 530 enabled_ = true; 531 } 532 533 534 void CompilationCache::Disable() { 535 enabled_ = false; 536 Clear(); 537 } 538 539 540 } } // namespace v8::internal 541