1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_COMPILER_H_ 29 #define V8_COMPILER_H_ 30 31 #include "allocation.h" 32 #include "ast.h" 33 #include "zone.h" 34 35 namespace v8 { 36 namespace internal { 37 38 class ScriptDataImpl; 39 class HydrogenCodeStub; 40 41 // ParseRestriction is used to restrict the set of valid statements in a 42 // unit of compilation. Restriction violations cause a syntax error. 43 enum ParseRestriction { 44 NO_PARSE_RESTRICTION, // All expressions are allowed. 45 ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression. 46 }; 47 48 struct OffsetRange { 49 OffsetRange(int from, int to) : from(from), to(to) {} 50 int from; 51 int to; 52 }; 53 54 // CompilationInfo encapsulates some information known at compile time. It 55 // is constructed based on the resources available at compile-time. 56 class CompilationInfo { 57 public: 58 CompilationInfo(Handle<JSFunction> closure, Zone* zone); 59 virtual ~CompilationInfo(); 60 61 Isolate* isolate() const { 62 return isolate_; 63 } 64 Zone* zone() { return zone_; } 65 bool is_osr() const { return !osr_ast_id_.IsNone(); } 66 bool is_lazy() const { return IsLazy::decode(flags_); } 67 bool is_eval() const { return IsEval::decode(flags_); } 68 bool is_global() const { return IsGlobal::decode(flags_); } 69 bool is_classic_mode() const { return language_mode() == CLASSIC_MODE; } 70 bool is_extended_mode() const { return language_mode() == EXTENDED_MODE; } 71 LanguageMode language_mode() const { 72 return LanguageModeField::decode(flags_); 73 } 74 bool is_in_loop() const { return IsInLoop::decode(flags_); } 75 FunctionLiteral* function() const { return function_; } 76 Scope* scope() const { return scope_; } 77 Scope* global_scope() const { return global_scope_; } 78 Handle<Code> code() const { return code_; } 79 Handle<JSFunction> closure() const { return closure_; } 80 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; } 81 Handle<Script> script() const { return script_; } 82 HydrogenCodeStub* code_stub() const {return code_stub_; } 83 v8::Extension* extension() const { return extension_; } 84 ScriptDataImpl* pre_parse_data() const { return pre_parse_data_; } 85 Handle<Context> context() const { return context_; } 86 BailoutId osr_ast_id() const { return osr_ast_id_; } 87 uint32_t osr_pc_offset() const { return osr_pc_offset_; } 88 int opt_count() const { return opt_count_; } 89 int num_parameters() const; 90 int num_heap_slots() const; 91 Code::Flags flags() const; 92 93 void MarkAsEval() { 94 ASSERT(!is_lazy()); 95 flags_ |= IsEval::encode(true); 96 } 97 void MarkAsGlobal() { 98 ASSERT(!is_lazy()); 99 flags_ |= IsGlobal::encode(true); 100 } 101 void set_parameter_count(int parameter_count) { 102 ASSERT(IsStub()); 103 parameter_count_ = parameter_count; 104 } 105 void SetLanguageMode(LanguageMode language_mode) { 106 ASSERT(this->language_mode() == CLASSIC_MODE || 107 this->language_mode() == language_mode || 108 language_mode == EXTENDED_MODE); 109 flags_ = LanguageModeField::update(flags_, language_mode); 110 } 111 void MarkAsInLoop() { 112 ASSERT(is_lazy()); 113 flags_ |= IsInLoop::encode(true); 114 } 115 void MarkAsNative() { 116 flags_ |= IsNative::encode(true); 117 } 118 119 bool is_native() const { 120 return IsNative::decode(flags_); 121 } 122 123 bool is_calling() const { 124 return is_deferred_calling() || is_non_deferred_calling(); 125 } 126 127 void MarkAsDeferredCalling() { 128 flags_ |= IsDeferredCalling::encode(true); 129 } 130 131 bool is_deferred_calling() const { 132 return IsDeferredCalling::decode(flags_); 133 } 134 135 void MarkAsNonDeferredCalling() { 136 flags_ |= IsNonDeferredCalling::encode(true); 137 } 138 139 bool is_non_deferred_calling() const { 140 return IsNonDeferredCalling::decode(flags_); 141 } 142 143 void MarkAsSavesCallerDoubles() { 144 flags_ |= SavesCallerDoubles::encode(true); 145 } 146 147 bool saves_caller_doubles() const { 148 return SavesCallerDoubles::decode(flags_); 149 } 150 151 void MarkAsRequiresFrame() { 152 flags_ |= RequiresFrame::encode(true); 153 } 154 155 bool requires_frame() const { 156 return RequiresFrame::decode(flags_); 157 } 158 159 void SetParseRestriction(ParseRestriction restriction) { 160 flags_ = ParseRestricitonField::update(flags_, restriction); 161 } 162 163 ParseRestriction parse_restriction() const { 164 return ParseRestricitonField::decode(flags_); 165 } 166 167 void SetFunction(FunctionLiteral* literal) { 168 ASSERT(function_ == NULL); 169 function_ = literal; 170 } 171 void SetScope(Scope* scope) { 172 ASSERT(scope_ == NULL); 173 scope_ = scope; 174 } 175 void SetGlobalScope(Scope* global_scope) { 176 ASSERT(global_scope_ == NULL); 177 global_scope_ = global_scope; 178 } 179 void SetCode(Handle<Code> code) { code_ = code; } 180 void SetExtension(v8::Extension* extension) { 181 ASSERT(!is_lazy()); 182 extension_ = extension; 183 } 184 void SetPreParseData(ScriptDataImpl* pre_parse_data) { 185 ASSERT(!is_lazy()); 186 pre_parse_data_ = pre_parse_data; 187 } 188 void SetContext(Handle<Context> context) { 189 context_ = context; 190 } 191 void MarkCompilingForDebugging(Handle<Code> current_code) { 192 ASSERT(mode_ != OPTIMIZE); 193 ASSERT(current_code->kind() == Code::FUNCTION); 194 flags_ |= IsCompilingForDebugging::encode(true); 195 if (current_code->is_compiled_optimizable()) { 196 EnableDeoptimizationSupport(); 197 } else { 198 mode_ = CompilationInfo::NONOPT; 199 } 200 } 201 bool IsCompilingForDebugging() { 202 return IsCompilingForDebugging::decode(flags_); 203 } 204 205 bool ShouldTrapOnDeopt() const { 206 return (FLAG_trap_on_deopt && IsOptimizing()) || 207 (FLAG_trap_on_stub_deopt && IsStub()); 208 } 209 210 bool has_global_object() const { 211 return !closure().is_null() && 212 (closure()->context()->global_object() != NULL); 213 } 214 215 GlobalObject* global_object() const { 216 return has_global_object() ? closure()->context()->global_object() : NULL; 217 } 218 219 // Accessors for the different compilation modes. 220 bool IsOptimizing() const { return mode_ == OPTIMIZE; } 221 bool IsOptimizable() const { return mode_ == BASE; } 222 bool IsStub() const { return mode_ == STUB; } 223 void SetOptimizing(BailoutId osr_ast_id) { 224 SetMode(OPTIMIZE); 225 osr_ast_id_ = osr_ast_id; 226 } 227 void DisableOptimization(); 228 229 // Deoptimization support. 230 bool HasDeoptimizationSupport() const { 231 return SupportsDeoptimization::decode(flags_); 232 } 233 void EnableDeoptimizationSupport() { 234 ASSERT(IsOptimizable()); 235 flags_ |= SupportsDeoptimization::encode(true); 236 } 237 238 // Determines whether or not to insert a self-optimization header. 239 bool ShouldSelfOptimize(); 240 241 // Reset code to the unoptimized version when optimization is aborted. 242 void AbortOptimization() { 243 SetCode(handle(shared_info()->code())); 244 } 245 246 void set_deferred_handles(DeferredHandles* deferred_handles) { 247 ASSERT(deferred_handles_ == NULL); 248 deferred_handles_ = deferred_handles; 249 } 250 251 ZoneList<Handle<HeapObject> >* dependencies( 252 DependentCode::DependencyGroup group) { 253 if (dependencies_[group] == NULL) { 254 dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_); 255 } 256 return dependencies_[group]; 257 } 258 259 void CommitDependencies(Handle<Code> code); 260 261 void RollbackDependencies(); 262 263 void SaveHandles() { 264 SaveHandle(&closure_); 265 SaveHandle(&shared_info_); 266 SaveHandle(&context_); 267 SaveHandle(&script_); 268 } 269 270 BailoutReason bailout_reason() const { return bailout_reason_; } 271 void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; } 272 273 int prologue_offset() const { 274 ASSERT_NE(Code::kPrologueOffsetNotSet, prologue_offset_); 275 return prologue_offset_; 276 } 277 278 void set_prologue_offset(int prologue_offset) { 279 ASSERT_EQ(Code::kPrologueOffsetNotSet, prologue_offset_); 280 prologue_offset_ = prologue_offset; 281 } 282 283 // Adds offset range [from, to) where fp register does not point 284 // to the current frame base. Used in CPU profiler to detect stack 285 // samples where top frame is not set up. 286 inline void AddNoFrameRange(int from, int to) { 287 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to)); 288 } 289 290 List<OffsetRange>* ReleaseNoFrameRanges() { 291 List<OffsetRange>* result = no_frame_ranges_; 292 no_frame_ranges_ = NULL; 293 return result; 294 } 295 296 Handle<Foreign> object_wrapper() { 297 if (object_wrapper_.is_null()) { 298 object_wrapper_ = 299 isolate()->factory()->NewForeign(reinterpret_cast<Address>(this)); 300 } 301 return object_wrapper_; 302 } 303 304 void AbortDueToDependencyChange() { 305 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 306 abort_due_to_dependency_ = true; 307 } 308 309 bool HasAbortedDueToDependencyChange() { 310 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 311 return abort_due_to_dependency_; 312 } 313 314 void set_osr_pc_offset(uint32_t pc_offset) { 315 osr_pc_offset_ = pc_offset; 316 } 317 318 bool HasSameOsrEntry(Handle<JSFunction> function, uint32_t pc_offset) { 319 return osr_pc_offset_ == pc_offset && function.is_identical_to(closure_); 320 } 321 322 protected: 323 CompilationInfo(Handle<Script> script, 324 Zone* zone); 325 CompilationInfo(Handle<SharedFunctionInfo> shared_info, 326 Zone* zone); 327 CompilationInfo(HydrogenCodeStub* stub, 328 Isolate* isolate, 329 Zone* zone); 330 331 private: 332 Isolate* isolate_; 333 334 // Compilation mode. 335 // BASE is generated by the full codegen, optionally prepared for bailouts. 336 // OPTIMIZE is optimized code generated by the Hydrogen-based backend. 337 // NONOPT is generated by the full codegen and is not prepared for 338 // recompilation/bailouts. These functions are never recompiled. 339 enum Mode { 340 BASE, 341 OPTIMIZE, 342 NONOPT, 343 STUB 344 }; 345 346 void Initialize(Isolate* isolate, Mode mode, Zone* zone); 347 348 void SetMode(Mode mode) { 349 ASSERT(isolate()->use_crankshaft()); 350 mode_ = mode; 351 } 352 353 // Flags using template class BitField<type, start, length>. All are 354 // false by default. 355 // 356 // Compilation is either eager or lazy. 357 class IsLazy: public BitField<bool, 0, 1> {}; 358 // Flags that can be set for eager compilation. 359 class IsEval: public BitField<bool, 1, 1> {}; 360 class IsGlobal: public BitField<bool, 2, 1> {}; 361 // Flags that can be set for lazy compilation. 362 class IsInLoop: public BitField<bool, 3, 1> {}; 363 // Strict mode - used in eager compilation. 364 class LanguageModeField: public BitField<LanguageMode, 4, 2> {}; 365 // Is this a function from our natives. 366 class IsNative: public BitField<bool, 6, 1> {}; 367 // Is this code being compiled with support for deoptimization.. 368 class SupportsDeoptimization: public BitField<bool, 7, 1> {}; 369 // If compiling for debugging produce just full code matching the 370 // initial mode setting. 371 class IsCompilingForDebugging: public BitField<bool, 8, 1> {}; 372 // If the compiled code contains calls that require building a frame 373 class IsCalling: public BitField<bool, 9, 1> {}; 374 // If the compiled code contains calls that require building a frame 375 class IsDeferredCalling: public BitField<bool, 10, 1> {}; 376 // If the compiled code contains calls that require building a frame 377 class IsNonDeferredCalling: public BitField<bool, 11, 1> {}; 378 // If the compiled code saves double caller registers that it clobbers. 379 class SavesCallerDoubles: public BitField<bool, 12, 1> {}; 380 // If the set of valid statements is restricted. 381 class ParseRestricitonField: public BitField<ParseRestriction, 13, 1> {}; 382 // If the function requires a frame (for unspecified reasons) 383 class RequiresFrame: public BitField<bool, 14, 1> {}; 384 385 unsigned flags_; 386 387 // Fields filled in by the compilation pipeline. 388 // AST filled in by the parser. 389 FunctionLiteral* function_; 390 // The scope of the function literal as a convenience. Set to indicate 391 // that scopes have been analyzed. 392 Scope* scope_; 393 // The global scope provided as a convenience. 394 Scope* global_scope_; 395 // For compiled stubs, the stub object 396 HydrogenCodeStub* code_stub_; 397 // The compiled code. 398 Handle<Code> code_; 399 400 // Possible initial inputs to the compilation process. 401 Handle<JSFunction> closure_; 402 Handle<SharedFunctionInfo> shared_info_; 403 Handle<Script> script_; 404 405 // Fields possibly needed for eager compilation, NULL by default. 406 v8::Extension* extension_; 407 ScriptDataImpl* pre_parse_data_; 408 409 // The context of the caller for eval code, and the global context for a 410 // global script. Will be a null handle otherwise. 411 Handle<Context> context_; 412 413 // Compilation mode flag and whether deoptimization is allowed. 414 Mode mode_; 415 BailoutId osr_ast_id_; 416 // The pc_offset corresponding to osr_ast_id_ in unoptimized code. 417 // We can look this up in the back edge table, but cache it for quick access. 418 uint32_t osr_pc_offset_; 419 420 // Flag whether compilation needs to be aborted due to dependency change. 421 bool abort_due_to_dependency_; 422 423 // The zone from which the compilation pipeline working on this 424 // CompilationInfo allocates. 425 Zone* zone_; 426 427 DeferredHandles* deferred_handles_; 428 429 ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount]; 430 431 template<typename T> 432 void SaveHandle(Handle<T> *object) { 433 if (!object->is_null()) { 434 Handle<T> handle(*(*object)); 435 *object = handle; 436 } 437 } 438 439 BailoutReason bailout_reason_; 440 441 int prologue_offset_; 442 443 List<OffsetRange>* no_frame_ranges_; 444 445 // A copy of shared_info()->opt_count() to avoid handle deref 446 // during graph optimization. 447 int opt_count_; 448 449 // Number of parameters used for compilation of stubs that require arguments. 450 int parameter_count_; 451 452 Handle<Foreign> object_wrapper_; 453 454 DISALLOW_COPY_AND_ASSIGN(CompilationInfo); 455 }; 456 457 458 // Exactly like a CompilationInfo, except also creates and enters a 459 // Zone on construction and deallocates it on exit. 460 class CompilationInfoWithZone: public CompilationInfo { 461 public: 462 explicit CompilationInfoWithZone(Handle<Script> script) 463 : CompilationInfo(script, &zone_), 464 zone_(script->GetIsolate()) {} 465 explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info) 466 : CompilationInfo(shared_info, &zone_), 467 zone_(shared_info->GetIsolate()) {} 468 explicit CompilationInfoWithZone(Handle<JSFunction> closure) 469 : CompilationInfo(closure, &zone_), 470 zone_(closure->GetIsolate()) {} 471 CompilationInfoWithZone(HydrogenCodeStub* stub, Isolate* isolate) 472 : CompilationInfo(stub, isolate, &zone_), 473 zone_(isolate) {} 474 475 // Virtual destructor because a CompilationInfoWithZone has to exit the 476 // zone scope and get rid of dependent maps even when the destructor is 477 // called when cast as a CompilationInfo. 478 virtual ~CompilationInfoWithZone() { 479 RollbackDependencies(); 480 } 481 482 private: 483 Zone zone_; 484 }; 485 486 487 // A wrapper around a CompilationInfo that detaches the Handles from 488 // the underlying DeferredHandleScope and stores them in info_ on 489 // destruction. 490 class CompilationHandleScope BASE_EMBEDDED { 491 public: 492 explicit CompilationHandleScope(CompilationInfo* info) 493 : deferred_(info->isolate()), info_(info) {} 494 ~CompilationHandleScope() { 495 info_->set_deferred_handles(deferred_.Detach()); 496 } 497 498 private: 499 DeferredHandleScope deferred_; 500 CompilationInfo* info_; 501 }; 502 503 504 class HGraph; 505 class HOptimizedGraphBuilder; 506 class LChunk; 507 508 // A helper class that calls the three compilation phases in 509 // Crankshaft and keeps track of its state. The three phases 510 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either 511 // fail, bail-out to the full code generator or succeed. Apart from 512 // their return value, the status of the phase last run can be checked 513 // using last_status(). 514 class RecompileJob: public ZoneObject { 515 public: 516 explicit RecompileJob(CompilationInfo* info) 517 : info_(info), 518 graph_builder_(NULL), 519 graph_(NULL), 520 chunk_(NULL), 521 last_status_(FAILED), 522 awaiting_install_(false) { } 523 524 enum Status { 525 FAILED, BAILED_OUT, SUCCEEDED 526 }; 527 528 MUST_USE_RESULT Status CreateGraph(); 529 MUST_USE_RESULT Status OptimizeGraph(); 530 MUST_USE_RESULT Status GenerateAndInstallCode(); 531 532 Status last_status() const { return last_status_; } 533 CompilationInfo* info() const { return info_; } 534 Isolate* isolate() const { return info()->isolate(); } 535 536 MUST_USE_RESULT Status AbortOptimization() { 537 info_->AbortOptimization(); 538 info_->shared_info()->DisableOptimization(info_->bailout_reason()); 539 return SetLastStatus(BAILED_OUT); 540 } 541 542 void WaitForInstall() { 543 ASSERT(info_->is_osr()); 544 awaiting_install_ = true; 545 } 546 547 bool IsWaitingForInstall() { return awaiting_install_; } 548 549 private: 550 CompilationInfo* info_; 551 HOptimizedGraphBuilder* graph_builder_; 552 HGraph* graph_; 553 LChunk* chunk_; 554 TimeDelta time_taken_to_create_graph_; 555 TimeDelta time_taken_to_optimize_; 556 TimeDelta time_taken_to_codegen_; 557 Status last_status_; 558 bool awaiting_install_; 559 560 MUST_USE_RESULT Status SetLastStatus(Status status) { 561 last_status_ = status; 562 return last_status_; 563 } 564 void RecordOptimizationStats(); 565 566 struct Timer { 567 Timer(RecompileJob* job, TimeDelta* location) 568 : job_(job), location_(location) { 569 ASSERT(location_ != NULL); 570 timer_.Start(); 571 } 572 573 ~Timer() { 574 *location_ += timer_.Elapsed(); 575 } 576 577 RecompileJob* job_; 578 ElapsedTimer timer_; 579 TimeDelta* location_; 580 }; 581 }; 582 583 584 // The V8 compiler 585 // 586 // General strategy: Source code is translated into an anonymous function w/o 587 // parameters which then can be executed. If the source code contains other 588 // functions, they will be compiled and allocated as part of the compilation 589 // of the source code. 590 591 // Please note this interface returns shared function infos. This means you 592 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a 593 // real function with a context. 594 595 class Compiler : public AllStatic { 596 public: 597 // Call count before primitive functions trigger their own optimization. 598 static const int kCallsUntilPrimitiveOpt = 200; 599 600 // All routines return a SharedFunctionInfo. 601 // If an error occurs an exception is raised and the return handle 602 // contains NULL. 603 604 // Compile a String source within a context. 605 static Handle<SharedFunctionInfo> Compile(Handle<String> source, 606 Handle<Object> script_name, 607 int line_offset, 608 int column_offset, 609 bool is_shared_cross_origin, 610 Handle<Context> context, 611 v8::Extension* extension, 612 ScriptDataImpl* pre_data, 613 Handle<Object> script_data, 614 NativesFlag is_natives_code); 615 616 // Compile a String source within a context for Eval. 617 static Handle<SharedFunctionInfo> CompileEval(Handle<String> source, 618 Handle<Context> context, 619 bool is_global, 620 LanguageMode language_mode, 621 ParseRestriction restriction, 622 int scope_position); 623 624 // Compile from function info (used for lazy compilation). Returns true on 625 // success and false if the compilation resulted in a stack overflow. 626 static bool CompileLazy(CompilationInfo* info); 627 628 static bool RecompileConcurrent(Handle<JSFunction> function, 629 uint32_t osr_pc_offset = 0); 630 631 // Compile a shared function info object (the function is possibly lazily 632 // compiled). 633 static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node, 634 Handle<Script> script); 635 636 // Set the function info for a newly compiled function. 637 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info, 638 FunctionLiteral* lit, 639 bool is_toplevel, 640 Handle<Script> script); 641 642 static Handle<Code> InstallOptimizedCode(RecompileJob* job); 643 644 #ifdef ENABLE_DEBUGGER_SUPPORT 645 static bool MakeCodeForLiveEdit(CompilationInfo* info); 646 #endif 647 648 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, 649 CompilationInfo* info, 650 Handle<SharedFunctionInfo> shared); 651 }; 652 653 654 class CompilationPhase BASE_EMBEDDED { 655 public: 656 CompilationPhase(const char* name, CompilationInfo* info); 657 ~CompilationPhase(); 658 659 protected: 660 bool ShouldProduceTraceOutput() const; 661 662 const char* name() const { return name_; } 663 CompilationInfo* info() const { return info_; } 664 Isolate* isolate() const { return info()->isolate(); } 665 Zone* zone() { return &zone_; } 666 667 private: 668 const char* name_; 669 CompilationInfo* info_; 670 Zone zone_; 671 unsigned info_zone_start_allocation_size_; 672 ElapsedTimer timer_; 673 674 DISALLOW_COPY_AND_ASSIGN(CompilationPhase); 675 }; 676 677 678 } } // namespace v8::internal 679 680 #endif // V8_COMPILER_H_ 681