1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_H_ 6 #define V8_COMPILER_H_ 7 8 #include "src/allocation.h" 9 #include "src/ast.h" 10 #include "src/zone.h" 11 12 namespace v8 { 13 namespace internal { 14 15 class ScriptData; 16 class HydrogenCodeStub; 17 18 // ParseRestriction is used to restrict the set of valid statements in a 19 // unit of compilation. Restriction violations cause a syntax error. 20 enum ParseRestriction { 21 NO_PARSE_RESTRICTION, // All expressions are allowed. 22 ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression. 23 }; 24 25 enum CachedDataMode { 26 NO_CACHED_DATA, 27 CONSUME_CACHED_DATA, 28 PRODUCE_CACHED_DATA 29 }; 30 31 struct OffsetRange { 32 OffsetRange(int from, int to) : from(from), to(to) {} 33 int from; 34 int to; 35 }; 36 37 // CompilationInfo encapsulates some information known at compile time. It 38 // is constructed based on the resources available at compile-time. 39 class CompilationInfo { 40 public: 41 CompilationInfo(Handle<JSFunction> closure, Zone* zone); 42 virtual ~CompilationInfo(); 43 44 Isolate* isolate() const { 45 return isolate_; 46 } 47 Zone* zone() { return zone_; } 48 bool is_osr() const { return !osr_ast_id_.IsNone(); } 49 bool is_lazy() const { return IsLazy::decode(flags_); } 50 bool is_eval() const { return IsEval::decode(flags_); } 51 bool is_global() const { return IsGlobal::decode(flags_); } 52 StrictMode strict_mode() const { return StrictModeField::decode(flags_); } 53 FunctionLiteral* function() const { return function_; } 54 Scope* scope() const { return scope_; } 55 Scope* global_scope() const { return global_scope_; } 56 Handle<Code> code() const { return code_; } 57 Handle<JSFunction> closure() const { return closure_; } 58 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; } 59 Handle<Script> script() const { return script_; } 60 HydrogenCodeStub* code_stub() const {return code_stub_; } 61 v8::Extension* extension() const { return extension_; } 62 ScriptData** cached_data() const { return cached_data_; } 63 CachedDataMode cached_data_mode() const { 64 return cached_data_mode_; 65 } 66 Handle<Context> context() const { return context_; } 67 BailoutId osr_ast_id() const { return osr_ast_id_; } 68 Handle<Code> unoptimized_code() const { return unoptimized_code_; } 69 int opt_count() const { return opt_count_; } 70 int num_parameters() const; 71 int num_heap_slots() const; 72 Code::Flags flags() const; 73 74 void MarkAsEval() { 75 ASSERT(!is_lazy()); 76 flags_ |= IsEval::encode(true); 77 } 78 void MarkAsGlobal() { 79 ASSERT(!is_lazy()); 80 flags_ |= IsGlobal::encode(true); 81 } 82 void set_parameter_count(int parameter_count) { 83 ASSERT(IsStub()); 84 parameter_count_ = parameter_count; 85 } 86 87 void set_this_has_uses(bool has_no_uses) { 88 this_has_uses_ = has_no_uses; 89 } 90 bool this_has_uses() { 91 return this_has_uses_; 92 } 93 void SetStrictMode(StrictMode strict_mode) { 94 ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode); 95 flags_ = StrictModeField::update(flags_, strict_mode); 96 } 97 void MarkAsNative() { 98 flags_ |= IsNative::encode(true); 99 } 100 101 bool is_native() const { 102 return IsNative::decode(flags_); 103 } 104 105 bool is_calling() const { 106 return is_deferred_calling() || is_non_deferred_calling(); 107 } 108 109 void MarkAsDeferredCalling() { 110 flags_ |= IsDeferredCalling::encode(true); 111 } 112 113 bool is_deferred_calling() const { 114 return IsDeferredCalling::decode(flags_); 115 } 116 117 void MarkAsNonDeferredCalling() { 118 flags_ |= IsNonDeferredCalling::encode(true); 119 } 120 121 bool is_non_deferred_calling() const { 122 return IsNonDeferredCalling::decode(flags_); 123 } 124 125 void MarkAsSavesCallerDoubles() { 126 flags_ |= SavesCallerDoubles::encode(true); 127 } 128 129 bool saves_caller_doubles() const { 130 return SavesCallerDoubles::decode(flags_); 131 } 132 133 void MarkAsRequiresFrame() { 134 flags_ |= RequiresFrame::encode(true); 135 } 136 137 bool requires_frame() const { 138 return RequiresFrame::decode(flags_); 139 } 140 141 void MarkMustNotHaveEagerFrame() { 142 flags_ |= MustNotHaveEagerFrame::encode(true); 143 } 144 145 bool GetMustNotHaveEagerFrame() const { 146 return MustNotHaveEagerFrame::decode(flags_); 147 } 148 149 void MarkAsDebug() { 150 flags_ |= IsDebug::encode(true); 151 } 152 153 bool is_debug() const { 154 return IsDebug::decode(flags_); 155 } 156 157 bool IsCodePreAgingActive() const { 158 return FLAG_optimize_for_size && FLAG_age_code && !is_debug(); 159 } 160 161 void SetParseRestriction(ParseRestriction restriction) { 162 flags_ = ParseRestricitonField::update(flags_, restriction); 163 } 164 165 ParseRestriction parse_restriction() const { 166 return ParseRestricitonField::decode(flags_); 167 } 168 169 void SetFunction(FunctionLiteral* literal) { 170 ASSERT(function_ == NULL); 171 function_ = literal; 172 } 173 void PrepareForCompilation(Scope* scope); 174 void SetGlobalScope(Scope* global_scope) { 175 ASSERT(global_scope_ == NULL); 176 global_scope_ = global_scope; 177 } 178 Handle<FixedArray> feedback_vector() const { 179 return feedback_vector_; 180 } 181 void SetCode(Handle<Code> code) { code_ = code; } 182 void SetExtension(v8::Extension* extension) { 183 ASSERT(!is_lazy()); 184 extension_ = extension; 185 } 186 void SetCachedData(ScriptData** cached_data, 187 CachedDataMode cached_data_mode) { 188 cached_data_mode_ = cached_data_mode; 189 if (cached_data_mode == NO_CACHED_DATA) { 190 cached_data_ = NULL; 191 } else { 192 ASSERT(!is_lazy()); 193 cached_data_ = cached_data; 194 } 195 } 196 void SetContext(Handle<Context> context) { 197 context_ = context; 198 } 199 200 void MarkCompilingForDebugging() { 201 flags_ |= IsCompilingForDebugging::encode(true); 202 } 203 bool IsCompilingForDebugging() { 204 return IsCompilingForDebugging::decode(flags_); 205 } 206 void MarkNonOptimizable() { 207 SetMode(CompilationInfo::NONOPT); 208 } 209 210 bool ShouldTrapOnDeopt() const { 211 return (FLAG_trap_on_deopt && IsOptimizing()) || 212 (FLAG_trap_on_stub_deopt && IsStub()); 213 } 214 215 bool has_global_object() const { 216 return !closure().is_null() && 217 (closure()->context()->global_object() != NULL); 218 } 219 220 GlobalObject* global_object() const { 221 return has_global_object() ? closure()->context()->global_object() : NULL; 222 } 223 224 // Accessors for the different compilation modes. 225 bool IsOptimizing() const { return mode_ == OPTIMIZE; } 226 bool IsOptimizable() const { return mode_ == BASE; } 227 bool IsStub() const { return mode_ == STUB; } 228 void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) { 229 ASSERT(!shared_info_.is_null()); 230 SetMode(OPTIMIZE); 231 osr_ast_id_ = osr_ast_id; 232 unoptimized_code_ = unoptimized; 233 optimization_id_ = isolate()->NextOptimizationId(); 234 } 235 void DisableOptimization(); 236 237 // Deoptimization support. 238 bool HasDeoptimizationSupport() const { 239 return SupportsDeoptimization::decode(flags_); 240 } 241 void EnableDeoptimizationSupport() { 242 ASSERT(IsOptimizable()); 243 flags_ |= SupportsDeoptimization::encode(true); 244 } 245 246 // Determines whether or not to insert a self-optimization header. 247 bool ShouldSelfOptimize(); 248 249 void set_deferred_handles(DeferredHandles* deferred_handles) { 250 ASSERT(deferred_handles_ == NULL); 251 deferred_handles_ = deferred_handles; 252 } 253 254 ZoneList<Handle<HeapObject> >* dependencies( 255 DependentCode::DependencyGroup group) { 256 if (dependencies_[group] == NULL) { 257 dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_); 258 } 259 return dependencies_[group]; 260 } 261 262 void CommitDependencies(Handle<Code> code); 263 264 void RollbackDependencies(); 265 266 void SaveHandles() { 267 SaveHandle(&closure_); 268 SaveHandle(&shared_info_); 269 SaveHandle(&context_); 270 SaveHandle(&script_); 271 SaveHandle(&unoptimized_code_); 272 } 273 274 BailoutReason bailout_reason() const { return bailout_reason_; } 275 void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; } 276 277 int prologue_offset() const { 278 ASSERT_NE(Code::kPrologueOffsetNotSet, prologue_offset_); 279 return prologue_offset_; 280 } 281 282 void set_prologue_offset(int prologue_offset) { 283 ASSERT_EQ(Code::kPrologueOffsetNotSet, prologue_offset_); 284 prologue_offset_ = prologue_offset; 285 } 286 287 // Adds offset range [from, to) where fp register does not point 288 // to the current frame base. Used in CPU profiler to detect stack 289 // samples where top frame is not set up. 290 inline void AddNoFrameRange(int from, int to) { 291 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to)); 292 } 293 294 List<OffsetRange>* ReleaseNoFrameRanges() { 295 List<OffsetRange>* result = no_frame_ranges_; 296 no_frame_ranges_ = NULL; 297 return result; 298 } 299 300 Handle<Foreign> object_wrapper() { 301 if (object_wrapper_.is_null()) { 302 object_wrapper_ = 303 isolate()->factory()->NewForeign(reinterpret_cast<Address>(this)); 304 } 305 return object_wrapper_; 306 } 307 308 void AbortDueToDependencyChange() { 309 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 310 abort_due_to_dependency_ = true; 311 } 312 313 bool HasAbortedDueToDependencyChange() { 314 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 315 return abort_due_to_dependency_; 316 } 317 318 bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) { 319 return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure_); 320 } 321 322 int optimization_id() const { return optimization_id_; } 323 324 protected: 325 CompilationInfo(Handle<Script> script, 326 Zone* zone); 327 CompilationInfo(Handle<SharedFunctionInfo> shared_info, 328 Zone* zone); 329 CompilationInfo(HydrogenCodeStub* stub, 330 Isolate* isolate, 331 Zone* zone); 332 333 private: 334 Isolate* isolate_; 335 336 // Compilation mode. 337 // BASE is generated by the full codegen, optionally prepared for bailouts. 338 // OPTIMIZE is optimized code generated by the Hydrogen-based backend. 339 // NONOPT is generated by the full codegen and is not prepared for 340 // recompilation/bailouts. These functions are never recompiled. 341 enum Mode { 342 BASE, 343 OPTIMIZE, 344 NONOPT, 345 STUB 346 }; 347 348 void Initialize(Isolate* isolate, Mode mode, Zone* zone); 349 350 void SetMode(Mode mode) { 351 ASSERT(isolate()->use_crankshaft()); 352 mode_ = mode; 353 } 354 355 // Flags using template class BitField<type, start, length>. All are 356 // false by default. 357 // 358 // Compilation is either eager or lazy. 359 class IsLazy: public BitField<bool, 0, 1> {}; 360 // Flags that can be set for eager compilation. 361 class IsEval: public BitField<bool, 1, 1> {}; 362 class IsGlobal: public BitField<bool, 2, 1> {}; 363 // If the function is being compiled for the debugger. 364 class IsDebug: public BitField<bool, 3, 1> {}; 365 // Strict mode - used in eager compilation. 366 class StrictModeField: public BitField<StrictMode, 4, 1> {}; 367 // Is this a function from our natives. 368 class IsNative: public BitField<bool, 5, 1> {}; 369 // Is this code being compiled with support for deoptimization.. 370 class SupportsDeoptimization: public BitField<bool, 6, 1> {}; 371 // If compiling for debugging produce just full code matching the 372 // initial mode setting. 373 class IsCompilingForDebugging: public BitField<bool, 7, 1> {}; 374 // If the compiled code contains calls that require building a frame 375 class IsCalling: public BitField<bool, 8, 1> {}; 376 // If the compiled code contains calls that require building a frame 377 class IsDeferredCalling: public BitField<bool, 9, 1> {}; 378 // If the compiled code contains calls that require building a frame 379 class IsNonDeferredCalling: public BitField<bool, 10, 1> {}; 380 // If the compiled code saves double caller registers that it clobbers. 381 class SavesCallerDoubles: public BitField<bool, 11, 1> {}; 382 // If the set of valid statements is restricted. 383 class ParseRestricitonField: public BitField<ParseRestriction, 12, 1> {}; 384 // If the function requires a frame (for unspecified reasons) 385 class RequiresFrame: public BitField<bool, 13, 1> {}; 386 // If the function cannot build a frame (for unspecified reasons) 387 class MustNotHaveEagerFrame: public BitField<bool, 14, 1> {}; 388 389 unsigned flags_; 390 391 // Fields filled in by the compilation pipeline. 392 // AST filled in by the parser. 393 FunctionLiteral* function_; 394 // The scope of the function literal as a convenience. Set to indicate 395 // that scopes have been analyzed. 396 Scope* scope_; 397 // The global scope provided as a convenience. 398 Scope* global_scope_; 399 // For compiled stubs, the stub object 400 HydrogenCodeStub* code_stub_; 401 // The compiled code. 402 Handle<Code> code_; 403 404 // Possible initial inputs to the compilation process. 405 Handle<JSFunction> closure_; 406 Handle<SharedFunctionInfo> shared_info_; 407 Handle<Script> script_; 408 409 // Fields possibly needed for eager compilation, NULL by default. 410 v8::Extension* extension_; 411 ScriptData** cached_data_; 412 CachedDataMode cached_data_mode_; 413 414 // The context of the caller for eval code, and the global context for a 415 // global script. Will be a null handle otherwise. 416 Handle<Context> context_; 417 418 // Used by codegen, ultimately kept rooted by the SharedFunctionInfo. 419 Handle<FixedArray> feedback_vector_; 420 421 // Compilation mode flag and whether deoptimization is allowed. 422 Mode mode_; 423 BailoutId osr_ast_id_; 424 // The unoptimized code we patched for OSR may not be the shared code 425 // afterwards, since we may need to compile it again to include deoptimization 426 // data. Keep track which code we patched. 427 Handle<Code> unoptimized_code_; 428 429 // Flag whether compilation needs to be aborted due to dependency change. 430 bool abort_due_to_dependency_; 431 432 // The zone from which the compilation pipeline working on this 433 // CompilationInfo allocates. 434 Zone* zone_; 435 436 DeferredHandles* deferred_handles_; 437 438 ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount]; 439 440 template<typename T> 441 void SaveHandle(Handle<T> *object) { 442 if (!object->is_null()) { 443 Handle<T> handle(*(*object)); 444 *object = handle; 445 } 446 } 447 448 BailoutReason bailout_reason_; 449 450 int prologue_offset_; 451 452 List<OffsetRange>* no_frame_ranges_; 453 454 // A copy of shared_info()->opt_count() to avoid handle deref 455 // during graph optimization. 456 int opt_count_; 457 458 // Number of parameters used for compilation of stubs that require arguments. 459 int parameter_count_; 460 461 bool this_has_uses_; 462 463 Handle<Foreign> object_wrapper_; 464 465 int optimization_id_; 466 467 DISALLOW_COPY_AND_ASSIGN(CompilationInfo); 468 }; 469 470 471 // Exactly like a CompilationInfo, except also creates and enters a 472 // Zone on construction and deallocates it on exit. 473 class CompilationInfoWithZone: public CompilationInfo { 474 public: 475 explicit CompilationInfoWithZone(Handle<Script> script) 476 : CompilationInfo(script, &zone_), 477 zone_(script->GetIsolate()) {} 478 explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info) 479 : CompilationInfo(shared_info, &zone_), 480 zone_(shared_info->GetIsolate()) {} 481 explicit CompilationInfoWithZone(Handle<JSFunction> closure) 482 : CompilationInfo(closure, &zone_), 483 zone_(closure->GetIsolate()) {} 484 CompilationInfoWithZone(HydrogenCodeStub* stub, Isolate* isolate) 485 : CompilationInfo(stub, isolate, &zone_), 486 zone_(isolate) {} 487 488 // Virtual destructor because a CompilationInfoWithZone has to exit the 489 // zone scope and get rid of dependent maps even when the destructor is 490 // called when cast as a CompilationInfo. 491 virtual ~CompilationInfoWithZone() { 492 RollbackDependencies(); 493 } 494 495 private: 496 Zone zone_; 497 }; 498 499 500 // A wrapper around a CompilationInfo that detaches the Handles from 501 // the underlying DeferredHandleScope and stores them in info_ on 502 // destruction. 503 class CompilationHandleScope BASE_EMBEDDED { 504 public: 505 explicit CompilationHandleScope(CompilationInfo* info) 506 : deferred_(info->isolate()), info_(info) {} 507 ~CompilationHandleScope() { 508 info_->set_deferred_handles(deferred_.Detach()); 509 } 510 511 private: 512 DeferredHandleScope deferred_; 513 CompilationInfo* info_; 514 }; 515 516 517 class HGraph; 518 class HOptimizedGraphBuilder; 519 class LChunk; 520 521 // A helper class that calls the three compilation phases in 522 // Crankshaft and keeps track of its state. The three phases 523 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either 524 // fail, bail-out to the full code generator or succeed. Apart from 525 // their return value, the status of the phase last run can be checked 526 // using last_status(). 527 class OptimizedCompileJob: public ZoneObject { 528 public: 529 explicit OptimizedCompileJob(CompilationInfo* info) 530 : info_(info), 531 graph_builder_(NULL), 532 graph_(NULL), 533 chunk_(NULL), 534 last_status_(FAILED), 535 awaiting_install_(false) { } 536 537 enum Status { 538 FAILED, BAILED_OUT, SUCCEEDED 539 }; 540 541 MUST_USE_RESULT Status CreateGraph(); 542 MUST_USE_RESULT Status OptimizeGraph(); 543 MUST_USE_RESULT Status GenerateCode(); 544 545 Status last_status() const { return last_status_; } 546 CompilationInfo* info() const { return info_; } 547 Isolate* isolate() const { return info()->isolate(); } 548 549 MUST_USE_RESULT Status AbortOptimization( 550 BailoutReason reason = kNoReason) { 551 if (reason != kNoReason) info_->set_bailout_reason(reason); 552 return SetLastStatus(BAILED_OUT); 553 } 554 555 MUST_USE_RESULT Status AbortAndDisableOptimization( 556 BailoutReason reason = kNoReason) { 557 if (reason != kNoReason) info_->set_bailout_reason(reason); 558 // Reference to shared function info does not change between phases. 559 AllowDeferredHandleDereference allow_handle_dereference; 560 info_->shared_info()->DisableOptimization(info_->bailout_reason()); 561 return SetLastStatus(BAILED_OUT); 562 } 563 564 void WaitForInstall() { 565 ASSERT(info_->is_osr()); 566 awaiting_install_ = true; 567 } 568 569 bool IsWaitingForInstall() { return awaiting_install_; } 570 571 private: 572 CompilationInfo* info_; 573 HOptimizedGraphBuilder* graph_builder_; 574 HGraph* graph_; 575 LChunk* chunk_; 576 TimeDelta time_taken_to_create_graph_; 577 TimeDelta time_taken_to_optimize_; 578 TimeDelta time_taken_to_codegen_; 579 Status last_status_; 580 bool awaiting_install_; 581 582 MUST_USE_RESULT Status SetLastStatus(Status status) { 583 last_status_ = status; 584 return last_status_; 585 } 586 void RecordOptimizationStats(); 587 588 struct Timer { 589 Timer(OptimizedCompileJob* job, TimeDelta* location) 590 : job_(job), location_(location) { 591 ASSERT(location_ != NULL); 592 timer_.Start(); 593 } 594 595 ~Timer() { 596 *location_ += timer_.Elapsed(); 597 } 598 599 OptimizedCompileJob* job_; 600 ElapsedTimer timer_; 601 TimeDelta* location_; 602 }; 603 }; 604 605 606 // The V8 compiler 607 // 608 // General strategy: Source code is translated into an anonymous function w/o 609 // parameters which then can be executed. If the source code contains other 610 // functions, they will be compiled and allocated as part of the compilation 611 // of the source code. 612 613 // Please note this interface returns shared function infos. This means you 614 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a 615 // real function with a context. 616 617 class Compiler : public AllStatic { 618 public: 619 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode( 620 Handle<JSFunction> function); 621 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode( 622 Handle<SharedFunctionInfo> shared); 623 static bool EnsureCompiled(Handle<JSFunction> function, 624 ClearExceptionFlag flag); 625 MUST_USE_RESULT static MaybeHandle<Code> GetCodeForDebugging( 626 Handle<JSFunction> function); 627 628 static void CompileForLiveEdit(Handle<Script> script); 629 630 // Compile a String source within a context for eval. 631 MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval( 632 Handle<String> source, 633 Handle<Context> context, 634 StrictMode strict_mode, 635 ParseRestriction restriction, 636 int scope_position); 637 638 // Compile a String source within a context. 639 static Handle<SharedFunctionInfo> CompileScript( 640 Handle<String> source, 641 Handle<Object> script_name, 642 int line_offset, 643 int column_offset, 644 bool is_shared_cross_origin, 645 Handle<Context> context, 646 v8::Extension* extension, 647 ScriptData** cached_data, 648 CachedDataMode cached_data_mode, 649 NativesFlag is_natives_code); 650 651 // Create a shared function info object (the code may be lazily compiled). 652 static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node, 653 Handle<Script> script); 654 655 enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT }; 656 657 // Generate and return optimized code or start a concurrent optimization job. 658 // In the latter case, return the InOptimizationQueue builtin. On failure, 659 // return the empty handle. 660 MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode( 661 Handle<JSFunction> function, 662 Handle<Code> current_code, 663 ConcurrencyMode mode, 664 BailoutId osr_ast_id = BailoutId::None()); 665 666 // Generate and return code from previously queued optimization job. 667 // On failure, return the empty handle. 668 static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job); 669 670 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, 671 CompilationInfo* info, 672 Handle<SharedFunctionInfo> shared); 673 }; 674 675 676 class CompilationPhase BASE_EMBEDDED { 677 public: 678 CompilationPhase(const char* name, CompilationInfo* info); 679 ~CompilationPhase(); 680 681 protected: 682 bool ShouldProduceTraceOutput() const; 683 684 const char* name() const { return name_; } 685 CompilationInfo* info() const { return info_; } 686 Isolate* isolate() const { return info()->isolate(); } 687 Zone* zone() { return &zone_; } 688 689 private: 690 const char* name_; 691 CompilationInfo* info_; 692 Zone zone_; 693 unsigned info_zone_start_allocation_size_; 694 ElapsedTimer timer_; 695 696 DISALLOW_COPY_AND_ASSIGN(CompilationPhase); 697 }; 698 699 700 } } // namespace v8::internal 701 702 #endif // V8_COMPILER_H_ 703