1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/crankshaft/lithium.h" 6 7 #include "src/ast/scopes.h" 8 9 #if V8_TARGET_ARCH_IA32 10 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT 11 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT 12 #elif V8_TARGET_ARCH_X64 13 #include "src/crankshaft/x64/lithium-x64.h" // NOLINT 14 #include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT 15 #elif V8_TARGET_ARCH_ARM 16 #include "src/crankshaft/arm/lithium-arm.h" // NOLINT 17 #include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT 18 #elif V8_TARGET_ARCH_PPC 19 #include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT 20 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT 21 #elif V8_TARGET_ARCH_MIPS 22 #include "src/crankshaft/mips/lithium-mips.h" // NOLINT 23 #include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT 24 #elif V8_TARGET_ARCH_ARM64 25 #include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT 26 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT 27 #elif V8_TARGET_ARCH_MIPS64 28 #include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT 29 #include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT 30 #elif V8_TARGET_ARCH_X87 31 #include "src/crankshaft/x87/lithium-x87.h" // NOLINT 32 #include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT 33 #else 34 #error "Unknown architecture." 35 #endif 36 37 namespace v8 { 38 namespace internal { 39 40 41 void LOperand::PrintTo(StringStream* stream) { 42 LUnallocated* unalloc = NULL; 43 switch (kind()) { 44 case INVALID: 45 stream->Add("(0)"); 46 break; 47 case UNALLOCATED: 48 unalloc = LUnallocated::cast(this); 49 stream->Add("v%d", unalloc->virtual_register()); 50 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) { 51 stream->Add("(=%dS)", unalloc->fixed_slot_index()); 52 break; 53 } 54 switch (unalloc->extended_policy()) { 55 case LUnallocated::NONE: 56 break; 57 case LUnallocated::FIXED_REGISTER: { 58 int reg_index = unalloc->fixed_register_index(); 59 if (reg_index < 0 || reg_index >= Register::kNumRegisters) { 60 stream->Add("(=invalid_reg#%d)", reg_index); 61 } else { 62 const char* register_name = 63 Register::from_code(reg_index).ToString(); 64 stream->Add("(=%s)", register_name); 65 } 66 break; 67 } 68 case LUnallocated::FIXED_DOUBLE_REGISTER: { 69 int reg_index = unalloc->fixed_register_index(); 70 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) { 71 stream->Add("(=invalid_double_reg#%d)", reg_index); 72 } else { 73 const char* double_register_name = 74 DoubleRegister::from_code(reg_index).ToString(); 75 stream->Add("(=%s)", double_register_name); 76 } 77 break; 78 } 79 case LUnallocated::MUST_HAVE_REGISTER: 80 stream->Add("(R)"); 81 break; 82 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER: 83 stream->Add("(D)"); 84 break; 85 case LUnallocated::WRITABLE_REGISTER: 86 stream->Add("(WR)"); 87 break; 88 case LUnallocated::SAME_AS_FIRST_INPUT: 89 stream->Add("(1)"); 90 break; 91 case LUnallocated::ANY: 92 stream->Add("(-)"); 93 break; 94 } 95 break; 96 case CONSTANT_OPERAND: 97 stream->Add("[constant:%d]", index()); 98 break; 99 case STACK_SLOT: 100 stream->Add("[stack:%d]", index()); 101 break; 102 case DOUBLE_STACK_SLOT: 103 stream->Add("[double_stack:%d]", index()); 104 break; 105 case REGISTER: { 106 int reg_index = index(); 107 if (reg_index < 0 || reg_index >= Register::kNumRegisters) { 108 stream->Add("(=invalid_reg#%d|R)", reg_index); 109 } else { 110 stream->Add("[%s|R]", Register::from_code(reg_index).ToString()); 111 } 112 break; 113 } 114 case DOUBLE_REGISTER: { 115 int reg_index = index(); 116 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) { 117 stream->Add("(=invalid_double_reg#%d|R)", reg_index); 118 } else { 119 stream->Add("[%s|R]", DoubleRegister::from_code(reg_index).ToString()); 120 } 121 break; 122 } 123 } 124 } 125 126 127 template<LOperand::Kind kOperandKind, int kNumCachedOperands> 128 LSubKindOperand<kOperandKind, kNumCachedOperands>* 129 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL; 130 131 132 template<LOperand::Kind kOperandKind, int kNumCachedOperands> 133 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() { 134 if (cache) return; 135 cache = new LSubKindOperand[kNumCachedOperands]; 136 for (int i = 0; i < kNumCachedOperands; i++) { 137 cache[i].ConvertTo(kOperandKind, i); 138 } 139 } 140 141 142 template<LOperand::Kind kOperandKind, int kNumCachedOperands> 143 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() { 144 delete[] cache; 145 cache = NULL; 146 } 147 148 149 void LOperand::SetUpCaches() { 150 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache(); 151 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP) 152 #undef LITHIUM_OPERAND_SETUP 153 } 154 155 156 void LOperand::TearDownCaches() { 157 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache(); 158 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN) 159 #undef LITHIUM_OPERAND_TEARDOWN 160 } 161 162 163 bool LParallelMove::IsRedundant() const { 164 for (int i = 0; i < move_operands_.length(); ++i) { 165 if (!move_operands_[i].IsRedundant()) return false; 166 } 167 return true; 168 } 169 170 171 void LParallelMove::PrintDataTo(StringStream* stream) const { 172 bool first = true; 173 for (int i = 0; i < move_operands_.length(); ++i) { 174 if (!move_operands_[i].IsEliminated()) { 175 LOperand* source = move_operands_[i].source(); 176 LOperand* destination = move_operands_[i].destination(); 177 if (!first) stream->Add(" "); 178 first = false; 179 if (source->Equals(destination)) { 180 destination->PrintTo(stream); 181 } else { 182 destination->PrintTo(stream); 183 stream->Add(" = "); 184 source->PrintTo(stream); 185 } 186 stream->Add(";"); 187 } 188 } 189 } 190 191 192 void LEnvironment::PrintTo(StringStream* stream) { 193 stream->Add("[id=%d|", ast_id().ToInt()); 194 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) { 195 stream->Add("deopt_id=%d|", deoptimization_index()); 196 } 197 stream->Add("parameters=%d|", parameter_count()); 198 stream->Add("arguments_stack_height=%d|", arguments_stack_height()); 199 for (int i = 0; i < values_.length(); ++i) { 200 if (i != 0) stream->Add(";"); 201 if (values_[i] == NULL) { 202 stream->Add("[hole]"); 203 } else { 204 values_[i]->PrintTo(stream); 205 } 206 } 207 stream->Add("]"); 208 } 209 210 211 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) { 212 // Do not record arguments as pointers. 213 if (op->IsStackSlot() && op->index() < 0) return; 214 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); 215 pointer_operands_.Add(op, zone); 216 } 217 218 219 void LPointerMap::RemovePointer(LOperand* op) { 220 // Do not record arguments as pointers. 221 if (op->IsStackSlot() && op->index() < 0) return; 222 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); 223 for (int i = 0; i < pointer_operands_.length(); ++i) { 224 if (pointer_operands_[i]->Equals(op)) { 225 pointer_operands_.Remove(i); 226 --i; 227 } 228 } 229 } 230 231 232 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) { 233 // Do not record arguments as pointers. 234 if (op->IsStackSlot() && op->index() < 0) return; 235 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); 236 untagged_operands_.Add(op, zone); 237 } 238 239 240 void LPointerMap::PrintTo(StringStream* stream) { 241 stream->Add("{"); 242 for (int i = 0; i < pointer_operands_.length(); ++i) { 243 if (i != 0) stream->Add(";"); 244 pointer_operands_[i]->PrintTo(stream); 245 } 246 stream->Add("}"); 247 } 248 249 250 int StackSlotOffset(int index) { 251 if (index >= 0) { 252 // Local or spill slot. Skip the frame pointer, function, and 253 // context in the fixed part of the frame. 254 return -(index + 1) * kPointerSize - 255 StandardFrameConstants::kFixedFrameSizeFromFp; 256 } else { 257 // Incoming parameter. Skip the return address. 258 return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize; 259 } 260 } 261 262 263 LChunk::LChunk(CompilationInfo* info, HGraph* graph) 264 : spill_slot_count_(0), 265 info_(info), 266 graph_(graph), 267 instructions_(32, info->zone()), 268 pointer_maps_(8, info->zone()), 269 inlined_functions_(1, info->zone()), 270 deprecation_dependencies_(32, info->zone()), 271 stability_dependencies_(8, info->zone()) {} 272 273 274 LLabel* LChunk::GetLabel(int block_id) const { 275 HBasicBlock* block = graph_->blocks()->at(block_id); 276 int first_instruction = block->first_instruction_index(); 277 return LLabel::cast(instructions_[first_instruction]); 278 } 279 280 281 int LChunk::LookupDestination(int block_id) const { 282 LLabel* cur = GetLabel(block_id); 283 while (cur->replacement() != NULL) { 284 cur = cur->replacement(); 285 } 286 return cur->block_id(); 287 } 288 289 Label* LChunk::GetAssemblyLabel(int block_id) const { 290 LLabel* label = GetLabel(block_id); 291 DCHECK(!label->HasReplacement()); 292 return label->label(); 293 } 294 295 296 void LChunk::MarkEmptyBlocks() { 297 LPhase phase("L_Mark empty blocks", this); 298 for (int i = 0; i < graph()->blocks()->length(); ++i) { 299 HBasicBlock* block = graph()->blocks()->at(i); 300 int first = block->first_instruction_index(); 301 int last = block->last_instruction_index(); 302 LInstruction* first_instr = instructions()->at(first); 303 LInstruction* last_instr = instructions()->at(last); 304 305 LLabel* label = LLabel::cast(first_instr); 306 if (last_instr->IsGoto()) { 307 LGoto* goto_instr = LGoto::cast(last_instr); 308 if (label->IsRedundant() && 309 !label->is_loop_header()) { 310 bool can_eliminate = true; 311 for (int i = first + 1; i < last && can_eliminate; ++i) { 312 LInstruction* cur = instructions()->at(i); 313 if (cur->IsGap()) { 314 LGap* gap = LGap::cast(cur); 315 if (!gap->IsRedundant()) { 316 can_eliminate = false; 317 } 318 } else { 319 can_eliminate = false; 320 } 321 } 322 if (can_eliminate) { 323 label->set_replacement(GetLabel(goto_instr->block_id())); 324 } 325 } 326 } 327 } 328 } 329 330 331 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) { 332 LInstructionGap* gap = new (zone()) LInstructionGap(block); 333 gap->set_hydrogen_value(instr->hydrogen_value()); 334 int index = -1; 335 if (instr->IsControl()) { 336 instructions_.Add(gap, zone()); 337 index = instructions_.length(); 338 instructions_.Add(instr, zone()); 339 } else { 340 index = instructions_.length(); 341 instructions_.Add(instr, zone()); 342 instructions_.Add(gap, zone()); 343 } 344 if (instr->HasPointerMap()) { 345 pointer_maps_.Add(instr->pointer_map(), zone()); 346 instr->pointer_map()->set_lithium_position(index); 347 } 348 } 349 350 351 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) { 352 return LConstantOperand::Create(constant->id(), zone()); 353 } 354 355 356 int LChunk::GetParameterStackSlot(int index) const { 357 // The receiver is at index 0, the first parameter at index 1, so we 358 // shift all parameter indexes down by the number of parameters, and 359 // make sure they end up negative so they are distinguishable from 360 // spill slots. 361 int result = index - info()->num_parameters() - 1; 362 363 DCHECK(result < 0); 364 return result; 365 } 366 367 368 // A parameter relative to ebp in the arguments stub. 369 int LChunk::ParameterAt(int index) { 370 DCHECK(-1 <= index); // -1 is the receiver. 371 return (1 + info()->scope()->num_parameters() - index) * 372 kPointerSize; 373 } 374 375 376 LGap* LChunk::GetGapAt(int index) const { 377 return LGap::cast(instructions_[index]); 378 } 379 380 381 bool LChunk::IsGapAt(int index) const { 382 return instructions_[index]->IsGap(); 383 } 384 385 386 int LChunk::NearestGapPos(int index) const { 387 while (!IsGapAt(index)) index--; 388 return index; 389 } 390 391 392 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) { 393 GetGapAt(index)->GetOrCreateParallelMove( 394 LGap::START, zone())->AddMove(from, to, zone()); 395 } 396 397 398 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const { 399 return HConstant::cast(graph_->LookupValue(operand->index())); 400 } 401 402 403 Representation LChunk::LookupLiteralRepresentation( 404 LConstantOperand* operand) const { 405 return graph_->LookupValue(operand->index())->representation(); 406 } 407 408 409 void LChunk::CommitDependencies(Handle<Code> code) const { 410 if (!code->is_optimized_code()) return; 411 HandleScope scope(isolate()); 412 413 for (Handle<Map> map : deprecation_dependencies_) { 414 DCHECK(!map->is_deprecated()); 415 DCHECK(map->CanBeDeprecated()); 416 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code); 417 } 418 419 for (Handle<Map> map : stability_dependencies_) { 420 DCHECK(map->is_stable()); 421 DCHECK(map->CanTransition()); 422 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code); 423 } 424 425 info_->dependencies()->Commit(code); 426 } 427 428 429 LChunk* LChunk::NewChunk(HGraph* graph) { 430 DisallowHandleAllocation no_handles; 431 DisallowHeapAllocation no_gc; 432 graph->DisallowAddingNewValues(); 433 int values = graph->GetMaximumValueID(); 434 CompilationInfo* info = graph->info(); 435 if (values > LUnallocated::kMaxVirtualRegisters) { 436 info->AbortOptimization(kNotEnoughVirtualRegistersForValues); 437 return NULL; 438 } 439 LAllocator allocator(values, graph); 440 LChunkBuilder builder(info, graph, &allocator); 441 LChunk* chunk = builder.Build(); 442 if (chunk == NULL) return NULL; 443 444 if (!allocator.Allocate(chunk)) { 445 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc); 446 return NULL; 447 } 448 449 chunk->set_allocated_double_registers( 450 allocator.assigned_double_registers()); 451 452 return chunk; 453 } 454 455 456 Handle<Code> LChunk::Codegen() { 457 MacroAssembler assembler(info()->isolate(), NULL, 0, 458 CodeObjectRequired::kYes); 459 LOG_CODE_EVENT(info()->isolate(), 460 CodeStartLinePosInfoRecordEvent( 461 assembler.positions_recorder())); 462 // Code serializer only takes unoptimized code. 463 DCHECK(!info()->will_serialize()); 464 LCodeGen generator(this, &assembler, info()); 465 466 MarkEmptyBlocks(); 467 468 if (generator.GenerateCode()) { 469 generator.CheckEnvironmentUsage(); 470 CodeGenerator::MakeCodePrologue(info(), "optimized"); 471 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&assembler, info()); 472 generator.FinishCode(code); 473 CommitDependencies(code); 474 code->set_is_crankshafted(true); 475 void* jit_handler_data = 476 assembler.positions_recorder()->DetachJITHandlerData(); 477 LOG_CODE_EVENT(info()->isolate(), 478 CodeEndLinePosInfoRecordEvent(*code, jit_handler_data)); 479 480 CodeGenerator::PrintCode(code, info()); 481 DCHECK(!(info()->isolate()->serializer_enabled() && 482 info()->GetMustNotHaveEagerFrame() && 483 generator.NeedsEagerFrame())); 484 return code; 485 } 486 assembler.AbortedCodeGeneration(); 487 return Handle<Code>::null(); 488 } 489 490 491 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) { 492 allocated_double_registers_ = allocated_registers; 493 BitVector* doubles = allocated_double_registers(); 494 BitVector::Iterator iterator(doubles); 495 while (!iterator.Done()) { 496 if (info()->saves_caller_doubles()) { 497 if (kDoubleSize == kPointerSize * 2) { 498 spill_slot_count_ += 2; 499 } else { 500 spill_slot_count_++; 501 } 502 } 503 iterator.Advance(); 504 } 505 } 506 507 508 void LChunkBuilderBase::Abort(BailoutReason reason) { 509 info()->AbortOptimization(reason); 510 status_ = ABORTED; 511 } 512 513 514 void LChunkBuilderBase::Retry(BailoutReason reason) { 515 info()->RetryOptimization(reason); 516 status_ = ABORTED; 517 } 518 519 520 LEnvironment* LChunkBuilderBase::CreateEnvironment( 521 HEnvironment* hydrogen_env, int* argument_index_accumulator, 522 ZoneList<HValue*>* objects_to_materialize) { 523 if (hydrogen_env == NULL) return NULL; 524 525 LEnvironment* outer = 526 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator, 527 objects_to_materialize); 528 BailoutId ast_id = hydrogen_env->ast_id(); 529 DCHECK(!ast_id.IsNone() || 530 hydrogen_env->frame_type() != JS_FUNCTION); 531 532 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION) 533 ? 0 534 : hydrogen_env->specials_count(); 535 536 int value_count = hydrogen_env->length() - omitted_count; 537 LEnvironment* result = 538 new(zone()) LEnvironment(hydrogen_env->closure(), 539 hydrogen_env->frame_type(), 540 ast_id, 541 hydrogen_env->parameter_count(), 542 argument_count_, 543 value_count, 544 outer, 545 hydrogen_env->entry(), 546 zone()); 547 int argument_index = *argument_index_accumulator; 548 549 // Store the environment description into the environment 550 // (with holes for nested objects) 551 for (int i = 0; i < hydrogen_env->length(); ++i) { 552 if (hydrogen_env->is_special_index(i) && 553 hydrogen_env->frame_type() != JS_FUNCTION) { 554 continue; 555 } 556 LOperand* op; 557 HValue* value = hydrogen_env->values()->at(i); 558 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments 559 if (value->IsArgumentsObject() || value->IsCapturedObject()) { 560 op = LEnvironment::materialization_marker(); 561 } else { 562 op = UseAny(value); 563 } 564 result->AddValue(op, 565 value->representation(), 566 value->CheckFlag(HInstruction::kUint32)); 567 } 568 569 // Recursively store the nested objects into the environment 570 for (int i = 0; i < hydrogen_env->length(); ++i) { 571 if (hydrogen_env->is_special_index(i)) continue; 572 573 HValue* value = hydrogen_env->values()->at(i); 574 if (value->IsArgumentsObject() || value->IsCapturedObject()) { 575 AddObjectToMaterialize(value, objects_to_materialize, result); 576 } 577 } 578 579 if (hydrogen_env->frame_type() == JS_FUNCTION) { 580 *argument_index_accumulator = argument_index; 581 } 582 583 return result; 584 } 585 586 587 // Add an object to the supplied environment and object materialization list. 588 // 589 // Notes: 590 // 591 // We are building three lists here: 592 // 593 // 1. In the result->object_mapping_ list (added to by the 594 // LEnvironment::Add*Object methods), we store the lengths (number 595 // of fields) of the captured objects in depth-first traversal order, or 596 // in case of duplicated objects, we store the index to the duplicate object 597 // (with a tag to differentiate between captured and duplicated objects). 598 // 599 // 2. The object fields are stored in the result->values_ list 600 // (added to by the LEnvironment.AddValue method) sequentially as lists 601 // of fields with holes for nested objects (the holes will be expanded 602 // later by LCodegen::AddToTranslation according to the 603 // LEnvironment.object_mapping_ list). 604 // 605 // 3. The auxiliary objects_to_materialize array stores the hydrogen values 606 // in the same order as result->object_mapping_ list. This is used 607 // to detect duplicate values and calculate the corresponding object index. 608 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value, 609 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) { 610 int object_index = objects_to_materialize->length(); 611 // Store the hydrogen value into the de-duplication array 612 objects_to_materialize->Add(value, zone()); 613 // Find out whether we are storing a duplicated value 614 int previously_materialized_object = -1; 615 for (int prev = 0; prev < object_index; ++prev) { 616 if (objects_to_materialize->at(prev) == value) { 617 previously_materialized_object = prev; 618 break; 619 } 620 } 621 // Store the captured object length (or duplicated object index) 622 // into the environment. For duplicated objects, we stop here. 623 int length = value->OperandCount(); 624 bool is_arguments = value->IsArgumentsObject(); 625 if (previously_materialized_object >= 0) { 626 result->AddDuplicateObject(previously_materialized_object); 627 return; 628 } else { 629 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments); 630 } 631 // Store the captured object's fields into the environment 632 for (int i = is_arguments ? 1 : 0; i < length; ++i) { 633 LOperand* op; 634 HValue* arg_value = value->OperandAt(i); 635 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) { 636 // Insert a hole for nested objects 637 op = LEnvironment::materialization_marker(); 638 } else { 639 DCHECK(!arg_value->IsPushArguments()); 640 // For ordinary values, tell the register allocator we need the value 641 // to be alive here 642 op = UseAny(arg_value); 643 } 644 result->AddValue(op, 645 arg_value->representation(), 646 arg_value->CheckFlag(HInstruction::kUint32)); 647 } 648 // Recursively store all the nested captured objects into the environment 649 for (int i = is_arguments ? 1 : 0; i < length; ++i) { 650 HValue* arg_value = value->OperandAt(i); 651 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) { 652 AddObjectToMaterialize(arg_value, objects_to_materialize, result); 653 } 654 } 655 } 656 657 658 LPhase::~LPhase() { 659 if (ShouldProduceTraceOutput()) { 660 isolate()->GetHTracer()->TraceLithium(name(), chunk_); 661 } 662 } 663 664 665 } // namespace internal 666 } // namespace v8 667