Lines Matching refs:Code
6 // * Redistributions of source code must retain the above copyright
33 #include "code-stubs.h"
75 // MANUAL indicates that the scope shouldn't actually generate code to set up
87 void LCodeGen::FinishCode(Handle<Code> code) {
89 code->set_stack_slots(GetStackSlotCount());
90 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
92 RegisterDependentCodeForEmbeddedMaps(code);
94 PopulateDeoptimizationData(code);
95 info()->CommitDependencies(code);
177 // Reserve space for the stack slots needed by the code.
268 // Don't emit code for basic blocks with a replacement.
335 LDeferredCode* code = deferred_[i];
337 int pos = instructions_->at(code->instruction_index())->position();
342 code->instruction_index(),
343 code->instr()->hydrogen_value()->id(),
344 code->instr()->Mnemonic());
345 __ bind(code->entry());
356 Comment(";;; Deferred code");
358 code->Generate();
366 __ jmp(code->exit());
370 // Deferred code is the last part of the instruction sequence. Mark
371 // the generated code as done unless we bailed out.
593 void LCodeGen::CallCodeGeneric(Handle<Code> code,
598 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code));
602 __ call(code, mode);
605 // Signal that we don't inline smi code before these stubs in the
606 // optimizing code generator.
607 if (code->kind() == Code::BINARY_OP_IC ||
608 code->kind() == Code::COMPARE_IC) {
614 void LCodeGen::CallCode(Handle<Code> code,
617 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0);
738 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
741 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
754 // this code is not yet in the depended code lists of the embedded maps.
758 maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
763 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
795 code->set_deoptimization_data(*data);
997 // Note: The code below even works when right contains kMinInt.
1703 Comment("Unreachable code.");
1870 __ nop(); // Signals no inlined code.
2300 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2393 // Faster code path to avoid two compares: subtract lower bound from the
2502 __ bind(deferred->map_check()); // Label for calculating code patching.
2510 // Check that the code size between patch label and patch sites is invariant.
2518 // before calling the deferred code.
2590 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2666 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2702 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
2802 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3072 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
3793 Handle<Code> ic =
3805 Handle<Code> ic =
3828 Handle<Code> ic =
3852 // No cell in ebx for construct type feedback in optimized code
4031 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4268 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
5170 // Use the fast case closure allocation code that allocates in new
5330 // instruction for patching the code here.
5421 // the deferred code.
5438 // code, but if there were none, record the entrypoint here.