Home | History | Annotate | Download | only in cctest

Lines Matching refs:Code

6 //     * Redistributions of source code must retain the above copyright
120 Object* code = heap->CreateCode(
122 Code::ComputeFlags(Code::STUB),
123 Handle<Code>())->ToObjectChecked();
124 CHECK(code->IsCode());
126 HeapObject* obj = HeapObject::cast(code);
131 CHECK_EQ(code, found);
136 Code::ComputeFlags(Code::STUB),
137 Handle<Code>())->ToObjectChecked();
142 CHECK(not_right != code);
1034 // If we do not flush code this test is invalid.
1049 // This compile will add the code to the compilation cache.
1061 // The code will survive at least two GCs.
1083 // If we do not flush code this test is invalid.
1098 // This compile will add the code to the compilation cache.
1110 // The code will survive at least two GCs.
1151 // If we do not flush code this test is invalid.
1171 // Perfrom one initial GC to enable code flushing.
1174 // This compile will add the code to the compilation cache.
1196 // Bump the code age so that flushing is triggered while the function
1200 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1201 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1205 // code flushing candidates. Then kill one of the functions. Finally
1219 // If we do not flush code this test is invalid.
1235 // This compile will add the code to the compilation cache.
1247 // The code will survive at least two GCs.
1252 // Bump the code age so that flushing is triggered.
1255 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1259 // code flushing candidate.
1264 // is running so that incremental marking aborts and code flushing is
1272 // Force optimization now that code flushing is disabled.
1689 // optimized code.
1735 // optimized code.
1906 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1909 // code below.
2033 // JS code running to trigger the interrupt, so we explicitly finalize
2041 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2084 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2742 f->shared()->code()->type_feedback_info())->type_feedback_cells());
2757 static Code* FindFirstIC(Code* code, Code::Kind kind) {
2762 for (RelocIterator it(code, mask); !it.done(); it.next()) {
2764 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2787 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2793 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2819 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2827 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2860 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2868 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2984 // the pre-monomorphic stub. These code objects are on yet another page.
2994 // Third we fill up the last page of the code space so that it does not get
3006 // Fourth is the tricky part. Make sure the code containing the CallIC is
3016 Handle<Object> call_code(call->code(), isolate);
3037 // Perform one initial GC to enable code flushing.
3040 // Prepare several closures that are all eligible for code flushing
3042 // optimized code object is directly reachable through a handle so
3044 Handle<Code> code;
3073 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3076 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3080 // code flushing candidates. Then optimize one function. Finally
3081 // finish the GC to complete code flushing.
3086 // Unoptimized code is missing and the deoptimizer will go ballistic.
3099 // Perform one initial GC to enable code flushing.
3102 // Prepare an optimized closure that the optimized code map will get
3103 // populated. Then age the unoptimized code to trigger code flushing
3104 // but make sure the optimized code is unreachable.
3121 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3127 // Simulate incremental marking so that unoptimized code is flushed
3128 // even though it still is cached in the optimized code map.
3132 // Make a new closure that will get code installed from the code map.
3133 // Unoptimized code is missing and the deoptimizer will go ballistic.
3158 // Perform one initial GC to enable code flushing.
3161 // Prepare a shared function info eligible for code flushing for which
3162 // the unoptimized code will be replaced during optimization.
3179 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3185 // Prepare a shared function info eligible for code flushing that will
3200 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3206 // Simulate incremental marking and collect code flushing candidates.
3208 CHECK(shared1->code()->gc_metadata() != NULL);
3210 // Optimize function and make sure the unoptimized code is replaced.
3219 CHECK(shared1->code()->gc_metadata() == NULL);
3319 // Perform one initial GC to enable code flushing.
3322 // Ensure the code ends up on an evacuation candidate.
3325 // Prepare an unoptimized function that is eligible for code flushing.
3342 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3349 // candidate for code flushing. The shared function info however will not be
3375 // Perform one initial GC to enable code flushing.
3378 // Ensure the code ends up on an evacuation candidate.
3381 // Prepare an unoptimized function that is eligible for code flushing.
3398 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3405 // candidate for code flushing. The shared function info however will not be
3410 // Now enable the debugger which in turn will disable code flushing.