Home | History | Annotate | Download | only in quick
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "entrypoints/quick/quick_alloc_entrypoints.h"
     18 
     19 #include "art_method-inl.h"
     20 #include "base/enums.h"
     21 #include "base/quasi_atomic.h"
     22 #include "callee_save_frame.h"
     23 #include "dex/dex_file_types.h"
     24 #include "entrypoints/entrypoint_utils-inl.h"
     25 #include "mirror/class-inl.h"
     26 #include "mirror/object-inl.h"
     27 #include "mirror/object_array-inl.h"
     28 
     29 namespace art {
     30 
     31 static constexpr bool kUseTlabFastPath = true;
     32 
     33 template <bool kInitialized,
     34           bool kFinalize,
     35           bool kInstrumented,
     36           gc::AllocatorType allocator_type>
     37 static ALWAYS_INLINE inline mirror::Object* artAllocObjectFromCode(
     38     mirror::Class* klass,
     39     Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
     40   ScopedQuickEntrypointChecks sqec(self);
     41   DCHECK(klass != nullptr);
     42   if (kUseTlabFastPath && !kInstrumented && allocator_type == gc::kAllocatorTypeTLAB) {
     43     if (kInitialized || klass->IsInitialized()) {
     44       if (!kFinalize || !klass->IsFinalizable()) {
     45         size_t byte_count = klass->GetObjectSize();
     46         byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment);
     47         mirror::Object* obj;
     48         if (LIKELY(byte_count < self->TlabSize())) {
     49           obj = self->AllocTlab(byte_count);
     50           DCHECK(obj != nullptr) << "AllocTlab can't fail";
     51           obj->SetClass(klass);
     52           if (kUseBakerReadBarrier) {
     53             obj->AssertReadBarrierState();
     54           }
     55           QuasiAtomic::ThreadFenceForConstructor();
     56           return obj;
     57         }
     58       }
     59     }
     60   }
     61   if (kInitialized) {
     62     return AllocObjectFromCodeInitialized<kInstrumented>(klass, self, allocator_type);
     63   } else if (!kFinalize) {
     64     return AllocObjectFromCodeResolved<kInstrumented>(klass, self, allocator_type);
     65   } else {
     66     return AllocObjectFromCode<kInstrumented>(klass, self, allocator_type);
     67   }
     68 }
     69 
     70 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \
     71 extern "C" mirror::Object* artAllocObjectFromCodeWithChecks##suffix##suffix2( \
     72     mirror::Class* klass, Thread* self) \
     73     REQUIRES_SHARED(Locks::mutator_lock_) { \
     74   return artAllocObjectFromCode<false, true, instrumented_bool, allocator_type>(klass, self); \
     75 } \
     76 extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \
     77     mirror::Class* klass, Thread* self) \
     78     REQUIRES_SHARED(Locks::mutator_lock_) { \
     79   return artAllocObjectFromCode<false, false, instrumented_bool, allocator_type>(klass, self); \
     80 } \
     81 extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \
     82     mirror::Class* klass, Thread* self) \
     83     REQUIRES_SHARED(Locks::mutator_lock_) { \
     84   return artAllocObjectFromCode<true, false, instrumented_bool, allocator_type>(klass, self); \
     85 } \
     86 extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \
     87     mirror::Class* klass, int32_t component_count, Thread* self) \
     88     REQUIRES_SHARED(Locks::mutator_lock_) { \
     89   ScopedQuickEntrypointChecks sqec(self); \
     90   return AllocArrayFromCodeResolved<instrumented_bool>(klass, component_count, self, \
     91                                                        allocator_type); \
     92 } \
     93 extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \
     94     mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \
     95     Thread* self) \
     96     REQUIRES_SHARED(Locks::mutator_lock_) { \
     97   ScopedQuickEntrypointChecks sqec(self); \
     98   StackHandleScope<1> hs(self); \
     99   Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \
    100   return mirror::String::AllocFromByteArray<instrumented_bool>(self, byte_count, handle_array, \
    101                                                                offset, high, allocator_type); \
    102 } \
    103 extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \
    104     int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \
    105     REQUIRES_SHARED(Locks::mutator_lock_) { \
    106   StackHandleScope<1> hs(self); \
    107   Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \
    108   return mirror::String::AllocFromCharArray<instrumented_bool>(self, char_count, handle_array, \
    109                                                                offset, allocator_type); \
    110 } \
    111 extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( /* NOLINT */ \
    112     mirror::String* string, Thread* self) \
    113     REQUIRES_SHARED(Locks::mutator_lock_) { \
    114   StackHandleScope<1> hs(self); \
    115   Handle<mirror::String> handle_string(hs.NewHandle(string)); \
    116   return mirror::String::AllocFromString<instrumented_bool>(self, handle_string->GetLength(), \
    117                                                             handle_string, 0, allocator_type); \
    118 }
    119 
    120 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \
    121     GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \
    122     GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type)
    123 
    124 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc)
    125 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc)
    126 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer)
    127 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB)
    128 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion)
    129 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB)
    130 
    131 #define GENERATE_ENTRYPOINTS(suffix) \
    132 extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t); \
    133 extern "C" void* art_quick_alloc_array_resolved8##suffix(mirror::Class* klass, int32_t); \
    134 extern "C" void* art_quick_alloc_array_resolved16##suffix(mirror::Class* klass, int32_t); \
    135 extern "C" void* art_quick_alloc_array_resolved32##suffix(mirror::Class* klass, int32_t); \
    136 extern "C" void* art_quick_alloc_array_resolved64##suffix(mirror::Class* klass, int32_t); \
    137 extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass); \
    138 extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass); \
    139 extern "C" void* art_quick_alloc_object_with_checks##suffix(mirror::Class* klass); \
    140 extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \
    141 extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \
    142 extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \
    143 extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t); \
    144 extern "C" void* art_quick_alloc_array_resolved8##suffix##_instrumented(mirror::Class* klass, int32_t); \
    145 extern "C" void* art_quick_alloc_array_resolved16##suffix##_instrumented(mirror::Class* klass, int32_t); \
    146 extern "C" void* art_quick_alloc_array_resolved32##suffix##_instrumented(mirror::Class* klass, int32_t); \
    147 extern "C" void* art_quick_alloc_array_resolved64##suffix##_instrumented(mirror::Class* klass, int32_t); \
    148 extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass); \
    149 extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass); \
    150 extern "C" void* art_quick_alloc_object_with_checks##suffix##_instrumented(mirror::Class* klass); \
    151 extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \
    152 extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \
    153 extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \
    154 void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \
    155   if (instrumented) { \
    156     qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \
    157     qpoints->pAllocArrayResolved8 = art_quick_alloc_array_resolved8##suffix##_instrumented; \
    158     qpoints->pAllocArrayResolved16 = art_quick_alloc_array_resolved16##suffix##_instrumented; \
    159     qpoints->pAllocArrayResolved32 = art_quick_alloc_array_resolved32##suffix##_instrumented; \
    160     qpoints->pAllocArrayResolved64 = art_quick_alloc_array_resolved64##suffix##_instrumented; \
    161     qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \
    162     qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \
    163     qpoints->pAllocObjectWithChecks = art_quick_alloc_object_with_checks##suffix##_instrumented; \
    164     qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \
    165     qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \
    166     qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \
    167   } else { \
    168     qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \
    169     qpoints->pAllocArrayResolved8 = art_quick_alloc_array_resolved8##suffix; \
    170     qpoints->pAllocArrayResolved16 = art_quick_alloc_array_resolved16##suffix; \
    171     qpoints->pAllocArrayResolved32 = art_quick_alloc_array_resolved32##suffix; \
    172     qpoints->pAllocArrayResolved64 = art_quick_alloc_array_resolved64##suffix; \
    173     qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \
    174     qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \
    175     qpoints->pAllocObjectWithChecks = art_quick_alloc_object_with_checks##suffix; \
    176     qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \
    177     qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \
    178     qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \
    179   } \
    180 }
    181 
    182 // Generate the entrypoint functions.
    183 #if !defined(__APPLE__) || !defined(__LP64__)
    184 GENERATE_ENTRYPOINTS(_dlmalloc)
    185 GENERATE_ENTRYPOINTS(_rosalloc)
    186 GENERATE_ENTRYPOINTS(_bump_pointer)
    187 GENERATE_ENTRYPOINTS(_tlab)
    188 GENERATE_ENTRYPOINTS(_region)
    189 GENERATE_ENTRYPOINTS(_region_tlab)
    190 #endif
    191 
    192 static bool entry_points_instrumented = false;
    193 static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc;
    194 
    195 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) {
    196   entry_points_allocator = allocator;
    197 }
    198 
    199 void SetQuickAllocEntryPointsInstrumented(bool instrumented) {
    200   entry_points_instrumented = instrumented;
    201 }
    202 
    203 void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints, bool is_marking) {
    204 #if !defined(__APPLE__) || !defined(__LP64__)
    205   switch (entry_points_allocator) {
    206     case gc::kAllocatorTypeDlMalloc: {
    207       SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented);
    208       return;
    209     }
    210     case gc::kAllocatorTypeRosAlloc: {
    211       SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented);
    212       return;
    213     }
    214     case gc::kAllocatorTypeBumpPointer: {
    215       CHECK(kMovingCollector);
    216       SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented);
    217       return;
    218     }
    219     case gc::kAllocatorTypeTLAB: {
    220       CHECK(kMovingCollector);
    221       SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
    222       return;
    223     }
    224     case gc::kAllocatorTypeRegion: {
    225       CHECK(kMovingCollector);
    226       SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented);
    227       return;
    228     }
    229     case gc::kAllocatorTypeRegionTLAB: {
    230       CHECK(kMovingCollector);
    231       if (is_marking) {
    232         SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented);
    233       } else {
    234         // Not marking means we need no read barriers and can just use the normal TLAB case.
    235         SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
    236       }
    237       return;
    238     }
    239     default:
    240       break;
    241   }
    242 #else
    243   UNUSED(qpoints);
    244   UNUSED(is_marking);
    245 #endif
    246   UNIMPLEMENTED(FATAL);
    247   UNREACHABLE();
    248 }
    249 
    250 }  // namespace art
    251