Home | History | Annotate | Download | only in entrypoints
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_
     18 #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_
     19 
     20 #include <jni.h>
     21 #include <stdint.h>
     22 
     23 #include "base/macros.h"
     24 #include "base/mutex.h"
     25 #include "gc/allocator_type.h"
     26 #include "invoke_type.h"
     27 #include "jvalue.h"
     28 
     29 namespace art {
     30 
     31 namespace mirror {
     32   class Class;
     33   class Array;
     34   class ArtField;
     35   class ArtMethod;
     36   class Object;
     37   class String;
     38 }  // namespace mirror
     39 
     40 class ScopedObjectAccessAlreadyRunnable;
     41 class Thread;
     42 
     43 template <const bool kAccessCheck>
     44 ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
     45                                                             mirror::ArtMethod* method,
     46                                                             Thread* self, bool* slow_path)
     47     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     48 
     49 // TODO: Fix no thread safety analysis when annotalysis is smarter.
     50 ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
     51                                                                                Thread* self, bool* slow_path)
     52     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     53 
     54 // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
     55 // cannot be resolved, throw an error. If it can, use it to create an instance.
     56 // When verification/compiler hasn't been able to verify access, optionally perform an access
     57 // check.
     58 template <bool kAccessCheck, bool kInstrumented>
     59 ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
     60                                                                 mirror::ArtMethod* method,
     61                                                                 Thread* self,
     62                                                                 gc::AllocatorType allocator_type)
     63     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     64 
     65 // Given the context of a calling Method and a resolved class, create an instance.
     66 template <bool kInstrumented>
     67 ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
     68                                                                         mirror::ArtMethod* method,
     69                                                                         Thread* self,
     70                                                                         gc::AllocatorType allocator_type)
     71     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     72 
     73 // Given the context of a calling Method and an initialized class, create an instance.
     74 template <bool kInstrumented>
     75 ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
     76                                                                            mirror::ArtMethod* method,
     77                                                                            Thread* self,
     78                                                                            gc::AllocatorType allocator_type)
     79     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     80 
     81 
     82 template <bool kAccessCheck>
     83 ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
     84                                                            mirror::ArtMethod* method,
     85                                                            int32_t component_count,
     86                                                            bool* slow_path)
     87     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     88 
     89 // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
     90 // it cannot be resolved, throw an error. If it can, use it to create an array.
     91 // When verification/compiler hasn't been able to verify access, optionally perform an access
     92 // check.
     93 template <bool kAccessCheck, bool kInstrumented>
     94 ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
     95                                                               mirror::ArtMethod* method,
     96                                                               int32_t component_count,
     97                                                               Thread* self,
     98                                                               gc::AllocatorType allocator_type)
     99     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    100 
    101 template <bool kAccessCheck, bool kInstrumented>
    102 ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
    103                                                                       mirror::ArtMethod* method,
    104                                                                       int32_t component_count,
    105                                                                       Thread* self,
    106                                                                       gc::AllocatorType allocator_type)
    107     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    108 
    109 extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* method,
    110                                                  int32_t component_count, Thread* self,
    111                                                  bool access_check,
    112                                                  gc::AllocatorType allocator_type)
    113     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    114 
    115 extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx,
    116                                                              mirror::ArtMethod* method,
    117                                                              int32_t component_count, Thread* self,
    118                                                              bool access_check,
    119                                                              gc::AllocatorType allocator_type)
    120     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    121 
    122 // Type of find field operation for fast and slow case.
    123 enum FindFieldType {
    124   InstanceObjectRead,
    125   InstanceObjectWrite,
    126   InstancePrimitiveRead,
    127   InstancePrimitiveWrite,
    128   StaticObjectRead,
    129   StaticObjectWrite,
    130   StaticPrimitiveRead,
    131   StaticPrimitiveWrite,
    132 };
    133 
    134 template<FindFieldType type, bool access_check>
    135 static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer,
    136                                                   Thread* self, size_t expected_size)
    137     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    138 
    139 template<InvokeType type, bool access_check>
    140 static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
    141                                                     mirror::Object** this_object,
    142                                                     mirror::ArtMethod** referrer, Thread* self)
    143     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    144 
    145 // Fast path field resolution that can't initialize classes or throw exceptions.
    146 static inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
    147                                               mirror::ArtMethod* referrer,
    148                                               FindFieldType type, size_t expected_size)
    149     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    150 
    151 // Fast path method resolution that can't throw exceptions.
    152 static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
    153                                                 mirror::Object* this_object,
    154                                                 mirror::ArtMethod* referrer,
    155                                                 bool access_check, InvokeType type)
    156     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    157 
    158 static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
    159                                                     mirror::ArtMethod* referrer,
    160                                                     Thread* self, bool can_run_clinit,
    161                                                     bool verify_access)
    162     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    163 
    164 extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    165 
    166 static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
    167                                                     uint32_t string_idx)
    168     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    169 
    170 // TODO: annotalysis disabled as monitor semantics are maintained in Java code.
    171 static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self)
    172     NO_THREAD_SAFETY_ANALYSIS;
    173 
    174 void CheckReferenceResult(mirror::Object* o, Thread* self)
    175     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    176 
    177 static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    178 
    179 JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty,
    180                                     jobject rcvr_jobj, jobject interface_art_method_jobj,
    181                                     std::vector<jvalue>& args)
    182     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    183 
    184 // Entry point for deoptimization.
    185 extern "C" void art_quick_deoptimize();
    186 static inline uintptr_t GetQuickDeoptimizationEntryPoint() {
    187   return reinterpret_cast<uintptr_t>(art_quick_deoptimize);
    188 }
    189 
    190 // Return address of instrumentation stub.
    191 extern "C" void art_quick_instrumentation_entry(void*);
    192 static inline void* GetQuickInstrumentationEntryPoint() {
    193   return reinterpret_cast<void*>(art_quick_instrumentation_entry);
    194 }
    195 
    196 // The return_pc of instrumentation exit stub.
    197 extern "C" void art_quick_instrumentation_exit();
    198 static inline uintptr_t GetQuickInstrumentationExitPc() {
    199   return reinterpret_cast<uintptr_t>(art_quick_instrumentation_exit);
    200 }
    201 
    202 #if defined(ART_USE_PORTABLE_COMPILER)
    203 extern "C" void art_portable_to_interpreter_bridge(mirror::ArtMethod*);
    204 static inline const void* GetPortableToInterpreterBridge() {
    205   return reinterpret_cast<void*>(art_portable_to_interpreter_bridge);
    206 }
    207 
    208 static inline const void* GetPortableToQuickBridge() {
    209   // TODO: portable to quick bridge. Bug: 8196384
    210   return GetPortableToInterpreterBridge();
    211 }
    212 #endif
    213 
    214 extern "C" void art_quick_to_interpreter_bridge(mirror::ArtMethod*);
    215 static inline const void* GetQuickToInterpreterBridge() {
    216   return reinterpret_cast<void*>(art_quick_to_interpreter_bridge);
    217 }
    218 
    219 #if defined(ART_USE_PORTABLE_COMPILER)
    220 static inline const void* GetQuickToPortableBridge() {
    221   // TODO: quick to portable bridge. Bug: 8196384
    222   return GetQuickToInterpreterBridge();
    223 }
    224 
    225 extern "C" void art_portable_proxy_invoke_handler();
    226 static inline const void* GetPortableProxyInvokeHandler() {
    227   return reinterpret_cast<void*>(art_portable_proxy_invoke_handler);
    228 }
    229 #endif
    230 
    231 extern "C" void art_quick_proxy_invoke_handler();
    232 static inline const void* GetQuickProxyInvokeHandler() {
    233   return reinterpret_cast<void*>(art_quick_proxy_invoke_handler);
    234 }
    235 
    236 extern "C" void* art_jni_dlsym_lookup_stub(JNIEnv*, jobject);
    237 static inline void* GetJniDlsymLookupStub() {
    238   return reinterpret_cast<void*>(art_jni_dlsym_lookup_stub);
    239 }
    240 
    241 template <typename INT_TYPE, typename FLOAT_TYPE>
    242 static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f);
    243 
    244 }  // namespace art
    245 
    246 #endif  // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_
    247