Home | History | Annotate | Download | only in src
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/register-configuration.h"
      6 #include "src/globals.h"
      7 #include "src/macro-assembler.h"
      8 
      9 namespace v8 {
     10 namespace internal {
     11 
     12 namespace {
     13 
     14 #define REGISTER_COUNT(R) 1 +
     15 static const int kMaxAllocatableGeneralRegisterCount =
     16     ALLOCATABLE_GENERAL_REGISTERS(REGISTER_COUNT)0;
     17 static const int kMaxAllocatableDoubleRegisterCount =
     18     ALLOCATABLE_DOUBLE_REGISTERS(REGISTER_COUNT)0;
     19 
     20 static const int kAllocatableGeneralCodes[] = {
     21 #define REGISTER_CODE(R) kRegCode_##R,
     22     ALLOCATABLE_GENERAL_REGISTERS(REGISTER_CODE)};
     23 #undef REGISTER_CODE
     24 
     25 #define REGISTER_CODE(R) kDoubleCode_##R,
     26 static const int kAllocatableDoubleCodes[] = {
     27     ALLOCATABLE_DOUBLE_REGISTERS(REGISTER_CODE)};
     28 #if V8_TARGET_ARCH_ARM
     29 static const int kAllocatableNoVFP32DoubleCodes[] = {
     30     ALLOCATABLE_NO_VFP32_DOUBLE_REGISTERS(REGISTER_CODE)};
     31 #endif  // V8_TARGET_ARCH_ARM
     32 #undef REGISTER_CODE
     33 
     34 static const char* const kGeneralRegisterNames[] = {
     35 #define REGISTER_NAME(R) #R,
     36     GENERAL_REGISTERS(REGISTER_NAME)
     37 #undef REGISTER_NAME
     38 };
     39 
     40 static const char* const kFloatRegisterNames[] = {
     41 #define REGISTER_NAME(R) #R,
     42     FLOAT_REGISTERS(REGISTER_NAME)
     43 #undef REGISTER_NAME
     44 };
     45 
     46 static const char* const kDoubleRegisterNames[] = {
     47 #define REGISTER_NAME(R) #R,
     48     DOUBLE_REGISTERS(REGISTER_NAME)
     49 #undef REGISTER_NAME
     50 };
     51 
     52 static const char* const kSimd128RegisterNames[] = {
     53 #define REGISTER_NAME(R) #R,
     54     SIMD128_REGISTERS(REGISTER_NAME)
     55 #undef REGISTER_NAME
     56 };
     57 
     58 STATIC_ASSERT(RegisterConfiguration::kMaxGeneralRegisters >=
     59               Register::kNumRegisters);
     60 STATIC_ASSERT(RegisterConfiguration::kMaxFPRegisters >=
     61               FloatRegister::kNumRegisters);
     62 STATIC_ASSERT(RegisterConfiguration::kMaxFPRegisters >=
     63               DoubleRegister::kNumRegisters);
     64 STATIC_ASSERT(RegisterConfiguration::kMaxFPRegisters >=
     65               Simd128Register::kNumRegisters);
     66 
     67 static int get_num_allocatable_double_registers() {
     68   return
     69 #if V8_TARGET_ARCH_IA32
     70       kMaxAllocatableDoubleRegisterCount;
     71 #elif V8_TARGET_ARCH_X64
     72       kMaxAllocatableDoubleRegisterCount;
     73 #elif V8_TARGET_ARCH_ARM
     74       CpuFeatures::IsSupported(VFP32DREGS)
     75           ? kMaxAllocatableDoubleRegisterCount
     76           : (ALLOCATABLE_NO_VFP32_DOUBLE_REGISTERS(REGISTER_COUNT) 0);
     77 #elif V8_TARGET_ARCH_ARM64
     78       kMaxAllocatableDoubleRegisterCount;
     79 #elif V8_TARGET_ARCH_MIPS
     80       kMaxAllocatableDoubleRegisterCount;
     81 #elif V8_TARGET_ARCH_MIPS64
     82       kMaxAllocatableDoubleRegisterCount;
     83 #elif V8_TARGET_ARCH_PPC
     84       kMaxAllocatableDoubleRegisterCount;
     85 #elif V8_TARGET_ARCH_S390
     86       kMaxAllocatableDoubleRegisterCount;
     87 #else
     88 #error Unsupported target architecture.
     89 #endif
     90 }
     91 
     92 static const int* get_allocatable_double_codes() {
     93   return
     94 #if V8_TARGET_ARCH_ARM
     95       CpuFeatures::IsSupported(VFP32DREGS) ? kAllocatableDoubleCodes
     96                                            : kAllocatableNoVFP32DoubleCodes;
     97 #else
     98       kAllocatableDoubleCodes;
     99 #endif
    100 }
    101 
    102 class ArchDefaultRegisterConfiguration : public RegisterConfiguration {
    103  public:
    104   ArchDefaultRegisterConfiguration()
    105       : RegisterConfiguration(
    106             Register::kNumRegisters, DoubleRegister::kNumRegisters,
    107             kMaxAllocatableGeneralRegisterCount,
    108             get_num_allocatable_double_registers(), kAllocatableGeneralCodes,
    109             get_allocatable_double_codes(),
    110             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE,
    111             kGeneralRegisterNames, kFloatRegisterNames, kDoubleRegisterNames,
    112             kSimd128RegisterNames) {}
    113 };
    114 
    115 struct RegisterConfigurationInitializer {
    116   static void Construct(void* config) {
    117     new (config) ArchDefaultRegisterConfiguration();
    118   }
    119 };
    120 
    121 static base::LazyInstance<ArchDefaultRegisterConfiguration,
    122                           RegisterConfigurationInitializer>::type
    123     kDefaultRegisterConfiguration = LAZY_INSTANCE_INITIALIZER;
    124 
    125 // Allocatable registers with the masking register removed.
    126 class ArchDefaultPoisoningRegisterConfiguration : public RegisterConfiguration {
    127  public:
    128   ArchDefaultPoisoningRegisterConfiguration()
    129       : RegisterConfiguration(
    130             Register::kNumRegisters, DoubleRegister::kNumRegisters,
    131             kMaxAllocatableGeneralRegisterCount - 1,
    132             get_num_allocatable_double_registers(),
    133             InitializeGeneralRegisterCodes(), get_allocatable_double_codes(),
    134             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE,
    135             kGeneralRegisterNames, kFloatRegisterNames, kDoubleRegisterNames,
    136             kSimd128RegisterNames) {}
    137 
    138  private:
    139   static const int* InitializeGeneralRegisterCodes() {
    140     int filtered_index = 0;
    141     for (int i = 0; i < kMaxAllocatableGeneralRegisterCount; ++i) {
    142       if (kAllocatableGeneralCodes[i] != kSpeculationPoisonRegister.code()) {
    143         allocatable_general_codes_[filtered_index] =
    144             kAllocatableGeneralCodes[i];
    145         filtered_index++;
    146       }
    147     }
    148     DCHECK_EQ(filtered_index, kMaxAllocatableGeneralRegisterCount - 1);
    149     return allocatable_general_codes_;
    150   }
    151 
    152   static int
    153       allocatable_general_codes_[kMaxAllocatableGeneralRegisterCount - 1];
    154 };
    155 
    156 int ArchDefaultPoisoningRegisterConfiguration::allocatable_general_codes_
    157     [kMaxAllocatableGeneralRegisterCount - 1];
    158 
    159 struct PoisoningRegisterConfigurationInitializer {
    160   static void Construct(void* config) {
    161     new (config) ArchDefaultPoisoningRegisterConfiguration();
    162   }
    163 };
    164 
    165 static base::LazyInstance<ArchDefaultPoisoningRegisterConfiguration,
    166                           PoisoningRegisterConfigurationInitializer>::type
    167     kDefaultPoisoningRegisterConfiguration = LAZY_INSTANCE_INITIALIZER;
    168 
    169 #if defined(V8_TARGET_ARCH_IA32) && defined(V8_EMBEDDED_BUILTINS)
    170 // Allocatable registers with the root register removed.
    171 // TODO(v8:6666): Once all builtins have been migrated, we could remove this
    172 // configuration and remove kRootRegister from ALLOCATABLE_GENERAL_REGISTERS
    173 // instead.
    174 class ArchPreserveRootIA32RegisterConfiguration : public RegisterConfiguration {
    175  public:
    176   ArchPreserveRootIA32RegisterConfiguration()
    177       : RegisterConfiguration(
    178             Register::kNumRegisters, DoubleRegister::kNumRegisters,
    179             kMaxAllocatableGeneralRegisterCount - 1,
    180             get_num_allocatable_double_registers(),
    181             InitializeGeneralRegisterCodes(), get_allocatable_double_codes(),
    182             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE,
    183             kGeneralRegisterNames, kFloatRegisterNames, kDoubleRegisterNames,
    184             kSimd128RegisterNames) {}
    185 
    186  private:
    187   static const int* InitializeGeneralRegisterCodes() {
    188     int filtered_index = 0;
    189     for (int i = 0; i < kMaxAllocatableGeneralRegisterCount; ++i) {
    190       if (kAllocatableGeneralCodes[i] != kRootRegister.code()) {
    191         allocatable_general_codes_[filtered_index] =
    192             kAllocatableGeneralCodes[i];
    193         filtered_index++;
    194       }
    195     }
    196     DCHECK_EQ(filtered_index, kMaxAllocatableGeneralRegisterCount - 1);
    197     return allocatable_general_codes_;
    198   }
    199 
    200   static int
    201       allocatable_general_codes_[kMaxAllocatableGeneralRegisterCount - 1];
    202 };
    203 
    204 int ArchPreserveRootIA32RegisterConfiguration::allocatable_general_codes_
    205     [kMaxAllocatableGeneralRegisterCount - 1];
    206 
    207 struct PreserveRootIA32RegisterConfigurationInitializer {
    208   static void Construct(void* config) {
    209     new (config) ArchPreserveRootIA32RegisterConfiguration();
    210   }
    211 };
    212 
    213 static base::LazyInstance<ArchPreserveRootIA32RegisterConfiguration,
    214                           PreserveRootIA32RegisterConfigurationInitializer>::
    215     type kPreserveRootIA32RegisterConfiguration = LAZY_INSTANCE_INITIALIZER;
    216 #endif  // defined(V8_TARGET_ARCH_IA32) && defined(V8_EMBEDDED_BUILTINS)
    217 
    218 // RestrictedRegisterConfiguration uses the subset of allocatable general
    219 // registers the architecture support, which results into generating assembly
    220 // to use less registers. Currently, it's only used by RecordWrite code stub.
    221 class RestrictedRegisterConfiguration : public RegisterConfiguration {
    222  public:
    223   RestrictedRegisterConfiguration(
    224       int num_allocatable_general_registers,
    225       std::unique_ptr<int[]> allocatable_general_register_codes,
    226       std::unique_ptr<char const* []> allocatable_general_register_names)
    227       : RegisterConfiguration(
    228             Register::kNumRegisters, DoubleRegister::kNumRegisters,
    229             num_allocatable_general_registers,
    230             get_num_allocatable_double_registers(),
    231             allocatable_general_register_codes.get(),
    232             get_allocatable_double_codes(),
    233             kSimpleFPAliasing ? AliasingKind::OVERLAP : AliasingKind::COMBINE,
    234             kGeneralRegisterNames, kFloatRegisterNames, kDoubleRegisterNames,
    235             kSimd128RegisterNames),
    236         allocatable_general_register_codes_(
    237             std::move(allocatable_general_register_codes)),
    238         allocatable_general_register_names_(
    239             std::move(allocatable_general_register_names)) {
    240     for (int i = 0; i < num_allocatable_general_registers; ++i) {
    241       DCHECK(
    242           IsAllocatableGeneralRegister(allocatable_general_register_codes_[i]));
    243     }
    244   }
    245 
    246   bool IsAllocatableGeneralRegister(int code) {
    247     for (int i = 0; i < kMaxAllocatableGeneralRegisterCount; ++i) {
    248       if (code == kAllocatableGeneralCodes[i]) {
    249         return true;
    250       }
    251     }
    252     return false;
    253   }
    254 
    255  private:
    256   std::unique_ptr<int[]> allocatable_general_register_codes_;
    257   std::unique_ptr<char const* []> allocatable_general_register_names_;
    258 };
    259 
    260 }  // namespace
    261 
    262 const RegisterConfiguration* RegisterConfiguration::Default() {
    263   return &kDefaultRegisterConfiguration.Get();
    264 }
    265 
    266 const RegisterConfiguration* RegisterConfiguration::Poisoning() {
    267   return &kDefaultPoisoningRegisterConfiguration.Get();
    268 }
    269 
    270 #if defined(V8_TARGET_ARCH_IA32) && defined(V8_EMBEDDED_BUILTINS)
    271 const RegisterConfiguration* RegisterConfiguration::PreserveRootIA32() {
    272   return &kPreserveRootIA32RegisterConfiguration.Get();
    273 }
    274 #endif  // defined(V8_TARGET_ARCH_IA32) && defined(V8_EMBEDDED_BUILTINS)
    275 
    276 const RegisterConfiguration* RegisterConfiguration::RestrictGeneralRegisters(
    277     RegList registers) {
    278   int num = NumRegs(registers);
    279   std::unique_ptr<int[]> codes{new int[num]};
    280   std::unique_ptr<char const* []> names { new char const*[num] };
    281   int counter = 0;
    282   for (int i = 0; i < Default()->num_allocatable_general_registers(); ++i) {
    283     auto reg = Register::from_code(Default()->GetAllocatableGeneralCode(i));
    284     if (reg.bit() & registers) {
    285       DCHECK(counter < num);
    286       codes[counter] = reg.code();
    287       names[counter] = Default()->GetGeneralRegisterName(i);
    288       counter++;
    289     }
    290   }
    291 
    292   return new RestrictedRegisterConfiguration(num, std::move(codes),
    293                                              std::move(names));
    294 }
    295 
    296 RegisterConfiguration::RegisterConfiguration(
    297     int num_general_registers, int num_double_registers,
    298     int num_allocatable_general_registers, int num_allocatable_double_registers,
    299     const int* allocatable_general_codes, const int* allocatable_double_codes,
    300     AliasingKind fp_aliasing_kind, const char* const* general_register_names,
    301     const char* const* float_register_names,
    302     const char* const* double_register_names,
    303     const char* const* simd128_register_names)
    304     : num_general_registers_(num_general_registers),
    305       num_float_registers_(0),
    306       num_double_registers_(num_double_registers),
    307       num_simd128_registers_(0),
    308       num_allocatable_general_registers_(num_allocatable_general_registers),
    309       num_allocatable_float_registers_(0),
    310       num_allocatable_double_registers_(num_allocatable_double_registers),
    311       num_allocatable_simd128_registers_(0),
    312       allocatable_general_codes_mask_(0),
    313       allocatable_float_codes_mask_(0),
    314       allocatable_double_codes_mask_(0),
    315       allocatable_simd128_codes_mask_(0),
    316       allocatable_general_codes_(allocatable_general_codes),
    317       allocatable_double_codes_(allocatable_double_codes),
    318       fp_aliasing_kind_(fp_aliasing_kind),
    319       general_register_names_(general_register_names),
    320       float_register_names_(float_register_names),
    321       double_register_names_(double_register_names),
    322       simd128_register_names_(simd128_register_names) {
    323   DCHECK_LE(num_general_registers_,
    324             RegisterConfiguration::kMaxGeneralRegisters);
    325   DCHECK_LE(num_double_registers_, RegisterConfiguration::kMaxFPRegisters);
    326   for (int i = 0; i < num_allocatable_general_registers_; ++i) {
    327     allocatable_general_codes_mask_ |= (1 << allocatable_general_codes_[i]);
    328   }
    329   for (int i = 0; i < num_allocatable_double_registers_; ++i) {
    330     allocatable_double_codes_mask_ |= (1 << allocatable_double_codes_[i]);
    331   }
    332 
    333   if (fp_aliasing_kind_ == COMBINE) {
    334     num_float_registers_ = num_double_registers_ * 2 <= kMaxFPRegisters
    335                                ? num_double_registers_ * 2
    336                                : kMaxFPRegisters;
    337     num_allocatable_float_registers_ = 0;
    338     for (int i = 0; i < num_allocatable_double_registers_; i++) {
    339       int base_code = allocatable_double_codes_[i] * 2;
    340       if (base_code >= kMaxFPRegisters) continue;
    341       allocatable_float_codes_[num_allocatable_float_registers_++] = base_code;
    342       allocatable_float_codes_[num_allocatable_float_registers_++] =
    343           base_code + 1;
    344       allocatable_float_codes_mask_ |= (0x3 << base_code);
    345     }
    346     num_simd128_registers_ = num_double_registers_ / 2;
    347     num_allocatable_simd128_registers_ = 0;
    348     int last_simd128_code = allocatable_double_codes_[0] / 2;
    349     for (int i = 1; i < num_allocatable_double_registers_; i++) {
    350       int next_simd128_code = allocatable_double_codes_[i] / 2;
    351       // This scheme assumes allocatable_double_codes_ are strictly increasing.
    352       DCHECK_GE(next_simd128_code, last_simd128_code);
    353       if (last_simd128_code == next_simd128_code) {
    354         allocatable_simd128_codes_[num_allocatable_simd128_registers_++] =
    355             next_simd128_code;
    356         allocatable_simd128_codes_mask_ |= (0x1 << next_simd128_code);
    357       }
    358       last_simd128_code = next_simd128_code;
    359     }
    360   } else {
    361     DCHECK(fp_aliasing_kind_ == OVERLAP);
    362     num_float_registers_ = num_simd128_registers_ = num_double_registers_;
    363     num_allocatable_float_registers_ = num_allocatable_simd128_registers_ =
    364         num_allocatable_double_registers_;
    365     for (int i = 0; i < num_allocatable_float_registers_; ++i) {
    366       allocatable_float_codes_[i] = allocatable_simd128_codes_[i] =
    367           allocatable_double_codes_[i];
    368     }
    369     allocatable_float_codes_mask_ = allocatable_simd128_codes_mask_ =
    370         allocatable_double_codes_mask_;
    371   }
    372 }
    373 
    374 const char* RegisterConfiguration::GetGeneralOrSpecialRegisterName(
    375     int code) const {
    376   if (code < num_general_registers_) return GetGeneralRegisterName(code);
    377   return Assembler::GetSpecialRegisterName(code);
    378 }
    379 
    380 // Assert that kFloat32, kFloat64, and kSimd128 are consecutive values.
    381 STATIC_ASSERT(static_cast<int>(MachineRepresentation::kSimd128) ==
    382               static_cast<int>(MachineRepresentation::kFloat64) + 1);
    383 STATIC_ASSERT(static_cast<int>(MachineRepresentation::kFloat64) ==
    384               static_cast<int>(MachineRepresentation::kFloat32) + 1);
    385 
    386 int RegisterConfiguration::GetAliases(MachineRepresentation rep, int index,
    387                                       MachineRepresentation other_rep,
    388                                       int* alias_base_index) const {
    389   DCHECK(fp_aliasing_kind_ == COMBINE);
    390   DCHECK(IsFloatingPoint(rep) && IsFloatingPoint(other_rep));
    391   if (rep == other_rep) {
    392     *alias_base_index = index;
    393     return 1;
    394   }
    395   int rep_int = static_cast<int>(rep);
    396   int other_rep_int = static_cast<int>(other_rep);
    397   if (rep_int > other_rep_int) {
    398     int shift = rep_int - other_rep_int;
    399     int base_index = index << shift;
    400     if (base_index >= kMaxFPRegisters) {
    401       // Alias indices would be out of FP register range.
    402       return 0;
    403     }
    404     *alias_base_index = base_index;
    405     return 1 << shift;
    406   }
    407   int shift = other_rep_int - rep_int;
    408   *alias_base_index = index >> shift;
    409   return 1;
    410 }
    411 
    412 bool RegisterConfiguration::AreAliases(MachineRepresentation rep, int index,
    413                                        MachineRepresentation other_rep,
    414                                        int other_index) const {
    415   DCHECK(fp_aliasing_kind_ == COMBINE);
    416   DCHECK(IsFloatingPoint(rep) && IsFloatingPoint(other_rep));
    417   if (rep == other_rep) {
    418     return index == other_index;
    419   }
    420   int rep_int = static_cast<int>(rep);
    421   int other_rep_int = static_cast<int>(other_rep);
    422   if (rep_int > other_rep_int) {
    423     int shift = rep_int - other_rep_int;
    424     return index == other_index >> shift;
    425   }
    426   int shift = other_rep_int - rep_int;
    427   return index >> shift == other_index;
    428 }
    429 
    430 }  // namespace internal
    431 }  // namespace v8
    432