1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 2 // All Rights Reserved. 3 // 4 // Redistribution and use in source and binary forms, with or without 5 // modification, are permitted provided that the following conditions are 6 // met: 7 // 8 // - Redistributions of source code must retain the above copyright notice, 9 // this list of conditions and the following disclaimer. 10 // 11 // - Redistribution in binary form must reproduce the above copyright 12 // notice, this list of conditions and the following disclaimer in the 13 // documentation and/or other materials provided with the distribution. 14 // 15 // - Neither the name of Sun Microsystems or the names of contributors may 16 // be used to endorse or promote products derived from this software without 17 // specific prior written permission. 18 // 19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS 20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR 23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 31 // The original source code covered by the above license above has been 32 // modified significantly by Google Inc. 33 // Copyright 2012 the V8 project authors. All rights reserved. 34 35 #include "src/assembler.h" 36 37 #include <cmath> 38 #include "src/api.h" 39 #include "src/base/lazy-instance.h" 40 #include "src/builtins.h" 41 #include "src/counters.h" 42 #include "src/cpu.h" 43 #include "src/cpu-profiler.h" 44 #include "src/debug.h" 45 #include "src/deoptimizer.h" 46 #include "src/execution.h" 47 #include "src/ic.h" 48 #include "src/isolate-inl.h" 49 #include "src/jsregexp.h" 50 #include "src/platform.h" 51 #include "src/regexp-macro-assembler.h" 52 #include "src/regexp-stack.h" 53 #include "src/runtime.h" 54 #include "src/serialize.h" 55 #include "src/store-buffer-inl.h" 56 #include "src/stub-cache.h" 57 #include "src/token.h" 58 59 #if V8_TARGET_ARCH_IA32 60 #include "src/ia32/assembler-ia32-inl.h" 61 #elif V8_TARGET_ARCH_X64 62 #include "src/x64/assembler-x64-inl.h" 63 #elif V8_TARGET_ARCH_ARM64 64 #include "src/arm64/assembler-arm64-inl.h" 65 #elif V8_TARGET_ARCH_ARM 66 #include "src/arm/assembler-arm-inl.h" 67 #elif V8_TARGET_ARCH_MIPS 68 #include "src/mips/assembler-mips-inl.h" 69 #elif V8_TARGET_ARCH_X87 70 #include "src/x87/assembler-x87-inl.h" 71 #else 72 #error "Unknown architecture." 73 #endif 74 75 // Include native regexp-macro-assembler. 76 #ifndef V8_INTERPRETED_REGEXP 77 #if V8_TARGET_ARCH_IA32 78 #include "src/ia32/regexp-macro-assembler-ia32.h" 79 #elif V8_TARGET_ARCH_X64 80 #include "src/x64/regexp-macro-assembler-x64.h" 81 #elif V8_TARGET_ARCH_ARM64 82 #include "src/arm64/regexp-macro-assembler-arm64.h" 83 #elif V8_TARGET_ARCH_ARM 84 #include "src/arm/regexp-macro-assembler-arm.h" 85 #elif V8_TARGET_ARCH_MIPS 86 #include "src/mips/regexp-macro-assembler-mips.h" 87 #elif V8_TARGET_ARCH_X87 88 #include "src/x87/regexp-macro-assembler-x87.h" 89 #else // Unknown architecture. 90 #error "Unknown architecture." 91 #endif // Target architecture. 92 #endif // V8_INTERPRETED_REGEXP 93 94 namespace v8 { 95 namespace internal { 96 97 // ----------------------------------------------------------------------------- 98 // Common double constants. 99 100 struct DoubleConstant BASE_EMBEDDED { 101 double min_int; 102 double one_half; 103 double minus_one_half; 104 double minus_zero; 105 double zero; 106 double uint8_max_value; 107 double negative_infinity; 108 double canonical_non_hole_nan; 109 double the_hole_nan; 110 double uint32_bias; 111 }; 112 113 static DoubleConstant double_constants; 114 115 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING"; 116 117 static bool math_exp_data_initialized = false; 118 static Mutex* math_exp_data_mutex = NULL; 119 static double* math_exp_constants_array = NULL; 120 static double* math_exp_log_table_array = NULL; 121 122 // ----------------------------------------------------------------------------- 123 // Implementation of AssemblerBase 124 125 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size) 126 : isolate_(isolate), 127 jit_cookie_(0), 128 enabled_cpu_features_(0), 129 emit_debug_code_(FLAG_debug_code), 130 predictable_code_size_(false), 131 // We may use the assembler without an isolate. 132 serializer_enabled_(isolate && isolate->serializer_enabled()) { 133 if (FLAG_mask_constants_with_cookie && isolate != NULL) { 134 jit_cookie_ = isolate->random_number_generator()->NextInt(); 135 } 136 if (buffer == NULL) { 137 // Do our own buffer management. 138 if (buffer_size <= kMinimalBufferSize) { 139 buffer_size = kMinimalBufferSize; 140 if (isolate->assembler_spare_buffer() != NULL) { 141 buffer = isolate->assembler_spare_buffer(); 142 isolate->set_assembler_spare_buffer(NULL); 143 } 144 } 145 if (buffer == NULL) buffer = NewArray<byte>(buffer_size); 146 own_buffer_ = true; 147 } else { 148 // Use externally provided buffer instead. 149 ASSERT(buffer_size > 0); 150 own_buffer_ = false; 151 } 152 buffer_ = static_cast<byte*>(buffer); 153 buffer_size_ = buffer_size; 154 155 pc_ = buffer_; 156 } 157 158 159 AssemblerBase::~AssemblerBase() { 160 if (own_buffer_) { 161 if (isolate() != NULL && 162 isolate()->assembler_spare_buffer() == NULL && 163 buffer_size_ == kMinimalBufferSize) { 164 isolate()->set_assembler_spare_buffer(buffer_); 165 } else { 166 DeleteArray(buffer_); 167 } 168 } 169 } 170 171 172 // ----------------------------------------------------------------------------- 173 // Implementation of PredictableCodeSizeScope 174 175 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler, 176 int expected_size) 177 : assembler_(assembler), 178 expected_size_(expected_size), 179 start_offset_(assembler->pc_offset()), 180 old_value_(assembler->predictable_code_size()) { 181 assembler_->set_predictable_code_size(true); 182 } 183 184 185 PredictableCodeSizeScope::~PredictableCodeSizeScope() { 186 // TODO(svenpanne) Remove the 'if' when everything works. 187 if (expected_size_ >= 0) { 188 CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_); 189 } 190 assembler_->set_predictable_code_size(old_value_); 191 } 192 193 194 // ----------------------------------------------------------------------------- 195 // Implementation of CpuFeatureScope 196 197 #ifdef DEBUG 198 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f) 199 : assembler_(assembler) { 200 ASSERT(CpuFeatures::IsSupported(f)); 201 old_enabled_ = assembler_->enabled_cpu_features(); 202 uint64_t mask = static_cast<uint64_t>(1) << f; 203 // TODO(svenpanne) This special case below doesn't belong here! 204 #if V8_TARGET_ARCH_ARM 205 // ARMv7 is implied by VFP3. 206 if (f == VFP3) { 207 mask |= static_cast<uint64_t>(1) << ARMv7; 208 } 209 #endif 210 assembler_->set_enabled_cpu_features(old_enabled_ | mask); 211 } 212 213 214 CpuFeatureScope::~CpuFeatureScope() { 215 assembler_->set_enabled_cpu_features(old_enabled_); 216 } 217 #endif 218 219 220 bool CpuFeatures::initialized_ = false; 221 unsigned CpuFeatures::supported_ = 0; 222 unsigned CpuFeatures::cache_line_size_ = 0; 223 224 225 // ----------------------------------------------------------------------------- 226 // Implementation of Label 227 228 int Label::pos() const { 229 if (pos_ < 0) return -pos_ - 1; 230 if (pos_ > 0) return pos_ - 1; 231 UNREACHABLE(); 232 return 0; 233 } 234 235 236 // ----------------------------------------------------------------------------- 237 // Implementation of RelocInfoWriter and RelocIterator 238 // 239 // Relocation information is written backwards in memory, from high addresses 240 // towards low addresses, byte by byte. Therefore, in the encodings listed 241 // below, the first byte listed it at the highest address, and successive 242 // bytes in the record are at progressively lower addresses. 243 // 244 // Encoding 245 // 246 // The most common modes are given single-byte encodings. Also, it is 247 // easy to identify the type of reloc info and skip unwanted modes in 248 // an iteration. 249 // 250 // The encoding relies on the fact that there are fewer than 14 251 // different relocation modes using standard non-compact encoding. 252 // 253 // The first byte of a relocation record has a tag in its low 2 bits: 254 // Here are the record schemes, depending on the low tag and optional higher 255 // tags. 256 // 257 // Low tag: 258 // 00: embedded_object: [6-bit pc delta] 00 259 // 260 // 01: code_target: [6-bit pc delta] 01 261 // 262 // 10: short_data_record: [6-bit pc delta] 10 followed by 263 // [6-bit data delta] [2-bit data type tag] 264 // 265 // 11: long_record [2-bit high tag][4 bit middle_tag] 11 266 // followed by variable data depending on type. 267 // 268 // 2-bit data type tags, used in short_data_record and data_jump long_record: 269 // code_target_with_id: 00 270 // position: 01 271 // statement_position: 10 272 // comment: 11 (not used in short_data_record) 273 // 274 // Long record format: 275 // 4-bit middle_tag: 276 // 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2 277 // (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM, 278 // and is between 0000 and 1100) 279 // The format is: 280 // 00 [4 bit middle_tag] 11 followed by 281 // 00 [6 bit pc delta] 282 // 283 // 1101: constant or veneer pool. Used only on ARM and ARM64 for now. 284 // The format is: [2-bit sub-type] 1101 11 285 // signed int (size of the pool). 286 // The 2-bit sub-types are: 287 // 00: constant pool 288 // 01: veneer pool 289 // 1110: long_data_record 290 // The format is: [2-bit data_type_tag] 1110 11 291 // signed intptr_t, lowest byte written first 292 // (except data_type code_target_with_id, which 293 // is followed by a signed int, not intptr_t.) 294 // 295 // 1111: long_pc_jump 296 // The format is: 297 // pc-jump: 00 1111 11, 298 // 00 [6 bits pc delta] 299 // or 300 // pc-jump (variable length): 301 // 01 1111 11, 302 // [7 bits data] 0 303 // ... 304 // [7 bits data] 1 305 // (Bits 6..31 of pc delta, with leading zeroes 306 // dropped, and last non-zero chunk tagged with 1.) 307 308 309 #ifdef DEBUG 310 const int kMaxStandardNonCompactModes = 14; 311 #endif 312 313 const int kTagBits = 2; 314 const int kTagMask = (1 << kTagBits) - 1; 315 const int kExtraTagBits = 4; 316 const int kLocatableTypeTagBits = 2; 317 const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits; 318 319 const int kEmbeddedObjectTag = 0; 320 const int kCodeTargetTag = 1; 321 const int kLocatableTag = 2; 322 const int kDefaultTag = 3; 323 324 const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1; 325 326 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits; 327 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1; 328 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask; 329 330 const int kVariableLengthPCJumpTopTag = 1; 331 const int kChunkBits = 7; 332 const int kChunkMask = (1 << kChunkBits) - 1; 333 const int kLastChunkTagBits = 1; 334 const int kLastChunkTagMask = 1; 335 const int kLastChunkTag = 1; 336 337 338 const int kDataJumpExtraTag = kPCJumpExtraTag - 1; 339 340 const int kCodeWithIdTag = 0; 341 const int kNonstatementPositionTag = 1; 342 const int kStatementPositionTag = 2; 343 const int kCommentTag = 3; 344 345 const int kPoolExtraTag = kPCJumpExtraTag - 2; 346 const int kConstPoolTag = 0; 347 const int kVeneerPoolTag = 1; 348 349 350 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) { 351 // Return if the pc_delta can fit in kSmallPCDeltaBits bits. 352 // Otherwise write a variable length PC jump for the bits that do 353 // not fit in the kSmallPCDeltaBits bits. 354 if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta; 355 WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag); 356 uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits; 357 ASSERT(pc_jump > 0); 358 // Write kChunkBits size chunks of the pc_jump. 359 for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) { 360 byte b = pc_jump & kChunkMask; 361 *--pos_ = b << kLastChunkTagBits; 362 } 363 // Tag the last chunk so it can be identified. 364 *pos_ = *pos_ | kLastChunkTag; 365 // Return the remaining kSmallPCDeltaBits of the pc_delta. 366 return pc_delta & kSmallPCDeltaMask; 367 } 368 369 370 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) { 371 // Write a byte of tagged pc-delta, possibly preceded by var. length pc-jump. 372 pc_delta = WriteVariableLengthPCJump(pc_delta); 373 *--pos_ = pc_delta << kTagBits | tag; 374 } 375 376 377 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) { 378 *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag); 379 } 380 381 382 void RelocInfoWriter::WriteExtraTag(int extra_tag, int top_tag) { 383 *--pos_ = static_cast<int>(top_tag << (kTagBits + kExtraTagBits) | 384 extra_tag << kTagBits | 385 kDefaultTag); 386 } 387 388 389 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) { 390 // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump. 391 pc_delta = WriteVariableLengthPCJump(pc_delta); 392 WriteExtraTag(extra_tag, 0); 393 *--pos_ = pc_delta; 394 } 395 396 397 void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) { 398 WriteExtraTag(kDataJumpExtraTag, top_tag); 399 for (int i = 0; i < kIntSize; i++) { 400 *--pos_ = static_cast<byte>(data_delta); 401 // Signed right shift is arithmetic shift. Tested in test-utils.cc. 402 data_delta = data_delta >> kBitsPerByte; 403 } 404 } 405 406 407 void RelocInfoWriter::WriteExtraTaggedPoolData(int data, int pool_type) { 408 WriteExtraTag(kPoolExtraTag, pool_type); 409 for (int i = 0; i < kIntSize; i++) { 410 *--pos_ = static_cast<byte>(data); 411 // Signed right shift is arithmetic shift. Tested in test-utils.cc. 412 data = data >> kBitsPerByte; 413 } 414 } 415 416 417 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) { 418 WriteExtraTag(kDataJumpExtraTag, top_tag); 419 for (int i = 0; i < kIntptrSize; i++) { 420 *--pos_ = static_cast<byte>(data_delta); 421 // Signed right shift is arithmetic shift. Tested in test-utils.cc. 422 data_delta = data_delta >> kBitsPerByte; 423 } 424 } 425 426 427 void RelocInfoWriter::Write(const RelocInfo* rinfo) { 428 #ifdef DEBUG 429 byte* begin_pos = pos_; 430 #endif 431 ASSERT(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES); 432 ASSERT(rinfo->pc() - last_pc_ >= 0); 433 ASSERT(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM 434 <= kMaxStandardNonCompactModes); 435 // Use unsigned delta-encoding for pc. 436 uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_); 437 RelocInfo::Mode rmode = rinfo->rmode(); 438 439 // The two most common modes are given small tags, and usually fit in a byte. 440 if (rmode == RelocInfo::EMBEDDED_OBJECT) { 441 WriteTaggedPC(pc_delta, kEmbeddedObjectTag); 442 } else if (rmode == RelocInfo::CODE_TARGET) { 443 WriteTaggedPC(pc_delta, kCodeTargetTag); 444 ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize); 445 } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { 446 // Use signed delta-encoding for id. 447 ASSERT(static_cast<int>(rinfo->data()) == rinfo->data()); 448 int id_delta = static_cast<int>(rinfo->data()) - last_id_; 449 // Check if delta is small enough to fit in a tagged byte. 450 if (is_intn(id_delta, kSmallDataBits)) { 451 WriteTaggedPC(pc_delta, kLocatableTag); 452 WriteTaggedData(id_delta, kCodeWithIdTag); 453 } else { 454 // Otherwise, use costly encoding. 455 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); 456 WriteExtraTaggedIntData(id_delta, kCodeWithIdTag); 457 } 458 last_id_ = static_cast<int>(rinfo->data()); 459 } else if (RelocInfo::IsPosition(rmode)) { 460 // Use signed delta-encoding for position. 461 ASSERT(static_cast<int>(rinfo->data()) == rinfo->data()); 462 int pos_delta = static_cast<int>(rinfo->data()) - last_position_; 463 int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag 464 : kStatementPositionTag; 465 // Check if delta is small enough to fit in a tagged byte. 466 if (is_intn(pos_delta, kSmallDataBits)) { 467 WriteTaggedPC(pc_delta, kLocatableTag); 468 WriteTaggedData(pos_delta, pos_type_tag); 469 } else { 470 // Otherwise, use costly encoding. 471 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); 472 WriteExtraTaggedIntData(pos_delta, pos_type_tag); 473 } 474 last_position_ = static_cast<int>(rinfo->data()); 475 } else if (RelocInfo::IsComment(rmode)) { 476 // Comments are normally not generated, so we use the costly encoding. 477 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); 478 WriteExtraTaggedData(rinfo->data(), kCommentTag); 479 ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize); 480 } else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) { 481 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag); 482 WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()), 483 RelocInfo::IsConstPool(rmode) ? kConstPoolTag 484 : kVeneerPoolTag); 485 } else { 486 ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM); 487 int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM; 488 // For all other modes we simply use the mode as the extra tag. 489 // None of these modes need a data component. 490 ASSERT(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag); 491 WriteExtraTaggedPC(pc_delta, saved_mode); 492 } 493 last_pc_ = rinfo->pc(); 494 #ifdef DEBUG 495 ASSERT(begin_pos - pos_ <= kMaxSize); 496 #endif 497 } 498 499 500 inline int RelocIterator::AdvanceGetTag() { 501 return *--pos_ & kTagMask; 502 } 503 504 505 inline int RelocIterator::GetExtraTag() { 506 return (*pos_ >> kTagBits) & ((1 << kExtraTagBits) - 1); 507 } 508 509 510 inline int RelocIterator::GetTopTag() { 511 return *pos_ >> (kTagBits + kExtraTagBits); 512 } 513 514 515 inline void RelocIterator::ReadTaggedPC() { 516 rinfo_.pc_ += *pos_ >> kTagBits; 517 } 518 519 520 inline void RelocIterator::AdvanceReadPC() { 521 rinfo_.pc_ += *--pos_; 522 } 523 524 525 void RelocIterator::AdvanceReadId() { 526 int x = 0; 527 for (int i = 0; i < kIntSize; i++) { 528 x |= static_cast<int>(*--pos_) << i * kBitsPerByte; 529 } 530 last_id_ += x; 531 rinfo_.data_ = last_id_; 532 } 533 534 535 void RelocIterator::AdvanceReadPoolData() { 536 int x = 0; 537 for (int i = 0; i < kIntSize; i++) { 538 x |= static_cast<int>(*--pos_) << i * kBitsPerByte; 539 } 540 rinfo_.data_ = x; 541 } 542 543 544 void RelocIterator::AdvanceReadPosition() { 545 int x = 0; 546 for (int i = 0; i < kIntSize; i++) { 547 x |= static_cast<int>(*--pos_) << i * kBitsPerByte; 548 } 549 last_position_ += x; 550 rinfo_.data_ = last_position_; 551 } 552 553 554 void RelocIterator::AdvanceReadData() { 555 intptr_t x = 0; 556 for (int i = 0; i < kIntptrSize; i++) { 557 x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte; 558 } 559 rinfo_.data_ = x; 560 } 561 562 563 void RelocIterator::AdvanceReadVariableLengthPCJump() { 564 // Read the 32-kSmallPCDeltaBits most significant bits of the 565 // pc jump in kChunkBits bit chunks and shift them into place. 566 // Stop when the last chunk is encountered. 567 uint32_t pc_jump = 0; 568 for (int i = 0; i < kIntSize; i++) { 569 byte pc_jump_part = *--pos_; 570 pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits; 571 if ((pc_jump_part & kLastChunkTagMask) == 1) break; 572 } 573 // The least significant kSmallPCDeltaBits bits will be added 574 // later. 575 rinfo_.pc_ += pc_jump << kSmallPCDeltaBits; 576 } 577 578 579 inline int RelocIterator::GetLocatableTypeTag() { 580 return *pos_ & ((1 << kLocatableTypeTagBits) - 1); 581 } 582 583 584 inline void RelocIterator::ReadTaggedId() { 585 int8_t signed_b = *pos_; 586 // Signed right shift is arithmetic shift. Tested in test-utils.cc. 587 last_id_ += signed_b >> kLocatableTypeTagBits; 588 rinfo_.data_ = last_id_; 589 } 590 591 592 inline void RelocIterator::ReadTaggedPosition() { 593 int8_t signed_b = *pos_; 594 // Signed right shift is arithmetic shift. Tested in test-utils.cc. 595 last_position_ += signed_b >> kLocatableTypeTagBits; 596 rinfo_.data_ = last_position_; 597 } 598 599 600 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) { 601 ASSERT(tag == kNonstatementPositionTag || 602 tag == kStatementPositionTag); 603 return (tag == kNonstatementPositionTag) ? 604 RelocInfo::POSITION : 605 RelocInfo::STATEMENT_POSITION; 606 } 607 608 609 void RelocIterator::next() { 610 ASSERT(!done()); 611 // Basically, do the opposite of RelocInfoWriter::Write. 612 // Reading of data is as far as possible avoided for unwanted modes, 613 // but we must always update the pc. 614 // 615 // We exit this loop by returning when we find a mode we want. 616 while (pos_ > end_) { 617 int tag = AdvanceGetTag(); 618 if (tag == kEmbeddedObjectTag) { 619 ReadTaggedPC(); 620 if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return; 621 } else if (tag == kCodeTargetTag) { 622 ReadTaggedPC(); 623 if (SetMode(RelocInfo::CODE_TARGET)) return; 624 } else if (tag == kLocatableTag) { 625 ReadTaggedPC(); 626 Advance(); 627 int locatable_tag = GetLocatableTypeTag(); 628 if (locatable_tag == kCodeWithIdTag) { 629 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) { 630 ReadTaggedId(); 631 return; 632 } 633 } else { 634 // Compact encoding is never used for comments, 635 // so it must be a position. 636 ASSERT(locatable_tag == kNonstatementPositionTag || 637 locatable_tag == kStatementPositionTag); 638 if (mode_mask_ & RelocInfo::kPositionMask) { 639 ReadTaggedPosition(); 640 if (SetMode(GetPositionModeFromTag(locatable_tag))) return; 641 } 642 } 643 } else { 644 ASSERT(tag == kDefaultTag); 645 int extra_tag = GetExtraTag(); 646 if (extra_tag == kPCJumpExtraTag) { 647 if (GetTopTag() == kVariableLengthPCJumpTopTag) { 648 AdvanceReadVariableLengthPCJump(); 649 } else { 650 AdvanceReadPC(); 651 } 652 } else if (extra_tag == kDataJumpExtraTag) { 653 int locatable_tag = GetTopTag(); 654 if (locatable_tag == kCodeWithIdTag) { 655 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) { 656 AdvanceReadId(); 657 return; 658 } 659 Advance(kIntSize); 660 } else if (locatable_tag != kCommentTag) { 661 ASSERT(locatable_tag == kNonstatementPositionTag || 662 locatable_tag == kStatementPositionTag); 663 if (mode_mask_ & RelocInfo::kPositionMask) { 664 AdvanceReadPosition(); 665 if (SetMode(GetPositionModeFromTag(locatable_tag))) return; 666 } else { 667 Advance(kIntSize); 668 } 669 } else { 670 ASSERT(locatable_tag == kCommentTag); 671 if (SetMode(RelocInfo::COMMENT)) { 672 AdvanceReadData(); 673 return; 674 } 675 Advance(kIntptrSize); 676 } 677 } else if (extra_tag == kPoolExtraTag) { 678 int pool_type = GetTopTag(); 679 ASSERT(pool_type == kConstPoolTag || pool_type == kVeneerPoolTag); 680 RelocInfo::Mode rmode = (pool_type == kConstPoolTag) ? 681 RelocInfo::CONST_POOL : RelocInfo::VENEER_POOL; 682 if (SetMode(rmode)) { 683 AdvanceReadPoolData(); 684 return; 685 } 686 Advance(kIntSize); 687 } else { 688 AdvanceReadPC(); 689 int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM; 690 if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return; 691 } 692 } 693 } 694 if (code_age_sequence_ != NULL) { 695 byte* old_code_age_sequence = code_age_sequence_; 696 code_age_sequence_ = NULL; 697 if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) { 698 rinfo_.data_ = 0; 699 rinfo_.pc_ = old_code_age_sequence; 700 return; 701 } 702 } 703 done_ = true; 704 } 705 706 707 RelocIterator::RelocIterator(Code* code, int mode_mask) { 708 rinfo_.host_ = code; 709 rinfo_.pc_ = code->instruction_start(); 710 rinfo_.data_ = 0; 711 // Relocation info is read backwards. 712 pos_ = code->relocation_start() + code->relocation_size(); 713 end_ = code->relocation_start(); 714 done_ = false; 715 mode_mask_ = mode_mask; 716 last_id_ = 0; 717 last_position_ = 0; 718 byte* sequence = code->FindCodeAgeSequence(); 719 // We get the isolate from the map, because at serialization time 720 // the code pointer has been cloned and isn't really in heap space. 721 Isolate* isolate = code->map()->GetIsolate(); 722 if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) { 723 code_age_sequence_ = sequence; 724 } else { 725 code_age_sequence_ = NULL; 726 } 727 if (mode_mask_ == 0) pos_ = end_; 728 next(); 729 } 730 731 732 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) { 733 rinfo_.pc_ = desc.buffer; 734 rinfo_.data_ = 0; 735 // Relocation info is read backwards. 736 pos_ = desc.buffer + desc.buffer_size; 737 end_ = pos_ - desc.reloc_size; 738 done_ = false; 739 mode_mask_ = mode_mask; 740 last_id_ = 0; 741 last_position_ = 0; 742 code_age_sequence_ = NULL; 743 if (mode_mask_ == 0) pos_ = end_; 744 next(); 745 } 746 747 748 // ----------------------------------------------------------------------------- 749 // Implementation of RelocInfo 750 751 752 #ifdef DEBUG 753 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) { 754 // Ensure there are no code targets or embedded objects present in the 755 // deoptimization entries, they would require relocation after code 756 // generation. 757 int mode_mask = RelocInfo::kCodeTargetMask | 758 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 759 RelocInfo::ModeMask(RelocInfo::CELL) | 760 RelocInfo::kApplyMask; 761 RelocIterator it(desc, mode_mask); 762 return !it.done(); 763 } 764 #endif 765 766 767 #ifdef ENABLE_DISASSEMBLER 768 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) { 769 switch (rmode) { 770 case RelocInfo::NONE32: 771 return "no reloc 32"; 772 case RelocInfo::NONE64: 773 return "no reloc 64"; 774 case RelocInfo::EMBEDDED_OBJECT: 775 return "embedded object"; 776 case RelocInfo::CONSTRUCT_CALL: 777 return "code target (js construct call)"; 778 case RelocInfo::DEBUG_BREAK: 779 return "debug break"; 780 case RelocInfo::CODE_TARGET: 781 return "code target"; 782 case RelocInfo::CODE_TARGET_WITH_ID: 783 return "code target with id"; 784 case RelocInfo::CELL: 785 return "property cell"; 786 case RelocInfo::RUNTIME_ENTRY: 787 return "runtime entry"; 788 case RelocInfo::JS_RETURN: 789 return "js return"; 790 case RelocInfo::COMMENT: 791 return "comment"; 792 case RelocInfo::POSITION: 793 return "position"; 794 case RelocInfo::STATEMENT_POSITION: 795 return "statement position"; 796 case RelocInfo::EXTERNAL_REFERENCE: 797 return "external reference"; 798 case RelocInfo::INTERNAL_REFERENCE: 799 return "internal reference"; 800 case RelocInfo::CONST_POOL: 801 return "constant pool"; 802 case RelocInfo::VENEER_POOL: 803 return "veneer pool"; 804 case RelocInfo::DEBUG_BREAK_SLOT: 805 return "debug break slot"; 806 case RelocInfo::CODE_AGE_SEQUENCE: 807 return "code_age_sequence"; 808 case RelocInfo::NUMBER_OF_MODES: 809 UNREACHABLE(); 810 return "number_of_modes"; 811 } 812 return "unknown relocation type"; 813 } 814 815 816 void RelocInfo::Print(Isolate* isolate, FILE* out) { 817 PrintF(out, "%p %s", pc_, RelocModeName(rmode_)); 818 if (IsComment(rmode_)) { 819 PrintF(out, " (%s)", reinterpret_cast<char*>(data_)); 820 } else if (rmode_ == EMBEDDED_OBJECT) { 821 PrintF(out, " ("); 822 target_object()->ShortPrint(out); 823 PrintF(out, ")"); 824 } else if (rmode_ == EXTERNAL_REFERENCE) { 825 ExternalReferenceEncoder ref_encoder(isolate); 826 PrintF(out, " (%s) (%p)", 827 ref_encoder.NameOfAddress(target_reference()), 828 target_reference()); 829 } else if (IsCodeTarget(rmode_)) { 830 Code* code = Code::GetCodeFromTargetAddress(target_address()); 831 PrintF(out, " (%s) (%p)", Code::Kind2String(code->kind()), 832 target_address()); 833 if (rmode_ == CODE_TARGET_WITH_ID) { 834 PrintF(out, " (id=%d)", static_cast<int>(data_)); 835 } 836 } else if (IsPosition(rmode_)) { 837 PrintF(out, " (%" V8_PTR_PREFIX "d)", data()); 838 } else if (IsRuntimeEntry(rmode_) && 839 isolate->deoptimizer_data() != NULL) { 840 // Depotimization bailouts are stored as runtime entries. 841 int id = Deoptimizer::GetDeoptimizationId( 842 isolate, target_address(), Deoptimizer::EAGER); 843 if (id != Deoptimizer::kNotDeoptimizationEntry) { 844 PrintF(out, " (deoptimization bailout %d)", id); 845 } 846 } 847 848 PrintF(out, "\n"); 849 } 850 #endif // ENABLE_DISASSEMBLER 851 852 853 #ifdef VERIFY_HEAP 854 void RelocInfo::Verify(Isolate* isolate) { 855 switch (rmode_) { 856 case EMBEDDED_OBJECT: 857 Object::VerifyPointer(target_object()); 858 break; 859 case CELL: 860 Object::VerifyPointer(target_cell()); 861 break; 862 case DEBUG_BREAK: 863 case CONSTRUCT_CALL: 864 case CODE_TARGET_WITH_ID: 865 case CODE_TARGET: { 866 // convert inline target address to code object 867 Address addr = target_address(); 868 CHECK(addr != NULL); 869 // Check that we can find the right code object. 870 Code* code = Code::GetCodeFromTargetAddress(addr); 871 Object* found = isolate->FindCodeObject(addr); 872 CHECK(found->IsCode()); 873 CHECK(code->address() == HeapObject::cast(found)->address()); 874 break; 875 } 876 case RUNTIME_ENTRY: 877 case JS_RETURN: 878 case COMMENT: 879 case POSITION: 880 case STATEMENT_POSITION: 881 case EXTERNAL_REFERENCE: 882 case INTERNAL_REFERENCE: 883 case CONST_POOL: 884 case VENEER_POOL: 885 case DEBUG_BREAK_SLOT: 886 case NONE32: 887 case NONE64: 888 break; 889 case NUMBER_OF_MODES: 890 UNREACHABLE(); 891 break; 892 case CODE_AGE_SEQUENCE: 893 ASSERT(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode()); 894 break; 895 } 896 } 897 #endif // VERIFY_HEAP 898 899 900 // ----------------------------------------------------------------------------- 901 // Implementation of ExternalReference 902 903 void ExternalReference::SetUp() { 904 double_constants.min_int = kMinInt; 905 double_constants.one_half = 0.5; 906 double_constants.minus_one_half = -0.5; 907 double_constants.minus_zero = -0.0; 908 double_constants.uint8_max_value = 255; 909 double_constants.zero = 0.0; 910 double_constants.canonical_non_hole_nan = OS::nan_value(); 911 double_constants.the_hole_nan = BitCast<double>(kHoleNanInt64); 912 double_constants.negative_infinity = -V8_INFINITY; 913 double_constants.uint32_bias = 914 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1; 915 916 math_exp_data_mutex = new Mutex(); 917 } 918 919 920 void ExternalReference::InitializeMathExpData() { 921 // Early return? 922 if (math_exp_data_initialized) return; 923 924 LockGuard<Mutex> lock_guard(math_exp_data_mutex); 925 if (!math_exp_data_initialized) { 926 // If this is changed, generated code must be adapted too. 927 const int kTableSizeBits = 11; 928 const int kTableSize = 1 << kTableSizeBits; 929 const double kTableSizeDouble = static_cast<double>(kTableSize); 930 931 math_exp_constants_array = new double[9]; 932 // Input values smaller than this always return 0. 933 math_exp_constants_array[0] = -708.39641853226408; 934 // Input values larger than this always return +Infinity. 935 math_exp_constants_array[1] = 709.78271289338397; 936 math_exp_constants_array[2] = V8_INFINITY; 937 // The rest is black magic. Do not attempt to understand it. It is 938 // loosely based on the "expd" function published at: 939 // http://herumi.blogspot.com/2011/08/fast-double-precision-exponential.html 940 const double constant3 = (1 << kTableSizeBits) / std::log(2.0); 941 math_exp_constants_array[3] = constant3; 942 math_exp_constants_array[4] = 943 static_cast<double>(static_cast<int64_t>(3) << 51); 944 math_exp_constants_array[5] = 1 / constant3; 945 math_exp_constants_array[6] = 3.0000000027955394; 946 math_exp_constants_array[7] = 0.16666666685227835; 947 math_exp_constants_array[8] = 1; 948 949 math_exp_log_table_array = new double[kTableSize]; 950 for (int i = 0; i < kTableSize; i++) { 951 double value = std::pow(2, i / kTableSizeDouble); 952 uint64_t bits = BitCast<uint64_t, double>(value); 953 bits &= (static_cast<uint64_t>(1) << 52) - 1; 954 double mantissa = BitCast<double, uint64_t>(bits); 955 math_exp_log_table_array[i] = mantissa; 956 } 957 958 math_exp_data_initialized = true; 959 } 960 } 961 962 963 void ExternalReference::TearDownMathExpData() { 964 delete[] math_exp_constants_array; 965 delete[] math_exp_log_table_array; 966 delete math_exp_data_mutex; 967 } 968 969 970 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate) 971 : address_(Redirect(isolate, Builtins::c_function_address(id))) {} 972 973 974 ExternalReference::ExternalReference( 975 ApiFunction* fun, 976 Type type = ExternalReference::BUILTIN_CALL, 977 Isolate* isolate = NULL) 978 : address_(Redirect(isolate, fun->address(), type)) {} 979 980 981 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate) 982 : address_(isolate->builtins()->builtin_address(name)) {} 983 984 985 ExternalReference::ExternalReference(Runtime::FunctionId id, 986 Isolate* isolate) 987 : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {} 988 989 990 ExternalReference::ExternalReference(const Runtime::Function* f, 991 Isolate* isolate) 992 : address_(Redirect(isolate, f->entry)) {} 993 994 995 ExternalReference ExternalReference::isolate_address(Isolate* isolate) { 996 return ExternalReference(isolate); 997 } 998 999 1000 ExternalReference::ExternalReference(const IC_Utility& ic_utility, 1001 Isolate* isolate) 1002 : address_(Redirect(isolate, ic_utility.address())) {} 1003 1004 1005 ExternalReference::ExternalReference(StatsCounter* counter) 1006 : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {} 1007 1008 1009 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate) 1010 : address_(isolate->get_address_from_id(id)) {} 1011 1012 1013 ExternalReference::ExternalReference(const SCTableReference& table_ref) 1014 : address_(table_ref.address()) {} 1015 1016 1017 ExternalReference ExternalReference:: 1018 incremental_marking_record_write_function(Isolate* isolate) { 1019 return ExternalReference(Redirect( 1020 isolate, 1021 FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode))); 1022 } 1023 1024 1025 ExternalReference ExternalReference:: 1026 store_buffer_overflow_function(Isolate* isolate) { 1027 return ExternalReference(Redirect( 1028 isolate, 1029 FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow))); 1030 } 1031 1032 1033 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) { 1034 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(CPU::FlushICache))); 1035 } 1036 1037 1038 ExternalReference ExternalReference::delete_handle_scope_extensions( 1039 Isolate* isolate) { 1040 return ExternalReference(Redirect( 1041 isolate, 1042 FUNCTION_ADDR(HandleScope::DeleteExtensions))); 1043 } 1044 1045 1046 ExternalReference ExternalReference::get_date_field_function( 1047 Isolate* isolate) { 1048 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField))); 1049 } 1050 1051 1052 ExternalReference ExternalReference::get_make_code_young_function( 1053 Isolate* isolate) { 1054 return ExternalReference(Redirect( 1055 isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung))); 1056 } 1057 1058 1059 ExternalReference ExternalReference::get_mark_code_as_executed_function( 1060 Isolate* isolate) { 1061 return ExternalReference(Redirect( 1062 isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted))); 1063 } 1064 1065 1066 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) { 1067 return ExternalReference(isolate->date_cache()->stamp_address()); 1068 } 1069 1070 1071 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) { 1072 return ExternalReference(isolate->stress_deopt_count_address()); 1073 } 1074 1075 1076 ExternalReference ExternalReference::new_deoptimizer_function( 1077 Isolate* isolate) { 1078 return ExternalReference( 1079 Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New))); 1080 } 1081 1082 1083 ExternalReference ExternalReference::compute_output_frames_function( 1084 Isolate* isolate) { 1085 return ExternalReference( 1086 Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames))); 1087 } 1088 1089 1090 ExternalReference ExternalReference::log_enter_external_function( 1091 Isolate* isolate) { 1092 return ExternalReference( 1093 Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal))); 1094 } 1095 1096 1097 ExternalReference ExternalReference::log_leave_external_function( 1098 Isolate* isolate) { 1099 return ExternalReference( 1100 Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal))); 1101 } 1102 1103 1104 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) { 1105 return ExternalReference(isolate->keyed_lookup_cache()->keys_address()); 1106 } 1107 1108 1109 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets( 1110 Isolate* isolate) { 1111 return ExternalReference( 1112 isolate->keyed_lookup_cache()->field_offsets_address()); 1113 } 1114 1115 1116 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) { 1117 return ExternalReference(isolate->heap()->roots_array_start()); 1118 } 1119 1120 1121 ExternalReference ExternalReference::allocation_sites_list_address( 1122 Isolate* isolate) { 1123 return ExternalReference(isolate->heap()->allocation_sites_list_address()); 1124 } 1125 1126 1127 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) { 1128 return ExternalReference(isolate->stack_guard()->address_of_jslimit()); 1129 } 1130 1131 1132 ExternalReference ExternalReference::address_of_real_stack_limit( 1133 Isolate* isolate) { 1134 return ExternalReference(isolate->stack_guard()->address_of_real_jslimit()); 1135 } 1136 1137 1138 ExternalReference ExternalReference::address_of_regexp_stack_limit( 1139 Isolate* isolate) { 1140 return ExternalReference(isolate->regexp_stack()->limit_address()); 1141 } 1142 1143 1144 ExternalReference ExternalReference::new_space_start(Isolate* isolate) { 1145 return ExternalReference(isolate->heap()->NewSpaceStart()); 1146 } 1147 1148 1149 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) { 1150 return ExternalReference(isolate->heap()->store_buffer()->TopAddress()); 1151 } 1152 1153 1154 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) { 1155 return ExternalReference(reinterpret_cast<Address>( 1156 isolate->heap()->NewSpaceMask())); 1157 } 1158 1159 1160 ExternalReference ExternalReference::new_space_allocation_top_address( 1161 Isolate* isolate) { 1162 return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress()); 1163 } 1164 1165 1166 ExternalReference ExternalReference::heap_always_allocate_scope_depth( 1167 Isolate* isolate) { 1168 Heap* heap = isolate->heap(); 1169 return ExternalReference(heap->always_allocate_scope_depth_address()); 1170 } 1171 1172 1173 ExternalReference ExternalReference::new_space_allocation_limit_address( 1174 Isolate* isolate) { 1175 return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress()); 1176 } 1177 1178 1179 ExternalReference ExternalReference::old_pointer_space_allocation_top_address( 1180 Isolate* isolate) { 1181 return ExternalReference( 1182 isolate->heap()->OldPointerSpaceAllocationTopAddress()); 1183 } 1184 1185 1186 ExternalReference ExternalReference::old_pointer_space_allocation_limit_address( 1187 Isolate* isolate) { 1188 return ExternalReference( 1189 isolate->heap()->OldPointerSpaceAllocationLimitAddress()); 1190 } 1191 1192 1193 ExternalReference ExternalReference::old_data_space_allocation_top_address( 1194 Isolate* isolate) { 1195 return ExternalReference( 1196 isolate->heap()->OldDataSpaceAllocationTopAddress()); 1197 } 1198 1199 1200 ExternalReference ExternalReference::old_data_space_allocation_limit_address( 1201 Isolate* isolate) { 1202 return ExternalReference( 1203 isolate->heap()->OldDataSpaceAllocationLimitAddress()); 1204 } 1205 1206 1207 ExternalReference ExternalReference::handle_scope_level_address( 1208 Isolate* isolate) { 1209 return ExternalReference(HandleScope::current_level_address(isolate)); 1210 } 1211 1212 1213 ExternalReference ExternalReference::handle_scope_next_address( 1214 Isolate* isolate) { 1215 return ExternalReference(HandleScope::current_next_address(isolate)); 1216 } 1217 1218 1219 ExternalReference ExternalReference::handle_scope_limit_address( 1220 Isolate* isolate) { 1221 return ExternalReference(HandleScope::current_limit_address(isolate)); 1222 } 1223 1224 1225 ExternalReference ExternalReference::scheduled_exception_address( 1226 Isolate* isolate) { 1227 return ExternalReference(isolate->scheduled_exception_address()); 1228 } 1229 1230 1231 ExternalReference ExternalReference::address_of_pending_message_obj( 1232 Isolate* isolate) { 1233 return ExternalReference(isolate->pending_message_obj_address()); 1234 } 1235 1236 1237 ExternalReference ExternalReference::address_of_has_pending_message( 1238 Isolate* isolate) { 1239 return ExternalReference(isolate->has_pending_message_address()); 1240 } 1241 1242 1243 ExternalReference ExternalReference::address_of_pending_message_script( 1244 Isolate* isolate) { 1245 return ExternalReference(isolate->pending_message_script_address()); 1246 } 1247 1248 1249 ExternalReference ExternalReference::address_of_min_int() { 1250 return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int)); 1251 } 1252 1253 1254 ExternalReference ExternalReference::address_of_one_half() { 1255 return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half)); 1256 } 1257 1258 1259 ExternalReference ExternalReference::address_of_minus_one_half() { 1260 return ExternalReference( 1261 reinterpret_cast<void*>(&double_constants.minus_one_half)); 1262 } 1263 1264 1265 ExternalReference ExternalReference::address_of_minus_zero() { 1266 return ExternalReference( 1267 reinterpret_cast<void*>(&double_constants.minus_zero)); 1268 } 1269 1270 1271 ExternalReference ExternalReference::address_of_zero() { 1272 return ExternalReference(reinterpret_cast<void*>(&double_constants.zero)); 1273 } 1274 1275 1276 ExternalReference ExternalReference::address_of_uint8_max_value() { 1277 return ExternalReference( 1278 reinterpret_cast<void*>(&double_constants.uint8_max_value)); 1279 } 1280 1281 1282 ExternalReference ExternalReference::address_of_negative_infinity() { 1283 return ExternalReference( 1284 reinterpret_cast<void*>(&double_constants.negative_infinity)); 1285 } 1286 1287 1288 ExternalReference ExternalReference::address_of_canonical_non_hole_nan() { 1289 return ExternalReference( 1290 reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan)); 1291 } 1292 1293 1294 ExternalReference ExternalReference::address_of_the_hole_nan() { 1295 return ExternalReference( 1296 reinterpret_cast<void*>(&double_constants.the_hole_nan)); 1297 } 1298 1299 1300 ExternalReference ExternalReference::address_of_uint32_bias() { 1301 return ExternalReference( 1302 reinterpret_cast<void*>(&double_constants.uint32_bias)); 1303 } 1304 1305 1306 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) { 1307 return ExternalReference(isolate->cpu_profiler()->is_profiling_address()); 1308 } 1309 1310 1311 ExternalReference ExternalReference::invoke_function_callback( 1312 Isolate* isolate) { 1313 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); 1314 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; 1315 ApiFunction thunk_fun(thunk_address); 1316 return ExternalReference(&thunk_fun, thunk_type, isolate); 1317 } 1318 1319 1320 ExternalReference ExternalReference::invoke_accessor_getter_callback( 1321 Isolate* isolate) { 1322 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); 1323 ExternalReference::Type thunk_type = 1324 ExternalReference::PROFILING_GETTER_CALL; 1325 ApiFunction thunk_fun(thunk_address); 1326 return ExternalReference(&thunk_fun, thunk_type, isolate); 1327 } 1328 1329 1330 #ifndef V8_INTERPRETED_REGEXP 1331 1332 ExternalReference ExternalReference::re_check_stack_guard_state( 1333 Isolate* isolate) { 1334 Address function; 1335 #if V8_TARGET_ARCH_X64 1336 function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState); 1337 #elif V8_TARGET_ARCH_IA32 1338 function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState); 1339 #elif V8_TARGET_ARCH_ARM64 1340 function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState); 1341 #elif V8_TARGET_ARCH_ARM 1342 function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState); 1343 #elif V8_TARGET_ARCH_MIPS 1344 function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState); 1345 #elif V8_TARGET_ARCH_X87 1346 function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState); 1347 #else 1348 UNREACHABLE(); 1349 #endif 1350 return ExternalReference(Redirect(isolate, function)); 1351 } 1352 1353 1354 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) { 1355 return ExternalReference( 1356 Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack))); 1357 } 1358 1359 ExternalReference ExternalReference::re_case_insensitive_compare_uc16( 1360 Isolate* isolate) { 1361 return ExternalReference(Redirect( 1362 isolate, 1363 FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16))); 1364 } 1365 1366 1367 ExternalReference ExternalReference::re_word_character_map() { 1368 return ExternalReference( 1369 NativeRegExpMacroAssembler::word_character_map_address()); 1370 } 1371 1372 ExternalReference ExternalReference::address_of_static_offsets_vector( 1373 Isolate* isolate) { 1374 return ExternalReference( 1375 reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector())); 1376 } 1377 1378 ExternalReference ExternalReference::address_of_regexp_stack_memory_address( 1379 Isolate* isolate) { 1380 return ExternalReference( 1381 isolate->regexp_stack()->memory_address()); 1382 } 1383 1384 ExternalReference ExternalReference::address_of_regexp_stack_memory_size( 1385 Isolate* isolate) { 1386 return ExternalReference(isolate->regexp_stack()->memory_size_address()); 1387 } 1388 1389 #endif // V8_INTERPRETED_REGEXP 1390 1391 1392 ExternalReference ExternalReference::math_log_double_function( 1393 Isolate* isolate) { 1394 typedef double (*d2d)(double x); 1395 return ExternalReference(Redirect(isolate, 1396 FUNCTION_ADDR(static_cast<d2d>(std::log)), 1397 BUILTIN_FP_CALL)); 1398 } 1399 1400 1401 ExternalReference ExternalReference::math_exp_constants(int constant_index) { 1402 ASSERT(math_exp_data_initialized); 1403 return ExternalReference( 1404 reinterpret_cast<void*>(math_exp_constants_array + constant_index)); 1405 } 1406 1407 1408 ExternalReference ExternalReference::math_exp_log_table() { 1409 ASSERT(math_exp_data_initialized); 1410 return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array)); 1411 } 1412 1413 1414 ExternalReference ExternalReference::page_flags(Page* page) { 1415 return ExternalReference(reinterpret_cast<Address>(page) + 1416 MemoryChunk::kFlagsOffset); 1417 } 1418 1419 1420 ExternalReference ExternalReference::ForDeoptEntry(Address entry) { 1421 return ExternalReference(entry); 1422 } 1423 1424 1425 ExternalReference ExternalReference::cpu_features() { 1426 ASSERT(CpuFeatures::initialized_); 1427 return ExternalReference(&CpuFeatures::supported_); 1428 } 1429 1430 1431 ExternalReference ExternalReference::debug_after_break_target_address( 1432 Isolate* isolate) { 1433 return ExternalReference(isolate->debug()->after_break_target_address()); 1434 } 1435 1436 1437 ExternalReference 1438 ExternalReference::debug_restarter_frame_function_pointer_address( 1439 Isolate* isolate) { 1440 return ExternalReference( 1441 isolate->debug()->restarter_frame_function_pointer_address()); 1442 } 1443 1444 1445 double power_helper(double x, double y) { 1446 int y_int = static_cast<int>(y); 1447 if (y == y_int) { 1448 return power_double_int(x, y_int); // Returns 1 if exponent is 0. 1449 } 1450 if (y == 0.5) { 1451 return (std::isinf(x)) ? V8_INFINITY 1452 : fast_sqrt(x + 0.0); // Convert -0 to +0. 1453 } 1454 if (y == -0.5) { 1455 return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0); // Convert -0 to +0. 1456 } 1457 return power_double_double(x, y); 1458 } 1459 1460 1461 // Helper function to compute x^y, where y is known to be an 1462 // integer. Uses binary decomposition to limit the number of 1463 // multiplications; see the discussion in "Hacker's Delight" by Henry 1464 // S. Warren, Jr., figure 11-6, page 213. 1465 double power_double_int(double x, int y) { 1466 double m = (y < 0) ? 1 / x : x; 1467 unsigned n = (y < 0) ? -y : y; 1468 double p = 1; 1469 while (n != 0) { 1470 if ((n & 1) != 0) p *= m; 1471 m *= m; 1472 if ((n & 2) != 0) p *= m; 1473 m *= m; 1474 n >>= 2; 1475 } 1476 return p; 1477 } 1478 1479 1480 double power_double_double(double x, double y) { 1481 #if defined(__MINGW64_VERSION_MAJOR) && \ 1482 (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1) 1483 // MinGW64 has a custom implementation for pow. This handles certain 1484 // special cases that are different. 1485 if ((x == 0.0 || std::isinf(x)) && std::isfinite(y)) { 1486 double f; 1487 if (std::modf(y, &f) != 0.0) { 1488 return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0; 1489 } 1490 } 1491 1492 if (x == 2.0) { 1493 int y_int = static_cast<int>(y); 1494 if (y == y_int) { 1495 return std::ldexp(1.0, y_int); 1496 } 1497 } 1498 #endif 1499 1500 // The checks for special cases can be dropped in ia32 because it has already 1501 // been done in generated code before bailing out here. 1502 if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) { 1503 return OS::nan_value(); 1504 } 1505 return std::pow(x, y); 1506 } 1507 1508 1509 ExternalReference ExternalReference::power_double_double_function( 1510 Isolate* isolate) { 1511 return ExternalReference(Redirect(isolate, 1512 FUNCTION_ADDR(power_double_double), 1513 BUILTIN_FP_FP_CALL)); 1514 } 1515 1516 1517 ExternalReference ExternalReference::power_double_int_function( 1518 Isolate* isolate) { 1519 return ExternalReference(Redirect(isolate, 1520 FUNCTION_ADDR(power_double_int), 1521 BUILTIN_FP_INT_CALL)); 1522 } 1523 1524 1525 bool EvalComparison(Token::Value op, double op1, double op2) { 1526 ASSERT(Token::IsCompareOp(op)); 1527 switch (op) { 1528 case Token::EQ: 1529 case Token::EQ_STRICT: return (op1 == op2); 1530 case Token::NE: return (op1 != op2); 1531 case Token::LT: return (op1 < op2); 1532 case Token::GT: return (op1 > op2); 1533 case Token::LTE: return (op1 <= op2); 1534 case Token::GTE: return (op1 >= op2); 1535 default: 1536 UNREACHABLE(); 1537 return false; 1538 } 1539 } 1540 1541 1542 ExternalReference ExternalReference::mod_two_doubles_operation( 1543 Isolate* isolate) { 1544 return ExternalReference(Redirect(isolate, 1545 FUNCTION_ADDR(modulo), 1546 BUILTIN_FP_FP_CALL)); 1547 } 1548 1549 1550 ExternalReference ExternalReference::debug_break(Isolate* isolate) { 1551 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break))); 1552 } 1553 1554 1555 ExternalReference ExternalReference::debug_step_in_fp_address( 1556 Isolate* isolate) { 1557 return ExternalReference(isolate->debug()->step_in_fp_addr()); 1558 } 1559 1560 1561 void PositionsRecorder::RecordPosition(int pos) { 1562 ASSERT(pos != RelocInfo::kNoPosition); 1563 ASSERT(pos >= 0); 1564 state_.current_position = pos; 1565 #ifdef ENABLE_GDB_JIT_INTERFACE 1566 if (gdbjit_lineinfo_ != NULL) { 1567 gdbjit_lineinfo_->SetPosition(assembler_->pc_offset(), pos, false); 1568 } 1569 #endif 1570 LOG_CODE_EVENT(assembler_->isolate(), 1571 CodeLinePosInfoAddPositionEvent(jit_handler_data_, 1572 assembler_->pc_offset(), 1573 pos)); 1574 } 1575 1576 1577 void PositionsRecorder::RecordStatementPosition(int pos) { 1578 ASSERT(pos != RelocInfo::kNoPosition); 1579 ASSERT(pos >= 0); 1580 state_.current_statement_position = pos; 1581 #ifdef ENABLE_GDB_JIT_INTERFACE 1582 if (gdbjit_lineinfo_ != NULL) { 1583 gdbjit_lineinfo_->SetPosition(assembler_->pc_offset(), pos, true); 1584 } 1585 #endif 1586 LOG_CODE_EVENT(assembler_->isolate(), 1587 CodeLinePosInfoAddStatementPositionEvent( 1588 jit_handler_data_, 1589 assembler_->pc_offset(), 1590 pos)); 1591 } 1592 1593 1594 bool PositionsRecorder::WriteRecordedPositions() { 1595 bool written = false; 1596 1597 // Write the statement position if it is different from what was written last 1598 // time. 1599 if (state_.current_statement_position != state_.written_statement_position) { 1600 EnsureSpace ensure_space(assembler_); 1601 assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION, 1602 state_.current_statement_position); 1603 state_.written_statement_position = state_.current_statement_position; 1604 written = true; 1605 } 1606 1607 // Write the position if it is different from what was written last time and 1608 // also different from the written statement position. 1609 if (state_.current_position != state_.written_position && 1610 state_.current_position != state_.written_statement_position) { 1611 EnsureSpace ensure_space(assembler_); 1612 assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position); 1613 state_.written_position = state_.current_position; 1614 written = true; 1615 } 1616 1617 // Return whether something was written. 1618 return written; 1619 } 1620 1621 1622 MultiplierAndShift::MultiplierAndShift(int32_t d) { 1623 ASSERT(d <= -2 || 2 <= d); 1624 const uint32_t two31 = 0x80000000; 1625 uint32_t ad = Abs(d); 1626 uint32_t t = two31 + (uint32_t(d) >> 31); 1627 uint32_t anc = t - 1 - t % ad; // Absolute value of nc. 1628 int32_t p = 31; // Init. p. 1629 uint32_t q1 = two31 / anc; // Init. q1 = 2**p/|nc|. 1630 uint32_t r1 = two31 - q1 * anc; // Init. r1 = rem(2**p, |nc|). 1631 uint32_t q2 = two31 / ad; // Init. q2 = 2**p/|d|. 1632 uint32_t r2 = two31 - q2 * ad; // Init. r2 = rem(2**p, |d|). 1633 uint32_t delta; 1634 do { 1635 p++; 1636 q1 *= 2; // Update q1 = 2**p/|nc|. 1637 r1 *= 2; // Update r1 = rem(2**p, |nc|). 1638 if (r1 >= anc) { // Must be an unsigned comparison here. 1639 q1++; 1640 r1 = r1 - anc; 1641 } 1642 q2 *= 2; // Update q2 = 2**p/|d|. 1643 r2 *= 2; // Update r2 = rem(2**p, |d|). 1644 if (r2 >= ad) { // Must be an unsigned comparison here. 1645 q2++; 1646 r2 = r2 - ad; 1647 } 1648 delta = ad - r2; 1649 } while (q1 < delta || (q1 == delta && r1 == 0)); 1650 int32_t mul = static_cast<int32_t>(q2 + 1); 1651 multiplier_ = (d < 0) ? -mul : mul; 1652 shift_ = p - 32; 1653 } 1654 1655 } } // namespace v8::internal 1656