Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 //
     28 // Review notes:
     29 //
     30 // - The use of macros in these inline functions may seem superfluous
     31 // but it is absolutely needed to make sure gcc generates optimal
     32 // code. gcc is not happy when attempting to inline too deep.
     33 //
     34 
     35 #ifndef V8_OBJECTS_INL_H_
     36 #define V8_OBJECTS_INL_H_
     37 
     38 #include "elements.h"
     39 #include "objects.h"
     40 #include "contexts.h"
     41 #include "conversions-inl.h"
     42 #include "heap.h"
     43 #include "isolate.h"
     44 #include "property.h"
     45 #include "spaces.h"
     46 #include "store-buffer.h"
     47 #include "v8memory.h"
     48 #include "factory.h"
     49 #include "incremental-marking.h"
     50 #include "transitions-inl.h"
     51 
     52 namespace v8 {
     53 namespace internal {
     54 
     55 PropertyDetails::PropertyDetails(Smi* smi) {
     56   value_ = smi->value();
     57 }
     58 
     59 
     60 Smi* PropertyDetails::AsSmi() {
     61   // Ensure the upper 2 bits have the same value by sign extending it. This is
     62   // necessary to be able to use the 31st bit of the property details.
     63   int value = value_ << 1;
     64   return Smi::FromInt(value >> 1);
     65 }
     66 
     67 
     68 PropertyDetails PropertyDetails::AsDeleted() {
     69   Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
     70   return PropertyDetails(smi);
     71 }
     72 
     73 
     74 #define TYPE_CHECKER(type, instancetype)                                \
     75   bool Object::Is##type() {                                             \
     76   return Object::IsHeapObject() &&                                      \
     77       HeapObject::cast(this)->map()->instance_type() == instancetype;   \
     78   }
     79 
     80 
     81 #define CAST_ACCESSOR(type)                     \
     82   type* type::cast(Object* object) {            \
     83     SLOW_ASSERT(object->Is##type());            \
     84     return reinterpret_cast<type*>(object);     \
     85   }
     86 
     87 
     88 #define INT_ACCESSORS(holder, name, offset)                             \
     89   int holder::name() { return READ_INT_FIELD(this, offset); }           \
     90   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
     91 
     92 
     93 #define ACCESSORS(holder, name, type, offset)                           \
     94   type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
     95   void holder::set_##name(type* value, WriteBarrierMode mode) {         \
     96     WRITE_FIELD(this, offset, value);                                   \
     97     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);    \
     98   }
     99 
    100 
    101 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
    102 #define ACCESSORS_TO_SMI(holder, name, offset)                          \
    103   Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); }   \
    104   void holder::set_##name(Smi* value, WriteBarrierMode mode) {          \
    105     WRITE_FIELD(this, offset, value);                                   \
    106   }
    107 
    108 
    109 // Getter that returns a Smi as an int and writes an int as a Smi.
    110 #define SMI_ACCESSORS(holder, name, offset)             \
    111   int holder::name() {                                  \
    112     Object* value = READ_FIELD(this, offset);           \
    113     return Smi::cast(value)->value();                   \
    114   }                                                     \
    115   void holder::set_##name(int value) {                  \
    116     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
    117   }
    118 
    119 
    120 #define BOOL_GETTER(holder, field, name, offset)           \
    121   bool holder::name() {                                    \
    122     return BooleanBit::get(field(), offset);               \
    123   }                                                        \
    124 
    125 
    126 #define BOOL_ACCESSORS(holder, field, name, offset)        \
    127   bool holder::name() {                                    \
    128     return BooleanBit::get(field(), offset);               \
    129   }                                                        \
    130   void holder::set_##name(bool value) {                    \
    131     set_##field(BooleanBit::set(field(), offset, value));  \
    132   }
    133 
    134 
    135 bool Object::IsFixedArrayBase() {
    136   return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray();
    137 }
    138 
    139 
    140 // External objects are not extensible, so the map check is enough.
    141 bool Object::IsExternal() {
    142   return Object::IsHeapObject() &&
    143       HeapObject::cast(this)->map() ==
    144       HeapObject::cast(this)->GetHeap()->external_map();
    145 }
    146 
    147 
    148 bool Object::IsAccessorInfo() {
    149   return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
    150 }
    151 
    152 
    153 bool Object::IsSmi() {
    154   return HAS_SMI_TAG(this);
    155 }
    156 
    157 
    158 bool Object::IsHeapObject() {
    159   return Internals::HasHeapObjectTag(this);
    160 }
    161 
    162 
    163 bool Object::NonFailureIsHeapObject() {
    164   ASSERT(!this->IsFailure());
    165   return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
    166 }
    167 
    168 
    169 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
    170 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
    171 
    172 
    173 bool Object::IsString() {
    174   return Object::IsHeapObject()
    175     && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
    176 }
    177 
    178 
    179 bool Object::IsName() {
    180   return IsString() || IsSymbol();
    181 }
    182 
    183 
    184 bool Object::IsUniqueName() {
    185   return IsInternalizedString() || IsSymbol();
    186 }
    187 
    188 
    189 bool Object::IsSpecObject() {
    190   return Object::IsHeapObject()
    191     && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
    192 }
    193 
    194 
    195 bool Object::IsSpecFunction() {
    196   if (!Object::IsHeapObject()) return false;
    197   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    198   return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
    199 }
    200 
    201 
    202 bool Object::IsInternalizedString() {
    203   if (!this->IsHeapObject()) return false;
    204   uint32_t type = HeapObject::cast(this)->map()->instance_type();
    205   STATIC_ASSERT(kNotInternalizedTag != 0);
    206   return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
    207       (kStringTag | kInternalizedTag);
    208 }
    209 
    210 
    211 bool Object::IsConsString() {
    212   if (!IsString()) return false;
    213   return StringShape(String::cast(this)).IsCons();
    214 }
    215 
    216 
    217 bool Object::IsSlicedString() {
    218   if (!IsString()) return false;
    219   return StringShape(String::cast(this)).IsSliced();
    220 }
    221 
    222 
    223 bool Object::IsSeqString() {
    224   if (!IsString()) return false;
    225   return StringShape(String::cast(this)).IsSequential();
    226 }
    227 
    228 
    229 bool Object::IsSeqOneByteString() {
    230   if (!IsString()) return false;
    231   return StringShape(String::cast(this)).IsSequential() &&
    232          String::cast(this)->IsOneByteRepresentation();
    233 }
    234 
    235 
    236 bool Object::IsSeqTwoByteString() {
    237   if (!IsString()) return false;
    238   return StringShape(String::cast(this)).IsSequential() &&
    239          String::cast(this)->IsTwoByteRepresentation();
    240 }
    241 
    242 
    243 bool Object::IsExternalString() {
    244   if (!IsString()) return false;
    245   return StringShape(String::cast(this)).IsExternal();
    246 }
    247 
    248 
    249 bool Object::IsExternalAsciiString() {
    250   if (!IsString()) return false;
    251   return StringShape(String::cast(this)).IsExternal() &&
    252          String::cast(this)->IsOneByteRepresentation();
    253 }
    254 
    255 
    256 bool Object::IsExternalTwoByteString() {
    257   if (!IsString()) return false;
    258   return StringShape(String::cast(this)).IsExternal() &&
    259          String::cast(this)->IsTwoByteRepresentation();
    260 }
    261 
    262 bool Object::HasValidElements() {
    263   // Dictionary is covered under FixedArray.
    264   return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
    265 }
    266 
    267 
    268 MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
    269                                            Representation representation) {
    270   if (!FLAG_track_double_fields) return this;
    271   if (!representation.IsDouble()) return this;
    272   if (IsUninitialized()) {
    273     return heap->AllocateHeapNumber(0);
    274   }
    275   return heap->AllocateHeapNumber(Number());
    276 }
    277 
    278 
    279 StringShape::StringShape(String* str)
    280   : type_(str->map()->instance_type()) {
    281   set_valid();
    282   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    283 }
    284 
    285 
    286 StringShape::StringShape(Map* map)
    287   : type_(map->instance_type()) {
    288   set_valid();
    289   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    290 }
    291 
    292 
    293 StringShape::StringShape(InstanceType t)
    294   : type_(static_cast<uint32_t>(t)) {
    295   set_valid();
    296   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    297 }
    298 
    299 
    300 bool StringShape::IsInternalized() {
    301   ASSERT(valid());
    302   STATIC_ASSERT(kNotInternalizedTag != 0);
    303   return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
    304       (kStringTag | kInternalizedTag);
    305 }
    306 
    307 
    308 bool String::IsOneByteRepresentation() {
    309   uint32_t type = map()->instance_type();
    310   return (type & kStringEncodingMask) == kOneByteStringTag;
    311 }
    312 
    313 
    314 bool String::IsTwoByteRepresentation() {
    315   uint32_t type = map()->instance_type();
    316   return (type & kStringEncodingMask) == kTwoByteStringTag;
    317 }
    318 
    319 
    320 bool String::IsOneByteRepresentationUnderneath() {
    321   uint32_t type = map()->instance_type();
    322   STATIC_ASSERT(kIsIndirectStringTag != 0);
    323   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
    324   ASSERT(IsFlat());
    325   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
    326     case kOneByteStringTag:
    327       return true;
    328     case kTwoByteStringTag:
    329       return false;
    330     default:  // Cons or sliced string.  Need to go deeper.
    331       return GetUnderlying()->IsOneByteRepresentation();
    332   }
    333 }
    334 
    335 
    336 bool String::IsTwoByteRepresentationUnderneath() {
    337   uint32_t type = map()->instance_type();
    338   STATIC_ASSERT(kIsIndirectStringTag != 0);
    339   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
    340   ASSERT(IsFlat());
    341   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
    342     case kOneByteStringTag:
    343       return false;
    344     case kTwoByteStringTag:
    345       return true;
    346     default:  // Cons or sliced string.  Need to go deeper.
    347       return GetUnderlying()->IsTwoByteRepresentation();
    348   }
    349 }
    350 
    351 
    352 bool String::HasOnlyOneByteChars() {
    353   uint32_t type = map()->instance_type();
    354   return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
    355          IsOneByteRepresentation();
    356 }
    357 
    358 
    359 bool StringShape::IsCons() {
    360   return (type_ & kStringRepresentationMask) == kConsStringTag;
    361 }
    362 
    363 
    364 bool StringShape::IsSliced() {
    365   return (type_ & kStringRepresentationMask) == kSlicedStringTag;
    366 }
    367 
    368 
    369 bool StringShape::IsIndirect() {
    370   return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
    371 }
    372 
    373 
    374 bool StringShape::IsExternal() {
    375   return (type_ & kStringRepresentationMask) == kExternalStringTag;
    376 }
    377 
    378 
    379 bool StringShape::IsSequential() {
    380   return (type_ & kStringRepresentationMask) == kSeqStringTag;
    381 }
    382 
    383 
    384 StringRepresentationTag StringShape::representation_tag() {
    385   uint32_t tag = (type_ & kStringRepresentationMask);
    386   return static_cast<StringRepresentationTag>(tag);
    387 }
    388 
    389 
    390 uint32_t StringShape::encoding_tag() {
    391   return type_ & kStringEncodingMask;
    392 }
    393 
    394 
    395 uint32_t StringShape::full_representation_tag() {
    396   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
    397 }
    398 
    399 
    400 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
    401              Internals::kFullStringRepresentationMask);
    402 
    403 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
    404              Internals::kStringEncodingMask);
    405 
    406 
    407 bool StringShape::IsSequentialAscii() {
    408   return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
    409 }
    410 
    411 
    412 bool StringShape::IsSequentialTwoByte() {
    413   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
    414 }
    415 
    416 
    417 bool StringShape::IsExternalAscii() {
    418   return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
    419 }
    420 
    421 
    422 STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
    423              Internals::kExternalAsciiRepresentationTag);
    424 
    425 STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
    426 
    427 
    428 bool StringShape::IsExternalTwoByte() {
    429   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
    430 }
    431 
    432 
    433 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
    434              Internals::kExternalTwoByteRepresentationTag);
    435 
    436 STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
    437 
    438 uc32 FlatStringReader::Get(int index) {
    439   ASSERT(0 <= index && index <= length_);
    440   if (is_ascii_) {
    441     return static_cast<const byte*>(start_)[index];
    442   } else {
    443     return static_cast<const uc16*>(start_)[index];
    444   }
    445 }
    446 
    447 
    448 bool Object::IsNumber() {
    449   return IsSmi() || IsHeapNumber();
    450 }
    451 
    452 
    453 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
    454 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
    455 
    456 
    457 bool Object::IsFiller() {
    458   if (!Object::IsHeapObject()) return false;
    459   InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
    460   return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
    461 }
    462 
    463 
    464 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
    465 
    466 
    467 bool Object::IsExternalArray() {
    468   if (!Object::IsHeapObject())
    469     return false;
    470   InstanceType instance_type =
    471       HeapObject::cast(this)->map()->instance_type();
    472   return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
    473           instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
    474 }
    475 
    476 
    477 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
    478 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
    479 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
    480 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
    481 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
    482 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
    483 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
    484 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
    485 
    486 
    487 bool MaybeObject::IsFailure() {
    488   return HAS_FAILURE_TAG(this);
    489 }
    490 
    491 
    492 bool MaybeObject::IsRetryAfterGC() {
    493   return HAS_FAILURE_TAG(this)
    494     && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
    495 }
    496 
    497 
    498 bool MaybeObject::IsOutOfMemory() {
    499   return HAS_FAILURE_TAG(this)
    500       && Failure::cast(this)->IsOutOfMemoryException();
    501 }
    502 
    503 
    504 bool MaybeObject::IsException() {
    505   return this == Failure::Exception();
    506 }
    507 
    508 
    509 bool MaybeObject::IsTheHole() {
    510   return !IsFailure() && ToObjectUnchecked()->IsTheHole();
    511 }
    512 
    513 
    514 bool MaybeObject::IsUninitialized() {
    515   return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
    516 }
    517 
    518 
    519 Failure* Failure::cast(MaybeObject* obj) {
    520   ASSERT(HAS_FAILURE_TAG(obj));
    521   return reinterpret_cast<Failure*>(obj);
    522 }
    523 
    524 
    525 bool Object::IsJSReceiver() {
    526   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    527   return IsHeapObject() &&
    528       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
    529 }
    530 
    531 
    532 bool Object::IsJSObject() {
    533   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
    534   return IsHeapObject() &&
    535       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
    536 }
    537 
    538 
    539 bool Object::IsJSProxy() {
    540   if (!Object::IsHeapObject()) return false;
    541   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    542   return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
    543 }
    544 
    545 
    546 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
    547 TYPE_CHECKER(JSSet, JS_SET_TYPE)
    548 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
    549 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
    550 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
    551 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
    552 TYPE_CHECKER(Map, MAP_TYPE)
    553 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
    554 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
    555 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
    556 
    557 
    558 bool Object::IsJSWeakCollection() {
    559   return IsJSWeakMap() || IsJSWeakSet();
    560 }
    561 
    562 
    563 bool Object::IsDescriptorArray() {
    564   return IsFixedArray();
    565 }
    566 
    567 
    568 bool Object::IsTransitionArray() {
    569   return IsFixedArray();
    570 }
    571 
    572 
    573 bool Object::IsDeoptimizationInputData() {
    574   // Must be a fixed array.
    575   if (!IsFixedArray()) return false;
    576 
    577   // There's no sure way to detect the difference between a fixed array and
    578   // a deoptimization data array.  Since this is used for asserts we can
    579   // check that the length is zero or else the fixed size plus a multiple of
    580   // the entry size.
    581   int length = FixedArray::cast(this)->length();
    582   if (length == 0) return true;
    583 
    584   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
    585   return length >= 0 &&
    586       length % DeoptimizationInputData::kDeoptEntrySize == 0;
    587 }
    588 
    589 
    590 bool Object::IsDeoptimizationOutputData() {
    591   if (!IsFixedArray()) return false;
    592   // There's actually no way to see the difference between a fixed array and
    593   // a deoptimization data array.  Since this is used for asserts we can check
    594   // that the length is plausible though.
    595   if (FixedArray::cast(this)->length() % 2 != 0) return false;
    596   return true;
    597 }
    598 
    599 
    600 bool Object::IsDependentCode() {
    601   if (!IsFixedArray()) return false;
    602   // There's actually no way to see the difference between a fixed array and
    603   // a dependent codes array.
    604   return true;
    605 }
    606 
    607 
    608 bool Object::IsTypeFeedbackCells() {
    609   if (!IsFixedArray()) return false;
    610   // There's actually no way to see the difference between a fixed array and
    611   // a cache cells array.  Since this is used for asserts we can check that
    612   // the length is plausible though.
    613   if (FixedArray::cast(this)->length() % 2 != 0) return false;
    614   return true;
    615 }
    616 
    617 
    618 bool Object::IsContext() {
    619   if (!Object::IsHeapObject()) return false;
    620   Map* map = HeapObject::cast(this)->map();
    621   Heap* heap = map->GetHeap();
    622   return (map == heap->function_context_map() ||
    623       map == heap->catch_context_map() ||
    624       map == heap->with_context_map() ||
    625       map == heap->native_context_map() ||
    626       map == heap->block_context_map() ||
    627       map == heap->module_context_map() ||
    628       map == heap->global_context_map());
    629 }
    630 
    631 
    632 bool Object::IsNativeContext() {
    633   return Object::IsHeapObject() &&
    634       HeapObject::cast(this)->map() ==
    635       HeapObject::cast(this)->GetHeap()->native_context_map();
    636 }
    637 
    638 
    639 bool Object::IsScopeInfo() {
    640   return Object::IsHeapObject() &&
    641       HeapObject::cast(this)->map() ==
    642       HeapObject::cast(this)->GetHeap()->scope_info_map();
    643 }
    644 
    645 
    646 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
    647 
    648 
    649 template <> inline bool Is<JSFunction>(Object* obj) {
    650   return obj->IsJSFunction();
    651 }
    652 
    653 
    654 TYPE_CHECKER(Code, CODE_TYPE)
    655 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
    656 TYPE_CHECKER(Cell, CELL_TYPE)
    657 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
    658 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
    659 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
    660 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
    661 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
    662 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
    663 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
    664 
    665 
    666 bool Object::IsStringWrapper() {
    667   return IsJSValue() && JSValue::cast(this)->value()->IsString();
    668 }
    669 
    670 
    671 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
    672 
    673 
    674 bool Object::IsBoolean() {
    675   return IsOddball() &&
    676       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
    677 }
    678 
    679 
    680 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
    681 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
    682 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
    683 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
    684 
    685 
    686 bool Object::IsJSArrayBufferView() {
    687   return IsJSDataView() || IsJSTypedArray();
    688 }
    689 
    690 
    691 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
    692 
    693 
    694 template <> inline bool Is<JSArray>(Object* obj) {
    695   return obj->IsJSArray();
    696 }
    697 
    698 
    699 bool Object::IsHashTable() {
    700   return Object::IsHeapObject() &&
    701       HeapObject::cast(this)->map() ==
    702       HeapObject::cast(this)->GetHeap()->hash_table_map();
    703 }
    704 
    705 
    706 bool Object::IsDictionary() {
    707   return IsHashTable() &&
    708       this != HeapObject::cast(this)->GetHeap()->string_table();
    709 }
    710 
    711 
    712 bool Object::IsStringTable() {
    713   return IsHashTable() &&
    714       this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
    715 }
    716 
    717 
    718 bool Object::IsJSFunctionResultCache() {
    719   if (!IsFixedArray()) return false;
    720   FixedArray* self = FixedArray::cast(this);
    721   int length = self->length();
    722   if (length < JSFunctionResultCache::kEntriesIndex) return false;
    723   if ((length - JSFunctionResultCache::kEntriesIndex)
    724       % JSFunctionResultCache::kEntrySize != 0) {
    725     return false;
    726   }
    727 #ifdef VERIFY_HEAP
    728   if (FLAG_verify_heap) {
    729     reinterpret_cast<JSFunctionResultCache*>(this)->
    730         JSFunctionResultCacheVerify();
    731   }
    732 #endif
    733   return true;
    734 }
    735 
    736 
    737 bool Object::IsNormalizedMapCache() {
    738   if (!IsFixedArray()) return false;
    739   if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
    740     return false;
    741   }
    742 #ifdef VERIFY_HEAP
    743   if (FLAG_verify_heap) {
    744     reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
    745   }
    746 #endif
    747   return true;
    748 }
    749 
    750 
    751 bool Object::IsCompilationCacheTable() {
    752   return IsHashTable();
    753 }
    754 
    755 
    756 bool Object::IsCodeCacheHashTable() {
    757   return IsHashTable();
    758 }
    759 
    760 
    761 bool Object::IsPolymorphicCodeCacheHashTable() {
    762   return IsHashTable();
    763 }
    764 
    765 
    766 bool Object::IsMapCache() {
    767   return IsHashTable();
    768 }
    769 
    770 
    771 bool Object::IsObjectHashTable() {
    772   return IsHashTable();
    773 }
    774 
    775 
    776 bool Object::IsPrimitive() {
    777   return IsOddball() || IsNumber() || IsString();
    778 }
    779 
    780 
    781 bool Object::IsJSGlobalProxy() {
    782   bool result = IsHeapObject() &&
    783                 (HeapObject::cast(this)->map()->instance_type() ==
    784                  JS_GLOBAL_PROXY_TYPE);
    785   ASSERT(!result || IsAccessCheckNeeded());
    786   return result;
    787 }
    788 
    789 
    790 bool Object::IsGlobalObject() {
    791   if (!IsHeapObject()) return false;
    792 
    793   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    794   return type == JS_GLOBAL_OBJECT_TYPE ||
    795          type == JS_BUILTINS_OBJECT_TYPE;
    796 }
    797 
    798 
    799 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
    800 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
    801 
    802 
    803 bool Object::IsUndetectableObject() {
    804   return IsHeapObject()
    805     && HeapObject::cast(this)->map()->is_undetectable();
    806 }
    807 
    808 
    809 bool Object::IsAccessCheckNeeded() {
    810   return IsHeapObject()
    811     && HeapObject::cast(this)->map()->is_access_check_needed();
    812 }
    813 
    814 
    815 bool Object::IsStruct() {
    816   if (!IsHeapObject()) return false;
    817   switch (HeapObject::cast(this)->map()->instance_type()) {
    818 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
    819   STRUCT_LIST(MAKE_STRUCT_CASE)
    820 #undef MAKE_STRUCT_CASE
    821     default: return false;
    822   }
    823 }
    824 
    825 
    826 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
    827   bool Object::Is##Name() {                                      \
    828     return Object::IsHeapObject()                                \
    829       && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
    830   }
    831   STRUCT_LIST(MAKE_STRUCT_PREDICATE)
    832 #undef MAKE_STRUCT_PREDICATE
    833 
    834 
    835 bool Object::IsUndefined() {
    836   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
    837 }
    838 
    839 
    840 bool Object::IsNull() {
    841   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
    842 }
    843 
    844 
    845 bool Object::IsTheHole() {
    846   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
    847 }
    848 
    849 
    850 bool Object::IsUninitialized() {
    851   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
    852 }
    853 
    854 
    855 bool Object::IsTrue() {
    856   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
    857 }
    858 
    859 
    860 bool Object::IsFalse() {
    861   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
    862 }
    863 
    864 
    865 bool Object::IsArgumentsMarker() {
    866   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
    867 }
    868 
    869 
    870 double Object::Number() {
    871   ASSERT(IsNumber());
    872   return IsSmi()
    873     ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
    874     : reinterpret_cast<HeapNumber*>(this)->value();
    875 }
    876 
    877 
    878 bool Object::IsNaN() {
    879   return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
    880 }
    881 
    882 
    883 MaybeObject* Object::ToSmi() {
    884   if (IsSmi()) return this;
    885   if (IsHeapNumber()) {
    886     double value = HeapNumber::cast(this)->value();
    887     int int_value = FastD2I(value);
    888     if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
    889       return Smi::FromInt(int_value);
    890     }
    891   }
    892   return Failure::Exception();
    893 }
    894 
    895 
    896 bool Object::HasSpecificClassOf(String* name) {
    897   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
    898 }
    899 
    900 
    901 MaybeObject* Object::GetElement(Isolate* isolate, uint32_t index) {
    902   // GetElement can trigger a getter which can cause allocation.
    903   // This was not always the case. This ASSERT is here to catch
    904   // leftover incorrect uses.
    905   ASSERT(AllowHeapAllocation::IsAllowed());
    906   return GetElementWithReceiver(isolate, this, index);
    907 }
    908 
    909 
    910 Object* Object::GetElementNoExceptionThrown(Isolate* isolate, uint32_t index) {
    911   MaybeObject* maybe = GetElementWithReceiver(isolate, this, index);
    912   ASSERT(!maybe->IsFailure());
    913   Object* result = NULL;  // Initialization to please compiler.
    914   maybe->ToObject(&result);
    915   return result;
    916 }
    917 
    918 
    919 MaybeObject* Object::GetProperty(Name* key) {
    920   PropertyAttributes attributes;
    921   return GetPropertyWithReceiver(this, key, &attributes);
    922 }
    923 
    924 
    925 MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
    926   return GetPropertyWithReceiver(this, key, attributes);
    927 }
    928 
    929 
    930 #define FIELD_ADDR(p, offset) \
    931   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
    932 
    933 #define READ_FIELD(p, offset) \
    934   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
    935 
    936 #define WRITE_FIELD(p, offset, value) \
    937   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
    938 
    939 #define WRITE_BARRIER(heap, object, offset, value)                      \
    940   heap->incremental_marking()->RecordWrite(                             \
    941       object, HeapObject::RawField(object, offset), value);             \
    942   if (heap->InNewSpace(value)) {                                        \
    943     heap->RecordWrite(object->address(), offset);                       \
    944   }
    945 
    946 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)    \
    947   if (mode == UPDATE_WRITE_BARRIER) {                                   \
    948     heap->incremental_marking()->RecordWrite(                           \
    949       object, HeapObject::RawField(object, offset), value);             \
    950     if (heap->InNewSpace(value)) {                                      \
    951       heap->RecordWrite(object->address(), offset);                     \
    952     }                                                                   \
    953   }
    954 
    955 #ifndef V8_TARGET_ARCH_MIPS
    956   #define READ_DOUBLE_FIELD(p, offset) \
    957     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
    958 #else  // V8_TARGET_ARCH_MIPS
    959   // Prevent gcc from using load-double (mips ldc1) on (possibly)
    960   // non-64-bit aligned HeapNumber::value.
    961   static inline double read_double_field(void* p, int offset) {
    962     union conversion {
    963       double d;
    964       uint32_t u[2];
    965     } c;
    966     c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
    967     c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
    968     return c.d;
    969   }
    970   #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
    971 #endif  // V8_TARGET_ARCH_MIPS
    972 
    973 #ifndef V8_TARGET_ARCH_MIPS
    974   #define WRITE_DOUBLE_FIELD(p, offset, value) \
    975     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
    976 #else  // V8_TARGET_ARCH_MIPS
    977   // Prevent gcc from using store-double (mips sdc1) on (possibly)
    978   // non-64-bit aligned HeapNumber::value.
    979   static inline void write_double_field(void* p, int offset,
    980                                         double value) {
    981     union conversion {
    982       double d;
    983       uint32_t u[2];
    984     } c;
    985     c.d = value;
    986     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
    987     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
    988   }
    989   #define WRITE_DOUBLE_FIELD(p, offset, value) \
    990     write_double_field(p, offset, value)
    991 #endif  // V8_TARGET_ARCH_MIPS
    992 
    993 
    994 #define READ_INT_FIELD(p, offset) \
    995   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
    996 
    997 #define WRITE_INT_FIELD(p, offset, value) \
    998   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
    999 
   1000 #define READ_INTPTR_FIELD(p, offset) \
   1001   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
   1002 
   1003 #define WRITE_INTPTR_FIELD(p, offset, value) \
   1004   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
   1005 
   1006 #define READ_UINT32_FIELD(p, offset) \
   1007   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
   1008 
   1009 #define WRITE_UINT32_FIELD(p, offset, value) \
   1010   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
   1011 
   1012 #define READ_INT32_FIELD(p, offset) \
   1013   (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
   1014 
   1015 #define WRITE_INT32_FIELD(p, offset, value) \
   1016   (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
   1017 
   1018 #define READ_INT64_FIELD(p, offset) \
   1019   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
   1020 
   1021 #define WRITE_INT64_FIELD(p, offset, value) \
   1022   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
   1023 
   1024 #define READ_SHORT_FIELD(p, offset) \
   1025   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
   1026 
   1027 #define WRITE_SHORT_FIELD(p, offset, value) \
   1028   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
   1029 
   1030 #define READ_BYTE_FIELD(p, offset) \
   1031   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
   1032 
   1033 #define WRITE_BYTE_FIELD(p, offset, value) \
   1034   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
   1035 
   1036 
   1037 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
   1038   return &READ_FIELD(obj, byte_offset);
   1039 }
   1040 
   1041 
   1042 int Smi::value() {
   1043   return Internals::SmiValue(this);
   1044 }
   1045 
   1046 
   1047 Smi* Smi::FromInt(int value) {
   1048   ASSERT(Smi::IsValid(value));
   1049   return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
   1050 }
   1051 
   1052 
   1053 Smi* Smi::FromIntptr(intptr_t value) {
   1054   ASSERT(Smi::IsValid(value));
   1055   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
   1056   return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
   1057 }
   1058 
   1059 
   1060 Failure::Type Failure::type() const {
   1061   return static_cast<Type>(value() & kFailureTypeTagMask);
   1062 }
   1063 
   1064 
   1065 bool Failure::IsInternalError() const {
   1066   return type() == INTERNAL_ERROR;
   1067 }
   1068 
   1069 
   1070 bool Failure::IsOutOfMemoryException() const {
   1071   return type() == OUT_OF_MEMORY_EXCEPTION;
   1072 }
   1073 
   1074 
   1075 AllocationSpace Failure::allocation_space() const {
   1076   ASSERT_EQ(RETRY_AFTER_GC, type());
   1077   return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
   1078                                       & kSpaceTagMask);
   1079 }
   1080 
   1081 
   1082 Failure* Failure::InternalError() {
   1083   return Construct(INTERNAL_ERROR);
   1084 }
   1085 
   1086 
   1087 Failure* Failure::Exception() {
   1088   return Construct(EXCEPTION);
   1089 }
   1090 
   1091 
   1092 Failure* Failure::OutOfMemoryException(intptr_t value) {
   1093   return Construct(OUT_OF_MEMORY_EXCEPTION, value);
   1094 }
   1095 
   1096 
   1097 intptr_t Failure::value() const {
   1098   return static_cast<intptr_t>(
   1099       reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
   1100 }
   1101 
   1102 
   1103 Failure* Failure::RetryAfterGC() {
   1104   return RetryAfterGC(NEW_SPACE);
   1105 }
   1106 
   1107 
   1108 Failure* Failure::RetryAfterGC(AllocationSpace space) {
   1109   ASSERT((space & ~kSpaceTagMask) == 0);
   1110   return Construct(RETRY_AFTER_GC, space);
   1111 }
   1112 
   1113 
   1114 Failure* Failure::Construct(Type type, intptr_t value) {
   1115   uintptr_t info =
   1116       (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
   1117   ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
   1118   // Fill the unused bits with a pattern that's easy to recognize in crash
   1119   // dumps.
   1120   static const int kFailureMagicPattern = 0x0BAD0000;
   1121   return reinterpret_cast<Failure*>(
   1122       (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
   1123 }
   1124 
   1125 
   1126 bool Smi::IsValid(intptr_t value) {
   1127   bool result = Internals::IsValidSmi(value);
   1128   ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
   1129   return result;
   1130 }
   1131 
   1132 
   1133 MapWord MapWord::FromMap(Map* map) {
   1134   return MapWord(reinterpret_cast<uintptr_t>(map));
   1135 }
   1136 
   1137 
   1138 Map* MapWord::ToMap() {
   1139   return reinterpret_cast<Map*>(value_);
   1140 }
   1141 
   1142 
   1143 bool MapWord::IsForwardingAddress() {
   1144   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
   1145 }
   1146 
   1147 
   1148 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
   1149   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
   1150   return MapWord(reinterpret_cast<uintptr_t>(raw));
   1151 }
   1152 
   1153 
   1154 HeapObject* MapWord::ToForwardingAddress() {
   1155   ASSERT(IsForwardingAddress());
   1156   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
   1157 }
   1158 
   1159 
   1160 #ifdef VERIFY_HEAP
   1161 void HeapObject::VerifyObjectField(int offset) {
   1162   VerifyPointer(READ_FIELD(this, offset));
   1163 }
   1164 
   1165 void HeapObject::VerifySmiField(int offset) {
   1166   CHECK(READ_FIELD(this, offset)->IsSmi());
   1167 }
   1168 #endif
   1169 
   1170 
   1171 Heap* HeapObject::GetHeap() {
   1172   Heap* heap =
   1173       MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
   1174   SLOW_ASSERT(heap != NULL);
   1175   return heap;
   1176 }
   1177 
   1178 
   1179 Isolate* HeapObject::GetIsolate() {
   1180   return GetHeap()->isolate();
   1181 }
   1182 
   1183 
   1184 Map* HeapObject::map() {
   1185   return map_word().ToMap();
   1186 }
   1187 
   1188 
   1189 void HeapObject::set_map(Map* value) {
   1190   set_map_word(MapWord::FromMap(value));
   1191   if (value != NULL) {
   1192     // TODO(1600) We are passing NULL as a slot because maps can never be on
   1193     // evacuation candidate.
   1194     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
   1195   }
   1196 }
   1197 
   1198 
   1199 // Unsafe accessor omitting write barrier.
   1200 void HeapObject::set_map_no_write_barrier(Map* value) {
   1201   set_map_word(MapWord::FromMap(value));
   1202 }
   1203 
   1204 
   1205 MapWord HeapObject::map_word() {
   1206   return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
   1207 }
   1208 
   1209 
   1210 void HeapObject::set_map_word(MapWord map_word) {
   1211   // WRITE_FIELD does not invoke write barrier, but there is no need
   1212   // here.
   1213   WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
   1214 }
   1215 
   1216 
   1217 HeapObject* HeapObject::FromAddress(Address address) {
   1218   ASSERT_TAG_ALIGNED(address);
   1219   return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
   1220 }
   1221 
   1222 
   1223 Address HeapObject::address() {
   1224   return reinterpret_cast<Address>(this) - kHeapObjectTag;
   1225 }
   1226 
   1227 
   1228 int HeapObject::Size() {
   1229   return SizeFromMap(map());
   1230 }
   1231 
   1232 
   1233 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
   1234   v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
   1235                    reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
   1236 }
   1237 
   1238 
   1239 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
   1240   v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
   1241 }
   1242 
   1243 
   1244 double HeapNumber::value() {
   1245   return READ_DOUBLE_FIELD(this, kValueOffset);
   1246 }
   1247 
   1248 
   1249 void HeapNumber::set_value(double value) {
   1250   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
   1251 }
   1252 
   1253 
   1254 int HeapNumber::get_exponent() {
   1255   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
   1256           kExponentShift) - kExponentBias;
   1257 }
   1258 
   1259 
   1260 int HeapNumber::get_sign() {
   1261   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
   1262 }
   1263 
   1264 
   1265 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
   1266 
   1267 
   1268 Object** FixedArray::GetFirstElementAddress() {
   1269   return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
   1270 }
   1271 
   1272 
   1273 bool FixedArray::ContainsOnlySmisOrHoles() {
   1274   Object* the_hole = GetHeap()->the_hole_value();
   1275   Object** current = GetFirstElementAddress();
   1276   for (int i = 0; i < length(); ++i) {
   1277     Object* candidate = *current++;
   1278     if (!candidate->IsSmi() && candidate != the_hole) return false;
   1279   }
   1280   return true;
   1281 }
   1282 
   1283 
   1284 FixedArrayBase* JSObject::elements() {
   1285   Object* array = READ_FIELD(this, kElementsOffset);
   1286   return static_cast<FixedArrayBase*>(array);
   1287 }
   1288 
   1289 
   1290 void JSObject::ValidateElements() {
   1291 #ifdef ENABLE_SLOW_ASSERTS
   1292   if (FLAG_enable_slow_asserts) {
   1293     ElementsAccessor* accessor = GetElementsAccessor();
   1294     accessor->Validate(this);
   1295   }
   1296 #endif
   1297 }
   1298 
   1299 
   1300 bool JSObject::ShouldTrackAllocationInfo() {
   1301   if (AllocationSite::CanTrack(map()->instance_type())) {
   1302     if (!IsJSArray()) {
   1303       return true;
   1304     }
   1305 
   1306     return AllocationSite::GetMode(GetElementsKind()) ==
   1307         TRACK_ALLOCATION_SITE;
   1308   }
   1309   return false;
   1310 }
   1311 
   1312 
   1313 void AllocationSite::Initialize() {
   1314   set_transition_info(Smi::FromInt(0));
   1315   SetElementsKind(GetInitialFastElementsKind());
   1316   set_nested_site(Smi::FromInt(0));
   1317   set_memento_create_count(Smi::FromInt(0));
   1318   set_memento_found_count(Smi::FromInt(0));
   1319   set_pretenure_decision(Smi::FromInt(0));
   1320   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
   1321                      SKIP_WRITE_BARRIER);
   1322 }
   1323 
   1324 
   1325 void AllocationSite::MarkZombie() {
   1326   ASSERT(!IsZombie());
   1327   set_pretenure_decision(Smi::FromInt(kZombie));
   1328   // Clear all non-smi fields
   1329   set_transition_info(Smi::FromInt(0));
   1330   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
   1331                      SKIP_WRITE_BARRIER);
   1332 }
   1333 
   1334 
   1335 // Heuristic: We only need to create allocation site info if the boilerplate
   1336 // elements kind is the initial elements kind.
   1337 AllocationSiteMode AllocationSite::GetMode(
   1338     ElementsKind boilerplate_elements_kind) {
   1339   if (FLAG_track_allocation_sites &&
   1340       IsFastSmiElementsKind(boilerplate_elements_kind)) {
   1341     return TRACK_ALLOCATION_SITE;
   1342   }
   1343 
   1344   return DONT_TRACK_ALLOCATION_SITE;
   1345 }
   1346 
   1347 
   1348 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
   1349                                            ElementsKind to) {
   1350   if (FLAG_track_allocation_sites &&
   1351       IsFastSmiElementsKind(from) &&
   1352       IsMoreGeneralElementsKindTransition(from, to)) {
   1353     return TRACK_ALLOCATION_SITE;
   1354   }
   1355 
   1356   return DONT_TRACK_ALLOCATION_SITE;
   1357 }
   1358 
   1359 
   1360 inline bool AllocationSite::CanTrack(InstanceType type) {
   1361   if (FLAG_allocation_site_pretenuring) {
   1362     return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
   1363   }
   1364   return type == JS_ARRAY_TYPE;
   1365 }
   1366 
   1367 
   1368 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
   1369     Reason reason) {
   1370   switch (reason) {
   1371     case TENURING:
   1372       return DependentCode::kAllocationSiteTenuringChangedGroup;
   1373       break;
   1374     case TRANSITIONS:
   1375       return DependentCode::kAllocationSiteTransitionChangedGroup;
   1376       break;
   1377   }
   1378   UNREACHABLE();
   1379   return DependentCode::kAllocationSiteTransitionChangedGroup;
   1380 }
   1381 
   1382 
   1383 inline void AllocationSite::IncrementMementoFoundCount() {
   1384   int value = memento_found_count()->value();
   1385   set_memento_found_count(Smi::FromInt(value + 1));
   1386 }
   1387 
   1388 
   1389 inline void AllocationSite::IncrementMementoCreateCount() {
   1390   ASSERT(FLAG_allocation_site_pretenuring);
   1391   int value = memento_create_count()->value();
   1392   set_memento_create_count(Smi::FromInt(value + 1));
   1393 }
   1394 
   1395 
   1396 inline bool AllocationSite::DigestPretenuringFeedback() {
   1397   bool decision_made = false;
   1398   if (!PretenuringDecisionMade()) {
   1399     int create_count = memento_create_count()->value();
   1400     if (create_count >= kPretenureMinimumCreated) {
   1401       int found_count = memento_found_count()->value();
   1402       double ratio = static_cast<double>(found_count) / create_count;
   1403       if (FLAG_trace_track_allocation_sites) {
   1404         PrintF("AllocationSite: %p (created, found, ratio) (%d, %d, %f)\n",
   1405                static_cast<void*>(this), create_count, found_count, ratio);
   1406       }
   1407       int result = ratio >= kPretenureRatio ? kTenure : kDontTenure;
   1408       set_pretenure_decision(Smi::FromInt(result));
   1409       decision_made = true;
   1410       // TODO(mvstanton): if the decision represents a change, any dependent
   1411       // code registered for pretenuring changes should be deopted.
   1412     }
   1413   }
   1414 
   1415   // Clear feedback calculation fields until the next gc.
   1416   set_memento_found_count(Smi::FromInt(0));
   1417   set_memento_create_count(Smi::FromInt(0));
   1418   return decision_made;
   1419 }
   1420 
   1421 
   1422 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
   1423   object->ValidateElements();
   1424   ElementsKind elements_kind = object->map()->elements_kind();
   1425   if (!IsFastObjectElementsKind(elements_kind)) {
   1426     if (IsFastHoleyElementsKind(elements_kind)) {
   1427       TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
   1428     } else {
   1429       TransitionElementsKind(object, FAST_ELEMENTS);
   1430     }
   1431   }
   1432 }
   1433 
   1434 
   1435 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
   1436                                                 uint32_t count,
   1437                                                 EnsureElementsMode mode) {
   1438   ElementsKind current_kind = map()->elements_kind();
   1439   ElementsKind target_kind = current_kind;
   1440   ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
   1441   bool is_holey = IsFastHoleyElementsKind(current_kind);
   1442   if (current_kind == FAST_HOLEY_ELEMENTS) return this;
   1443   Heap* heap = GetHeap();
   1444   Object* the_hole = heap->the_hole_value();
   1445   for (uint32_t i = 0; i < count; ++i) {
   1446     Object* current = *objects++;
   1447     if (current == the_hole) {
   1448       is_holey = true;
   1449       target_kind = GetHoleyElementsKind(target_kind);
   1450     } else if (!current->IsSmi()) {
   1451       if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
   1452         if (IsFastSmiElementsKind(target_kind)) {
   1453           if (is_holey) {
   1454             target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
   1455           } else {
   1456             target_kind = FAST_DOUBLE_ELEMENTS;
   1457           }
   1458         }
   1459       } else if (is_holey) {
   1460         target_kind = FAST_HOLEY_ELEMENTS;
   1461         break;
   1462       } else {
   1463         target_kind = FAST_ELEMENTS;
   1464       }
   1465     }
   1466   }
   1467 
   1468   if (target_kind != current_kind) {
   1469     return TransitionElementsKind(target_kind);
   1470   }
   1471   return this;
   1472 }
   1473 
   1474 
   1475 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
   1476                                                 uint32_t length,
   1477                                                 EnsureElementsMode mode) {
   1478   if (elements->map() != GetHeap()->fixed_double_array_map()) {
   1479     ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
   1480            elements->map() == GetHeap()->fixed_cow_array_map());
   1481     if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
   1482       mode = DONT_ALLOW_DOUBLE_ELEMENTS;
   1483     }
   1484     Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
   1485     return EnsureCanContainElements(objects, length, mode);
   1486   }
   1487 
   1488   ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
   1489   if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
   1490     return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
   1491   } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
   1492     FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
   1493     for (uint32_t i = 0; i < length; ++i) {
   1494       if (double_array->is_the_hole(i)) {
   1495         return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
   1496       }
   1497     }
   1498     return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
   1499   }
   1500 
   1501   return this;
   1502 }
   1503 
   1504 
   1505 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
   1506                                                 ElementsKind to_kind) {
   1507   Map* current_map = map();
   1508   ElementsKind from_kind = current_map->elements_kind();
   1509   if (from_kind == to_kind) return current_map;
   1510 
   1511   Context* native_context = isolate->context()->native_context();
   1512   Object* maybe_array_maps = native_context->js_array_maps();
   1513   if (maybe_array_maps->IsFixedArray()) {
   1514     FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
   1515     if (array_maps->get(from_kind) == current_map) {
   1516       Object* maybe_transitioned_map = array_maps->get(to_kind);
   1517       if (maybe_transitioned_map->IsMap()) {
   1518         return Map::cast(maybe_transitioned_map);
   1519       }
   1520     }
   1521   }
   1522 
   1523   return GetElementsTransitionMapSlow(to_kind);
   1524 }
   1525 
   1526 
   1527 void JSObject::set_map_and_elements(Map* new_map,
   1528                                     FixedArrayBase* value,
   1529                                     WriteBarrierMode mode) {
   1530   ASSERT(value->HasValidElements());
   1531   if (new_map != NULL) {
   1532     if (mode == UPDATE_WRITE_BARRIER) {
   1533       set_map(new_map);
   1534     } else {
   1535       ASSERT(mode == SKIP_WRITE_BARRIER);
   1536       set_map_no_write_barrier(new_map);
   1537     }
   1538   }
   1539   ASSERT((map()->has_fast_smi_or_object_elements() ||
   1540           (value == GetHeap()->empty_fixed_array())) ==
   1541          (value->map() == GetHeap()->fixed_array_map() ||
   1542           value->map() == GetHeap()->fixed_cow_array_map()));
   1543   ASSERT((value == GetHeap()->empty_fixed_array()) ||
   1544          (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
   1545   WRITE_FIELD(this, kElementsOffset, value);
   1546   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
   1547 }
   1548 
   1549 
   1550 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
   1551   set_map_and_elements(NULL, value, mode);
   1552 }
   1553 
   1554 
   1555 void JSObject::initialize_properties() {
   1556   ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
   1557   WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
   1558 }
   1559 
   1560 
   1561 void JSObject::initialize_elements() {
   1562   if (map()->has_fast_smi_or_object_elements() ||
   1563       map()->has_fast_double_elements()) {
   1564     ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
   1565     WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
   1566   } else if (map()->has_external_array_elements()) {
   1567     ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
   1568     ASSERT(!GetHeap()->InNewSpace(empty_array));
   1569     WRITE_FIELD(this, kElementsOffset, empty_array);
   1570   } else {
   1571     UNREACHABLE();
   1572   }
   1573 }
   1574 
   1575 
   1576 MaybeObject* JSObject::ResetElements() {
   1577   if (map()->is_observed()) {
   1578     // Maintain invariant that observed elements are always in dictionary mode.
   1579     SeededNumberDictionary* dictionary;
   1580     MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
   1581     if (!maybe->To(&dictionary)) return maybe;
   1582     if (map() == GetHeap()->non_strict_arguments_elements_map()) {
   1583       FixedArray::cast(elements())->set(1, dictionary);
   1584     } else {
   1585       set_elements(dictionary);
   1586     }
   1587     return this;
   1588   }
   1589 
   1590   ElementsKind elements_kind = GetInitialFastElementsKind();
   1591   if (!FLAG_smi_only_arrays) {
   1592     elements_kind = FastSmiToObjectElementsKind(elements_kind);
   1593   }
   1594   MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
   1595   Map* map;
   1596   if (!maybe->To(&map)) return maybe;
   1597   set_map(map);
   1598   initialize_elements();
   1599 
   1600   return this;
   1601 }
   1602 
   1603 
   1604 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
   1605   DisallowHeapAllocation no_gc;
   1606   if (!map->HasTransitionArray()) return Handle<String>::null();
   1607   TransitionArray* transitions = map->transitions();
   1608   if (!transitions->IsSimpleTransition()) return Handle<String>::null();
   1609   int transition = TransitionArray::kSimpleTransitionIndex;
   1610   PropertyDetails details = transitions->GetTargetDetails(transition);
   1611   Name* name = transitions->GetKey(transition);
   1612   if (details.type() != FIELD) return Handle<String>::null();
   1613   if (details.attributes() != NONE) return Handle<String>::null();
   1614   if (!name->IsString()) return Handle<String>::null();
   1615   return Handle<String>(String::cast(name));
   1616 }
   1617 
   1618 
   1619 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
   1620   ASSERT(!ExpectedTransitionKey(map).is_null());
   1621   return Handle<Map>(map->transitions()->GetTarget(
   1622       TransitionArray::kSimpleTransitionIndex));
   1623 }
   1624 
   1625 
   1626 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
   1627   DisallowHeapAllocation no_allocation;
   1628   if (!map->HasTransitionArray()) return Handle<Map>::null();
   1629   TransitionArray* transitions = map->transitions();
   1630   int transition = transitions->Search(*key);
   1631   if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
   1632   PropertyDetails target_details = transitions->GetTargetDetails(transition);
   1633   if (target_details.type() != FIELD) return Handle<Map>::null();
   1634   if (target_details.attributes() != NONE) return Handle<Map>::null();
   1635   return Handle<Map>(transitions->GetTarget(transition));
   1636 }
   1637 
   1638 
   1639 ACCESSORS(Oddball, to_string, String, kToStringOffset)
   1640 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
   1641 
   1642 
   1643 byte Oddball::kind() {
   1644   return Smi::cast(READ_FIELD(this, kKindOffset))->value();
   1645 }
   1646 
   1647 
   1648 void Oddball::set_kind(byte value) {
   1649   WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
   1650 }
   1651 
   1652 
   1653 Object* Cell::value() {
   1654   return READ_FIELD(this, kValueOffset);
   1655 }
   1656 
   1657 
   1658 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
   1659   // The write barrier is not used for global property cells.
   1660   ASSERT(!val->IsPropertyCell() && !val->IsCell());
   1661   WRITE_FIELD(this, kValueOffset, val);
   1662 }
   1663 
   1664 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
   1665 
   1666 Object* PropertyCell::type_raw() {
   1667   return READ_FIELD(this, kTypeOffset);
   1668 }
   1669 
   1670 
   1671 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
   1672   WRITE_FIELD(this, kTypeOffset, val);
   1673 }
   1674 
   1675 
   1676 int JSObject::GetHeaderSize() {
   1677   InstanceType type = map()->instance_type();
   1678   // Check for the most common kind of JavaScript object before
   1679   // falling into the generic switch. This speeds up the internal
   1680   // field operations considerably on average.
   1681   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
   1682   switch (type) {
   1683     case JS_GENERATOR_OBJECT_TYPE:
   1684       return JSGeneratorObject::kSize;
   1685     case JS_MODULE_TYPE:
   1686       return JSModule::kSize;
   1687     case JS_GLOBAL_PROXY_TYPE:
   1688       return JSGlobalProxy::kSize;
   1689     case JS_GLOBAL_OBJECT_TYPE:
   1690       return JSGlobalObject::kSize;
   1691     case JS_BUILTINS_OBJECT_TYPE:
   1692       return JSBuiltinsObject::kSize;
   1693     case JS_FUNCTION_TYPE:
   1694       return JSFunction::kSize;
   1695     case JS_VALUE_TYPE:
   1696       return JSValue::kSize;
   1697     case JS_DATE_TYPE:
   1698       return JSDate::kSize;
   1699     case JS_ARRAY_TYPE:
   1700       return JSArray::kSize;
   1701     case JS_ARRAY_BUFFER_TYPE:
   1702       return JSArrayBuffer::kSize;
   1703     case JS_TYPED_ARRAY_TYPE:
   1704       return JSTypedArray::kSize;
   1705     case JS_DATA_VIEW_TYPE:
   1706       return JSDataView::kSize;
   1707     case JS_SET_TYPE:
   1708       return JSSet::kSize;
   1709     case JS_MAP_TYPE:
   1710       return JSMap::kSize;
   1711     case JS_WEAK_MAP_TYPE:
   1712       return JSWeakMap::kSize;
   1713     case JS_WEAK_SET_TYPE:
   1714       return JSWeakSet::kSize;
   1715     case JS_REGEXP_TYPE:
   1716       return JSRegExp::kSize;
   1717     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
   1718       return JSObject::kHeaderSize;
   1719     case JS_MESSAGE_OBJECT_TYPE:
   1720       return JSMessageObject::kSize;
   1721     default:
   1722       // TODO(jkummerow): Re-enable this. Blink currently hits this
   1723       // from its CustomElementConstructorBuilder.
   1724       // UNREACHABLE();
   1725       return 0;
   1726   }
   1727 }
   1728 
   1729 
   1730 int JSObject::GetInternalFieldCount() {
   1731   ASSERT(1 << kPointerSizeLog2 == kPointerSize);
   1732   // Make sure to adjust for the number of in-object properties. These
   1733   // properties do contribute to the size, but are not internal fields.
   1734   return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
   1735          map()->inobject_properties();
   1736 }
   1737 
   1738 
   1739 int JSObject::GetInternalFieldOffset(int index) {
   1740   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1741   return GetHeaderSize() + (kPointerSize * index);
   1742 }
   1743 
   1744 
   1745 Object* JSObject::GetInternalField(int index) {
   1746   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1747   // Internal objects do follow immediately after the header, whereas in-object
   1748   // properties are at the end of the object. Therefore there is no need
   1749   // to adjust the index here.
   1750   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
   1751 }
   1752 
   1753 
   1754 void JSObject::SetInternalField(int index, Object* value) {
   1755   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1756   // Internal objects do follow immediately after the header, whereas in-object
   1757   // properties are at the end of the object. Therefore there is no need
   1758   // to adjust the index here.
   1759   int offset = GetHeaderSize() + (kPointerSize * index);
   1760   WRITE_FIELD(this, offset, value);
   1761   WRITE_BARRIER(GetHeap(), this, offset, value);
   1762 }
   1763 
   1764 
   1765 void JSObject::SetInternalField(int index, Smi* value) {
   1766   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1767   // Internal objects do follow immediately after the header, whereas in-object
   1768   // properties are at the end of the object. Therefore there is no need
   1769   // to adjust the index here.
   1770   int offset = GetHeaderSize() + (kPointerSize * index);
   1771   WRITE_FIELD(this, offset, value);
   1772 }
   1773 
   1774 
   1775 MaybeObject* JSObject::FastPropertyAt(Representation representation,
   1776                                       int index) {
   1777   Object* raw_value = RawFastPropertyAt(index);
   1778   return raw_value->AllocateNewStorageFor(GetHeap(), representation);
   1779 }
   1780 
   1781 
   1782 // Access fast-case object properties at index. The use of these routines
   1783 // is needed to correctly distinguish between properties stored in-object and
   1784 // properties stored in the properties array.
   1785 Object* JSObject::RawFastPropertyAt(int index) {
   1786   // Adjust for the number of properties stored in the object.
   1787   index -= map()->inobject_properties();
   1788   if (index < 0) {
   1789     int offset = map()->instance_size() + (index * kPointerSize);
   1790     return READ_FIELD(this, offset);
   1791   } else {
   1792     ASSERT(index < properties()->length());
   1793     return properties()->get(index);
   1794   }
   1795 }
   1796 
   1797 
   1798 void JSObject::FastPropertyAtPut(int index, Object* value) {
   1799   // Adjust for the number of properties stored in the object.
   1800   index -= map()->inobject_properties();
   1801   if (index < 0) {
   1802     int offset = map()->instance_size() + (index * kPointerSize);
   1803     WRITE_FIELD(this, offset, value);
   1804     WRITE_BARRIER(GetHeap(), this, offset, value);
   1805   } else {
   1806     ASSERT(index < properties()->length());
   1807     properties()->set(index, value);
   1808   }
   1809 }
   1810 
   1811 
   1812 int JSObject::GetInObjectPropertyOffset(int index) {
   1813   // Adjust for the number of properties stored in the object.
   1814   index -= map()->inobject_properties();
   1815   ASSERT(index < 0);
   1816   return map()->instance_size() + (index * kPointerSize);
   1817 }
   1818 
   1819 
   1820 Object* JSObject::InObjectPropertyAt(int index) {
   1821   // Adjust for the number of properties stored in the object.
   1822   index -= map()->inobject_properties();
   1823   ASSERT(index < 0);
   1824   int offset = map()->instance_size() + (index * kPointerSize);
   1825   return READ_FIELD(this, offset);
   1826 }
   1827 
   1828 
   1829 Object* JSObject::InObjectPropertyAtPut(int index,
   1830                                         Object* value,
   1831                                         WriteBarrierMode mode) {
   1832   // Adjust for the number of properties stored in the object.
   1833   index -= map()->inobject_properties();
   1834   ASSERT(index < 0);
   1835   int offset = map()->instance_size() + (index * kPointerSize);
   1836   WRITE_FIELD(this, offset, value);
   1837   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
   1838   return value;
   1839 }
   1840 
   1841 
   1842 
   1843 void JSObject::InitializeBody(Map* map,
   1844                               Object* pre_allocated_value,
   1845                               Object* filler_value) {
   1846   ASSERT(!filler_value->IsHeapObject() ||
   1847          !GetHeap()->InNewSpace(filler_value));
   1848   ASSERT(!pre_allocated_value->IsHeapObject() ||
   1849          !GetHeap()->InNewSpace(pre_allocated_value));
   1850   int size = map->instance_size();
   1851   int offset = kHeaderSize;
   1852   if (filler_value != pre_allocated_value) {
   1853     int pre_allocated = map->pre_allocated_property_fields();
   1854     ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
   1855     for (int i = 0; i < pre_allocated; i++) {
   1856       WRITE_FIELD(this, offset, pre_allocated_value);
   1857       offset += kPointerSize;
   1858     }
   1859   }
   1860   while (offset < size) {
   1861     WRITE_FIELD(this, offset, filler_value);
   1862     offset += kPointerSize;
   1863   }
   1864 }
   1865 
   1866 
   1867 bool JSObject::HasFastProperties() {
   1868   ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
   1869   return !properties()->IsDictionary();
   1870 }
   1871 
   1872 
   1873 bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
   1874   // Allow extra fast properties if the object has more than
   1875   // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
   1876   // very unlikely that the object is being used as a dictionary and there is a
   1877   // good chance that allowing more map transitions will be worth it.
   1878   Map* map = this->map();
   1879   if (map->unused_property_fields() != 0) return false;
   1880 
   1881   int inobject = map->inobject_properties();
   1882 
   1883   int limit;
   1884   if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
   1885     limit = Max(inobject, kMaxFastProperties);
   1886   } else {
   1887     limit = Max(inobject, kFastPropertiesSoftLimit);
   1888   }
   1889   return properties()->length() > limit;
   1890 }
   1891 
   1892 
   1893 void Struct::InitializeBody(int object_size) {
   1894   Object* value = GetHeap()->undefined_value();
   1895   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
   1896     WRITE_FIELD(this, offset, value);
   1897   }
   1898 }
   1899 
   1900 
   1901 bool Object::ToArrayIndex(uint32_t* index) {
   1902   if (IsSmi()) {
   1903     int value = Smi::cast(this)->value();
   1904     if (value < 0) return false;
   1905     *index = value;
   1906     return true;
   1907   }
   1908   if (IsHeapNumber()) {
   1909     double value = HeapNumber::cast(this)->value();
   1910     uint32_t uint_value = static_cast<uint32_t>(value);
   1911     if (value == static_cast<double>(uint_value)) {
   1912       *index = uint_value;
   1913       return true;
   1914     }
   1915   }
   1916   return false;
   1917 }
   1918 
   1919 
   1920 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
   1921   if (!this->IsJSValue()) return false;
   1922 
   1923   JSValue* js_value = JSValue::cast(this);
   1924   if (!js_value->value()->IsString()) return false;
   1925 
   1926   String* str = String::cast(js_value->value());
   1927   if (index >= static_cast<uint32_t>(str->length())) return false;
   1928 
   1929   return true;
   1930 }
   1931 
   1932 
   1933 
   1934 void Object::VerifyApiCallResultType() {
   1935 #if ENABLE_EXTRA_CHECKS
   1936   if (!(IsSmi() ||
   1937         IsString() ||
   1938         IsSpecObject() ||
   1939         IsHeapNumber() ||
   1940         IsUndefined() ||
   1941         IsTrue() ||
   1942         IsFalse() ||
   1943         IsNull())) {
   1944     FATAL("API call returned invalid object");
   1945   }
   1946 #endif  // ENABLE_EXTRA_CHECKS
   1947 }
   1948 
   1949 
   1950 FixedArrayBase* FixedArrayBase::cast(Object* object) {
   1951   ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray() ||
   1952          object->IsConstantPoolArray());
   1953   return reinterpret_cast<FixedArrayBase*>(object);
   1954 }
   1955 
   1956 
   1957 Object* FixedArray::get(int index) {
   1958   SLOW_ASSERT(index >= 0 && index < this->length());
   1959   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
   1960 }
   1961 
   1962 
   1963 bool FixedArray::is_the_hole(int index) {
   1964   return get(index) == GetHeap()->the_hole_value();
   1965 }
   1966 
   1967 
   1968 void FixedArray::set(int index, Smi* value) {
   1969   ASSERT(map() != GetHeap()->fixed_cow_array_map());
   1970   ASSERT(index >= 0 && index < this->length());
   1971   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
   1972   int offset = kHeaderSize + index * kPointerSize;
   1973   WRITE_FIELD(this, offset, value);
   1974 }
   1975 
   1976 
   1977 void FixedArray::set(int index, Object* value) {
   1978   ASSERT(map() != GetHeap()->fixed_cow_array_map());
   1979   ASSERT(index >= 0 && index < this->length());
   1980   int offset = kHeaderSize + index * kPointerSize;
   1981   WRITE_FIELD(this, offset, value);
   1982   WRITE_BARRIER(GetHeap(), this, offset, value);
   1983 }
   1984 
   1985 
   1986 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
   1987   return BitCast<uint64_t, double>(value) == kHoleNanInt64;
   1988 }
   1989 
   1990 
   1991 inline double FixedDoubleArray::hole_nan_as_double() {
   1992   return BitCast<double, uint64_t>(kHoleNanInt64);
   1993 }
   1994 
   1995 
   1996 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
   1997   ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
   1998   ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
   1999   return OS::nan_value();
   2000 }
   2001 
   2002 
   2003 double FixedDoubleArray::get_scalar(int index) {
   2004   ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
   2005          map() != GetHeap()->fixed_array_map());
   2006   ASSERT(index >= 0 && index < this->length());
   2007   double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
   2008   ASSERT(!is_the_hole_nan(result));
   2009   return result;
   2010 }
   2011 
   2012 int64_t FixedDoubleArray::get_representation(int index) {
   2013   ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
   2014          map() != GetHeap()->fixed_array_map());
   2015   ASSERT(index >= 0 && index < this->length());
   2016   return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
   2017 }
   2018 
   2019 MaybeObject* FixedDoubleArray::get(int index) {
   2020   if (is_the_hole(index)) {
   2021     return GetHeap()->the_hole_value();
   2022   } else {
   2023     return GetHeap()->NumberFromDouble(get_scalar(index));
   2024   }
   2025 }
   2026 
   2027 
   2028 void FixedDoubleArray::set(int index, double value) {
   2029   ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
   2030          map() != GetHeap()->fixed_array_map());
   2031   int offset = kHeaderSize + index * kDoubleSize;
   2032   if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
   2033   WRITE_DOUBLE_FIELD(this, offset, value);
   2034 }
   2035 
   2036 
   2037 void FixedDoubleArray::set_the_hole(int index) {
   2038   ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
   2039          map() != GetHeap()->fixed_array_map());
   2040   int offset = kHeaderSize + index * kDoubleSize;
   2041   WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
   2042 }
   2043 
   2044 
   2045 bool FixedDoubleArray::is_the_hole(int index) {
   2046   int offset = kHeaderSize + index * kDoubleSize;
   2047   return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
   2048 }
   2049 
   2050 
   2051 SMI_ACCESSORS(ConstantPoolArray, first_ptr_index, kFirstPointerIndexOffset)
   2052 SMI_ACCESSORS(ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset)
   2053 
   2054 
   2055 int ConstantPoolArray::first_int64_index() {
   2056   return 0;
   2057 }
   2058 
   2059 
   2060 int ConstantPoolArray::count_of_int64_entries() {
   2061   return first_ptr_index();
   2062 }
   2063 
   2064 
   2065 int ConstantPoolArray::count_of_ptr_entries() {
   2066   return first_int32_index() - first_ptr_index();
   2067 }
   2068 
   2069 
   2070 int ConstantPoolArray::count_of_int32_entries() {
   2071   return length() - first_int32_index();
   2072 }
   2073 
   2074 
   2075 void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries,
   2076                                        int number_of_ptr_entries,
   2077                                        int number_of_int32_entries) {
   2078   set_first_ptr_index(number_of_int64_entries);
   2079   set_first_int32_index(number_of_int64_entries + number_of_ptr_entries);
   2080   set_length(number_of_int64_entries + number_of_ptr_entries +
   2081              number_of_int32_entries);
   2082 }
   2083 
   2084 
   2085 int64_t ConstantPoolArray::get_int64_entry(int index) {
   2086   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2087   ASSERT(index >= 0 && index < first_ptr_index());
   2088   return READ_INT64_FIELD(this, OffsetOfElementAt(index));
   2089 }
   2090 
   2091 double ConstantPoolArray::get_int64_entry_as_double(int index) {
   2092   STATIC_ASSERT(kDoubleSize == kInt64Size);
   2093   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2094   ASSERT(index >= 0 && index < first_ptr_index());
   2095   return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
   2096 }
   2097 
   2098 
   2099 Object* ConstantPoolArray::get_ptr_entry(int index) {
   2100   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2101   ASSERT(index >= first_ptr_index() && index < first_int32_index());
   2102   return READ_FIELD(this, OffsetOfElementAt(index));
   2103 }
   2104 
   2105 
   2106 int32_t ConstantPoolArray::get_int32_entry(int index) {
   2107   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2108   ASSERT(index >= first_int32_index() && index < length());
   2109   return READ_INT32_FIELD(this, OffsetOfElementAt(index));
   2110 }
   2111 
   2112 
   2113 void ConstantPoolArray::set(int index, Object* value) {
   2114   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2115   ASSERT(index >= first_ptr_index() && index < first_int32_index());
   2116   WRITE_FIELD(this, OffsetOfElementAt(index), value);
   2117   WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
   2118 }
   2119 
   2120 
   2121 void ConstantPoolArray::set(int index, int64_t value) {
   2122   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2123   ASSERT(index >= first_int64_index() && index < first_ptr_index());
   2124   WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
   2125 }
   2126 
   2127 
   2128 void ConstantPoolArray::set(int index, double value) {
   2129   STATIC_ASSERT(kDoubleSize == kInt64Size);
   2130   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2131   ASSERT(index >= first_int64_index() && index < first_ptr_index());
   2132   WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
   2133 }
   2134 
   2135 
   2136 void ConstantPoolArray::set(int index, int32_t value) {
   2137   ASSERT(map() == GetHeap()->constant_pool_array_map());
   2138   ASSERT(index >= this->first_int32_index() && index < length());
   2139   WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
   2140 }
   2141 
   2142 
   2143 WriteBarrierMode HeapObject::GetWriteBarrierMode(
   2144     const DisallowHeapAllocation& promise) {
   2145   Heap* heap = GetHeap();
   2146   if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
   2147   if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
   2148   return UPDATE_WRITE_BARRIER;
   2149 }
   2150 
   2151 
   2152 void FixedArray::set(int index,
   2153                      Object* value,
   2154                      WriteBarrierMode mode) {
   2155   ASSERT(map() != GetHeap()->fixed_cow_array_map());
   2156   ASSERT(index >= 0 && index < this->length());
   2157   int offset = kHeaderSize + index * kPointerSize;
   2158   WRITE_FIELD(this, offset, value);
   2159   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
   2160 }
   2161 
   2162 
   2163 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
   2164                                               int index,
   2165                                               Object* value) {
   2166   ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
   2167   ASSERT(index >= 0 && index < array->length());
   2168   int offset = kHeaderSize + index * kPointerSize;
   2169   WRITE_FIELD(array, offset, value);
   2170   Heap* heap = array->GetHeap();
   2171   if (heap->InNewSpace(value)) {
   2172     heap->RecordWrite(array->address(), offset);
   2173   }
   2174 }
   2175 
   2176 
   2177 void FixedArray::NoWriteBarrierSet(FixedArray* array,
   2178                                    int index,
   2179                                    Object* value) {
   2180   ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
   2181   ASSERT(index >= 0 && index < array->length());
   2182   ASSERT(!array->GetHeap()->InNewSpace(value));
   2183   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
   2184 }
   2185 
   2186 
   2187 void FixedArray::set_undefined(int index) {
   2188   ASSERT(map() != GetHeap()->fixed_cow_array_map());
   2189   ASSERT(index >= 0 && index < this->length());
   2190   ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
   2191   WRITE_FIELD(this,
   2192               kHeaderSize + index * kPointerSize,
   2193               GetHeap()->undefined_value());
   2194 }
   2195 
   2196 
   2197 void FixedArray::set_null(int index) {
   2198   ASSERT(index >= 0 && index < this->length());
   2199   ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
   2200   WRITE_FIELD(this,
   2201               kHeaderSize + index * kPointerSize,
   2202               GetHeap()->null_value());
   2203 }
   2204 
   2205 
   2206 void FixedArray::set_the_hole(int index) {
   2207   ASSERT(map() != GetHeap()->fixed_cow_array_map());
   2208   ASSERT(index >= 0 && index < this->length());
   2209   ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
   2210   WRITE_FIELD(this,
   2211               kHeaderSize + index * kPointerSize,
   2212               GetHeap()->the_hole_value());
   2213 }
   2214 
   2215 
   2216 double* FixedDoubleArray::data_start() {
   2217   return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
   2218 }
   2219 
   2220 
   2221 Object** FixedArray::data_start() {
   2222   return HeapObject::RawField(this, kHeaderSize);
   2223 }
   2224 
   2225 
   2226 bool DescriptorArray::IsEmpty() {
   2227   ASSERT(length() >= kFirstIndex ||
   2228          this == GetHeap()->empty_descriptor_array());
   2229   return length() < kFirstIndex;
   2230 }
   2231 
   2232 
   2233 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
   2234   WRITE_FIELD(
   2235       this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
   2236 }
   2237 
   2238 
   2239 // Perform a binary search in a fixed array. Low and high are entry indices. If
   2240 // there are three entries in this array it should be called with low=0 and
   2241 // high=2.
   2242 template<SearchMode search_mode, typename T>
   2243 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
   2244   uint32_t hash = name->Hash();
   2245   int limit = high;
   2246 
   2247   ASSERT(low <= high);
   2248 
   2249   while (low != high) {
   2250     int mid = (low + high) / 2;
   2251     Name* mid_name = array->GetSortedKey(mid);
   2252     uint32_t mid_hash = mid_name->Hash();
   2253 
   2254     if (mid_hash >= hash) {
   2255       high = mid;
   2256     } else {
   2257       low = mid + 1;
   2258     }
   2259   }
   2260 
   2261   for (; low <= limit; ++low) {
   2262     int sort_index = array->GetSortedKeyIndex(low);
   2263     Name* entry = array->GetKey(sort_index);
   2264     if (entry->Hash() != hash) break;
   2265     if (entry->Equals(name)) {
   2266       if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
   2267         return sort_index;
   2268       }
   2269       return T::kNotFound;
   2270     }
   2271   }
   2272 
   2273   return T::kNotFound;
   2274 }
   2275 
   2276 
   2277 // Perform a linear search in this fixed array. len is the number of entry
   2278 // indices that are valid.
   2279 template<SearchMode search_mode, typename T>
   2280 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
   2281   uint32_t hash = name->Hash();
   2282   if (search_mode == ALL_ENTRIES) {
   2283     for (int number = 0; number < len; number++) {
   2284       int sorted_index = array->GetSortedKeyIndex(number);
   2285       Name* entry = array->GetKey(sorted_index);
   2286       uint32_t current_hash = entry->Hash();
   2287       if (current_hash > hash) break;
   2288       if (current_hash == hash && entry->Equals(name)) return sorted_index;
   2289     }
   2290   } else {
   2291     ASSERT(len >= valid_entries);
   2292     for (int number = 0; number < valid_entries; number++) {
   2293       Name* entry = array->GetKey(number);
   2294       uint32_t current_hash = entry->Hash();
   2295       if (current_hash == hash && entry->Equals(name)) return number;
   2296     }
   2297   }
   2298   return T::kNotFound;
   2299 }
   2300 
   2301 
   2302 template<SearchMode search_mode, typename T>
   2303 int Search(T* array, Name* name, int valid_entries) {
   2304   if (search_mode == VALID_ENTRIES) {
   2305     SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
   2306   } else {
   2307     SLOW_ASSERT(array->IsSortedNoDuplicates());
   2308   }
   2309 
   2310   int nof = array->number_of_entries();
   2311   if (nof == 0) return T::kNotFound;
   2312 
   2313   // Fast case: do linear search for small arrays.
   2314   const int kMaxElementsForLinearSearch = 8;
   2315   if ((search_mode == ALL_ENTRIES &&
   2316        nof <= kMaxElementsForLinearSearch) ||
   2317       (search_mode == VALID_ENTRIES &&
   2318        valid_entries <= (kMaxElementsForLinearSearch * 3))) {
   2319     return LinearSearch<search_mode>(array, name, nof, valid_entries);
   2320   }
   2321 
   2322   // Slow case: perform binary search.
   2323   return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
   2324 }
   2325 
   2326 
   2327 int DescriptorArray::Search(Name* name, int valid_descriptors) {
   2328   return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
   2329 }
   2330 
   2331 
   2332 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
   2333   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
   2334   if (number_of_own_descriptors == 0) return kNotFound;
   2335 
   2336   DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
   2337   int number = cache->Lookup(map, name);
   2338 
   2339   if (number == DescriptorLookupCache::kAbsent) {
   2340     number = Search(name, number_of_own_descriptors);
   2341     cache->Update(map, name, number);
   2342   }
   2343 
   2344   return number;
   2345 }
   2346 
   2347 
   2348 PropertyDetails Map::GetLastDescriptorDetails() {
   2349   return instance_descriptors()->GetDetails(LastAdded());
   2350 }
   2351 
   2352 
   2353 void Map::LookupDescriptor(JSObject* holder,
   2354                            Name* name,
   2355                            LookupResult* result) {
   2356   DescriptorArray* descriptors = this->instance_descriptors();
   2357   int number = descriptors->SearchWithCache(name, this);
   2358   if (number == DescriptorArray::kNotFound) return result->NotFound();
   2359   result->DescriptorResult(holder, descriptors->GetDetails(number), number);
   2360 }
   2361 
   2362 
   2363 void Map::LookupTransition(JSObject* holder,
   2364                            Name* name,
   2365                            LookupResult* result) {
   2366   if (HasTransitionArray()) {
   2367     TransitionArray* transition_array = transitions();
   2368     int number = transition_array->Search(name);
   2369     if (number != TransitionArray::kNotFound) {
   2370       return result->TransitionResult(
   2371           holder, transition_array->GetTarget(number));
   2372     }
   2373   }
   2374   result->NotFound();
   2375 }
   2376 
   2377 
   2378 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
   2379   ASSERT(descriptor_number < number_of_descriptors());
   2380   return HeapObject::RawField(
   2381       reinterpret_cast<HeapObject*>(this),
   2382       OffsetOfElementAt(ToKeyIndex(descriptor_number)));
   2383 }
   2384 
   2385 
   2386 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
   2387   return GetKeySlot(descriptor_number);
   2388 }
   2389 
   2390 
   2391 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
   2392   return GetValueSlot(descriptor_number - 1) + 1;
   2393 }
   2394 
   2395 
   2396 Name* DescriptorArray::GetKey(int descriptor_number) {
   2397   ASSERT(descriptor_number < number_of_descriptors());
   2398   return Name::cast(get(ToKeyIndex(descriptor_number)));
   2399 }
   2400 
   2401 
   2402 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
   2403   return GetDetails(descriptor_number).pointer();
   2404 }
   2405 
   2406 
   2407 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
   2408   return GetKey(GetSortedKeyIndex(descriptor_number));
   2409 }
   2410 
   2411 
   2412 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
   2413   PropertyDetails details = GetDetails(descriptor_index);
   2414   set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
   2415 }
   2416 
   2417 
   2418 void DescriptorArray::SetRepresentation(int descriptor_index,
   2419                                         Representation representation) {
   2420   ASSERT(!representation.IsNone());
   2421   PropertyDetails details = GetDetails(descriptor_index);
   2422   set(ToDetailsIndex(descriptor_index),
   2423       details.CopyWithRepresentation(representation).AsSmi());
   2424 }
   2425 
   2426 
   2427 void DescriptorArray::InitializeRepresentations(Representation representation) {
   2428   int length = number_of_descriptors();
   2429   for (int i = 0; i < length; i++) {
   2430     SetRepresentation(i, representation);
   2431   }
   2432 }
   2433 
   2434 
   2435 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
   2436   ASSERT(descriptor_number < number_of_descriptors());
   2437   return HeapObject::RawField(
   2438       reinterpret_cast<HeapObject*>(this),
   2439       OffsetOfElementAt(ToValueIndex(descriptor_number)));
   2440 }
   2441 
   2442 
   2443 Object* DescriptorArray::GetValue(int descriptor_number) {
   2444   ASSERT(descriptor_number < number_of_descriptors());
   2445   return get(ToValueIndex(descriptor_number));
   2446 }
   2447 
   2448 
   2449 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
   2450   ASSERT(descriptor_number < number_of_descriptors());
   2451   Object* details = get(ToDetailsIndex(descriptor_number));
   2452   return PropertyDetails(Smi::cast(details));
   2453 }
   2454 
   2455 
   2456 PropertyType DescriptorArray::GetType(int descriptor_number) {
   2457   return GetDetails(descriptor_number).type();
   2458 }
   2459 
   2460 
   2461 int DescriptorArray::GetFieldIndex(int descriptor_number) {
   2462   ASSERT(GetDetails(descriptor_number).type() == FIELD);
   2463   return GetDetails(descriptor_number).field_index();
   2464 }
   2465 
   2466 
   2467 Object* DescriptorArray::GetConstant(int descriptor_number) {
   2468   return GetValue(descriptor_number);
   2469 }
   2470 
   2471 
   2472 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
   2473   ASSERT(GetType(descriptor_number) == CALLBACKS);
   2474   return GetValue(descriptor_number);
   2475 }
   2476 
   2477 
   2478 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
   2479   ASSERT(GetType(descriptor_number) == CALLBACKS);
   2480   Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
   2481   return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
   2482 }
   2483 
   2484 
   2485 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
   2486   desc->Init(GetKey(descriptor_number),
   2487              GetValue(descriptor_number),
   2488              GetDetails(descriptor_number));
   2489 }
   2490 
   2491 
   2492 void DescriptorArray::Set(int descriptor_number,
   2493                           Descriptor* desc,
   2494                           const WhitenessWitness&) {
   2495   // Range check.
   2496   ASSERT(descriptor_number < number_of_descriptors());
   2497 
   2498   NoIncrementalWriteBarrierSet(this,
   2499                                ToKeyIndex(descriptor_number),
   2500                                desc->GetKey());
   2501   NoIncrementalWriteBarrierSet(this,
   2502                                ToValueIndex(descriptor_number),
   2503                                desc->GetValue());
   2504   NoIncrementalWriteBarrierSet(this,
   2505                                ToDetailsIndex(descriptor_number),
   2506                                desc->GetDetails().AsSmi());
   2507 }
   2508 
   2509 
   2510 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
   2511   // Range check.
   2512   ASSERT(descriptor_number < number_of_descriptors());
   2513 
   2514   set(ToKeyIndex(descriptor_number), desc->GetKey());
   2515   set(ToValueIndex(descriptor_number), desc->GetValue());
   2516   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
   2517 }
   2518 
   2519 
   2520 void DescriptorArray::Append(Descriptor* desc,
   2521                              const WhitenessWitness& witness) {
   2522   int descriptor_number = number_of_descriptors();
   2523   SetNumberOfDescriptors(descriptor_number + 1);
   2524   Set(descriptor_number, desc, witness);
   2525 
   2526   uint32_t hash = desc->GetKey()->Hash();
   2527 
   2528   int insertion;
   2529 
   2530   for (insertion = descriptor_number; insertion > 0; --insertion) {
   2531     Name* key = GetSortedKey(insertion - 1);
   2532     if (key->Hash() <= hash) break;
   2533     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
   2534   }
   2535 
   2536   SetSortedKey(insertion, descriptor_number);
   2537 }
   2538 
   2539 
   2540 void DescriptorArray::Append(Descriptor* desc) {
   2541   int descriptor_number = number_of_descriptors();
   2542   SetNumberOfDescriptors(descriptor_number + 1);
   2543   Set(descriptor_number, desc);
   2544 
   2545   uint32_t hash = desc->GetKey()->Hash();
   2546 
   2547   int insertion;
   2548 
   2549   for (insertion = descriptor_number; insertion > 0; --insertion) {
   2550     Name* key = GetSortedKey(insertion - 1);
   2551     if (key->Hash() <= hash) break;
   2552     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
   2553   }
   2554 
   2555   SetSortedKey(insertion, descriptor_number);
   2556 }
   2557 
   2558 
   2559 void DescriptorArray::SwapSortedKeys(int first, int second) {
   2560   int first_key = GetSortedKeyIndex(first);
   2561   SetSortedKey(first, GetSortedKeyIndex(second));
   2562   SetSortedKey(second, first_key);
   2563 }
   2564 
   2565 
   2566 DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
   2567     : marking_(array->GetHeap()->incremental_marking()) {
   2568   marking_->EnterNoMarkingScope();
   2569   ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
   2570 }
   2571 
   2572 
   2573 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
   2574   marking_->LeaveNoMarkingScope();
   2575 }
   2576 
   2577 
   2578 template<typename Shape, typename Key>
   2579 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
   2580   const int kMinCapacity = 32;
   2581   int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
   2582   if (capacity < kMinCapacity) {
   2583     capacity = kMinCapacity;  // Guarantee min capacity.
   2584   }
   2585   return capacity;
   2586 }
   2587 
   2588 
   2589 template<typename Shape, typename Key>
   2590 int HashTable<Shape, Key>::FindEntry(Key key) {
   2591   return FindEntry(GetIsolate(), key);
   2592 }
   2593 
   2594 
   2595 // Find entry for key otherwise return kNotFound.
   2596 template<typename Shape, typename Key>
   2597 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
   2598   uint32_t capacity = Capacity();
   2599   uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
   2600   uint32_t count = 1;
   2601   // EnsureCapacity will guarantee the hash table is never full.
   2602   while (true) {
   2603     Object* element = KeyAt(entry);
   2604     // Empty entry. Uses raw unchecked accessors because it is called by the
   2605     // string table during bootstrapping.
   2606     if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
   2607     if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
   2608         Shape::IsMatch(key, element)) return entry;
   2609     entry = NextProbe(entry, count++, capacity);
   2610   }
   2611   return kNotFound;
   2612 }
   2613 
   2614 
   2615 bool SeededNumberDictionary::requires_slow_elements() {
   2616   Object* max_index_object = get(kMaxNumberKeyIndex);
   2617   if (!max_index_object->IsSmi()) return false;
   2618   return 0 !=
   2619       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
   2620 }
   2621 
   2622 uint32_t SeededNumberDictionary::max_number_key() {
   2623   ASSERT(!requires_slow_elements());
   2624   Object* max_index_object = get(kMaxNumberKeyIndex);
   2625   if (!max_index_object->IsSmi()) return 0;
   2626   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
   2627   return value >> kRequiresSlowElementsTagSize;
   2628 }
   2629 
   2630 void SeededNumberDictionary::set_requires_slow_elements() {
   2631   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
   2632 }
   2633 
   2634 
   2635 // ------------------------------------
   2636 // Cast operations
   2637 
   2638 
   2639 CAST_ACCESSOR(FixedArray)
   2640 CAST_ACCESSOR(FixedDoubleArray)
   2641 CAST_ACCESSOR(ConstantPoolArray)
   2642 CAST_ACCESSOR(DescriptorArray)
   2643 CAST_ACCESSOR(DeoptimizationInputData)
   2644 CAST_ACCESSOR(DeoptimizationOutputData)
   2645 CAST_ACCESSOR(DependentCode)
   2646 CAST_ACCESSOR(TypeFeedbackCells)
   2647 CAST_ACCESSOR(StringTable)
   2648 CAST_ACCESSOR(JSFunctionResultCache)
   2649 CAST_ACCESSOR(NormalizedMapCache)
   2650 CAST_ACCESSOR(ScopeInfo)
   2651 CAST_ACCESSOR(CompilationCacheTable)
   2652 CAST_ACCESSOR(CodeCacheHashTable)
   2653 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
   2654 CAST_ACCESSOR(MapCache)
   2655 CAST_ACCESSOR(String)
   2656 CAST_ACCESSOR(SeqString)
   2657 CAST_ACCESSOR(SeqOneByteString)
   2658 CAST_ACCESSOR(SeqTwoByteString)
   2659 CAST_ACCESSOR(SlicedString)
   2660 CAST_ACCESSOR(ConsString)
   2661 CAST_ACCESSOR(ExternalString)
   2662 CAST_ACCESSOR(ExternalAsciiString)
   2663 CAST_ACCESSOR(ExternalTwoByteString)
   2664 CAST_ACCESSOR(Symbol)
   2665 CAST_ACCESSOR(Name)
   2666 CAST_ACCESSOR(JSReceiver)
   2667 CAST_ACCESSOR(JSObject)
   2668 CAST_ACCESSOR(Smi)
   2669 CAST_ACCESSOR(HeapObject)
   2670 CAST_ACCESSOR(HeapNumber)
   2671 CAST_ACCESSOR(Oddball)
   2672 CAST_ACCESSOR(Cell)
   2673 CAST_ACCESSOR(PropertyCell)
   2674 CAST_ACCESSOR(SharedFunctionInfo)
   2675 CAST_ACCESSOR(Map)
   2676 CAST_ACCESSOR(JSFunction)
   2677 CAST_ACCESSOR(GlobalObject)
   2678 CAST_ACCESSOR(JSGlobalProxy)
   2679 CAST_ACCESSOR(JSGlobalObject)
   2680 CAST_ACCESSOR(JSBuiltinsObject)
   2681 CAST_ACCESSOR(Code)
   2682 CAST_ACCESSOR(JSArray)
   2683 CAST_ACCESSOR(JSArrayBuffer)
   2684 CAST_ACCESSOR(JSArrayBufferView)
   2685 CAST_ACCESSOR(JSTypedArray)
   2686 CAST_ACCESSOR(JSDataView)
   2687 CAST_ACCESSOR(JSRegExp)
   2688 CAST_ACCESSOR(JSProxy)
   2689 CAST_ACCESSOR(JSFunctionProxy)
   2690 CAST_ACCESSOR(JSSet)
   2691 CAST_ACCESSOR(JSMap)
   2692 CAST_ACCESSOR(JSWeakMap)
   2693 CAST_ACCESSOR(JSWeakSet)
   2694 CAST_ACCESSOR(Foreign)
   2695 CAST_ACCESSOR(ByteArray)
   2696 CAST_ACCESSOR(FreeSpace)
   2697 CAST_ACCESSOR(ExternalArray)
   2698 CAST_ACCESSOR(ExternalByteArray)
   2699 CAST_ACCESSOR(ExternalUnsignedByteArray)
   2700 CAST_ACCESSOR(ExternalShortArray)
   2701 CAST_ACCESSOR(ExternalUnsignedShortArray)
   2702 CAST_ACCESSOR(ExternalIntArray)
   2703 CAST_ACCESSOR(ExternalUnsignedIntArray)
   2704 CAST_ACCESSOR(ExternalFloatArray)
   2705 CAST_ACCESSOR(ExternalDoubleArray)
   2706 CAST_ACCESSOR(ExternalPixelArray)
   2707 CAST_ACCESSOR(Struct)
   2708 CAST_ACCESSOR(AccessorInfo)
   2709 
   2710 
   2711 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
   2712   STRUCT_LIST(MAKE_STRUCT_CAST)
   2713 #undef MAKE_STRUCT_CAST
   2714 
   2715 
   2716 template <typename Shape, typename Key>
   2717 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
   2718   ASSERT(obj->IsHashTable());
   2719   return reinterpret_cast<HashTable*>(obj);
   2720 }
   2721 
   2722 
   2723 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
   2724 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
   2725 
   2726 SMI_ACCESSORS(String, length, kLengthOffset)
   2727 
   2728 
   2729 uint32_t Name::hash_field() {
   2730   return READ_UINT32_FIELD(this, kHashFieldOffset);
   2731 }
   2732 
   2733 
   2734 void Name::set_hash_field(uint32_t value) {
   2735   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
   2736 #if V8_HOST_ARCH_64_BIT
   2737   WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
   2738 #endif
   2739 }
   2740 
   2741 
   2742 bool Name::Equals(Name* other) {
   2743   if (other == this) return true;
   2744   if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
   2745       this->IsSymbol() || other->IsSymbol()) {
   2746     return false;
   2747   }
   2748   return String::cast(this)->SlowEquals(String::cast(other));
   2749 }
   2750 
   2751 
   2752 ACCESSORS(Symbol, name, Object, kNameOffset)
   2753 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
   2754 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
   2755 
   2756 
   2757 bool String::Equals(String* other) {
   2758   if (other == this) return true;
   2759   if (this->IsInternalizedString() && other->IsInternalizedString()) {
   2760     return false;
   2761   }
   2762   return SlowEquals(other);
   2763 }
   2764 
   2765 
   2766 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
   2767   if (!StringShape(this).IsCons()) return this;
   2768   ConsString* cons = ConsString::cast(this);
   2769   if (cons->IsFlat()) return cons->first();
   2770   return SlowTryFlatten(pretenure);
   2771 }
   2772 
   2773 
   2774 String* String::TryFlattenGetString(PretenureFlag pretenure) {
   2775   MaybeObject* flat = TryFlatten(pretenure);
   2776   Object* successfully_flattened;
   2777   if (!flat->ToObject(&successfully_flattened)) return this;
   2778   return String::cast(successfully_flattened);
   2779 }
   2780 
   2781 
   2782 uint16_t String::Get(int index) {
   2783   ASSERT(index >= 0 && index < length());
   2784   switch (StringShape(this).full_representation_tag()) {
   2785     case kSeqStringTag | kOneByteStringTag:
   2786       return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
   2787     case kSeqStringTag | kTwoByteStringTag:
   2788       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
   2789     case kConsStringTag | kOneByteStringTag:
   2790     case kConsStringTag | kTwoByteStringTag:
   2791       return ConsString::cast(this)->ConsStringGet(index);
   2792     case kExternalStringTag | kOneByteStringTag:
   2793       return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
   2794     case kExternalStringTag | kTwoByteStringTag:
   2795       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
   2796     case kSlicedStringTag | kOneByteStringTag:
   2797     case kSlicedStringTag | kTwoByteStringTag:
   2798       return SlicedString::cast(this)->SlicedStringGet(index);
   2799     default:
   2800       break;
   2801   }
   2802 
   2803   UNREACHABLE();
   2804   return 0;
   2805 }
   2806 
   2807 
   2808 void String::Set(int index, uint16_t value) {
   2809   ASSERT(index >= 0 && index < length());
   2810   ASSERT(StringShape(this).IsSequential());
   2811 
   2812   return this->IsOneByteRepresentation()
   2813       ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
   2814       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
   2815 }
   2816 
   2817 
   2818 bool String::IsFlat() {
   2819   if (!StringShape(this).IsCons()) return true;
   2820   return ConsString::cast(this)->second()->length() == 0;
   2821 }
   2822 
   2823 
   2824 String* String::GetUnderlying() {
   2825   // Giving direct access to underlying string only makes sense if the
   2826   // wrapping string is already flattened.
   2827   ASSERT(this->IsFlat());
   2828   ASSERT(StringShape(this).IsIndirect());
   2829   STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
   2830   const int kUnderlyingOffset = SlicedString::kParentOffset;
   2831   return String::cast(READ_FIELD(this, kUnderlyingOffset));
   2832 }
   2833 
   2834 
   2835 template<class Visitor, class ConsOp>
   2836 void String::Visit(
   2837     String* string,
   2838     unsigned offset,
   2839     Visitor& visitor,
   2840     ConsOp& cons_op,
   2841     int32_t type,
   2842     unsigned length) {
   2843   ASSERT(length == static_cast<unsigned>(string->length()));
   2844   ASSERT(offset <= length);
   2845   unsigned slice_offset = offset;
   2846   while (true) {
   2847     ASSERT(type == string->map()->instance_type());
   2848 
   2849     switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
   2850       case kSeqStringTag | kOneByteStringTag:
   2851         visitor.VisitOneByteString(
   2852             SeqOneByteString::cast(string)->GetChars() + slice_offset,
   2853             length - offset);
   2854         return;
   2855 
   2856       case kSeqStringTag | kTwoByteStringTag:
   2857         visitor.VisitTwoByteString(
   2858             SeqTwoByteString::cast(string)->GetChars() + slice_offset,
   2859             length - offset);
   2860         return;
   2861 
   2862       case kExternalStringTag | kOneByteStringTag:
   2863         visitor.VisitOneByteString(
   2864             ExternalAsciiString::cast(string)->GetChars() + slice_offset,
   2865             length - offset);
   2866         return;
   2867 
   2868       case kExternalStringTag | kTwoByteStringTag:
   2869         visitor.VisitTwoByteString(
   2870             ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
   2871             length - offset);
   2872         return;
   2873 
   2874       case kSlicedStringTag | kOneByteStringTag:
   2875       case kSlicedStringTag | kTwoByteStringTag: {
   2876         SlicedString* slicedString = SlicedString::cast(string);
   2877         slice_offset += slicedString->offset();
   2878         string = slicedString->parent();
   2879         type = string->map()->instance_type();
   2880         continue;
   2881       }
   2882 
   2883       case kConsStringTag | kOneByteStringTag:
   2884       case kConsStringTag | kTwoByteStringTag:
   2885         string = cons_op.Operate(string, &offset, &type, &length);
   2886         if (string == NULL) return;
   2887         slice_offset = offset;
   2888         ASSERT(length == static_cast<unsigned>(string->length()));
   2889         continue;
   2890 
   2891       default:
   2892         UNREACHABLE();
   2893         return;
   2894     }
   2895   }
   2896 }
   2897 
   2898 
   2899 // TODO(dcarney): Remove this class after conversion to VisitFlat.
   2900 class ConsStringCaptureOp {
   2901  public:
   2902   inline ConsStringCaptureOp() : cons_string_(NULL) {}
   2903   inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
   2904     cons_string_ = ConsString::cast(string);
   2905     return NULL;
   2906   }
   2907   ConsString* cons_string_;
   2908 };
   2909 
   2910 
   2911 template<class Visitor>
   2912 ConsString* String::VisitFlat(Visitor* visitor,
   2913                               String* string,
   2914                               int offset,
   2915                               int length,
   2916                               int32_t type) {
   2917   ASSERT(length >= 0 && length == string->length());
   2918   ASSERT(offset >= 0 && offset <= length);
   2919   ConsStringCaptureOp op;
   2920   Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
   2921   return op.cons_string_;
   2922 }
   2923 
   2924 
   2925 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
   2926   ASSERT(index >= 0 && index < length());
   2927   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
   2928 }
   2929 
   2930 
   2931 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
   2932   ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
   2933   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
   2934                    static_cast<byte>(value));
   2935 }
   2936 
   2937 
   2938 Address SeqOneByteString::GetCharsAddress() {
   2939   return FIELD_ADDR(this, kHeaderSize);
   2940 }
   2941 
   2942 
   2943 uint8_t* SeqOneByteString::GetChars() {
   2944   return reinterpret_cast<uint8_t*>(GetCharsAddress());
   2945 }
   2946 
   2947 
   2948 Address SeqTwoByteString::GetCharsAddress() {
   2949   return FIELD_ADDR(this, kHeaderSize);
   2950 }
   2951 
   2952 
   2953 uc16* SeqTwoByteString::GetChars() {
   2954   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
   2955 }
   2956 
   2957 
   2958 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
   2959   ASSERT(index >= 0 && index < length());
   2960   return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
   2961 }
   2962 
   2963 
   2964 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
   2965   ASSERT(index >= 0 && index < length());
   2966   WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
   2967 }
   2968 
   2969 
   2970 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
   2971   return SizeFor(length());
   2972 }
   2973 
   2974 
   2975 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
   2976   return SizeFor(length());
   2977 }
   2978 
   2979 
   2980 String* SlicedString::parent() {
   2981   return String::cast(READ_FIELD(this, kParentOffset));
   2982 }
   2983 
   2984 
   2985 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
   2986   ASSERT(parent->IsSeqString() || parent->IsExternalString());
   2987   WRITE_FIELD(this, kParentOffset, parent);
   2988   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
   2989 }
   2990 
   2991 
   2992 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
   2993 
   2994 
   2995 String* ConsString::first() {
   2996   return String::cast(READ_FIELD(this, kFirstOffset));
   2997 }
   2998 
   2999 
   3000 Object* ConsString::unchecked_first() {
   3001   return READ_FIELD(this, kFirstOffset);
   3002 }
   3003 
   3004 
   3005 void ConsString::set_first(String* value, WriteBarrierMode mode) {
   3006   WRITE_FIELD(this, kFirstOffset, value);
   3007   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
   3008 }
   3009 
   3010 
   3011 String* ConsString::second() {
   3012   return String::cast(READ_FIELD(this, kSecondOffset));
   3013 }
   3014 
   3015 
   3016 Object* ConsString::unchecked_second() {
   3017   return READ_FIELD(this, kSecondOffset);
   3018 }
   3019 
   3020 
   3021 void ConsString::set_second(String* value, WriteBarrierMode mode) {
   3022   WRITE_FIELD(this, kSecondOffset, value);
   3023   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
   3024 }
   3025 
   3026 
   3027 bool ExternalString::is_short() {
   3028   InstanceType type = map()->instance_type();
   3029   return (type & kShortExternalStringMask) == kShortExternalStringTag;
   3030 }
   3031 
   3032 
   3033 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
   3034   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
   3035 }
   3036 
   3037 
   3038 void ExternalAsciiString::update_data_cache() {
   3039   if (is_short()) return;
   3040   const char** data_field =
   3041       reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
   3042   *data_field = resource()->data();
   3043 }
   3044 
   3045 
   3046 void ExternalAsciiString::set_resource(
   3047     const ExternalAsciiString::Resource* resource) {
   3048   *reinterpret_cast<const Resource**>(
   3049       FIELD_ADDR(this, kResourceOffset)) = resource;
   3050   if (resource != NULL) update_data_cache();
   3051 }
   3052 
   3053 
   3054 const uint8_t* ExternalAsciiString::GetChars() {
   3055   return reinterpret_cast<const uint8_t*>(resource()->data());
   3056 }
   3057 
   3058 
   3059 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
   3060   ASSERT(index >= 0 && index < length());
   3061   return GetChars()[index];
   3062 }
   3063 
   3064 
   3065 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
   3066   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
   3067 }
   3068 
   3069 
   3070 void ExternalTwoByteString::update_data_cache() {
   3071   if (is_short()) return;
   3072   const uint16_t** data_field =
   3073       reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
   3074   *data_field = resource()->data();
   3075 }
   3076 
   3077 
   3078 void ExternalTwoByteString::set_resource(
   3079     const ExternalTwoByteString::Resource* resource) {
   3080   *reinterpret_cast<const Resource**>(
   3081       FIELD_ADDR(this, kResourceOffset)) = resource;
   3082   if (resource != NULL) update_data_cache();
   3083 }
   3084 
   3085 
   3086 const uint16_t* ExternalTwoByteString::GetChars() {
   3087   return resource()->data();
   3088 }
   3089 
   3090 
   3091 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
   3092   ASSERT(index >= 0 && index < length());
   3093   return GetChars()[index];
   3094 }
   3095 
   3096 
   3097 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
   3098       unsigned start) {
   3099   return GetChars() + start;
   3100 }
   3101 
   3102 
   3103 String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
   3104   return NULL;
   3105 }
   3106 
   3107 
   3108 unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
   3109   return depth & kDepthMask;
   3110 }
   3111 
   3112 
   3113 void ConsStringIteratorOp::PushLeft(ConsString* string) {
   3114   frames_[depth_++ & kDepthMask] = string;
   3115 }
   3116 
   3117 
   3118 void ConsStringIteratorOp::PushRight(ConsString* string) {
   3119   // Inplace update.
   3120   frames_[(depth_-1) & kDepthMask] = string;
   3121 }
   3122 
   3123 
   3124 void ConsStringIteratorOp::AdjustMaximumDepth() {
   3125   if (depth_ > maximum_depth_) maximum_depth_ = depth_;
   3126 }
   3127 
   3128 
   3129 void ConsStringIteratorOp::Pop() {
   3130   ASSERT(depth_ > 0);
   3131   ASSERT(depth_ <= maximum_depth_);
   3132   depth_--;
   3133 }
   3134 
   3135 
   3136 bool ConsStringIteratorOp::HasMore() {
   3137   return depth_ != 0;
   3138 }
   3139 
   3140 
   3141 void ConsStringIteratorOp::Reset() {
   3142   depth_ = 0;
   3143 }
   3144 
   3145 
   3146 String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out,
   3147                                                 unsigned* length_out) {
   3148   bool blew_stack = false;
   3149   String* string = NextLeaf(&blew_stack, type_out, length_out);
   3150   // String found.
   3151   if (string != NULL) {
   3152     // Verify output.
   3153     ASSERT(*length_out == static_cast<unsigned>(string->length()));
   3154     ASSERT(*type_out == string->map()->instance_type());
   3155     return string;
   3156   }
   3157   // Traversal complete.
   3158   if (!blew_stack) return NULL;
   3159   // Restart search from root.
   3160   unsigned offset_out;
   3161   string = Search(&offset_out, type_out, length_out);
   3162   // Verify output.
   3163   ASSERT(string == NULL || offset_out == 0);
   3164   ASSERT(string == NULL ||
   3165          *length_out == static_cast<unsigned>(string->length()));
   3166   ASSERT(string == NULL || *type_out == string->map()->instance_type());
   3167   return string;
   3168 }
   3169 
   3170 
   3171 uint16_t StringCharacterStream::GetNext() {
   3172   ASSERT(buffer8_ != NULL && end_ != NULL);
   3173   // Advance cursor if needed.
   3174   // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
   3175   if (buffer8_ == end_) HasMore();
   3176   ASSERT(buffer8_ < end_);
   3177   return is_one_byte_ ? *buffer8_++ : *buffer16_++;
   3178 }
   3179 
   3180 
   3181 StringCharacterStream::StringCharacterStream(String* string,
   3182                                              ConsStringIteratorOp* op,
   3183                                              unsigned offset)
   3184   : is_one_byte_(false),
   3185     op_(op) {
   3186   Reset(string, offset);
   3187 }
   3188 
   3189 
   3190 void StringCharacterStream::Reset(String* string, unsigned offset) {
   3191   op_->Reset();
   3192   buffer8_ = NULL;
   3193   end_ = NULL;
   3194   int32_t type = string->map()->instance_type();
   3195   unsigned length = string->length();
   3196   String::Visit(string, offset, *this, *op_, type, length);
   3197 }
   3198 
   3199 
   3200 bool StringCharacterStream::HasMore() {
   3201   if (buffer8_ != end_) return true;
   3202   if (!op_->HasMore()) return false;
   3203   unsigned length;
   3204   int32_t type;
   3205   String* string = op_->ContinueOperation(&type, &length);
   3206   if (string == NULL) return false;
   3207   ASSERT(!string->IsConsString());
   3208   ASSERT(string->length() != 0);
   3209   ConsStringNullOp null_op;
   3210   String::Visit(string, 0, *this, null_op, type, length);
   3211   ASSERT(buffer8_ != end_);
   3212   return true;
   3213 }
   3214 
   3215 
   3216 void StringCharacterStream::VisitOneByteString(
   3217     const uint8_t* chars, unsigned length) {
   3218   is_one_byte_ = true;
   3219   buffer8_ = chars;
   3220   end_ = chars + length;
   3221 }
   3222 
   3223 
   3224 void StringCharacterStream::VisitTwoByteString(
   3225     const uint16_t* chars, unsigned length) {
   3226   is_one_byte_ = false;
   3227   buffer16_ = chars;
   3228   end_ = reinterpret_cast<const uint8_t*>(chars + length);
   3229 }
   3230 
   3231 
   3232 void JSFunctionResultCache::MakeZeroSize() {
   3233   set_finger_index(kEntriesIndex);
   3234   set_size(kEntriesIndex);
   3235 }
   3236 
   3237 
   3238 void JSFunctionResultCache::Clear() {
   3239   int cache_size = size();
   3240   Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
   3241   MemsetPointer(entries_start,
   3242                 GetHeap()->the_hole_value(),
   3243                 cache_size - kEntriesIndex);
   3244   MakeZeroSize();
   3245 }
   3246 
   3247 
   3248 int JSFunctionResultCache::size() {
   3249   return Smi::cast(get(kCacheSizeIndex))->value();
   3250 }
   3251 
   3252 
   3253 void JSFunctionResultCache::set_size(int size) {
   3254   set(kCacheSizeIndex, Smi::FromInt(size));
   3255 }
   3256 
   3257 
   3258 int JSFunctionResultCache::finger_index() {
   3259   return Smi::cast(get(kFingerIndex))->value();
   3260 }
   3261 
   3262 
   3263 void JSFunctionResultCache::set_finger_index(int finger_index) {
   3264   set(kFingerIndex, Smi::FromInt(finger_index));
   3265 }
   3266 
   3267 
   3268 byte ByteArray::get(int index) {
   3269   ASSERT(index >= 0 && index < this->length());
   3270   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
   3271 }
   3272 
   3273 
   3274 void ByteArray::set(int index, byte value) {
   3275   ASSERT(index >= 0 && index < this->length());
   3276   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
   3277 }
   3278 
   3279 
   3280 int ByteArray::get_int(int index) {
   3281   ASSERT(index >= 0 && (index * kIntSize) < this->length());
   3282   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
   3283 }
   3284 
   3285 
   3286 ByteArray* ByteArray::FromDataStartAddress(Address address) {
   3287   ASSERT_TAG_ALIGNED(address);
   3288   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
   3289 }
   3290 
   3291 
   3292 Address ByteArray::GetDataStartAddress() {
   3293   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
   3294 }
   3295 
   3296 
   3297 uint8_t* ExternalPixelArray::external_pixel_pointer() {
   3298   return reinterpret_cast<uint8_t*>(external_pointer());
   3299 }
   3300 
   3301 
   3302 uint8_t ExternalPixelArray::get_scalar(int index) {
   3303   ASSERT((index >= 0) && (index < this->length()));
   3304   uint8_t* ptr = external_pixel_pointer();
   3305   return ptr[index];
   3306 }
   3307 
   3308 
   3309 MaybeObject* ExternalPixelArray::get(int index) {
   3310   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3311 }
   3312 
   3313 
   3314 void ExternalPixelArray::set(int index, uint8_t value) {
   3315   ASSERT((index >= 0) && (index < this->length()));
   3316   uint8_t* ptr = external_pixel_pointer();
   3317   ptr[index] = value;
   3318 }
   3319 
   3320 
   3321 void* ExternalArray::external_pointer() {
   3322   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
   3323   return reinterpret_cast<void*>(ptr);
   3324 }
   3325 
   3326 
   3327 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
   3328   intptr_t ptr = reinterpret_cast<intptr_t>(value);
   3329   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
   3330 }
   3331 
   3332 
   3333 int8_t ExternalByteArray::get_scalar(int index) {
   3334   ASSERT((index >= 0) && (index < this->length()));
   3335   int8_t* ptr = static_cast<int8_t*>(external_pointer());
   3336   return ptr[index];
   3337 }
   3338 
   3339 
   3340 MaybeObject* ExternalByteArray::get(int index) {
   3341   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3342 }
   3343 
   3344 
   3345 void ExternalByteArray::set(int index, int8_t value) {
   3346   ASSERT((index >= 0) && (index < this->length()));
   3347   int8_t* ptr = static_cast<int8_t*>(external_pointer());
   3348   ptr[index] = value;
   3349 }
   3350 
   3351 
   3352 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
   3353   ASSERT((index >= 0) && (index < this->length()));
   3354   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
   3355   return ptr[index];
   3356 }
   3357 
   3358 
   3359 MaybeObject* ExternalUnsignedByteArray::get(int index) {
   3360   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3361 }
   3362 
   3363 
   3364 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
   3365   ASSERT((index >= 0) && (index < this->length()));
   3366   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
   3367   ptr[index] = value;
   3368 }
   3369 
   3370 
   3371 int16_t ExternalShortArray::get_scalar(int index) {
   3372   ASSERT((index >= 0) && (index < this->length()));
   3373   int16_t* ptr = static_cast<int16_t*>(external_pointer());
   3374   return ptr[index];
   3375 }
   3376 
   3377 
   3378 MaybeObject* ExternalShortArray::get(int index) {
   3379   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3380 }
   3381 
   3382 
   3383 void ExternalShortArray::set(int index, int16_t value) {
   3384   ASSERT((index >= 0) && (index < this->length()));
   3385   int16_t* ptr = static_cast<int16_t*>(external_pointer());
   3386   ptr[index] = value;
   3387 }
   3388 
   3389 
   3390 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
   3391   ASSERT((index >= 0) && (index < this->length()));
   3392   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
   3393   return ptr[index];
   3394 }
   3395 
   3396 
   3397 MaybeObject* ExternalUnsignedShortArray::get(int index) {
   3398   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3399 }
   3400 
   3401 
   3402 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
   3403   ASSERT((index >= 0) && (index < this->length()));
   3404   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
   3405   ptr[index] = value;
   3406 }
   3407 
   3408 
   3409 int32_t ExternalIntArray::get_scalar(int index) {
   3410   ASSERT((index >= 0) && (index < this->length()));
   3411   int32_t* ptr = static_cast<int32_t*>(external_pointer());
   3412   return ptr[index];
   3413 }
   3414 
   3415 
   3416 MaybeObject* ExternalIntArray::get(int index) {
   3417     return GetHeap()->NumberFromInt32(get_scalar(index));
   3418 }
   3419 
   3420 
   3421 void ExternalIntArray::set(int index, int32_t value) {
   3422   ASSERT((index >= 0) && (index < this->length()));
   3423   int32_t* ptr = static_cast<int32_t*>(external_pointer());
   3424   ptr[index] = value;
   3425 }
   3426 
   3427 
   3428 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
   3429   ASSERT((index >= 0) && (index < this->length()));
   3430   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
   3431   return ptr[index];
   3432 }
   3433 
   3434 
   3435 MaybeObject* ExternalUnsignedIntArray::get(int index) {
   3436     return GetHeap()->NumberFromUint32(get_scalar(index));
   3437 }
   3438 
   3439 
   3440 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
   3441   ASSERT((index >= 0) && (index < this->length()));
   3442   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
   3443   ptr[index] = value;
   3444 }
   3445 
   3446 
   3447 float ExternalFloatArray::get_scalar(int index) {
   3448   ASSERT((index >= 0) && (index < this->length()));
   3449   float* ptr = static_cast<float*>(external_pointer());
   3450   return ptr[index];
   3451 }
   3452 
   3453 
   3454 MaybeObject* ExternalFloatArray::get(int index) {
   3455     return GetHeap()->NumberFromDouble(get_scalar(index));
   3456 }
   3457 
   3458 
   3459 void ExternalFloatArray::set(int index, float value) {
   3460   ASSERT((index >= 0) && (index < this->length()));
   3461   float* ptr = static_cast<float*>(external_pointer());
   3462   ptr[index] = value;
   3463 }
   3464 
   3465 
   3466 double ExternalDoubleArray::get_scalar(int index) {
   3467   ASSERT((index >= 0) && (index < this->length()));
   3468   double* ptr = static_cast<double*>(external_pointer());
   3469   return ptr[index];
   3470 }
   3471 
   3472 
   3473 MaybeObject* ExternalDoubleArray::get(int index) {
   3474     return GetHeap()->NumberFromDouble(get_scalar(index));
   3475 }
   3476 
   3477 
   3478 void ExternalDoubleArray::set(int index, double value) {
   3479   ASSERT((index >= 0) && (index < this->length()));
   3480   double* ptr = static_cast<double*>(external_pointer());
   3481   ptr[index] = value;
   3482 }
   3483 
   3484 
   3485 int Map::visitor_id() {
   3486   return READ_BYTE_FIELD(this, kVisitorIdOffset);
   3487 }
   3488 
   3489 
   3490 void Map::set_visitor_id(int id) {
   3491   ASSERT(0 <= id && id < 256);
   3492   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
   3493 }
   3494 
   3495 
   3496 int Map::instance_size() {
   3497   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
   3498 }
   3499 
   3500 
   3501 int Map::inobject_properties() {
   3502   return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
   3503 }
   3504 
   3505 
   3506 int Map::pre_allocated_property_fields() {
   3507   return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
   3508 }
   3509 
   3510 
   3511 int HeapObject::SizeFromMap(Map* map) {
   3512   int instance_size = map->instance_size();
   3513   if (instance_size != kVariableSizeSentinel) return instance_size;
   3514   // Only inline the most frequent cases.
   3515   int instance_type = static_cast<int>(map->instance_type());
   3516   if (instance_type == FIXED_ARRAY_TYPE) {
   3517     return FixedArray::BodyDescriptor::SizeOf(map, this);
   3518   }
   3519   if (instance_type == ASCII_STRING_TYPE ||
   3520       instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
   3521     return SeqOneByteString::SizeFor(
   3522         reinterpret_cast<SeqOneByteString*>(this)->length());
   3523   }
   3524   if (instance_type == BYTE_ARRAY_TYPE) {
   3525     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
   3526   }
   3527   if (instance_type == FREE_SPACE_TYPE) {
   3528     return reinterpret_cast<FreeSpace*>(this)->size();
   3529   }
   3530   if (instance_type == STRING_TYPE ||
   3531       instance_type == INTERNALIZED_STRING_TYPE) {
   3532     return SeqTwoByteString::SizeFor(
   3533         reinterpret_cast<SeqTwoByteString*>(this)->length());
   3534   }
   3535   if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
   3536     return FixedDoubleArray::SizeFor(
   3537         reinterpret_cast<FixedDoubleArray*>(this)->length());
   3538   }
   3539   if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
   3540     return ConstantPoolArray::SizeFor(
   3541         reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
   3542         reinterpret_cast<ConstantPoolArray*>(this)->count_of_ptr_entries(),
   3543         reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
   3544   }
   3545   ASSERT(instance_type == CODE_TYPE);
   3546   return reinterpret_cast<Code*>(this)->CodeSize();
   3547 }
   3548 
   3549 
   3550 void Map::set_instance_size(int value) {
   3551   ASSERT_EQ(0, value & (kPointerSize - 1));
   3552   value >>= kPointerSizeLog2;
   3553   ASSERT(0 <= value && value < 256);
   3554   WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
   3555 }
   3556 
   3557 
   3558 void Map::set_inobject_properties(int value) {
   3559   ASSERT(0 <= value && value < 256);
   3560   WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
   3561 }
   3562 
   3563 
   3564 void Map::set_pre_allocated_property_fields(int value) {
   3565   ASSERT(0 <= value && value < 256);
   3566   WRITE_BYTE_FIELD(this,
   3567                    kPreAllocatedPropertyFieldsOffset,
   3568                    static_cast<byte>(value));
   3569 }
   3570 
   3571 
   3572 InstanceType Map::instance_type() {
   3573   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
   3574 }
   3575 
   3576 
   3577 void Map::set_instance_type(InstanceType value) {
   3578   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
   3579 }
   3580 
   3581 
   3582 int Map::unused_property_fields() {
   3583   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
   3584 }
   3585 
   3586 
   3587 void Map::set_unused_property_fields(int value) {
   3588   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
   3589 }
   3590 
   3591 
   3592 byte Map::bit_field() {
   3593   return READ_BYTE_FIELD(this, kBitFieldOffset);
   3594 }
   3595 
   3596 
   3597 void Map::set_bit_field(byte value) {
   3598   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
   3599 }
   3600 
   3601 
   3602 byte Map::bit_field2() {
   3603   return READ_BYTE_FIELD(this, kBitField2Offset);
   3604 }
   3605 
   3606 
   3607 void Map::set_bit_field2(byte value) {
   3608   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
   3609 }
   3610 
   3611 
   3612 void Map::set_non_instance_prototype(bool value) {
   3613   if (value) {
   3614     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
   3615   } else {
   3616     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
   3617   }
   3618 }
   3619 
   3620 
   3621 bool Map::has_non_instance_prototype() {
   3622   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
   3623 }
   3624 
   3625 
   3626 void Map::set_function_with_prototype(bool value) {
   3627   set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
   3628 }
   3629 
   3630 
   3631 bool Map::function_with_prototype() {
   3632   return FunctionWithPrototype::decode(bit_field3());
   3633 }
   3634 
   3635 
   3636 void Map::set_is_access_check_needed(bool access_check_needed) {
   3637   if (access_check_needed) {
   3638     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
   3639   } else {
   3640     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
   3641   }
   3642 }
   3643 
   3644 
   3645 bool Map::is_access_check_needed() {
   3646   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
   3647 }
   3648 
   3649 
   3650 void Map::set_is_extensible(bool value) {
   3651   if (value) {
   3652     set_bit_field2(bit_field2() | (1 << kIsExtensible));
   3653   } else {
   3654     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
   3655   }
   3656 }
   3657 
   3658 bool Map::is_extensible() {
   3659   return ((1 << kIsExtensible) & bit_field2()) != 0;
   3660 }
   3661 
   3662 
   3663 void Map::set_attached_to_shared_function_info(bool value) {
   3664   if (value) {
   3665     set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
   3666   } else {
   3667     set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
   3668   }
   3669 }
   3670 
   3671 bool Map::attached_to_shared_function_info() {
   3672   return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
   3673 }
   3674 
   3675 
   3676 void Map::set_is_shared(bool value) {
   3677   set_bit_field3(IsShared::update(bit_field3(), value));
   3678 }
   3679 
   3680 
   3681 bool Map::is_shared() {
   3682   return IsShared::decode(bit_field3());
   3683 }
   3684 
   3685 
   3686 void Map::set_dictionary_map(bool value) {
   3687   if (value) mark_unstable();
   3688   set_bit_field3(DictionaryMap::update(bit_field3(), value));
   3689 }
   3690 
   3691 
   3692 bool Map::is_dictionary_map() {
   3693   return DictionaryMap::decode(bit_field3());
   3694 }
   3695 
   3696 
   3697 Code::Flags Code::flags() {
   3698   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
   3699 }
   3700 
   3701 
   3702 void Map::set_owns_descriptors(bool is_shared) {
   3703   set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
   3704 }
   3705 
   3706 
   3707 bool Map::owns_descriptors() {
   3708   return OwnsDescriptors::decode(bit_field3());
   3709 }
   3710 
   3711 
   3712 void Map::set_has_instance_call_handler() {
   3713   set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
   3714 }
   3715 
   3716 
   3717 bool Map::has_instance_call_handler() {
   3718   return HasInstanceCallHandler::decode(bit_field3());
   3719 }
   3720 
   3721 
   3722 void Map::deprecate() {
   3723   set_bit_field3(Deprecated::update(bit_field3(), true));
   3724 }
   3725 
   3726 
   3727 bool Map::is_deprecated() {
   3728   if (!FLAG_track_fields) return false;
   3729   return Deprecated::decode(bit_field3());
   3730 }
   3731 
   3732 
   3733 void Map::set_migration_target(bool value) {
   3734   set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
   3735 }
   3736 
   3737 
   3738 bool Map::is_migration_target() {
   3739   if (!FLAG_track_fields) return false;
   3740   return IsMigrationTarget::decode(bit_field3());
   3741 }
   3742 
   3743 
   3744 void Map::freeze() {
   3745   set_bit_field3(IsFrozen::update(bit_field3(), true));
   3746 }
   3747 
   3748 
   3749 bool Map::is_frozen() {
   3750   return IsFrozen::decode(bit_field3());
   3751 }
   3752 
   3753 
   3754 void Map::mark_unstable() {
   3755   set_bit_field3(IsUnstable::update(bit_field3(), true));
   3756 }
   3757 
   3758 
   3759 bool Map::is_stable() {
   3760   return !IsUnstable::decode(bit_field3());
   3761 }
   3762 
   3763 
   3764 bool Map::has_code_cache() {
   3765   return code_cache() != GetIsolate()->heap()->empty_fixed_array();
   3766 }
   3767 
   3768 
   3769 bool Map::CanBeDeprecated() {
   3770   int descriptor = LastAdded();
   3771   for (int i = 0; i <= descriptor; i++) {
   3772     PropertyDetails details = instance_descriptors()->GetDetails(i);
   3773     if (FLAG_track_fields && details.representation().IsNone()) {
   3774       return true;
   3775     }
   3776     if (FLAG_track_fields && details.representation().IsSmi()) {
   3777       return true;
   3778     }
   3779     if (FLAG_track_double_fields && details.representation().IsDouble()) {
   3780       return true;
   3781     }
   3782     if (FLAG_track_heap_object_fields &&
   3783         details.representation().IsHeapObject()) {
   3784       return true;
   3785     }
   3786     if (FLAG_track_fields && details.type() == CONSTANT) {
   3787       return true;
   3788     }
   3789   }
   3790   return false;
   3791 }
   3792 
   3793 
   3794 void Map::NotifyLeafMapLayoutChange() {
   3795   if (is_stable()) {
   3796     mark_unstable();
   3797     dependent_code()->DeoptimizeDependentCodeGroup(
   3798         GetIsolate(),
   3799         DependentCode::kPrototypeCheckGroup);
   3800   }
   3801 }
   3802 
   3803 
   3804 bool Map::CanOmitMapChecks() {
   3805   return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
   3806 }
   3807 
   3808 
   3809 int DependentCode::number_of_entries(DependencyGroup group) {
   3810   if (length() == 0) return 0;
   3811   return Smi::cast(get(group))->value();
   3812 }
   3813 
   3814 
   3815 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
   3816   set(group, Smi::FromInt(value));
   3817 }
   3818 
   3819 
   3820 bool DependentCode::is_code_at(int i) {
   3821   return get(kCodesStartIndex + i)->IsCode();
   3822 }
   3823 
   3824 Code* DependentCode::code_at(int i) {
   3825   return Code::cast(get(kCodesStartIndex + i));
   3826 }
   3827 
   3828 
   3829 CompilationInfo* DependentCode::compilation_info_at(int i) {
   3830   return reinterpret_cast<CompilationInfo*>(
   3831       Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
   3832 }
   3833 
   3834 
   3835 void DependentCode::set_object_at(int i, Object* object) {
   3836   set(kCodesStartIndex + i, object);
   3837 }
   3838 
   3839 
   3840 Object* DependentCode::object_at(int i) {
   3841   return get(kCodesStartIndex + i);
   3842 }
   3843 
   3844 
   3845 Object** DependentCode::slot_at(int i) {
   3846   return HeapObject::RawField(
   3847       this, FixedArray::OffsetOfElementAt(kCodesStartIndex + i));
   3848 }
   3849 
   3850 
   3851 void DependentCode::clear_at(int i) {
   3852   set_undefined(kCodesStartIndex + i);
   3853 }
   3854 
   3855 
   3856 void DependentCode::copy(int from, int to) {
   3857   set(kCodesStartIndex + to, get(kCodesStartIndex + from));
   3858 }
   3859 
   3860 
   3861 void DependentCode::ExtendGroup(DependencyGroup group) {
   3862   GroupStartIndexes starts(this);
   3863   for (int g = kGroupCount - 1; g > group; g--) {
   3864     if (starts.at(g) < starts.at(g + 1)) {
   3865       copy(starts.at(g), starts.at(g + 1));
   3866     }
   3867   }
   3868 }
   3869 
   3870 
   3871 void Code::set_flags(Code::Flags flags) {
   3872   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
   3873   // Make sure that all call stubs have an arguments count.
   3874   ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
   3875           ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
   3876          ExtractArgumentsCountFromFlags(flags) >= 0);
   3877   WRITE_INT_FIELD(this, kFlagsOffset, flags);
   3878 }
   3879 
   3880 
   3881 Code::Kind Code::kind() {
   3882   return ExtractKindFromFlags(flags());
   3883 }
   3884 
   3885 
   3886 InlineCacheState Code::ic_state() {
   3887   InlineCacheState result = ExtractICStateFromFlags(flags());
   3888   // Only allow uninitialized or debugger states for non-IC code
   3889   // objects. This is used in the debugger to determine whether or not
   3890   // a call to code object has been replaced with a debug break call.
   3891   ASSERT(is_inline_cache_stub() ||
   3892          result == UNINITIALIZED ||
   3893          result == DEBUG_STUB);
   3894   return result;
   3895 }
   3896 
   3897 
   3898 ExtraICState Code::extra_ic_state() {
   3899   ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind()))
   3900          || ic_state() == DEBUG_STUB);
   3901   return ExtractExtraICStateFromFlags(flags());
   3902 }
   3903 
   3904 
   3905 ExtraICState Code::extended_extra_ic_state() {
   3906   ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
   3907   ASSERT(needs_extended_extra_ic_state(kind()));
   3908   return ExtractExtendedExtraICStateFromFlags(flags());
   3909 }
   3910 
   3911 
   3912 Code::StubType Code::type() {
   3913   return ExtractTypeFromFlags(flags());
   3914 }
   3915 
   3916 
   3917 int Code::arguments_count() {
   3918   ASSERT(is_call_stub() || is_keyed_call_stub() ||
   3919          kind() == STUB || is_handler());
   3920   return ExtractArgumentsCountFromFlags(flags());
   3921 }
   3922 
   3923 
   3924 // For initialization.
   3925 void Code::set_raw_kind_specific_flags1(int value) {
   3926   WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
   3927 }
   3928 
   3929 
   3930 void Code::set_raw_kind_specific_flags2(int value) {
   3931   WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
   3932 }
   3933 
   3934 
   3935 inline bool Code::is_crankshafted() {
   3936   return IsCrankshaftedField::decode(
   3937       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   3938 }
   3939 
   3940 
   3941 inline void Code::set_is_crankshafted(bool value) {
   3942   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   3943   int updated = IsCrankshaftedField::update(previous, value);
   3944   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   3945 }
   3946 
   3947 
   3948 int Code::major_key() {
   3949   ASSERT(has_major_key());
   3950   return StubMajorKeyField::decode(
   3951       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   3952 }
   3953 
   3954 
   3955 void Code::set_major_key(int major) {
   3956   ASSERT(has_major_key());
   3957   ASSERT(0 <= major && major < 256);
   3958   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   3959   int updated = StubMajorKeyField::update(previous, major);
   3960   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   3961 }
   3962 
   3963 
   3964 bool Code::has_major_key() {
   3965   return kind() == STUB ||
   3966       kind() == HANDLER ||
   3967       kind() == BINARY_OP_IC ||
   3968       kind() == COMPARE_IC ||
   3969       kind() == COMPARE_NIL_IC ||
   3970       kind() == LOAD_IC ||
   3971       kind() == KEYED_LOAD_IC ||
   3972       kind() == STORE_IC ||
   3973       kind() == KEYED_STORE_IC ||
   3974       kind() == KEYED_CALL_IC ||
   3975       kind() == TO_BOOLEAN_IC;
   3976 }
   3977 
   3978 
   3979 bool Code::optimizable() {
   3980   ASSERT_EQ(FUNCTION, kind());
   3981   return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
   3982 }
   3983 
   3984 
   3985 void Code::set_optimizable(bool value) {
   3986   ASSERT_EQ(FUNCTION, kind());
   3987   WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
   3988 }
   3989 
   3990 
   3991 bool Code::has_deoptimization_support() {
   3992   ASSERT_EQ(FUNCTION, kind());
   3993   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3994   return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
   3995 }
   3996 
   3997 
   3998 void Code::set_has_deoptimization_support(bool value) {
   3999   ASSERT_EQ(FUNCTION, kind());
   4000   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   4001   flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
   4002   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   4003 }
   4004 
   4005 
   4006 bool Code::has_debug_break_slots() {
   4007   ASSERT_EQ(FUNCTION, kind());
   4008   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   4009   return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
   4010 }
   4011 
   4012 
   4013 void Code::set_has_debug_break_slots(bool value) {
   4014   ASSERT_EQ(FUNCTION, kind());
   4015   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   4016   flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
   4017   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   4018 }
   4019 
   4020 
   4021 bool Code::is_compiled_optimizable() {
   4022   ASSERT_EQ(FUNCTION, kind());
   4023   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   4024   return FullCodeFlagsIsCompiledOptimizable::decode(flags);
   4025 }
   4026 
   4027 
   4028 void Code::set_compiled_optimizable(bool value) {
   4029   ASSERT_EQ(FUNCTION, kind());
   4030   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   4031   flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
   4032   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   4033 }
   4034 
   4035 
   4036 int Code::allow_osr_at_loop_nesting_level() {
   4037   ASSERT_EQ(FUNCTION, kind());
   4038   return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
   4039 }
   4040 
   4041 
   4042 void Code::set_allow_osr_at_loop_nesting_level(int level) {
   4043   ASSERT_EQ(FUNCTION, kind());
   4044   ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
   4045   WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
   4046 }
   4047 
   4048 
   4049 int Code::profiler_ticks() {
   4050   ASSERT_EQ(FUNCTION, kind());
   4051   return READ_BYTE_FIELD(this, kProfilerTicksOffset);
   4052 }
   4053 
   4054 
   4055 void Code::set_profiler_ticks(int ticks) {
   4056   ASSERT_EQ(FUNCTION, kind());
   4057   ASSERT(ticks < 256);
   4058   WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
   4059 }
   4060 
   4061 
   4062 unsigned Code::stack_slots() {
   4063   ASSERT(is_crankshafted());
   4064   return StackSlotsField::decode(
   4065       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
   4066 }
   4067 
   4068 
   4069 void Code::set_stack_slots(unsigned slots) {
   4070   CHECK(slots <= (1 << kStackSlotsBitCount));
   4071   ASSERT(is_crankshafted());
   4072   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
   4073   int updated = StackSlotsField::update(previous, slots);
   4074   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
   4075 }
   4076 
   4077 
   4078 unsigned Code::safepoint_table_offset() {
   4079   ASSERT(is_crankshafted());
   4080   return SafepointTableOffsetField::decode(
   4081       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   4082 }
   4083 
   4084 
   4085 void Code::set_safepoint_table_offset(unsigned offset) {
   4086   CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
   4087   ASSERT(is_crankshafted());
   4088   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
   4089   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   4090   int updated = SafepointTableOffsetField::update(previous, offset);
   4091   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   4092 }
   4093 
   4094 
   4095 unsigned Code::back_edge_table_offset() {
   4096   ASSERT_EQ(FUNCTION, kind());
   4097   return BackEdgeTableOffsetField::decode(
   4098       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   4099 }
   4100 
   4101 
   4102 void Code::set_back_edge_table_offset(unsigned offset) {
   4103   ASSERT_EQ(FUNCTION, kind());
   4104   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
   4105   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   4106   int updated = BackEdgeTableOffsetField::update(previous, offset);
   4107   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   4108 }
   4109 
   4110 
   4111 bool Code::back_edges_patched_for_osr() {
   4112   ASSERT_EQ(FUNCTION, kind());
   4113   return BackEdgesPatchedForOSRField::decode(
   4114       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   4115 }
   4116 
   4117 
   4118 void Code::set_back_edges_patched_for_osr(bool value) {
   4119   ASSERT_EQ(FUNCTION, kind());
   4120   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   4121   int updated = BackEdgesPatchedForOSRField::update(previous, value);
   4122   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   4123 }
   4124 
   4125 
   4126 
   4127 CheckType Code::check_type() {
   4128   ASSERT(is_call_stub() || is_keyed_call_stub());
   4129   byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
   4130   return static_cast<CheckType>(type);
   4131 }
   4132 
   4133 
   4134 void Code::set_check_type(CheckType value) {
   4135   ASSERT(is_call_stub() || is_keyed_call_stub());
   4136   WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
   4137 }
   4138 
   4139 
   4140 byte Code::to_boolean_state() {
   4141   return extended_extra_ic_state();
   4142 }
   4143 
   4144 
   4145 bool Code::has_function_cache() {
   4146   ASSERT(kind() == STUB);
   4147   return HasFunctionCacheField::decode(
   4148       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
   4149 }
   4150 
   4151 
   4152 void Code::set_has_function_cache(bool flag) {
   4153   ASSERT(kind() == STUB);
   4154   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
   4155   int updated = HasFunctionCacheField::update(previous, flag);
   4156   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
   4157 }
   4158 
   4159 
   4160 bool Code::marked_for_deoptimization() {
   4161   ASSERT(kind() == OPTIMIZED_FUNCTION);
   4162   return MarkedForDeoptimizationField::decode(
   4163       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
   4164 }
   4165 
   4166 
   4167 void Code::set_marked_for_deoptimization(bool flag) {
   4168   ASSERT(kind() == OPTIMIZED_FUNCTION);
   4169   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
   4170   int updated = MarkedForDeoptimizationField::update(previous, flag);
   4171   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
   4172 }
   4173 
   4174 
   4175 bool Code::is_inline_cache_stub() {
   4176   Kind kind = this->kind();
   4177   switch (kind) {
   4178 #define CASE(name) case name: return true;
   4179     IC_KIND_LIST(CASE)
   4180 #undef CASE
   4181     default: return false;
   4182   }
   4183 }
   4184 
   4185 
   4186 bool Code::is_keyed_stub() {
   4187   return is_keyed_load_stub() || is_keyed_store_stub() || is_keyed_call_stub();
   4188 }
   4189 
   4190 
   4191 bool Code::is_debug_stub() {
   4192   return ic_state() == DEBUG_STUB;
   4193 }
   4194 
   4195 
   4196 Code::Flags Code::ComputeFlags(Kind kind,
   4197                                InlineCacheState ic_state,
   4198                                ExtraICState extra_ic_state,
   4199                                StubType type,
   4200                                int argc,
   4201                                InlineCacheHolderFlag holder) {
   4202   ASSERT(argc <= Code::kMaxArguments);
   4203   // Since the extended extra ic state overlaps with the argument count
   4204   // for CALL_ICs, do so checks to make sure that they don't interfere.
   4205   ASSERT((kind != Code::CALL_IC &&
   4206           kind != Code::KEYED_CALL_IC) ||
   4207          (ExtraICStateField::encode(extra_ic_state) | true));
   4208   // Compute the bit mask.
   4209   unsigned int bits = KindField::encode(kind)
   4210       | ICStateField::encode(ic_state)
   4211       | TypeField::encode(type)
   4212       | ExtendedExtraICStateField::encode(extra_ic_state)
   4213       | CacheHolderField::encode(holder);
   4214   if (!Code::needs_extended_extra_ic_state(kind)) {
   4215     bits |= (argc << kArgumentsCountShift);
   4216   }
   4217   return static_cast<Flags>(bits);
   4218 }
   4219 
   4220 
   4221 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
   4222                                           ExtraICState extra_ic_state,
   4223                                           InlineCacheHolderFlag holder,
   4224                                           StubType type,
   4225                                           int argc) {
   4226   return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
   4227 }
   4228 
   4229 
   4230 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
   4231   return KindField::decode(flags);
   4232 }
   4233 
   4234 
   4235 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
   4236   return ICStateField::decode(flags);
   4237 }
   4238 
   4239 
   4240 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
   4241   return ExtraICStateField::decode(flags);
   4242 }
   4243 
   4244 
   4245 ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
   4246     Flags flags) {
   4247   return ExtendedExtraICStateField::decode(flags);
   4248 }
   4249 
   4250 
   4251 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
   4252   return TypeField::decode(flags);
   4253 }
   4254 
   4255 
   4256 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
   4257   return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
   4258 }
   4259 
   4260 
   4261 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
   4262   return CacheHolderField::decode(flags);
   4263 }
   4264 
   4265 
   4266 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
   4267   int bits = flags & ~TypeField::kMask;
   4268   return static_cast<Flags>(bits);
   4269 }
   4270 
   4271 
   4272 Code* Code::GetCodeFromTargetAddress(Address address) {
   4273   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
   4274   // GetCodeFromTargetAddress might be called when marking objects during mark
   4275   // sweep. reinterpret_cast is therefore used instead of the more appropriate
   4276   // Code::cast. Code::cast does not work when the object's map is
   4277   // marked.
   4278   Code* result = reinterpret_cast<Code*>(code);
   4279   return result;
   4280 }
   4281 
   4282 
   4283 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
   4284   return HeapObject::
   4285       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
   4286 }
   4287 
   4288 
   4289 Object* Map::prototype() {
   4290   return READ_FIELD(this, kPrototypeOffset);
   4291 }
   4292 
   4293 
   4294 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
   4295   ASSERT(value->IsNull() || value->IsJSReceiver());
   4296   WRITE_FIELD(this, kPrototypeOffset, value);
   4297   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
   4298 }
   4299 
   4300 
   4301 // If the descriptor is using the empty transition array, install a new empty
   4302 // transition array that will have place for an element transition.
   4303 static MaybeObject* EnsureHasTransitionArray(Map* map) {
   4304   TransitionArray* transitions;
   4305   MaybeObject* maybe_transitions;
   4306   if (!map->HasTransitionArray()) {
   4307     maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
   4308     if (!maybe_transitions->To(&transitions)) return maybe_transitions;
   4309     transitions->set_back_pointer_storage(map->GetBackPointer());
   4310   } else if (!map->transitions()->IsFullTransitionArray()) {
   4311     maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
   4312     if (!maybe_transitions->To(&transitions)) return maybe_transitions;
   4313   } else {
   4314     return map;
   4315   }
   4316   map->set_transitions(transitions);
   4317   return transitions;
   4318 }
   4319 
   4320 
   4321 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
   4322   int len = descriptors->number_of_descriptors();
   4323   set_instance_descriptors(descriptors);
   4324   SetNumberOfOwnDescriptors(len);
   4325 }
   4326 
   4327 
   4328 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
   4329 
   4330 
   4331 void Map::set_bit_field3(uint32_t bits) {
   4332   // Ensure the upper 2 bits have the same value by sign extending it. This is
   4333   // necessary to be able to use the 31st bit.
   4334   int value = bits << 1;
   4335   WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
   4336 }
   4337 
   4338 
   4339 uint32_t Map::bit_field3() {
   4340   Object* value = READ_FIELD(this, kBitField3Offset);
   4341   return Smi::cast(value)->value();
   4342 }
   4343 
   4344 
   4345 void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
   4346   Object* back_pointer = GetBackPointer();
   4347 
   4348   if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
   4349     ZapTransitions();
   4350   }
   4351 
   4352   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
   4353   CONDITIONAL_WRITE_BARRIER(
   4354       heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
   4355 }
   4356 
   4357 
   4358 void Map::AppendDescriptor(Descriptor* desc,
   4359                            const DescriptorArray::WhitenessWitness& witness) {
   4360   DescriptorArray* descriptors = instance_descriptors();
   4361   int number_of_own_descriptors = NumberOfOwnDescriptors();
   4362   ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
   4363   descriptors->Append(desc, witness);
   4364   SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
   4365 }
   4366 
   4367 
   4368 Object* Map::GetBackPointer() {
   4369   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4370   if (object->IsDescriptorArray()) {
   4371     return TransitionArray::cast(object)->back_pointer_storage();
   4372   } else {
   4373     ASSERT(object->IsMap() || object->IsUndefined());
   4374     return object;
   4375   }
   4376 }
   4377 
   4378 
   4379 bool Map::HasElementsTransition() {
   4380   return HasTransitionArray() && transitions()->HasElementsTransition();
   4381 }
   4382 
   4383 
   4384 bool Map::HasTransitionArray() {
   4385   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4386   return object->IsTransitionArray();
   4387 }
   4388 
   4389 
   4390 Map* Map::elements_transition_map() {
   4391   int index = transitions()->Search(GetHeap()->elements_transition_symbol());
   4392   return transitions()->GetTarget(index);
   4393 }
   4394 
   4395 
   4396 bool Map::CanHaveMoreTransitions() {
   4397   if (!HasTransitionArray()) return true;
   4398   return FixedArray::SizeFor(transitions()->length() +
   4399                              TransitionArray::kTransitionSize)
   4400       <= Page::kMaxNonCodeHeapObjectSize;
   4401 }
   4402 
   4403 
   4404 MaybeObject* Map::AddTransition(Name* key,
   4405                                 Map* target,
   4406                                 SimpleTransitionFlag flag) {
   4407   if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
   4408   return TransitionArray::NewWith(flag, key, target, GetBackPointer());
   4409 }
   4410 
   4411 
   4412 void Map::SetTransition(int transition_index, Map* target) {
   4413   transitions()->SetTarget(transition_index, target);
   4414 }
   4415 
   4416 
   4417 Map* Map::GetTransition(int transition_index) {
   4418   return transitions()->GetTarget(transition_index);
   4419 }
   4420 
   4421 
   4422 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
   4423   TransitionArray* transitions;
   4424   MaybeObject* maybe_transitions = AddTransition(
   4425       GetHeap()->elements_transition_symbol(),
   4426       transitioned_map,
   4427       FULL_TRANSITION);
   4428   if (!maybe_transitions->To(&transitions)) return maybe_transitions;
   4429   set_transitions(transitions);
   4430   return transitions;
   4431 }
   4432 
   4433 
   4434 FixedArray* Map::GetPrototypeTransitions() {
   4435   if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
   4436   if (!transitions()->HasPrototypeTransitions()) {
   4437     return GetHeap()->empty_fixed_array();
   4438   }
   4439   return transitions()->GetPrototypeTransitions();
   4440 }
   4441 
   4442 
   4443 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
   4444   MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
   4445   if (allow_prototype->IsFailure()) return allow_prototype;
   4446   int old_number_of_transitions = NumberOfProtoTransitions();
   4447 #ifdef DEBUG
   4448   if (HasPrototypeTransitions()) {
   4449     ASSERT(GetPrototypeTransitions() != proto_transitions);
   4450     ZapPrototypeTransitions();
   4451   }
   4452 #endif
   4453   transitions()->SetPrototypeTransitions(proto_transitions);
   4454   SetNumberOfProtoTransitions(old_number_of_transitions);
   4455   return this;
   4456 }
   4457 
   4458 
   4459 bool Map::HasPrototypeTransitions() {
   4460   return HasTransitionArray() && transitions()->HasPrototypeTransitions();
   4461 }
   4462 
   4463 
   4464 TransitionArray* Map::transitions() {
   4465   ASSERT(HasTransitionArray());
   4466   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4467   return TransitionArray::cast(object);
   4468 }
   4469 
   4470 
   4471 void Map::set_transitions(TransitionArray* transition_array,
   4472                           WriteBarrierMode mode) {
   4473   // Transition arrays are not shared. When one is replaced, it should not
   4474   // keep referenced objects alive, so we zap it.
   4475   // When there is another reference to the array somewhere (e.g. a handle),
   4476   // not zapping turns from a waste of memory into a source of crashes.
   4477   if (HasTransitionArray()) {
   4478 #ifdef DEBUG
   4479     for (int i = 0; i < transitions()->number_of_transitions(); i++) {
   4480       Map* target = transitions()->GetTarget(i);
   4481       if (target->instance_descriptors() == instance_descriptors()) {
   4482         Name* key = transitions()->GetKey(i);
   4483         int new_target_index = transition_array->Search(key);
   4484         ASSERT(new_target_index != TransitionArray::kNotFound);
   4485         ASSERT(transition_array->GetTarget(new_target_index) == target);
   4486       }
   4487     }
   4488 #endif
   4489     ASSERT(transitions() != transition_array);
   4490     ZapTransitions();
   4491   }
   4492 
   4493   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
   4494   CONDITIONAL_WRITE_BARRIER(
   4495       GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
   4496 }
   4497 
   4498 
   4499 void Map::init_back_pointer(Object* undefined) {
   4500   ASSERT(undefined->IsUndefined());
   4501   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
   4502 }
   4503 
   4504 
   4505 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
   4506   ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
   4507   ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
   4508          (value->IsMap() && GetBackPointer()->IsUndefined()));
   4509   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4510   if (object->IsTransitionArray()) {
   4511     TransitionArray::cast(object)->set_back_pointer_storage(value);
   4512   } else {
   4513     WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
   4514     CONDITIONAL_WRITE_BARRIER(
   4515         GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
   4516   }
   4517 }
   4518 
   4519 
   4520 // Can either be Smi (no transitions), normal transition array, or a transition
   4521 // array with the header overwritten as a Smi (thus iterating).
   4522 TransitionArray* Map::unchecked_transition_array() {
   4523   Object* object = *HeapObject::RawField(this,
   4524                                          Map::kTransitionsOrBackPointerOffset);
   4525   TransitionArray* transition_array = static_cast<TransitionArray*>(object);
   4526   return transition_array;
   4527 }
   4528 
   4529 
   4530 HeapObject* Map::UncheckedPrototypeTransitions() {
   4531   ASSERT(HasTransitionArray());
   4532   ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
   4533   return unchecked_transition_array()->UncheckedPrototypeTransitions();
   4534 }
   4535 
   4536 
   4537 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
   4538 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
   4539 ACCESSORS(Map, constructor, Object, kConstructorOffset)
   4540 
   4541 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
   4542 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
   4543 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
   4544 
   4545 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
   4546 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
   4547 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
   4548 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
   4549 
   4550 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
   4551 
   4552 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
   4553 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
   4554 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
   4555           kExpectedReceiverTypeOffset)
   4556 
   4557 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
   4558           kSerializedDataOffset)
   4559 
   4560 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
   4561           kDescriptorOffset)
   4562 
   4563 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
   4564 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
   4565 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
   4566 
   4567 ACCESSORS(Box, value, Object, kValueOffset)
   4568 
   4569 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
   4570 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
   4571 ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
   4572 
   4573 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
   4574 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
   4575 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
   4576 
   4577 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
   4578 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
   4579 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
   4580 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
   4581 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
   4582 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
   4583 
   4584 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
   4585 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
   4586 
   4587 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
   4588 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
   4589 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
   4590 
   4591 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
   4592 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
   4593 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
   4594           kPrototypeTemplateOffset)
   4595 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
   4596 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
   4597           kNamedPropertyHandlerOffset)
   4598 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
   4599           kIndexedPropertyHandlerOffset)
   4600 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
   4601           kInstanceTemplateOffset)
   4602 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
   4603 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
   4604 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
   4605           kInstanceCallHandlerOffset)
   4606 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
   4607           kAccessCheckInfoOffset)
   4608 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
   4609 
   4610 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
   4611 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
   4612           kInternalFieldCountOffset)
   4613 
   4614 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
   4615 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
   4616 
   4617 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
   4618 
   4619 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
   4620 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
   4621 ACCESSORS_TO_SMI(AllocationSite, memento_found_count, kMementoFoundCountOffset)
   4622 ACCESSORS_TO_SMI(AllocationSite, memento_create_count,
   4623                  kMementoCreateCountOffset)
   4624 ACCESSORS_TO_SMI(AllocationSite, pretenure_decision, kPretenureDecisionOffset)
   4625 ACCESSORS(AllocationSite, dependent_code, DependentCode,
   4626           kDependentCodeOffset)
   4627 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
   4628 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
   4629 
   4630 ACCESSORS(Script, source, Object, kSourceOffset)
   4631 ACCESSORS(Script, name, Object, kNameOffset)
   4632 ACCESSORS(Script, id, Smi, kIdOffset)
   4633 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
   4634 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
   4635 ACCESSORS(Script, data, Object, kDataOffset)
   4636 ACCESSORS(Script, context_data, Object, kContextOffset)
   4637 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
   4638 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
   4639 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
   4640 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
   4641 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
   4642                  kEvalFrominstructionsOffsetOffset)
   4643 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
   4644 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
   4645 
   4646 Script::CompilationType Script::compilation_type() {
   4647   return BooleanBit::get(flags(), kCompilationTypeBit) ?
   4648       COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
   4649 }
   4650 void Script::set_compilation_type(CompilationType type) {
   4651   set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
   4652       type == COMPILATION_TYPE_EVAL));
   4653 }
   4654 Script::CompilationState Script::compilation_state() {
   4655   return BooleanBit::get(flags(), kCompilationStateBit) ?
   4656       COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
   4657 }
   4658 void Script::set_compilation_state(CompilationState state) {
   4659   set_flags(BooleanBit::set(flags(), kCompilationStateBit,
   4660       state == COMPILATION_STATE_COMPILED));
   4661 }
   4662 
   4663 
   4664 #ifdef ENABLE_DEBUGGER_SUPPORT
   4665 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
   4666 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
   4667 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
   4668 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
   4669 
   4670 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
   4671 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
   4672 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
   4673 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
   4674 #endif
   4675 
   4676 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
   4677 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
   4678                  kOptimizedCodeMapOffset)
   4679 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
   4680 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
   4681 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
   4682           kInstanceClassNameOffset)
   4683 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
   4684 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
   4685 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
   4686 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
   4687 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
   4688 
   4689 
   4690 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
   4691 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
   4692                kHiddenPrototypeBit)
   4693 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
   4694 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
   4695                kNeedsAccessCheckBit)
   4696 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
   4697                kReadOnlyPrototypeBit)
   4698 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
   4699                kRemovePrototypeBit)
   4700 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
   4701                kDoNotCacheBit)
   4702 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
   4703                kIsExpressionBit)
   4704 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
   4705                kIsTopLevelBit)
   4706 
   4707 BOOL_ACCESSORS(SharedFunctionInfo,
   4708                compiler_hints,
   4709                allows_lazy_compilation,
   4710                kAllowLazyCompilation)
   4711 BOOL_ACCESSORS(SharedFunctionInfo,
   4712                compiler_hints,
   4713                allows_lazy_compilation_without_context,
   4714                kAllowLazyCompilationWithoutContext)
   4715 BOOL_ACCESSORS(SharedFunctionInfo,
   4716                compiler_hints,
   4717                uses_arguments,
   4718                kUsesArguments)
   4719 BOOL_ACCESSORS(SharedFunctionInfo,
   4720                compiler_hints,
   4721                has_duplicate_parameters,
   4722                kHasDuplicateParameters)
   4723 
   4724 
   4725 #if V8_HOST_ARCH_32_BIT
   4726 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
   4727 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
   4728               kFormalParameterCountOffset)
   4729 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
   4730               kExpectedNofPropertiesOffset)
   4731 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
   4732 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
   4733               kStartPositionAndTypeOffset)
   4734 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
   4735 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
   4736               kFunctionTokenPositionOffset)
   4737 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
   4738               kCompilerHintsOffset)
   4739 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
   4740               kOptCountAndBailoutReasonOffset)
   4741 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
   4742 
   4743 #else
   4744 
   4745 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
   4746   STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
   4747   int holder::name() {                                            \
   4748     int value = READ_INT_FIELD(this, offset);                     \
   4749     ASSERT(kHeapObjectTag == 1);                                  \
   4750     ASSERT((value & kHeapObjectTag) == 0);                        \
   4751     return value >> 1;                                            \
   4752   }                                                               \
   4753   void holder::set_##name(int value) {                            \
   4754     ASSERT(kHeapObjectTag == 1);                                  \
   4755     ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
   4756            (value & 0xC0000000) == 0x000000000);                  \
   4757     WRITE_INT_FIELD(this,                                         \
   4758                     offset,                                       \
   4759                     (value << 1) & ~kHeapObjectTag);              \
   4760   }
   4761 
   4762 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
   4763   STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
   4764   INT_ACCESSORS(holder, name, offset)
   4765 
   4766 
   4767 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
   4768 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   4769                         formal_parameter_count,
   4770                         kFormalParameterCountOffset)
   4771 
   4772 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   4773                         expected_nof_properties,
   4774                         kExpectedNofPropertiesOffset)
   4775 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
   4776 
   4777 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
   4778 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   4779                         start_position_and_type,
   4780                         kStartPositionAndTypeOffset)
   4781 
   4782 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   4783                         function_token_position,
   4784                         kFunctionTokenPositionOffset)
   4785 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   4786                         compiler_hints,
   4787                         kCompilerHintsOffset)
   4788 
   4789 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   4790                         opt_count_and_bailout_reason,
   4791                         kOptCountAndBailoutReasonOffset)
   4792 
   4793 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
   4794 
   4795 #endif
   4796 
   4797 
   4798 int SharedFunctionInfo::construction_count() {
   4799   return READ_BYTE_FIELD(this, kConstructionCountOffset);
   4800 }
   4801 
   4802 
   4803 void SharedFunctionInfo::set_construction_count(int value) {
   4804   ASSERT(0 <= value && value < 256);
   4805   WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
   4806 }
   4807 
   4808 
   4809 BOOL_ACCESSORS(SharedFunctionInfo,
   4810                compiler_hints,
   4811                live_objects_may_exist,
   4812                kLiveObjectsMayExist)
   4813 
   4814 
   4815 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
   4816   return initial_map() != GetHeap()->undefined_value();
   4817 }
   4818 
   4819 
   4820 BOOL_GETTER(SharedFunctionInfo,
   4821             compiler_hints,
   4822             optimization_disabled,
   4823             kOptimizationDisabled)
   4824 
   4825 
   4826 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
   4827   set_compiler_hints(BooleanBit::set(compiler_hints(),
   4828                                      kOptimizationDisabled,
   4829                                      disable));
   4830   // If disabling optimizations we reflect that in the code object so
   4831   // it will not be counted as optimizable code.
   4832   if ((code()->kind() == Code::FUNCTION) && disable) {
   4833     code()->set_optimizable(false);
   4834   }
   4835 }
   4836 
   4837 
   4838 int SharedFunctionInfo::profiler_ticks() {
   4839   if (code()->kind() != Code::FUNCTION) return 0;
   4840   return code()->profiler_ticks();
   4841 }
   4842 
   4843 
   4844 LanguageMode SharedFunctionInfo::language_mode() {
   4845   int hints = compiler_hints();
   4846   if (BooleanBit::get(hints, kExtendedModeFunction)) {
   4847     ASSERT(BooleanBit::get(hints, kStrictModeFunction));
   4848     return EXTENDED_MODE;
   4849   }
   4850   return BooleanBit::get(hints, kStrictModeFunction)
   4851       ? STRICT_MODE : CLASSIC_MODE;
   4852 }
   4853 
   4854 
   4855 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
   4856   // We only allow language mode transitions that go set the same language mode
   4857   // again or go up in the chain:
   4858   //   CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
   4859   ASSERT(this->language_mode() == CLASSIC_MODE ||
   4860          this->language_mode() == language_mode ||
   4861          language_mode == EXTENDED_MODE);
   4862   int hints = compiler_hints();
   4863   hints = BooleanBit::set(
   4864       hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
   4865   hints = BooleanBit::set(
   4866       hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
   4867   set_compiler_hints(hints);
   4868 }
   4869 
   4870 
   4871 bool SharedFunctionInfo::is_classic_mode() {
   4872   return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
   4873 }
   4874 
   4875 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
   4876             kExtendedModeFunction)
   4877 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
   4878 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
   4879                kInlineBuiltin)
   4880 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
   4881                name_should_print_as_anonymous,
   4882                kNameShouldPrintAsAnonymous)
   4883 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
   4884 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
   4885 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
   4886 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
   4887                kDontOptimize)
   4888 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
   4889 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
   4890 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
   4891 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
   4892 
   4893 void SharedFunctionInfo::BeforeVisitingPointers() {
   4894   if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
   4895 }
   4896 
   4897 
   4898 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
   4899 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
   4900 
   4901 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
   4902 
   4903 bool Script::HasValidSource() {
   4904   Object* src = this->source();
   4905   if (!src->IsString()) return true;
   4906   String* src_str = String::cast(src);
   4907   if (!StringShape(src_str).IsExternal()) return true;
   4908   if (src_str->IsOneByteRepresentation()) {
   4909     return ExternalAsciiString::cast(src)->resource() != NULL;
   4910   } else if (src_str->IsTwoByteRepresentation()) {
   4911     return ExternalTwoByteString::cast(src)->resource() != NULL;
   4912   }
   4913   return true;
   4914 }
   4915 
   4916 
   4917 void SharedFunctionInfo::DontAdaptArguments() {
   4918   ASSERT(code()->kind() == Code::BUILTIN);
   4919   set_formal_parameter_count(kDontAdaptArgumentsSentinel);
   4920 }
   4921 
   4922 
   4923 int SharedFunctionInfo::start_position() {
   4924   return start_position_and_type() >> kStartPositionShift;
   4925 }
   4926 
   4927 
   4928 void SharedFunctionInfo::set_start_position(int start_position) {
   4929   set_start_position_and_type((start_position << kStartPositionShift)
   4930     | (start_position_and_type() & ~kStartPositionMask));
   4931 }
   4932 
   4933 
   4934 Code* SharedFunctionInfo::code() {
   4935   return Code::cast(READ_FIELD(this, kCodeOffset));
   4936 }
   4937 
   4938 
   4939 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
   4940   ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION);
   4941   WRITE_FIELD(this, kCodeOffset, value);
   4942   CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
   4943 }
   4944 
   4945 
   4946 void SharedFunctionInfo::ReplaceCode(Code* value) {
   4947   // If the GC metadata field is already used then the function was
   4948   // enqueued as a code flushing candidate and we remove it now.
   4949   if (code()->gc_metadata() != NULL) {
   4950     CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
   4951     flusher->EvictCandidate(this);
   4952   }
   4953 
   4954   ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
   4955   set_code(value);
   4956 }
   4957 
   4958 
   4959 ScopeInfo* SharedFunctionInfo::scope_info() {
   4960   return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
   4961 }
   4962 
   4963 
   4964 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
   4965                                         WriteBarrierMode mode) {
   4966   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
   4967   CONDITIONAL_WRITE_BARRIER(GetHeap(),
   4968                             this,
   4969                             kScopeInfoOffset,
   4970                             reinterpret_cast<Object*>(value),
   4971                             mode);
   4972 }
   4973 
   4974 
   4975 bool SharedFunctionInfo::is_compiled() {
   4976   return code() !=
   4977       GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
   4978 }
   4979 
   4980 
   4981 bool SharedFunctionInfo::IsApiFunction() {
   4982   return function_data()->IsFunctionTemplateInfo();
   4983 }
   4984 
   4985 
   4986 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
   4987   ASSERT(IsApiFunction());
   4988   return FunctionTemplateInfo::cast(function_data());
   4989 }
   4990 
   4991 
   4992 bool SharedFunctionInfo::HasBuiltinFunctionId() {
   4993   return function_data()->IsSmi();
   4994 }
   4995 
   4996 
   4997 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
   4998   ASSERT(HasBuiltinFunctionId());
   4999   return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
   5000 }
   5001 
   5002 
   5003 int SharedFunctionInfo::ic_age() {
   5004   return ICAgeBits::decode(counters());
   5005 }
   5006 
   5007 
   5008 void SharedFunctionInfo::set_ic_age(int ic_age) {
   5009   set_counters(ICAgeBits::update(counters(), ic_age));
   5010 }
   5011 
   5012 
   5013 int SharedFunctionInfo::deopt_count() {
   5014   return DeoptCountBits::decode(counters());
   5015 }
   5016 
   5017 
   5018 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
   5019   set_counters(DeoptCountBits::update(counters(), deopt_count));
   5020 }
   5021 
   5022 
   5023 void SharedFunctionInfo::increment_deopt_count() {
   5024   int value = counters();
   5025   int deopt_count = DeoptCountBits::decode(value);
   5026   deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
   5027   set_counters(DeoptCountBits::update(value, deopt_count));
   5028 }
   5029 
   5030 
   5031 int SharedFunctionInfo::opt_reenable_tries() {
   5032   return OptReenableTriesBits::decode(counters());
   5033 }
   5034 
   5035 
   5036 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
   5037   set_counters(OptReenableTriesBits::update(counters(), tries));
   5038 }
   5039 
   5040 
   5041 int SharedFunctionInfo::opt_count() {
   5042   return OptCountBits::decode(opt_count_and_bailout_reason());
   5043 }
   5044 
   5045 
   5046 void SharedFunctionInfo::set_opt_count(int opt_count) {
   5047   set_opt_count_and_bailout_reason(
   5048       OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
   5049 }
   5050 
   5051 
   5052 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
   5053   BailoutReason reason = static_cast<BailoutReason>(
   5054       DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
   5055   return reason;
   5056 }
   5057 
   5058 
   5059 bool SharedFunctionInfo::has_deoptimization_support() {
   5060   Code* code = this->code();
   5061   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
   5062 }
   5063 
   5064 
   5065 void SharedFunctionInfo::TryReenableOptimization() {
   5066   int tries = opt_reenable_tries();
   5067   set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
   5068   // We reenable optimization whenever the number of tries is a large
   5069   // enough power of 2.
   5070   if (tries >= 16 && (((tries - 1) & tries) == 0)) {
   5071     set_optimization_disabled(false);
   5072     set_opt_count(0);
   5073     set_deopt_count(0);
   5074     code()->set_optimizable(true);
   5075   }
   5076 }
   5077 
   5078 
   5079 bool JSFunction::IsBuiltin() {
   5080   return context()->global_object()->IsJSBuiltinsObject();
   5081 }
   5082 
   5083 
   5084 bool JSFunction::NeedsArgumentsAdaption() {
   5085   return shared()->formal_parameter_count() !=
   5086       SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   5087 }
   5088 
   5089 
   5090 bool JSFunction::IsOptimized() {
   5091   return code()->kind() == Code::OPTIMIZED_FUNCTION;
   5092 }
   5093 
   5094 
   5095 bool JSFunction::IsOptimizable() {
   5096   return code()->kind() == Code::FUNCTION && code()->optimizable();
   5097 }
   5098 
   5099 
   5100 bool JSFunction::IsMarkedForLazyRecompilation() {
   5101   return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
   5102 }
   5103 
   5104 
   5105 bool JSFunction::IsMarkedForConcurrentRecompilation() {
   5106   return code() == GetIsolate()->builtins()->builtin(
   5107       Builtins::kConcurrentRecompile);
   5108 }
   5109 
   5110 
   5111 bool JSFunction::IsInRecompileQueue() {
   5112   return code() == GetIsolate()->builtins()->builtin(
   5113       Builtins::kInRecompileQueue);
   5114 }
   5115 
   5116 
   5117 Code* JSFunction::code() {
   5118   return Code::cast(
   5119       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
   5120 }
   5121 
   5122 
   5123 void JSFunction::set_code(Code* value) {
   5124   ASSERT(!GetHeap()->InNewSpace(value));
   5125   Address entry = value->entry();
   5126   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
   5127   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
   5128       this,
   5129       HeapObject::RawField(this, kCodeEntryOffset),
   5130       value);
   5131 }
   5132 
   5133 
   5134 void JSFunction::set_code_no_write_barrier(Code* value) {
   5135   ASSERT(!GetHeap()->InNewSpace(value));
   5136   Address entry = value->entry();
   5137   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
   5138 }
   5139 
   5140 
   5141 void JSFunction::ReplaceCode(Code* code) {
   5142   bool was_optimized = IsOptimized();
   5143   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
   5144 
   5145   if (was_optimized && is_optimized) {
   5146     shared()->EvictFromOptimizedCodeMap(
   5147       this->code(), "Replacing with another optimized code");
   5148   }
   5149 
   5150   set_code(code);
   5151 
   5152   // Add/remove the function from the list of optimized functions for this
   5153   // context based on the state change.
   5154   if (!was_optimized && is_optimized) {
   5155     context()->native_context()->AddOptimizedFunction(this);
   5156   }
   5157   if (was_optimized && !is_optimized) {
   5158     // TODO(titzer): linear in the number of optimized functions; fix!
   5159     context()->native_context()->RemoveOptimizedFunction(this);
   5160   }
   5161 }
   5162 
   5163 
   5164 Context* JSFunction::context() {
   5165   return Context::cast(READ_FIELD(this, kContextOffset));
   5166 }
   5167 
   5168 
   5169 void JSFunction::set_context(Object* value) {
   5170   ASSERT(value->IsUndefined() || value->IsContext());
   5171   WRITE_FIELD(this, kContextOffset, value);
   5172   WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
   5173 }
   5174 
   5175 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
   5176           kPrototypeOrInitialMapOffset)
   5177 
   5178 
   5179 Map* JSFunction::initial_map() {
   5180   return Map::cast(prototype_or_initial_map());
   5181 }
   5182 
   5183 
   5184 void JSFunction::set_initial_map(Map* value) {
   5185   set_prototype_or_initial_map(value);
   5186 }
   5187 
   5188 
   5189 bool JSFunction::has_initial_map() {
   5190   return prototype_or_initial_map()->IsMap();
   5191 }
   5192 
   5193 
   5194 bool JSFunction::has_instance_prototype() {
   5195   return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
   5196 }
   5197 
   5198 
   5199 bool JSFunction::has_prototype() {
   5200   return map()->has_non_instance_prototype() || has_instance_prototype();
   5201 }
   5202 
   5203 
   5204 Object* JSFunction::instance_prototype() {
   5205   ASSERT(has_instance_prototype());
   5206   if (has_initial_map()) return initial_map()->prototype();
   5207   // When there is no initial map and the prototype is a JSObject, the
   5208   // initial map field is used for the prototype field.
   5209   return prototype_or_initial_map();
   5210 }
   5211 
   5212 
   5213 Object* JSFunction::prototype() {
   5214   ASSERT(has_prototype());
   5215   // If the function's prototype property has been set to a non-JSObject
   5216   // value, that value is stored in the constructor field of the map.
   5217   if (map()->has_non_instance_prototype()) return map()->constructor();
   5218   return instance_prototype();
   5219 }
   5220 
   5221 
   5222 bool JSFunction::should_have_prototype() {
   5223   return map()->function_with_prototype();
   5224 }
   5225 
   5226 
   5227 bool JSFunction::is_compiled() {
   5228   return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
   5229 }
   5230 
   5231 
   5232 FixedArray* JSFunction::literals() {
   5233   ASSERT(!shared()->bound());
   5234   return literals_or_bindings();
   5235 }
   5236 
   5237 
   5238 void JSFunction::set_literals(FixedArray* literals) {
   5239   ASSERT(!shared()->bound());
   5240   set_literals_or_bindings(literals);
   5241 }
   5242 
   5243 
   5244 FixedArray* JSFunction::function_bindings() {
   5245   ASSERT(shared()->bound());
   5246   return literals_or_bindings();
   5247 }
   5248 
   5249 
   5250 void JSFunction::set_function_bindings(FixedArray* bindings) {
   5251   ASSERT(shared()->bound());
   5252   // Bound function literal may be initialized to the empty fixed array
   5253   // before the bindings are set.
   5254   ASSERT(bindings == GetHeap()->empty_fixed_array() ||
   5255          bindings->map() == GetHeap()->fixed_cow_array_map());
   5256   set_literals_or_bindings(bindings);
   5257 }
   5258 
   5259 
   5260 int JSFunction::NumberOfLiterals() {
   5261   ASSERT(!shared()->bound());
   5262   return literals()->length();
   5263 }
   5264 
   5265 
   5266 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
   5267   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5268   return READ_FIELD(this, OffsetOfFunctionWithId(id));
   5269 }
   5270 
   5271 
   5272 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
   5273                                               Object* value) {
   5274   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5275   WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
   5276   WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
   5277 }
   5278 
   5279 
   5280 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
   5281   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5282   return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
   5283 }
   5284 
   5285 
   5286 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
   5287                                                    Code* value) {
   5288   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5289   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
   5290   ASSERT(!GetHeap()->InNewSpace(value));
   5291 }
   5292 
   5293 
   5294 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
   5295 ACCESSORS(JSProxy, hash, Object, kHashOffset)
   5296 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
   5297 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
   5298 
   5299 
   5300 void JSProxy::InitializeBody(int object_size, Object* value) {
   5301   ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
   5302   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
   5303     WRITE_FIELD(this, offset, value);
   5304   }
   5305 }
   5306 
   5307 
   5308 ACCESSORS(JSSet, table, Object, kTableOffset)
   5309 ACCESSORS(JSMap, table, Object, kTableOffset)
   5310 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
   5311 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
   5312 
   5313 
   5314 Address Foreign::foreign_address() {
   5315   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
   5316 }
   5317 
   5318 
   5319 void Foreign::set_foreign_address(Address value) {
   5320   WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
   5321 }
   5322 
   5323 
   5324 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
   5325 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
   5326 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
   5327 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
   5328 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
   5329 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
   5330 
   5331 
   5332 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
   5333   ASSERT(obj->IsJSGeneratorObject());
   5334   ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
   5335   return reinterpret_cast<JSGeneratorObject*>(obj);
   5336 }
   5337 
   5338 
   5339 ACCESSORS(JSModule, context, Object, kContextOffset)
   5340 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
   5341 
   5342 
   5343 JSModule* JSModule::cast(Object* obj) {
   5344   ASSERT(obj->IsJSModule());
   5345   ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
   5346   return reinterpret_cast<JSModule*>(obj);
   5347 }
   5348 
   5349 
   5350 ACCESSORS(JSValue, value, Object, kValueOffset)
   5351 
   5352 
   5353 JSValue* JSValue::cast(Object* obj) {
   5354   ASSERT(obj->IsJSValue());
   5355   ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
   5356   return reinterpret_cast<JSValue*>(obj);
   5357 }
   5358 
   5359 
   5360 ACCESSORS(JSDate, value, Object, kValueOffset)
   5361 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
   5362 ACCESSORS(JSDate, year, Object, kYearOffset)
   5363 ACCESSORS(JSDate, month, Object, kMonthOffset)
   5364 ACCESSORS(JSDate, day, Object, kDayOffset)
   5365 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
   5366 ACCESSORS(JSDate, hour, Object, kHourOffset)
   5367 ACCESSORS(JSDate, min, Object, kMinOffset)
   5368 ACCESSORS(JSDate, sec, Object, kSecOffset)
   5369 
   5370 
   5371 JSDate* JSDate::cast(Object* obj) {
   5372   ASSERT(obj->IsJSDate());
   5373   ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
   5374   return reinterpret_cast<JSDate*>(obj);
   5375 }
   5376 
   5377 
   5378 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
   5379 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
   5380 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
   5381 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
   5382 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
   5383 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
   5384 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
   5385 
   5386 
   5387 JSMessageObject* JSMessageObject::cast(Object* obj) {
   5388   ASSERT(obj->IsJSMessageObject());
   5389   ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
   5390   return reinterpret_cast<JSMessageObject*>(obj);
   5391 }
   5392 
   5393 
   5394 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
   5395 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
   5396 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
   5397 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
   5398 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
   5399 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
   5400 
   5401 
   5402 void Code::WipeOutHeader() {
   5403   WRITE_FIELD(this, kRelocationInfoOffset, NULL);
   5404   WRITE_FIELD(this, kHandlerTableOffset, NULL);
   5405   WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
   5406   // Do not wipe out e.g. a minor key.
   5407   if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
   5408     WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
   5409   }
   5410 }
   5411 
   5412 
   5413 Object* Code::type_feedback_info() {
   5414   ASSERT(kind() == FUNCTION);
   5415   return raw_type_feedback_info();
   5416 }
   5417 
   5418 
   5419 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
   5420   ASSERT(kind() == FUNCTION);
   5421   set_raw_type_feedback_info(value, mode);
   5422   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
   5423                             value, mode);
   5424 }
   5425 
   5426 
   5427 Object* Code::next_code_link() {
   5428   CHECK(kind() == OPTIMIZED_FUNCTION);
   5429   return raw_type_feedback_info();
   5430 }
   5431 
   5432 
   5433 void Code::set_next_code_link(Object* value, WriteBarrierMode mode) {
   5434   CHECK(kind() == OPTIMIZED_FUNCTION);
   5435   set_raw_type_feedback_info(value);
   5436   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
   5437                             value, mode);
   5438 }
   5439 
   5440 
   5441 int Code::stub_info() {
   5442   ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
   5443          kind() == BINARY_OP_IC || kind() == LOAD_IC);
   5444   return Smi::cast(raw_type_feedback_info())->value();
   5445 }
   5446 
   5447 
   5448 void Code::set_stub_info(int value) {
   5449   ASSERT(kind() == COMPARE_IC ||
   5450          kind() == COMPARE_NIL_IC ||
   5451          kind() == BINARY_OP_IC ||
   5452          kind() == STUB ||
   5453          kind() == LOAD_IC ||
   5454          kind() == KEYED_LOAD_IC ||
   5455          kind() == STORE_IC ||
   5456          kind() == KEYED_STORE_IC);
   5457   set_raw_type_feedback_info(Smi::FromInt(value));
   5458 }
   5459 
   5460 
   5461 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
   5462 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
   5463 
   5464 
   5465 byte* Code::instruction_start()  {
   5466   return FIELD_ADDR(this, kHeaderSize);
   5467 }
   5468 
   5469 
   5470 byte* Code::instruction_end()  {
   5471   return instruction_start() + instruction_size();
   5472 }
   5473 
   5474 
   5475 int Code::body_size() {
   5476   return RoundUp(instruction_size(), kObjectAlignment);
   5477 }
   5478 
   5479 
   5480 ByteArray* Code::unchecked_relocation_info() {
   5481   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
   5482 }
   5483 
   5484 
   5485 byte* Code::relocation_start() {
   5486   return unchecked_relocation_info()->GetDataStartAddress();
   5487 }
   5488 
   5489 
   5490 int Code::relocation_size() {
   5491   return unchecked_relocation_info()->length();
   5492 }
   5493 
   5494 
   5495 byte* Code::entry() {
   5496   return instruction_start();
   5497 }
   5498 
   5499 
   5500 bool Code::contains(byte* inner_pointer) {
   5501   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
   5502 }
   5503 
   5504 
   5505 ACCESSORS(JSArray, length, Object, kLengthOffset)
   5506 
   5507 
   5508 void* JSArrayBuffer::backing_store() {
   5509   intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
   5510   return reinterpret_cast<void*>(ptr);
   5511 }
   5512 
   5513 
   5514 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
   5515   intptr_t ptr = reinterpret_cast<intptr_t>(value);
   5516   WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
   5517 }
   5518 
   5519 
   5520 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
   5521 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
   5522 
   5523 
   5524 bool JSArrayBuffer::is_external() {
   5525   return BooleanBit::get(flag(), kIsExternalBit);
   5526 }
   5527 
   5528 
   5529 void JSArrayBuffer::set_is_external(bool value) {
   5530   set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
   5531 }
   5532 
   5533 
   5534 bool JSArrayBuffer::should_be_freed() {
   5535   return BooleanBit::get(flag(), kShouldBeFreed);
   5536 }
   5537 
   5538 
   5539 void JSArrayBuffer::set_should_be_freed(bool value) {
   5540   set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
   5541 }
   5542 
   5543 
   5544 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
   5545 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
   5546 
   5547 
   5548 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
   5549 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
   5550 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
   5551 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
   5552 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
   5553 
   5554 ACCESSORS(JSRegExp, data, Object, kDataOffset)
   5555 
   5556 
   5557 JSRegExp::Type JSRegExp::TypeTag() {
   5558   Object* data = this->data();
   5559   if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
   5560   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
   5561   return static_cast<JSRegExp::Type>(smi->value());
   5562 }
   5563 
   5564 
   5565 int JSRegExp::CaptureCount() {
   5566   switch (TypeTag()) {
   5567     case ATOM:
   5568       return 0;
   5569     case IRREGEXP:
   5570       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
   5571     default:
   5572       UNREACHABLE();
   5573       return -1;
   5574   }
   5575 }
   5576 
   5577 
   5578 JSRegExp::Flags JSRegExp::GetFlags() {
   5579   ASSERT(this->data()->IsFixedArray());
   5580   Object* data = this->data();
   5581   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
   5582   return Flags(smi->value());
   5583 }
   5584 
   5585 
   5586 String* JSRegExp::Pattern() {
   5587   ASSERT(this->data()->IsFixedArray());
   5588   Object* data = this->data();
   5589   String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
   5590   return pattern;
   5591 }
   5592 
   5593 
   5594 Object* JSRegExp::DataAt(int index) {
   5595   ASSERT(TypeTag() != NOT_COMPILED);
   5596   return FixedArray::cast(data())->get(index);
   5597 }
   5598 
   5599 
   5600 void JSRegExp::SetDataAt(int index, Object* value) {
   5601   ASSERT(TypeTag() != NOT_COMPILED);
   5602   ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
   5603   FixedArray::cast(data())->set(index, value);
   5604 }
   5605 
   5606 
   5607 ElementsKind JSObject::GetElementsKind() {
   5608   ElementsKind kind = map()->elements_kind();
   5609 #if DEBUG
   5610   FixedArrayBase* fixed_array =
   5611       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
   5612 
   5613   // If a GC was caused while constructing this object, the elements
   5614   // pointer may point to a one pointer filler map.
   5615   if (ElementsAreSafeToExamine()) {
   5616     Map* map = fixed_array->map();
   5617     ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
   5618             (map == GetHeap()->fixed_array_map() ||
   5619              map == GetHeap()->fixed_cow_array_map())) ||
   5620            (IsFastDoubleElementsKind(kind) &&
   5621             (fixed_array->IsFixedDoubleArray() ||
   5622              fixed_array == GetHeap()->empty_fixed_array())) ||
   5623            (kind == DICTIONARY_ELEMENTS &&
   5624             fixed_array->IsFixedArray() &&
   5625             fixed_array->IsDictionary()) ||
   5626            (kind > DICTIONARY_ELEMENTS));
   5627     ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
   5628            (elements()->IsFixedArray() && elements()->length() >= 2));
   5629   }
   5630 #endif
   5631   return kind;
   5632 }
   5633 
   5634 
   5635 ElementsAccessor* JSObject::GetElementsAccessor() {
   5636   return ElementsAccessor::ForKind(GetElementsKind());
   5637 }
   5638 
   5639 
   5640 bool JSObject::HasFastObjectElements() {
   5641   return IsFastObjectElementsKind(GetElementsKind());
   5642 }
   5643 
   5644 
   5645 bool JSObject::HasFastSmiElements() {
   5646   return IsFastSmiElementsKind(GetElementsKind());
   5647 }
   5648 
   5649 
   5650 bool JSObject::HasFastSmiOrObjectElements() {
   5651   return IsFastSmiOrObjectElementsKind(GetElementsKind());
   5652 }
   5653 
   5654 
   5655 bool JSObject::HasFastDoubleElements() {
   5656   return IsFastDoubleElementsKind(GetElementsKind());
   5657 }
   5658 
   5659 
   5660 bool JSObject::HasFastHoleyElements() {
   5661   return IsFastHoleyElementsKind(GetElementsKind());
   5662 }
   5663 
   5664 
   5665 bool JSObject::HasFastElements() {
   5666   return IsFastElementsKind(GetElementsKind());
   5667 }
   5668 
   5669 
   5670 bool JSObject::HasDictionaryElements() {
   5671   return GetElementsKind() == DICTIONARY_ELEMENTS;
   5672 }
   5673 
   5674 
   5675 bool JSObject::HasNonStrictArgumentsElements() {
   5676   return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
   5677 }
   5678 
   5679 
   5680 bool JSObject::HasExternalArrayElements() {
   5681   HeapObject* array = elements();
   5682   ASSERT(array != NULL);
   5683   return array->IsExternalArray();
   5684 }
   5685 
   5686 
   5687 #define EXTERNAL_ELEMENTS_CHECK(name, type)          \
   5688 bool JSObject::HasExternal##name##Elements() {       \
   5689   HeapObject* array = elements();                    \
   5690   ASSERT(array != NULL);                             \
   5691   if (!array->IsHeapObject())                        \
   5692     return false;                                    \
   5693   return array->map()->instance_type() == type;      \
   5694 }
   5695 
   5696 
   5697 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
   5698 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
   5699 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
   5700 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
   5701                         EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
   5702 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
   5703 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
   5704                         EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
   5705 EXTERNAL_ELEMENTS_CHECK(Float,
   5706                         EXTERNAL_FLOAT_ARRAY_TYPE)
   5707 EXTERNAL_ELEMENTS_CHECK(Double,
   5708                         EXTERNAL_DOUBLE_ARRAY_TYPE)
   5709 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
   5710 
   5711 
   5712 bool JSObject::HasNamedInterceptor() {
   5713   return map()->has_named_interceptor();
   5714 }
   5715 
   5716 
   5717 bool JSObject::HasIndexedInterceptor() {
   5718   return map()->has_indexed_interceptor();
   5719 }
   5720 
   5721 
   5722 MaybeObject* JSObject::EnsureWritableFastElements() {
   5723   ASSERT(HasFastSmiOrObjectElements());
   5724   FixedArray* elems = FixedArray::cast(elements());
   5725   Isolate* isolate = GetIsolate();
   5726   if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
   5727   Object* writable_elems;
   5728   { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
   5729       elems, isolate->heap()->fixed_array_map());
   5730     if (!maybe_writable_elems->ToObject(&writable_elems)) {
   5731       return maybe_writable_elems;
   5732     }
   5733   }
   5734   set_elements(FixedArray::cast(writable_elems));
   5735   isolate->counters()->cow_arrays_converted()->Increment();
   5736   return writable_elems;
   5737 }
   5738 
   5739 
   5740 NameDictionary* JSObject::property_dictionary() {
   5741   ASSERT(!HasFastProperties());
   5742   return NameDictionary::cast(properties());
   5743 }
   5744 
   5745 
   5746 SeededNumberDictionary* JSObject::element_dictionary() {
   5747   ASSERT(HasDictionaryElements());
   5748   return SeededNumberDictionary::cast(elements());
   5749 }
   5750 
   5751 
   5752 bool Name::IsHashFieldComputed(uint32_t field) {
   5753   return (field & kHashNotComputedMask) == 0;
   5754 }
   5755 
   5756 
   5757 bool Name::HasHashCode() {
   5758   return IsHashFieldComputed(hash_field());
   5759 }
   5760 
   5761 
   5762 uint32_t Name::Hash() {
   5763   // Fast case: has hash code already been computed?
   5764   uint32_t field = hash_field();
   5765   if (IsHashFieldComputed(field)) return field >> kHashShift;
   5766   // Slow case: compute hash code and set it. Has to be a string.
   5767   return String::cast(this)->ComputeAndSetHash();
   5768 }
   5769 
   5770 
   5771 StringHasher::StringHasher(int length, uint32_t seed)
   5772   : length_(length),
   5773     raw_running_hash_(seed),
   5774     array_index_(0),
   5775     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
   5776     is_first_char_(true) {
   5777   ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
   5778 }
   5779 
   5780 
   5781 bool StringHasher::has_trivial_hash() {
   5782   return length_ > String::kMaxHashCalcLength;
   5783 }
   5784 
   5785 
   5786 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
   5787   running_hash += c;
   5788   running_hash += (running_hash << 10);
   5789   running_hash ^= (running_hash >> 6);
   5790   return running_hash;
   5791 }
   5792 
   5793 
   5794 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
   5795   running_hash += (running_hash << 3);
   5796   running_hash ^= (running_hash >> 11);
   5797   running_hash += (running_hash << 15);
   5798   if ((running_hash & String::kHashBitMask) == 0) {
   5799     return kZeroHash;
   5800   }
   5801   return running_hash;
   5802 }
   5803 
   5804 
   5805 void StringHasher::AddCharacter(uint16_t c) {
   5806   // Use the Jenkins one-at-a-time hash function to update the hash
   5807   // for the given character.
   5808   raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
   5809 }
   5810 
   5811 
   5812 bool StringHasher::UpdateIndex(uint16_t c) {
   5813   ASSERT(is_array_index_);
   5814   if (c < '0' || c > '9') {
   5815     is_array_index_ = false;
   5816     return false;
   5817   }
   5818   int d = c - '0';
   5819   if (is_first_char_) {
   5820     is_first_char_ = false;
   5821     if (c == '0' && length_ > 1) {
   5822       is_array_index_ = false;
   5823       return false;
   5824     }
   5825   }
   5826   if (array_index_ > 429496729U - ((d + 2) >> 3)) {
   5827     is_array_index_ = false;
   5828     return false;
   5829   }
   5830   array_index_ = array_index_ * 10 + d;
   5831   return true;
   5832 }
   5833 
   5834 
   5835 template<typename Char>
   5836 inline void StringHasher::AddCharacters(const Char* chars, int length) {
   5837   ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
   5838   int i = 0;
   5839   if (is_array_index_) {
   5840     for (; i < length; i++) {
   5841       AddCharacter(chars[i]);
   5842       if (!UpdateIndex(chars[i])) {
   5843         i++;
   5844         break;
   5845       }
   5846     }
   5847   }
   5848   for (; i < length; i++) {
   5849     ASSERT(!is_array_index_);
   5850     AddCharacter(chars[i]);
   5851   }
   5852 }
   5853 
   5854 
   5855 template <typename schar>
   5856 uint32_t StringHasher::HashSequentialString(const schar* chars,
   5857                                             int length,
   5858                                             uint32_t seed) {
   5859   StringHasher hasher(length, seed);
   5860   if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
   5861   return hasher.GetHashField();
   5862 }
   5863 
   5864 
   5865 bool Name::AsArrayIndex(uint32_t* index) {
   5866   return IsString() && String::cast(this)->AsArrayIndex(index);
   5867 }
   5868 
   5869 
   5870 bool String::AsArrayIndex(uint32_t* index) {
   5871   uint32_t field = hash_field();
   5872   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
   5873     return false;
   5874   }
   5875   return SlowAsArrayIndex(index);
   5876 }
   5877 
   5878 
   5879 Object* JSReceiver::GetPrototype() {
   5880   return map()->prototype();
   5881 }
   5882 
   5883 
   5884 Object* JSReceiver::GetConstructor() {
   5885   return map()->constructor();
   5886 }
   5887 
   5888 
   5889 bool JSReceiver::HasProperty(Handle<JSReceiver> object,
   5890                              Handle<Name> name) {
   5891   if (object->IsJSProxy()) {
   5892     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
   5893     return JSProxy::HasPropertyWithHandler(proxy, name);
   5894   }
   5895   return object->GetPropertyAttribute(*name) != ABSENT;
   5896 }
   5897 
   5898 
   5899 bool JSReceiver::HasLocalProperty(Handle<JSReceiver> object,
   5900                                   Handle<Name> name) {
   5901   if (object->IsJSProxy()) {
   5902     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
   5903     return JSProxy::HasPropertyWithHandler(proxy, name);
   5904   }
   5905   return object->GetLocalPropertyAttribute(*name) != ABSENT;
   5906 }
   5907 
   5908 
   5909 PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) {
   5910   uint32_t index;
   5911   if (IsJSObject() && key->AsArrayIndex(&index)) {
   5912     return GetElementAttribute(index);
   5913   }
   5914   return GetPropertyAttributeWithReceiver(this, key);
   5915 }
   5916 
   5917 
   5918 PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
   5919   if (IsJSProxy()) {
   5920     return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
   5921   }
   5922   return JSObject::cast(this)->GetElementAttributeWithReceiver(
   5923       this, index, true);
   5924 }
   5925 
   5926 
   5927 bool JSGlobalObject::IsDetached() {
   5928   return JSGlobalProxy::cast(global_receiver())->IsDetachedFrom(this);
   5929 }
   5930 
   5931 
   5932 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) {
   5933   return GetPrototype() != global;
   5934 }
   5935 
   5936 
   5937 Handle<Object> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
   5938   return object->IsJSProxy()
   5939       ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
   5940       : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
   5941 }
   5942 
   5943 
   5944 Object* JSReceiver::GetIdentityHash() {
   5945   return IsJSProxy()
   5946       ? JSProxy::cast(this)->GetIdentityHash()
   5947       : JSObject::cast(this)->GetIdentityHash();
   5948 }
   5949 
   5950 
   5951 bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
   5952   if (object->IsJSProxy()) {
   5953     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
   5954     return JSProxy::HasElementWithHandler(proxy, index);
   5955   }
   5956   return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
   5957       *object, index, true) != ABSENT;
   5958 }
   5959 
   5960 
   5961 bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
   5962   if (object->IsJSProxy()) {
   5963     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
   5964     return JSProxy::HasElementWithHandler(proxy, index);
   5965   }
   5966   return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
   5967       *object, index, false) != ABSENT;
   5968 }
   5969 
   5970 
   5971 PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
   5972   if (IsJSProxy()) {
   5973     return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
   5974   }
   5975   return JSObject::cast(this)->GetElementAttributeWithReceiver(
   5976       this, index, false);
   5977 }
   5978 
   5979 
   5980 bool AccessorInfo::all_can_read() {
   5981   return BooleanBit::get(flag(), kAllCanReadBit);
   5982 }
   5983 
   5984 
   5985 void AccessorInfo::set_all_can_read(bool value) {
   5986   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
   5987 }
   5988 
   5989 
   5990 bool AccessorInfo::all_can_write() {
   5991   return BooleanBit::get(flag(), kAllCanWriteBit);
   5992 }
   5993 
   5994 
   5995 void AccessorInfo::set_all_can_write(bool value) {
   5996   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
   5997 }
   5998 
   5999 
   6000 bool AccessorInfo::prohibits_overwriting() {
   6001   return BooleanBit::get(flag(), kProhibitsOverwritingBit);
   6002 }
   6003 
   6004 
   6005 void AccessorInfo::set_prohibits_overwriting(bool value) {
   6006   set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
   6007 }
   6008 
   6009 
   6010 PropertyAttributes AccessorInfo::property_attributes() {
   6011   return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
   6012 }
   6013 
   6014 
   6015 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
   6016   set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
   6017 }
   6018 
   6019 
   6020 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
   6021   Object* function_template = expected_receiver_type();
   6022   if (!function_template->IsFunctionTemplateInfo()) return true;
   6023   return FunctionTemplateInfo::cast(function_template)->IsTemplateFor(receiver);
   6024 }
   6025 
   6026 
   6027 void AccessorPair::set_access_flags(v8::AccessControl access_control) {
   6028   int current = access_flags()->value();
   6029   current = BooleanBit::set(current,
   6030                             kProhibitsOverwritingBit,
   6031                             access_control & PROHIBITS_OVERWRITING);
   6032   current = BooleanBit::set(current,
   6033                             kAllCanReadBit,
   6034                             access_control & ALL_CAN_READ);
   6035   current = BooleanBit::set(current,
   6036                             kAllCanWriteBit,
   6037                             access_control & ALL_CAN_WRITE);
   6038   set_access_flags(Smi::FromInt(current));
   6039 }
   6040 
   6041 
   6042 bool AccessorPair::all_can_read() {
   6043   return BooleanBit::get(access_flags(), kAllCanReadBit);
   6044 }
   6045 
   6046 
   6047 bool AccessorPair::all_can_write() {
   6048   return BooleanBit::get(access_flags(), kAllCanWriteBit);
   6049 }
   6050 
   6051 
   6052 bool AccessorPair::prohibits_overwriting() {
   6053   return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
   6054 }
   6055 
   6056 
   6057 template<typename Shape, typename Key>
   6058 void Dictionary<Shape, Key>::SetEntry(int entry,
   6059                                       Object* key,
   6060                                       Object* value) {
   6061   SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
   6062 }
   6063 
   6064 
   6065 template<typename Shape, typename Key>
   6066 void Dictionary<Shape, Key>::SetEntry(int entry,
   6067                                       Object* key,
   6068                                       Object* value,
   6069                                       PropertyDetails details) {
   6070   ASSERT(!key->IsName() ||
   6071          details.IsDeleted() ||
   6072          details.dictionary_index() > 0);
   6073   int index = HashTable<Shape, Key>::EntryToIndex(entry);
   6074   DisallowHeapAllocation no_gc;
   6075   WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
   6076   FixedArray::set(index, key, mode);
   6077   FixedArray::set(index+1, value, mode);
   6078   FixedArray::set(index+2, details.AsSmi());
   6079 }
   6080 
   6081 
   6082 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
   6083   ASSERT(other->IsNumber());
   6084   return key == static_cast<uint32_t>(other->Number());
   6085 }
   6086 
   6087 
   6088 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
   6089   return ComputeIntegerHash(key, 0);
   6090 }
   6091 
   6092 
   6093 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
   6094                                                       Object* other) {
   6095   ASSERT(other->IsNumber());
   6096   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
   6097 }
   6098 
   6099 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
   6100   return ComputeIntegerHash(key, seed);
   6101 }
   6102 
   6103 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
   6104                                                           uint32_t seed,
   6105                                                           Object* other) {
   6106   ASSERT(other->IsNumber());
   6107   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
   6108 }
   6109 
   6110 MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
   6111   return heap->NumberFromUint32(key);
   6112 }
   6113 
   6114 
   6115 bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
   6116   // We know that all entries in a hash table had their hash keys created.
   6117   // Use that knowledge to have fast failure.
   6118   if (key->Hash() != Name::cast(other)->Hash()) return false;
   6119   return key->Equals(Name::cast(other));
   6120 }
   6121 
   6122 
   6123 uint32_t NameDictionaryShape::Hash(Name* key) {
   6124   return key->Hash();
   6125 }
   6126 
   6127 
   6128 uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
   6129   return Name::cast(other)->Hash();
   6130 }
   6131 
   6132 
   6133 MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
   6134   ASSERT(key->IsUniqueName());
   6135   return key;
   6136 }
   6137 
   6138 
   6139 template <int entrysize>
   6140 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
   6141   return key->SameValue(other);
   6142 }
   6143 
   6144 
   6145 template <int entrysize>
   6146 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
   6147   return Smi::cast(key->GetHash())->value();
   6148 }
   6149 
   6150 
   6151 template <int entrysize>
   6152 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
   6153                                                         Object* other) {
   6154   return Smi::cast(other->GetHash())->value();
   6155 }
   6156 
   6157 
   6158 template <int entrysize>
   6159 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap,
   6160                                                        Object* key) {
   6161   return key;
   6162 }
   6163 
   6164 
   6165 template <int entrysize>
   6166 bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
   6167   return key->SameValue(other);
   6168 }
   6169 
   6170 
   6171 template <int entrysize>
   6172 uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) {
   6173   intptr_t hash = reinterpret_cast<intptr_t>(key);
   6174   return (uint32_t)(hash & 0xFFFFFFFF);
   6175 }
   6176 
   6177 
   6178 template <int entrysize>
   6179 uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key,
   6180                                                       Object* other) {
   6181   intptr_t hash = reinterpret_cast<intptr_t>(other);
   6182   return (uint32_t)(hash & 0xFFFFFFFF);
   6183 }
   6184 
   6185 
   6186 template <int entrysize>
   6187 MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap,
   6188                                                     Object* key) {
   6189   return key;
   6190 }
   6191 
   6192 
   6193 void Map::ClearCodeCache(Heap* heap) {
   6194   // No write barrier is needed since empty_fixed_array is not in new space.
   6195   // Please note this function is used during marking:
   6196   //  - MarkCompactCollector::MarkUnmarkedObject
   6197   //  - IncrementalMarking::Step
   6198   ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
   6199   WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
   6200 }
   6201 
   6202 
   6203 void JSArray::EnsureSize(int required_size) {
   6204   ASSERT(HasFastSmiOrObjectElements());
   6205   FixedArray* elts = FixedArray::cast(elements());
   6206   const int kArraySizeThatFitsComfortablyInNewSpace = 128;
   6207   if (elts->length() < required_size) {
   6208     // Doubling in size would be overkill, but leave some slack to avoid
   6209     // constantly growing.
   6210     Expand(required_size + (required_size >> 3));
   6211     // It's a performance benefit to keep a frequently used array in new-space.
   6212   } else if (!GetHeap()->new_space()->Contains(elts) &&
   6213              required_size < kArraySizeThatFitsComfortablyInNewSpace) {
   6214     // Expand will allocate a new backing store in new space even if the size
   6215     // we asked for isn't larger than what we had before.
   6216     Expand(required_size);
   6217   }
   6218 }
   6219 
   6220 
   6221 void JSArray::set_length(Smi* length) {
   6222   // Don't need a write barrier for a Smi.
   6223   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
   6224 }
   6225 
   6226 
   6227 bool JSArray::AllowsSetElementsLength() {
   6228   bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
   6229   ASSERT(result == !HasExternalArrayElements());
   6230   return result;
   6231 }
   6232 
   6233 
   6234 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
   6235   MaybeObject* maybe_result = EnsureCanContainElements(
   6236       storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
   6237   if (maybe_result->IsFailure()) return maybe_result;
   6238   ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
   6239           IsFastDoubleElementsKind(GetElementsKind())) ||
   6240          ((storage->map() != GetHeap()->fixed_double_array_map()) &&
   6241           (IsFastObjectElementsKind(GetElementsKind()) ||
   6242            (IsFastSmiElementsKind(GetElementsKind()) &&
   6243             FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
   6244   set_elements(storage);
   6245   set_length(Smi::FromInt(storage->length()));
   6246   return this;
   6247 }
   6248 
   6249 
   6250 MaybeObject* FixedArray::Copy() {
   6251   if (length() == 0) return this;
   6252   return GetHeap()->CopyFixedArray(this);
   6253 }
   6254 
   6255 
   6256 MaybeObject* FixedDoubleArray::Copy() {
   6257   if (length() == 0) return this;
   6258   return GetHeap()->CopyFixedDoubleArray(this);
   6259 }
   6260 
   6261 
   6262 MaybeObject* ConstantPoolArray::Copy() {
   6263   if (length() == 0) return this;
   6264   return GetHeap()->CopyConstantPoolArray(this);
   6265 }
   6266 
   6267 
   6268 void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
   6269   set(1 + index * 2, Smi::FromInt(id.ToInt()));
   6270 }
   6271 
   6272 
   6273 TypeFeedbackId TypeFeedbackCells::AstId(int index) {
   6274   return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
   6275 }
   6276 
   6277 
   6278 void TypeFeedbackCells::SetCell(int index, Cell* cell) {
   6279   set(index * 2, cell);
   6280 }
   6281 
   6282 
   6283 Cell* TypeFeedbackCells::GetCell(int index) {
   6284   return Cell::cast(get(index * 2));
   6285 }
   6286 
   6287 
   6288 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
   6289   return isolate->factory()->the_hole_value();
   6290 }
   6291 
   6292 
   6293 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
   6294   return isolate->factory()->undefined_value();
   6295 }
   6296 
   6297 
   6298 Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
   6299     ElementsKind elements_kind) {
   6300   return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
   6301 }
   6302 
   6303 
   6304 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
   6305   return heap->the_hole_value();
   6306 }
   6307 
   6308 
   6309 int TypeFeedbackInfo::ic_total_count() {
   6310   int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6311   return ICTotalCountField::decode(current);
   6312 }
   6313 
   6314 
   6315 void TypeFeedbackInfo::set_ic_total_count(int count) {
   6316   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6317   value = ICTotalCountField::update(value,
   6318                                     ICTotalCountField::decode(count));
   6319   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
   6320 }
   6321 
   6322 
   6323 int TypeFeedbackInfo::ic_with_type_info_count() {
   6324   int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6325   return ICsWithTypeInfoCountField::decode(current);
   6326 }
   6327 
   6328 
   6329 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
   6330   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6331   int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
   6332   // We can get negative count here when the type-feedback info is
   6333   // shared between two code objects. The can only happen when
   6334   // the debugger made a shallow copy of code object (see Heap::CopyCode).
   6335   // Since we do not optimize when the debugger is active, we can skip
   6336   // this counter update.
   6337   if (new_count >= 0) {
   6338     new_count &= ICsWithTypeInfoCountField::kMask;
   6339     value = ICsWithTypeInfoCountField::update(value, new_count);
   6340     WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
   6341   }
   6342 }
   6343 
   6344 
   6345 void TypeFeedbackInfo::initialize_storage() {
   6346   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
   6347   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
   6348 }
   6349 
   6350 
   6351 void TypeFeedbackInfo::change_own_type_change_checksum() {
   6352   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6353   int checksum = OwnTypeChangeChecksum::decode(value);
   6354   checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
   6355   value = OwnTypeChangeChecksum::update(value, checksum);
   6356   // Ensure packed bit field is in Smi range.
   6357   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
   6358   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
   6359   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
   6360 }
   6361 
   6362 
   6363 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
   6364   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6365   int mask = (1 << kTypeChangeChecksumBits) - 1;
   6366   value = InlinedTypeChangeChecksum::update(value, checksum & mask);
   6367   // Ensure packed bit field is in Smi range.
   6368   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
   6369   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
   6370   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
   6371 }
   6372 
   6373 
   6374 int TypeFeedbackInfo::own_type_change_checksum() {
   6375   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6376   return OwnTypeChangeChecksum::decode(value);
   6377 }
   6378 
   6379 
   6380 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
   6381   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6382   int mask = (1 << kTypeChangeChecksumBits) - 1;
   6383   return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
   6384 }
   6385 
   6386 
   6387 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
   6388           kTypeFeedbackCellsOffset)
   6389 
   6390 
   6391 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
   6392 
   6393 
   6394 Relocatable::Relocatable(Isolate* isolate) {
   6395   isolate_ = isolate;
   6396   prev_ = isolate->relocatable_top();
   6397   isolate->set_relocatable_top(this);
   6398 }
   6399 
   6400 
   6401 Relocatable::~Relocatable() {
   6402   ASSERT_EQ(isolate_->relocatable_top(), this);
   6403   isolate_->set_relocatable_top(prev_);
   6404 }
   6405 
   6406 
   6407 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
   6408   return map->instance_size();
   6409 }
   6410 
   6411 
   6412 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
   6413   v->VisitExternalReference(
   6414       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
   6415 }
   6416 
   6417 
   6418 template<typename StaticVisitor>
   6419 void Foreign::ForeignIterateBody() {
   6420   StaticVisitor::VisitExternalReference(
   6421       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
   6422 }
   6423 
   6424 
   6425 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
   6426   typedef v8::String::ExternalAsciiStringResource Resource;
   6427   v->VisitExternalAsciiString(
   6428       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6429 }
   6430 
   6431 
   6432 template<typename StaticVisitor>
   6433 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
   6434   typedef v8::String::ExternalAsciiStringResource Resource;
   6435   StaticVisitor::VisitExternalAsciiString(
   6436       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6437 }
   6438 
   6439 
   6440 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
   6441   typedef v8::String::ExternalStringResource Resource;
   6442   v->VisitExternalTwoByteString(
   6443       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6444 }
   6445 
   6446 
   6447 template<typename StaticVisitor>
   6448 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
   6449   typedef v8::String::ExternalStringResource Resource;
   6450   StaticVisitor::VisitExternalTwoByteString(
   6451       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6452 }
   6453 
   6454 
   6455 template<int start_offset, int end_offset, int size>
   6456 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
   6457     HeapObject* obj,
   6458     ObjectVisitor* v) {
   6459     v->VisitPointers(HeapObject::RawField(obj, start_offset),
   6460                      HeapObject::RawField(obj, end_offset));
   6461 }
   6462 
   6463 
   6464 template<int start_offset>
   6465 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
   6466                                                        int object_size,
   6467                                                        ObjectVisitor* v) {
   6468   v->VisitPointers(HeapObject::RawField(obj, start_offset),
   6469                    HeapObject::RawField(obj, object_size));
   6470 }
   6471 
   6472 
   6473 #undef TYPE_CHECKER
   6474 #undef CAST_ACCESSOR
   6475 #undef INT_ACCESSORS
   6476 #undef ACCESSORS
   6477 #undef ACCESSORS_TO_SMI
   6478 #undef SMI_ACCESSORS
   6479 #undef BOOL_GETTER
   6480 #undef BOOL_ACCESSORS
   6481 #undef FIELD_ADDR
   6482 #undef READ_FIELD
   6483 #undef WRITE_FIELD
   6484 #undef WRITE_BARRIER
   6485 #undef CONDITIONAL_WRITE_BARRIER
   6486 #undef READ_DOUBLE_FIELD
   6487 #undef WRITE_DOUBLE_FIELD
   6488 #undef READ_INT_FIELD
   6489 #undef WRITE_INT_FIELD
   6490 #undef READ_INTPTR_FIELD
   6491 #undef WRITE_INTPTR_FIELD
   6492 #undef READ_UINT32_FIELD
   6493 #undef WRITE_UINT32_FIELD
   6494 #undef READ_SHORT_FIELD
   6495 #undef WRITE_SHORT_FIELD
   6496 #undef READ_BYTE_FIELD
   6497 #undef WRITE_BYTE_FIELD
   6498 
   6499 
   6500 } }  // namespace v8::internal
   6501 
   6502 #endif  // V8_OBJECTS_INL_H_
   6503