Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 //
     28 // Review notes:
     29 //
     30 // - The use of macros in these inline functions may seem superfluous
     31 // but it is absolutely needed to make sure gcc generates optimal
     32 // code. gcc is not happy when attempting to inline too deep.
     33 //
     34 
     35 #ifndef V8_OBJECTS_INL_H_
     36 #define V8_OBJECTS_INL_H_
     37 
     38 #include "elements.h"
     39 #include "objects.h"
     40 #include "contexts.h"
     41 #include "conversions-inl.h"
     42 #include "heap.h"
     43 #include "isolate.h"
     44 #include "property.h"
     45 #include "spaces.h"
     46 #include "store-buffer.h"
     47 #include "v8memory.h"
     48 #include "factory.h"
     49 #include "incremental-marking.h"
     50 #include "transitions-inl.h"
     51 
     52 namespace v8 {
     53 namespace internal {
     54 
     55 PropertyDetails::PropertyDetails(Smi* smi) {
     56   value_ = smi->value();
     57 }
     58 
     59 
     60 Smi* PropertyDetails::AsSmi() {
     61   // Ensure the upper 2 bits have the same value by sign extending it. This is
     62   // necessary to be able to use the 31st bit of the property details.
     63   int value = value_ << 1;
     64   return Smi::FromInt(value >> 1);
     65 }
     66 
     67 
     68 PropertyDetails PropertyDetails::AsDeleted() {
     69   Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
     70   return PropertyDetails(smi);
     71 }
     72 
     73 
     74 #define TYPE_CHECKER(type, instancetype)                                \
     75   bool Object::Is##type() {                                             \
     76   return Object::IsHeapObject() &&                                      \
     77       HeapObject::cast(this)->map()->instance_type() == instancetype;   \
     78   }
     79 
     80 
     81 #define CAST_ACCESSOR(type)                     \
     82   type* type::cast(Object* object) {            \
     83     ASSERT(object->Is##type());                 \
     84     return reinterpret_cast<type*>(object);     \
     85   }
     86 
     87 
     88 #define INT_ACCESSORS(holder, name, offset)                             \
     89   int holder::name() { return READ_INT_FIELD(this, offset); }           \
     90   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
     91 
     92 
     93 #define ACCESSORS(holder, name, type, offset)                           \
     94   type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
     95   void holder::set_##name(type* value, WriteBarrierMode mode) {         \
     96     WRITE_FIELD(this, offset, value);                                   \
     97     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);    \
     98   }
     99 
    100 
    101 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
    102 #define ACCESSORS_TO_SMI(holder, name, offset)                          \
    103   Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); }   \
    104   void holder::set_##name(Smi* value, WriteBarrierMode mode) {          \
    105     WRITE_FIELD(this, offset, value);                                   \
    106   }
    107 
    108 
    109 // Getter that returns a Smi as an int and writes an int as a Smi.
    110 #define SMI_ACCESSORS(holder, name, offset)             \
    111   int holder::name() {                                  \
    112     Object* value = READ_FIELD(this, offset);           \
    113     return Smi::cast(value)->value();                   \
    114   }                                                     \
    115   void holder::set_##name(int value) {                  \
    116     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
    117   }
    118 
    119 
    120 #define BOOL_GETTER(holder, field, name, offset)           \
    121   bool holder::name() {                                    \
    122     return BooleanBit::get(field(), offset);               \
    123   }                                                        \
    124 
    125 
    126 #define BOOL_ACCESSORS(holder, field, name, offset)        \
    127   bool holder::name() {                                    \
    128     return BooleanBit::get(field(), offset);               \
    129   }                                                        \
    130   void holder::set_##name(bool value) {                    \
    131     set_##field(BooleanBit::set(field(), offset, value));  \
    132   }
    133 
    134 
    135 bool Object::IsFixedArrayBase() {
    136   return IsFixedArray() || IsFixedDoubleArray();
    137 }
    138 
    139 
    140 // External objects are not extensible, so the map check is enough.
    141 bool Object::IsExternal() {
    142   return Object::IsHeapObject() &&
    143       HeapObject::cast(this)->map() ==
    144       HeapObject::cast(this)->GetHeap()->external_map();
    145 }
    146 
    147 
    148 bool Object::IsAccessorInfo() {
    149   return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
    150 }
    151 
    152 
    153 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
    154   // There is a constraint on the object; check.
    155   if (!this->IsJSObject()) return false;
    156   // Fetch the constructor function of the object.
    157   Object* cons_obj = JSObject::cast(this)->map()->constructor();
    158   if (!cons_obj->IsJSFunction()) return false;
    159   JSFunction* fun = JSFunction::cast(cons_obj);
    160   // Iterate through the chain of inheriting function templates to
    161   // see if the required one occurs.
    162   for (Object* type = fun->shared()->function_data();
    163        type->IsFunctionTemplateInfo();
    164        type = FunctionTemplateInfo::cast(type)->parent_template()) {
    165     if (type == expected) return true;
    166   }
    167   // Didn't find the required type in the inheritance chain.
    168   return false;
    169 }
    170 
    171 
    172 bool Object::IsSmi() {
    173   return HAS_SMI_TAG(this);
    174 }
    175 
    176 
    177 bool Object::IsHeapObject() {
    178   return Internals::HasHeapObjectTag(this);
    179 }
    180 
    181 
    182 bool Object::NonFailureIsHeapObject() {
    183   ASSERT(!this->IsFailure());
    184   return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
    185 }
    186 
    187 
    188 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
    189 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
    190 
    191 
    192 bool Object::IsString() {
    193   return Object::IsHeapObject()
    194     && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
    195 }
    196 
    197 
    198 bool Object::IsName() {
    199   return IsString() || IsSymbol();
    200 }
    201 
    202 
    203 bool Object::IsUniqueName() {
    204   return IsInternalizedString() || IsSymbol();
    205 }
    206 
    207 
    208 bool Object::IsSpecObject() {
    209   return Object::IsHeapObject()
    210     && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
    211 }
    212 
    213 
    214 bool Object::IsSpecFunction() {
    215   if (!Object::IsHeapObject()) return false;
    216   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    217   return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
    218 }
    219 
    220 
    221 bool Object::IsInternalizedString() {
    222   if (!this->IsHeapObject()) return false;
    223   uint32_t type = HeapObject::cast(this)->map()->instance_type();
    224   STATIC_ASSERT(kNotInternalizedTag != 0);
    225   return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
    226       (kStringTag | kInternalizedTag);
    227 }
    228 
    229 
    230 bool Object::IsConsString() {
    231   if (!IsString()) return false;
    232   return StringShape(String::cast(this)).IsCons();
    233 }
    234 
    235 
    236 bool Object::IsSlicedString() {
    237   if (!IsString()) return false;
    238   return StringShape(String::cast(this)).IsSliced();
    239 }
    240 
    241 
    242 bool Object::IsSeqString() {
    243   if (!IsString()) return false;
    244   return StringShape(String::cast(this)).IsSequential();
    245 }
    246 
    247 
    248 bool Object::IsSeqOneByteString() {
    249   if (!IsString()) return false;
    250   return StringShape(String::cast(this)).IsSequential() &&
    251          String::cast(this)->IsOneByteRepresentation();
    252 }
    253 
    254 
    255 bool Object::IsSeqTwoByteString() {
    256   if (!IsString()) return false;
    257   return StringShape(String::cast(this)).IsSequential() &&
    258          String::cast(this)->IsTwoByteRepresentation();
    259 }
    260 
    261 
    262 bool Object::IsExternalString() {
    263   if (!IsString()) return false;
    264   return StringShape(String::cast(this)).IsExternal();
    265 }
    266 
    267 
    268 bool Object::IsExternalAsciiString() {
    269   if (!IsString()) return false;
    270   return StringShape(String::cast(this)).IsExternal() &&
    271          String::cast(this)->IsOneByteRepresentation();
    272 }
    273 
    274 
    275 bool Object::IsExternalTwoByteString() {
    276   if (!IsString()) return false;
    277   return StringShape(String::cast(this)).IsExternal() &&
    278          String::cast(this)->IsTwoByteRepresentation();
    279 }
    280 
    281 bool Object::HasValidElements() {
    282   // Dictionary is covered under FixedArray.
    283   return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
    284 }
    285 
    286 
    287 MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
    288                                            Representation representation,
    289                                            PretenureFlag tenure) {
    290   if (!FLAG_track_double_fields) return this;
    291   if (!representation.IsDouble()) return this;
    292   if (IsUninitialized()) {
    293     return heap->AllocateHeapNumber(0, tenure);
    294   }
    295   return heap->AllocateHeapNumber(Number(), tenure);
    296 }
    297 
    298 
    299 StringShape::StringShape(String* str)
    300   : type_(str->map()->instance_type()) {
    301   set_valid();
    302   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    303 }
    304 
    305 
    306 StringShape::StringShape(Map* map)
    307   : type_(map->instance_type()) {
    308   set_valid();
    309   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    310 }
    311 
    312 
    313 StringShape::StringShape(InstanceType t)
    314   : type_(static_cast<uint32_t>(t)) {
    315   set_valid();
    316   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    317 }
    318 
    319 
    320 bool StringShape::IsInternalized() {
    321   ASSERT(valid());
    322   STATIC_ASSERT(kNotInternalizedTag != 0);
    323   return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
    324       (kStringTag | kInternalizedTag);
    325 }
    326 
    327 
    328 bool String::IsOneByteRepresentation() {
    329   uint32_t type = map()->instance_type();
    330   return (type & kStringEncodingMask) == kOneByteStringTag;
    331 }
    332 
    333 
    334 bool String::IsTwoByteRepresentation() {
    335   uint32_t type = map()->instance_type();
    336   return (type & kStringEncodingMask) == kTwoByteStringTag;
    337 }
    338 
    339 
    340 bool String::IsOneByteRepresentationUnderneath() {
    341   uint32_t type = map()->instance_type();
    342   STATIC_ASSERT(kIsIndirectStringTag != 0);
    343   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
    344   ASSERT(IsFlat());
    345   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
    346     case kOneByteStringTag:
    347       return true;
    348     case kTwoByteStringTag:
    349       return false;
    350     default:  // Cons or sliced string.  Need to go deeper.
    351       return GetUnderlying()->IsOneByteRepresentation();
    352   }
    353 }
    354 
    355 
    356 bool String::IsTwoByteRepresentationUnderneath() {
    357   uint32_t type = map()->instance_type();
    358   STATIC_ASSERT(kIsIndirectStringTag != 0);
    359   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
    360   ASSERT(IsFlat());
    361   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
    362     case kOneByteStringTag:
    363       return false;
    364     case kTwoByteStringTag:
    365       return true;
    366     default:  // Cons or sliced string.  Need to go deeper.
    367       return GetUnderlying()->IsTwoByteRepresentation();
    368   }
    369 }
    370 
    371 
    372 bool String::HasOnlyOneByteChars() {
    373   uint32_t type = map()->instance_type();
    374   return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
    375          IsOneByteRepresentation();
    376 }
    377 
    378 
    379 bool StringShape::IsCons() {
    380   return (type_ & kStringRepresentationMask) == kConsStringTag;
    381 }
    382 
    383 
    384 bool StringShape::IsSliced() {
    385   return (type_ & kStringRepresentationMask) == kSlicedStringTag;
    386 }
    387 
    388 
    389 bool StringShape::IsIndirect() {
    390   return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
    391 }
    392 
    393 
    394 bool StringShape::IsExternal() {
    395   return (type_ & kStringRepresentationMask) == kExternalStringTag;
    396 }
    397 
    398 
    399 bool StringShape::IsSequential() {
    400   return (type_ & kStringRepresentationMask) == kSeqStringTag;
    401 }
    402 
    403 
    404 StringRepresentationTag StringShape::representation_tag() {
    405   uint32_t tag = (type_ & kStringRepresentationMask);
    406   return static_cast<StringRepresentationTag>(tag);
    407 }
    408 
    409 
    410 uint32_t StringShape::encoding_tag() {
    411   return type_ & kStringEncodingMask;
    412 }
    413 
    414 
    415 uint32_t StringShape::full_representation_tag() {
    416   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
    417 }
    418 
    419 
    420 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
    421              Internals::kFullStringRepresentationMask);
    422 
    423 STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
    424              Internals::kStringEncodingMask);
    425 
    426 
    427 bool StringShape::IsSequentialAscii() {
    428   return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
    429 }
    430 
    431 
    432 bool StringShape::IsSequentialTwoByte() {
    433   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
    434 }
    435 
    436 
    437 bool StringShape::IsExternalAscii() {
    438   return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
    439 }
    440 
    441 
    442 STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
    443              Internals::kExternalAsciiRepresentationTag);
    444 
    445 STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
    446 
    447 
    448 bool StringShape::IsExternalTwoByte() {
    449   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
    450 }
    451 
    452 
    453 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
    454              Internals::kExternalTwoByteRepresentationTag);
    455 
    456 STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
    457 
    458 uc32 FlatStringReader::Get(int index) {
    459   ASSERT(0 <= index && index <= length_);
    460   if (is_ascii_) {
    461     return static_cast<const byte*>(start_)[index];
    462   } else {
    463     return static_cast<const uc16*>(start_)[index];
    464   }
    465 }
    466 
    467 
    468 bool Object::IsNumber() {
    469   return IsSmi() || IsHeapNumber();
    470 }
    471 
    472 
    473 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
    474 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
    475 
    476 
    477 bool Object::IsFiller() {
    478   if (!Object::IsHeapObject()) return false;
    479   InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
    480   return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
    481 }
    482 
    483 
    484 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
    485 
    486 
    487 bool Object::IsExternalArray() {
    488   if (!Object::IsHeapObject())
    489     return false;
    490   InstanceType instance_type =
    491       HeapObject::cast(this)->map()->instance_type();
    492   return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
    493           instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
    494 }
    495 
    496 
    497 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
    498 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
    499 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
    500 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
    501 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
    502 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
    503 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
    504 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
    505 
    506 
    507 bool MaybeObject::IsFailure() {
    508   return HAS_FAILURE_TAG(this);
    509 }
    510 
    511 
    512 bool MaybeObject::IsRetryAfterGC() {
    513   return HAS_FAILURE_TAG(this)
    514     && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
    515 }
    516 
    517 
    518 bool MaybeObject::IsOutOfMemory() {
    519   return HAS_FAILURE_TAG(this)
    520       && Failure::cast(this)->IsOutOfMemoryException();
    521 }
    522 
    523 
    524 bool MaybeObject::IsException() {
    525   return this == Failure::Exception();
    526 }
    527 
    528 
    529 bool MaybeObject::IsTheHole() {
    530   return !IsFailure() && ToObjectUnchecked()->IsTheHole();
    531 }
    532 
    533 
    534 bool MaybeObject::IsUninitialized() {
    535   return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
    536 }
    537 
    538 
    539 Failure* Failure::cast(MaybeObject* obj) {
    540   ASSERT(HAS_FAILURE_TAG(obj));
    541   return reinterpret_cast<Failure*>(obj);
    542 }
    543 
    544 
    545 bool Object::IsJSReceiver() {
    546   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    547   return IsHeapObject() &&
    548       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
    549 }
    550 
    551 
    552 bool Object::IsJSObject() {
    553   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
    554   return IsHeapObject() &&
    555       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
    556 }
    557 
    558 
    559 bool Object::IsJSProxy() {
    560   if (!Object::IsHeapObject()) return false;
    561   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    562   return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
    563 }
    564 
    565 
    566 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
    567 TYPE_CHECKER(JSSet, JS_SET_TYPE)
    568 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
    569 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
    570 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
    571 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
    572 TYPE_CHECKER(Map, MAP_TYPE)
    573 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
    574 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
    575 
    576 
    577 bool Object::IsJSWeakCollection() {
    578   return IsJSWeakMap() || IsJSWeakSet();
    579 }
    580 
    581 
    582 bool Object::IsDescriptorArray() {
    583   return IsFixedArray();
    584 }
    585 
    586 
    587 bool Object::IsTransitionArray() {
    588   return IsFixedArray();
    589 }
    590 
    591 
    592 bool Object::IsDeoptimizationInputData() {
    593   // Must be a fixed array.
    594   if (!IsFixedArray()) return false;
    595 
    596   // There's no sure way to detect the difference between a fixed array and
    597   // a deoptimization data array.  Since this is used for asserts we can
    598   // check that the length is zero or else the fixed size plus a multiple of
    599   // the entry size.
    600   int length = FixedArray::cast(this)->length();
    601   if (length == 0) return true;
    602 
    603   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
    604   return length >= 0 &&
    605       length % DeoptimizationInputData::kDeoptEntrySize == 0;
    606 }
    607 
    608 
    609 bool Object::IsDeoptimizationOutputData() {
    610   if (!IsFixedArray()) return false;
    611   // There's actually no way to see the difference between a fixed array and
    612   // a deoptimization data array.  Since this is used for asserts we can check
    613   // that the length is plausible though.
    614   if (FixedArray::cast(this)->length() % 2 != 0) return false;
    615   return true;
    616 }
    617 
    618 
    619 bool Object::IsDependentCode() {
    620   if (!IsFixedArray()) return false;
    621   // There's actually no way to see the difference between a fixed array and
    622   // a dependent codes array.
    623   return true;
    624 }
    625 
    626 
    627 bool Object::IsTypeFeedbackCells() {
    628   if (!IsFixedArray()) return false;
    629   // There's actually no way to see the difference between a fixed array and
    630   // a cache cells array.  Since this is used for asserts we can check that
    631   // the length is plausible though.
    632   if (FixedArray::cast(this)->length() % 2 != 0) return false;
    633   return true;
    634 }
    635 
    636 
    637 bool Object::IsContext() {
    638   if (!Object::IsHeapObject()) return false;
    639   Map* map = HeapObject::cast(this)->map();
    640   Heap* heap = map->GetHeap();
    641   return (map == heap->function_context_map() ||
    642       map == heap->catch_context_map() ||
    643       map == heap->with_context_map() ||
    644       map == heap->native_context_map() ||
    645       map == heap->block_context_map() ||
    646       map == heap->module_context_map() ||
    647       map == heap->global_context_map());
    648 }
    649 
    650 
    651 bool Object::IsNativeContext() {
    652   return Object::IsHeapObject() &&
    653       HeapObject::cast(this)->map() ==
    654       HeapObject::cast(this)->GetHeap()->native_context_map();
    655 }
    656 
    657 
    658 bool Object::IsScopeInfo() {
    659   return Object::IsHeapObject() &&
    660       HeapObject::cast(this)->map() ==
    661       HeapObject::cast(this)->GetHeap()->scope_info_map();
    662 }
    663 
    664 
    665 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
    666 
    667 
    668 template <> inline bool Is<JSFunction>(Object* obj) {
    669   return obj->IsJSFunction();
    670 }
    671 
    672 
    673 TYPE_CHECKER(Code, CODE_TYPE)
    674 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
    675 TYPE_CHECKER(Cell, CELL_TYPE)
    676 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
    677 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
    678 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
    679 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
    680 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
    681 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
    682 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
    683 
    684 
    685 bool Object::IsStringWrapper() {
    686   return IsJSValue() && JSValue::cast(this)->value()->IsString();
    687 }
    688 
    689 
    690 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
    691 
    692 
    693 bool Object::IsBoolean() {
    694   return IsOddball() &&
    695       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
    696 }
    697 
    698 
    699 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
    700 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
    701 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
    702 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
    703 
    704 
    705 bool Object::IsJSArrayBufferView() {
    706   return IsJSDataView() || IsJSTypedArray();
    707 }
    708 
    709 
    710 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
    711 
    712 
    713 template <> inline bool Is<JSArray>(Object* obj) {
    714   return obj->IsJSArray();
    715 }
    716 
    717 
    718 bool Object::IsHashTable() {
    719   return Object::IsHeapObject() &&
    720       HeapObject::cast(this)->map() ==
    721       HeapObject::cast(this)->GetHeap()->hash_table_map();
    722 }
    723 
    724 
    725 bool Object::IsDictionary() {
    726   return IsHashTable() &&
    727       this != HeapObject::cast(this)->GetHeap()->string_table();
    728 }
    729 
    730 
    731 bool Object::IsStringTable() {
    732   return IsHashTable() &&
    733       this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
    734 }
    735 
    736 
    737 bool Object::IsJSFunctionResultCache() {
    738   if (!IsFixedArray()) return false;
    739   FixedArray* self = FixedArray::cast(this);
    740   int length = self->length();
    741   if (length < JSFunctionResultCache::kEntriesIndex) return false;
    742   if ((length - JSFunctionResultCache::kEntriesIndex)
    743       % JSFunctionResultCache::kEntrySize != 0) {
    744     return false;
    745   }
    746 #ifdef VERIFY_HEAP
    747   if (FLAG_verify_heap) {
    748     reinterpret_cast<JSFunctionResultCache*>(this)->
    749         JSFunctionResultCacheVerify();
    750   }
    751 #endif
    752   return true;
    753 }
    754 
    755 
    756 bool Object::IsNormalizedMapCache() {
    757   if (!IsFixedArray()) return false;
    758   if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
    759     return false;
    760   }
    761 #ifdef VERIFY_HEAP
    762   if (FLAG_verify_heap) {
    763     reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
    764   }
    765 #endif
    766   return true;
    767 }
    768 
    769 
    770 bool Object::IsCompilationCacheTable() {
    771   return IsHashTable();
    772 }
    773 
    774 
    775 bool Object::IsCodeCacheHashTable() {
    776   return IsHashTable();
    777 }
    778 
    779 
    780 bool Object::IsPolymorphicCodeCacheHashTable() {
    781   return IsHashTable();
    782 }
    783 
    784 
    785 bool Object::IsMapCache() {
    786   return IsHashTable();
    787 }
    788 
    789 
    790 bool Object::IsObjectHashTable() {
    791   return IsHashTable();
    792 }
    793 
    794 
    795 bool Object::IsPrimitive() {
    796   return IsOddball() || IsNumber() || IsString();
    797 }
    798 
    799 
    800 bool Object::IsJSGlobalProxy() {
    801   bool result = IsHeapObject() &&
    802                 (HeapObject::cast(this)->map()->instance_type() ==
    803                  JS_GLOBAL_PROXY_TYPE);
    804   ASSERT(!result || IsAccessCheckNeeded());
    805   return result;
    806 }
    807 
    808 
    809 bool Object::IsGlobalObject() {
    810   if (!IsHeapObject()) return false;
    811 
    812   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    813   return type == JS_GLOBAL_OBJECT_TYPE ||
    814          type == JS_BUILTINS_OBJECT_TYPE;
    815 }
    816 
    817 
    818 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
    819 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
    820 
    821 
    822 bool Object::IsUndetectableObject() {
    823   return IsHeapObject()
    824     && HeapObject::cast(this)->map()->is_undetectable();
    825 }
    826 
    827 
    828 bool Object::IsAccessCheckNeeded() {
    829   return IsHeapObject()
    830     && HeapObject::cast(this)->map()->is_access_check_needed();
    831 }
    832 
    833 
    834 bool Object::IsStruct() {
    835   if (!IsHeapObject()) return false;
    836   switch (HeapObject::cast(this)->map()->instance_type()) {
    837 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
    838   STRUCT_LIST(MAKE_STRUCT_CASE)
    839 #undef MAKE_STRUCT_CASE
    840     default: return false;
    841   }
    842 }
    843 
    844 
    845 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
    846   bool Object::Is##Name() {                                      \
    847     return Object::IsHeapObject()                                \
    848       && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
    849   }
    850   STRUCT_LIST(MAKE_STRUCT_PREDICATE)
    851 #undef MAKE_STRUCT_PREDICATE
    852 
    853 
    854 bool Object::IsUndefined() {
    855   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
    856 }
    857 
    858 
    859 bool Object::IsNull() {
    860   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
    861 }
    862 
    863 
    864 bool Object::IsTheHole() {
    865   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
    866 }
    867 
    868 
    869 bool Object::IsUninitialized() {
    870   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
    871 }
    872 
    873 
    874 bool Object::IsTrue() {
    875   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
    876 }
    877 
    878 
    879 bool Object::IsFalse() {
    880   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
    881 }
    882 
    883 
    884 bool Object::IsArgumentsMarker() {
    885   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
    886 }
    887 
    888 
    889 double Object::Number() {
    890   ASSERT(IsNumber());
    891   return IsSmi()
    892     ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
    893     : reinterpret_cast<HeapNumber*>(this)->value();
    894 }
    895 
    896 
    897 bool Object::IsNaN() {
    898   return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
    899 }
    900 
    901 
    902 MaybeObject* Object::ToSmi() {
    903   if (IsSmi()) return this;
    904   if (IsHeapNumber()) {
    905     double value = HeapNumber::cast(this)->value();
    906     int int_value = FastD2I(value);
    907     if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
    908       return Smi::FromInt(int_value);
    909     }
    910   }
    911   return Failure::Exception();
    912 }
    913 
    914 
    915 bool Object::HasSpecificClassOf(String* name) {
    916   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
    917 }
    918 
    919 
    920 MaybeObject* Object::GetElement(uint32_t index) {
    921   // GetElement can trigger a getter which can cause allocation.
    922   // This was not always the case. This ASSERT is here to catch
    923   // leftover incorrect uses.
    924   ASSERT(AllowHeapAllocation::IsAllowed());
    925   return GetElementWithReceiver(this, index);
    926 }
    927 
    928 
    929 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
    930   MaybeObject* maybe = GetElementWithReceiver(this, index);
    931   ASSERT(!maybe->IsFailure());
    932   Object* result = NULL;  // Initialization to please compiler.
    933   maybe->ToObject(&result);
    934   return result;
    935 }
    936 
    937 
    938 MaybeObject* Object::GetProperty(Name* key) {
    939   PropertyAttributes attributes;
    940   return GetPropertyWithReceiver(this, key, &attributes);
    941 }
    942 
    943 
    944 MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
    945   return GetPropertyWithReceiver(this, key, attributes);
    946 }
    947 
    948 
    949 #define FIELD_ADDR(p, offset) \
    950   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
    951 
    952 #define READ_FIELD(p, offset) \
    953   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
    954 
    955 #define WRITE_FIELD(p, offset, value) \
    956   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
    957 
    958 #define WRITE_BARRIER(heap, object, offset, value)                      \
    959   heap->incremental_marking()->RecordWrite(                             \
    960       object, HeapObject::RawField(object, offset), value);             \
    961   if (heap->InNewSpace(value)) {                                        \
    962     heap->RecordWrite(object->address(), offset);                       \
    963   }
    964 
    965 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)    \
    966   if (mode == UPDATE_WRITE_BARRIER) {                                   \
    967     heap->incremental_marking()->RecordWrite(                           \
    968       object, HeapObject::RawField(object, offset), value);             \
    969     if (heap->InNewSpace(value)) {                                      \
    970       heap->RecordWrite(object->address(), offset);                     \
    971     }                                                                   \
    972   }
    973 
    974 #ifndef V8_TARGET_ARCH_MIPS
    975   #define READ_DOUBLE_FIELD(p, offset) \
    976     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
    977 #else  // V8_TARGET_ARCH_MIPS
    978   // Prevent gcc from using load-double (mips ldc1) on (possibly)
    979   // non-64-bit aligned HeapNumber::value.
    980   static inline double read_double_field(void* p, int offset) {
    981     union conversion {
    982       double d;
    983       uint32_t u[2];
    984     } c;
    985     c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
    986     c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
    987     return c.d;
    988   }
    989   #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
    990 #endif  // V8_TARGET_ARCH_MIPS
    991 
    992 #ifndef V8_TARGET_ARCH_MIPS
    993   #define WRITE_DOUBLE_FIELD(p, offset, value) \
    994     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
    995 #else  // V8_TARGET_ARCH_MIPS
    996   // Prevent gcc from using store-double (mips sdc1) on (possibly)
    997   // non-64-bit aligned HeapNumber::value.
    998   static inline void write_double_field(void* p, int offset,
    999                                         double value) {
   1000     union conversion {
   1001       double d;
   1002       uint32_t u[2];
   1003     } c;
   1004     c.d = value;
   1005     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
   1006     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
   1007   }
   1008   #define WRITE_DOUBLE_FIELD(p, offset, value) \
   1009     write_double_field(p, offset, value)
   1010 #endif  // V8_TARGET_ARCH_MIPS
   1011 
   1012 
   1013 #define READ_INT_FIELD(p, offset) \
   1014   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
   1015 
   1016 #define WRITE_INT_FIELD(p, offset, value) \
   1017   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
   1018 
   1019 #define READ_INTPTR_FIELD(p, offset) \
   1020   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
   1021 
   1022 #define WRITE_INTPTR_FIELD(p, offset, value) \
   1023   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
   1024 
   1025 #define READ_UINT32_FIELD(p, offset) \
   1026   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
   1027 
   1028 #define WRITE_UINT32_FIELD(p, offset, value) \
   1029   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
   1030 
   1031 #define READ_INT64_FIELD(p, offset) \
   1032   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
   1033 
   1034 #define WRITE_INT64_FIELD(p, offset, value) \
   1035   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
   1036 
   1037 #define READ_SHORT_FIELD(p, offset) \
   1038   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
   1039 
   1040 #define WRITE_SHORT_FIELD(p, offset, value) \
   1041   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
   1042 
   1043 #define READ_BYTE_FIELD(p, offset) \
   1044   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
   1045 
   1046 #define WRITE_BYTE_FIELD(p, offset, value) \
   1047   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
   1048 
   1049 
   1050 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
   1051   return &READ_FIELD(obj, byte_offset);
   1052 }
   1053 
   1054 
   1055 int Smi::value() {
   1056   return Internals::SmiValue(this);
   1057 }
   1058 
   1059 
   1060 Smi* Smi::FromInt(int value) {
   1061   ASSERT(Smi::IsValid(value));
   1062   return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
   1063 }
   1064 
   1065 
   1066 Smi* Smi::FromIntptr(intptr_t value) {
   1067   ASSERT(Smi::IsValid(value));
   1068   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
   1069   return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
   1070 }
   1071 
   1072 
   1073 Failure::Type Failure::type() const {
   1074   return static_cast<Type>(value() & kFailureTypeTagMask);
   1075 }
   1076 
   1077 
   1078 bool Failure::IsInternalError() const {
   1079   return type() == INTERNAL_ERROR;
   1080 }
   1081 
   1082 
   1083 bool Failure::IsOutOfMemoryException() const {
   1084   return type() == OUT_OF_MEMORY_EXCEPTION;
   1085 }
   1086 
   1087 
   1088 AllocationSpace Failure::allocation_space() const {
   1089   ASSERT_EQ(RETRY_AFTER_GC, type());
   1090   return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
   1091                                       & kSpaceTagMask);
   1092 }
   1093 
   1094 
   1095 Failure* Failure::InternalError() {
   1096   return Construct(INTERNAL_ERROR);
   1097 }
   1098 
   1099 
   1100 Failure* Failure::Exception() {
   1101   return Construct(EXCEPTION);
   1102 }
   1103 
   1104 
   1105 Failure* Failure::OutOfMemoryException(intptr_t value) {
   1106   return Construct(OUT_OF_MEMORY_EXCEPTION, value);
   1107 }
   1108 
   1109 
   1110 intptr_t Failure::value() const {
   1111   return static_cast<intptr_t>(
   1112       reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
   1113 }
   1114 
   1115 
   1116 Failure* Failure::RetryAfterGC() {
   1117   return RetryAfterGC(NEW_SPACE);
   1118 }
   1119 
   1120 
   1121 Failure* Failure::RetryAfterGC(AllocationSpace space) {
   1122   ASSERT((space & ~kSpaceTagMask) == 0);
   1123   return Construct(RETRY_AFTER_GC, space);
   1124 }
   1125 
   1126 
   1127 Failure* Failure::Construct(Type type, intptr_t value) {
   1128   uintptr_t info =
   1129       (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
   1130   ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
   1131   // Fill the unused bits with a pattern that's easy to recognize in crash
   1132   // dumps.
   1133   static const int kFailureMagicPattern = 0x0BAD0000;
   1134   return reinterpret_cast<Failure*>(
   1135       (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
   1136 }
   1137 
   1138 
   1139 bool Smi::IsValid(intptr_t value) {
   1140   bool result = Internals::IsValidSmi(value);
   1141   ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
   1142   return result;
   1143 }
   1144 
   1145 
   1146 MapWord MapWord::FromMap(Map* map) {
   1147   return MapWord(reinterpret_cast<uintptr_t>(map));
   1148 }
   1149 
   1150 
   1151 Map* MapWord::ToMap() {
   1152   return reinterpret_cast<Map*>(value_);
   1153 }
   1154 
   1155 
   1156 bool MapWord::IsForwardingAddress() {
   1157   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
   1158 }
   1159 
   1160 
   1161 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
   1162   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
   1163   return MapWord(reinterpret_cast<uintptr_t>(raw));
   1164 }
   1165 
   1166 
   1167 HeapObject* MapWord::ToForwardingAddress() {
   1168   ASSERT(IsForwardingAddress());
   1169   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
   1170 }
   1171 
   1172 
   1173 #ifdef VERIFY_HEAP
   1174 void HeapObject::VerifyObjectField(int offset) {
   1175   VerifyPointer(READ_FIELD(this, offset));
   1176 }
   1177 
   1178 void HeapObject::VerifySmiField(int offset) {
   1179   CHECK(READ_FIELD(this, offset)->IsSmi());
   1180 }
   1181 #endif
   1182 
   1183 
   1184 Heap* HeapObject::GetHeap() {
   1185   Heap* heap =
   1186       MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
   1187   ASSERT(heap != NULL);
   1188   ASSERT(heap->isolate() == Isolate::Current());
   1189   return heap;
   1190 }
   1191 
   1192 
   1193 Isolate* HeapObject::GetIsolate() {
   1194   return GetHeap()->isolate();
   1195 }
   1196 
   1197 
   1198 Map* HeapObject::map() {
   1199   return map_word().ToMap();
   1200 }
   1201 
   1202 
   1203 void HeapObject::set_map(Map* value) {
   1204   set_map_word(MapWord::FromMap(value));
   1205   if (value != NULL) {
   1206     // TODO(1600) We are passing NULL as a slot because maps can never be on
   1207     // evacuation candidate.
   1208     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
   1209   }
   1210 }
   1211 
   1212 
   1213 // Unsafe accessor omitting write barrier.
   1214 void HeapObject::set_map_no_write_barrier(Map* value) {
   1215   set_map_word(MapWord::FromMap(value));
   1216 }
   1217 
   1218 
   1219 MapWord HeapObject::map_word() {
   1220   return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
   1221 }
   1222 
   1223 
   1224 void HeapObject::set_map_word(MapWord map_word) {
   1225   // WRITE_FIELD does not invoke write barrier, but there is no need
   1226   // here.
   1227   WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
   1228 }
   1229 
   1230 
   1231 HeapObject* HeapObject::FromAddress(Address address) {
   1232   ASSERT_TAG_ALIGNED(address);
   1233   return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
   1234 }
   1235 
   1236 
   1237 Address HeapObject::address() {
   1238   return reinterpret_cast<Address>(this) - kHeapObjectTag;
   1239 }
   1240 
   1241 
   1242 int HeapObject::Size() {
   1243   return SizeFromMap(map());
   1244 }
   1245 
   1246 
   1247 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
   1248   v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
   1249                    reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
   1250 }
   1251 
   1252 
   1253 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
   1254   v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
   1255 }
   1256 
   1257 
   1258 double HeapNumber::value() {
   1259   return READ_DOUBLE_FIELD(this, kValueOffset);
   1260 }
   1261 
   1262 
   1263 void HeapNumber::set_value(double value) {
   1264   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
   1265 }
   1266 
   1267 
   1268 int HeapNumber::get_exponent() {
   1269   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
   1270           kExponentShift) - kExponentBias;
   1271 }
   1272 
   1273 
   1274 int HeapNumber::get_sign() {
   1275   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
   1276 }
   1277 
   1278 
   1279 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
   1280 
   1281 
   1282 Object** FixedArray::GetFirstElementAddress() {
   1283   return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
   1284 }
   1285 
   1286 
   1287 bool FixedArray::ContainsOnlySmisOrHoles() {
   1288   Object* the_hole = GetHeap()->the_hole_value();
   1289   Object** current = GetFirstElementAddress();
   1290   for (int i = 0; i < length(); ++i) {
   1291     Object* candidate = *current++;
   1292     if (!candidate->IsSmi() && candidate != the_hole) return false;
   1293   }
   1294   return true;
   1295 }
   1296 
   1297 
   1298 FixedArrayBase* JSObject::elements() {
   1299   Object* array = READ_FIELD(this, kElementsOffset);
   1300   return static_cast<FixedArrayBase*>(array);
   1301 }
   1302 
   1303 
   1304 void JSObject::ValidateElements() {
   1305 #if DEBUG
   1306   if (FLAG_enable_slow_asserts) {
   1307     ElementsAccessor* accessor = GetElementsAccessor();
   1308     accessor->Validate(this);
   1309   }
   1310 #endif
   1311 }
   1312 
   1313 
   1314 bool JSObject::ShouldTrackAllocationInfo() {
   1315   if (AllocationSite::CanTrack(map()->instance_type())) {
   1316     if (!IsJSArray()) {
   1317       return true;
   1318     }
   1319 
   1320     return AllocationSite::GetMode(GetElementsKind()) ==
   1321         TRACK_ALLOCATION_SITE;
   1322   }
   1323   return false;
   1324 }
   1325 
   1326 
   1327 // Heuristic: We only need to create allocation site info if the boilerplate
   1328 // elements kind is the initial elements kind.
   1329 AllocationSiteMode AllocationSite::GetMode(
   1330     ElementsKind boilerplate_elements_kind) {
   1331   if (FLAG_track_allocation_sites &&
   1332       IsFastSmiElementsKind(boilerplate_elements_kind)) {
   1333     return TRACK_ALLOCATION_SITE;
   1334   }
   1335 
   1336   return DONT_TRACK_ALLOCATION_SITE;
   1337 }
   1338 
   1339 
   1340 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
   1341                                            ElementsKind to) {
   1342   if (FLAG_track_allocation_sites &&
   1343       IsFastSmiElementsKind(from) &&
   1344       IsMoreGeneralElementsKindTransition(from, to)) {
   1345     return TRACK_ALLOCATION_SITE;
   1346   }
   1347 
   1348   return DONT_TRACK_ALLOCATION_SITE;
   1349 }
   1350 
   1351 
   1352 inline bool AllocationSite::CanTrack(InstanceType type) {
   1353   return type == JS_ARRAY_TYPE;
   1354 }
   1355 
   1356 
   1357 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
   1358   ValidateElements();
   1359   ElementsKind elements_kind = map()->elements_kind();
   1360   if (!IsFastObjectElementsKind(elements_kind)) {
   1361     if (IsFastHoleyElementsKind(elements_kind)) {
   1362       return TransitionElementsKind(FAST_HOLEY_ELEMENTS);
   1363     } else {
   1364       return TransitionElementsKind(FAST_ELEMENTS);
   1365     }
   1366   }
   1367   return this;
   1368 }
   1369 
   1370 
   1371 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
   1372                                                 uint32_t count,
   1373                                                 EnsureElementsMode mode) {
   1374   ElementsKind current_kind = map()->elements_kind();
   1375   ElementsKind target_kind = current_kind;
   1376   ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
   1377   bool is_holey = IsFastHoleyElementsKind(current_kind);
   1378   if (current_kind == FAST_HOLEY_ELEMENTS) return this;
   1379   Heap* heap = GetHeap();
   1380   Object* the_hole = heap->the_hole_value();
   1381   for (uint32_t i = 0; i < count; ++i) {
   1382     Object* current = *objects++;
   1383     if (current == the_hole) {
   1384       is_holey = true;
   1385       target_kind = GetHoleyElementsKind(target_kind);
   1386     } else if (!current->IsSmi()) {
   1387       if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
   1388         if (IsFastSmiElementsKind(target_kind)) {
   1389           if (is_holey) {
   1390             target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
   1391           } else {
   1392             target_kind = FAST_DOUBLE_ELEMENTS;
   1393           }
   1394         }
   1395       } else if (is_holey) {
   1396         target_kind = FAST_HOLEY_ELEMENTS;
   1397         break;
   1398       } else {
   1399         target_kind = FAST_ELEMENTS;
   1400       }
   1401     }
   1402   }
   1403 
   1404   if (target_kind != current_kind) {
   1405     return TransitionElementsKind(target_kind);
   1406   }
   1407   return this;
   1408 }
   1409 
   1410 
   1411 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
   1412                                                 uint32_t length,
   1413                                                 EnsureElementsMode mode) {
   1414   if (elements->map() != GetHeap()->fixed_double_array_map()) {
   1415     ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
   1416            elements->map() == GetHeap()->fixed_cow_array_map());
   1417     if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
   1418       mode = DONT_ALLOW_DOUBLE_ELEMENTS;
   1419     }
   1420     Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
   1421     return EnsureCanContainElements(objects, length, mode);
   1422   }
   1423 
   1424   ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
   1425   if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
   1426     return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
   1427   } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
   1428     FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
   1429     for (uint32_t i = 0; i < length; ++i) {
   1430       if (double_array->is_the_hole(i)) {
   1431         return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
   1432       }
   1433     }
   1434     return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
   1435   }
   1436 
   1437   return this;
   1438 }
   1439 
   1440 
   1441 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
   1442                                                 ElementsKind to_kind) {
   1443   Map* current_map = map();
   1444   ElementsKind from_kind = current_map->elements_kind();
   1445   if (from_kind == to_kind) return current_map;
   1446 
   1447   Context* native_context = isolate->context()->native_context();
   1448   Object* maybe_array_maps = native_context->js_array_maps();
   1449   if (maybe_array_maps->IsFixedArray()) {
   1450     FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
   1451     if (array_maps->get(from_kind) == current_map) {
   1452       Object* maybe_transitioned_map = array_maps->get(to_kind);
   1453       if (maybe_transitioned_map->IsMap()) {
   1454         return Map::cast(maybe_transitioned_map);
   1455       }
   1456     }
   1457   }
   1458 
   1459   return GetElementsTransitionMapSlow(to_kind);
   1460 }
   1461 
   1462 
   1463 void JSObject::set_map_and_elements(Map* new_map,
   1464                                     FixedArrayBase* value,
   1465                                     WriteBarrierMode mode) {
   1466   ASSERT(value->HasValidElements());
   1467   if (new_map != NULL) {
   1468     if (mode == UPDATE_WRITE_BARRIER) {
   1469       set_map(new_map);
   1470     } else {
   1471       ASSERT(mode == SKIP_WRITE_BARRIER);
   1472       set_map_no_write_barrier(new_map);
   1473     }
   1474   }
   1475   ASSERT((map()->has_fast_smi_or_object_elements() ||
   1476           (value == GetHeap()->empty_fixed_array())) ==
   1477          (value->map() == GetHeap()->fixed_array_map() ||
   1478           value->map() == GetHeap()->fixed_cow_array_map()));
   1479   ASSERT((value == GetHeap()->empty_fixed_array()) ||
   1480          (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
   1481   WRITE_FIELD(this, kElementsOffset, value);
   1482   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
   1483 }
   1484 
   1485 
   1486 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
   1487   set_map_and_elements(NULL, value, mode);
   1488 }
   1489 
   1490 
   1491 void JSObject::initialize_properties() {
   1492   ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
   1493   WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
   1494 }
   1495 
   1496 
   1497 void JSObject::initialize_elements() {
   1498   if (map()->has_fast_smi_or_object_elements() ||
   1499       map()->has_fast_double_elements()) {
   1500     ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
   1501     WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
   1502   } else if (map()->has_external_array_elements()) {
   1503     ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
   1504     ASSERT(!GetHeap()->InNewSpace(empty_array));
   1505     WRITE_FIELD(this, kElementsOffset, empty_array);
   1506   } else {
   1507     UNREACHABLE();
   1508   }
   1509 }
   1510 
   1511 
   1512 MaybeObject* JSObject::ResetElements() {
   1513   if (map()->is_observed()) {
   1514     // Maintain invariant that observed elements are always in dictionary mode.
   1515     SeededNumberDictionary* dictionary;
   1516     MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
   1517     if (!maybe->To(&dictionary)) return maybe;
   1518     if (map() == GetHeap()->non_strict_arguments_elements_map()) {
   1519       FixedArray::cast(elements())->set(1, dictionary);
   1520     } else {
   1521       set_elements(dictionary);
   1522     }
   1523     return this;
   1524   }
   1525 
   1526   ElementsKind elements_kind = GetInitialFastElementsKind();
   1527   if (!FLAG_smi_only_arrays) {
   1528     elements_kind = FastSmiToObjectElementsKind(elements_kind);
   1529   }
   1530   MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
   1531   Map* map;
   1532   if (!maybe->To(&map)) return maybe;
   1533   set_map(map);
   1534   initialize_elements();
   1535 
   1536   return this;
   1537 }
   1538 
   1539 
   1540 MaybeObject* JSObject::AllocateStorageForMap(Map* map) {
   1541   ASSERT(this->map()->inobject_properties() == map->inobject_properties());
   1542   ElementsKind obj_kind = this->map()->elements_kind();
   1543   ElementsKind map_kind = map->elements_kind();
   1544   if (map_kind != obj_kind) {
   1545     ElementsKind to_kind = map_kind;
   1546     if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
   1547         IsDictionaryElementsKind(obj_kind)) {
   1548       to_kind = obj_kind;
   1549     }
   1550     MaybeObject* maybe_obj =
   1551         IsDictionaryElementsKind(to_kind) ? NormalizeElements()
   1552                                           : TransitionElementsKind(to_kind);
   1553     if (maybe_obj->IsFailure()) return maybe_obj;
   1554     MaybeObject* maybe_map = map->AsElementsKind(to_kind);
   1555     if (!maybe_map->To(&map)) return maybe_map;
   1556   }
   1557   int total_size =
   1558       map->NumberOfOwnDescriptors() + map->unused_property_fields();
   1559   int out_of_object = total_size - map->inobject_properties();
   1560   if (out_of_object != properties()->length()) {
   1561     FixedArray* new_properties;
   1562     MaybeObject* maybe_properties = properties()->CopySize(out_of_object);
   1563     if (!maybe_properties->To(&new_properties)) return maybe_properties;
   1564     set_properties(new_properties);
   1565   }
   1566   set_map(map);
   1567   return this;
   1568 }
   1569 
   1570 
   1571 MaybeObject* JSObject::MigrateInstance() {
   1572   // Converting any field to the most specific type will cause the
   1573   // GeneralizeFieldRepresentation algorithm to create the most general existing
   1574   // transition that matches the object. This achieves what is needed.
   1575   return GeneralizeFieldRepresentation(0, Representation::None());
   1576 }
   1577 
   1578 
   1579 MaybeObject* JSObject::TryMigrateInstance() {
   1580   Map* new_map = map()->CurrentMapForDeprecated();
   1581   if (new_map == NULL) return Smi::FromInt(0);
   1582   return MigrateToMap(new_map);
   1583 }
   1584 
   1585 
   1586 Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
   1587   DisallowHeapAllocation no_gc;
   1588   if (!map->HasTransitionArray()) return Handle<String>::null();
   1589   TransitionArray* transitions = map->transitions();
   1590   if (!transitions->IsSimpleTransition()) return Handle<String>::null();
   1591   int transition = TransitionArray::kSimpleTransitionIndex;
   1592   PropertyDetails details = transitions->GetTargetDetails(transition);
   1593   Name* name = transitions->GetKey(transition);
   1594   if (details.type() != FIELD) return Handle<String>::null();
   1595   if (details.attributes() != NONE) return Handle<String>::null();
   1596   if (!name->IsString()) return Handle<String>::null();
   1597   return Handle<String>(String::cast(name));
   1598 }
   1599 
   1600 
   1601 Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
   1602   ASSERT(!ExpectedTransitionKey(map).is_null());
   1603   return Handle<Map>(map->transitions()->GetTarget(
   1604       TransitionArray::kSimpleTransitionIndex));
   1605 }
   1606 
   1607 
   1608 Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
   1609   DisallowHeapAllocation no_allocation;
   1610   if (!map->HasTransitionArray()) return Handle<Map>::null();
   1611   TransitionArray* transitions = map->transitions();
   1612   int transition = transitions->Search(*key);
   1613   if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
   1614   PropertyDetails target_details = transitions->GetTargetDetails(transition);
   1615   if (target_details.type() != FIELD) return Handle<Map>::null();
   1616   if (target_details.attributes() != NONE) return Handle<Map>::null();
   1617   return Handle<Map>(transitions->GetTarget(transition));
   1618 }
   1619 
   1620 
   1621 int JSObject::LastAddedFieldIndex() {
   1622   Map* map = this->map();
   1623   int last_added = map->LastAdded();
   1624   return map->instance_descriptors()->GetFieldIndex(last_added);
   1625 }
   1626 
   1627 
   1628 ACCESSORS(Oddball, to_string, String, kToStringOffset)
   1629 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
   1630 
   1631 
   1632 byte Oddball::kind() {
   1633   return Smi::cast(READ_FIELD(this, kKindOffset))->value();
   1634 }
   1635 
   1636 
   1637 void Oddball::set_kind(byte value) {
   1638   WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
   1639 }
   1640 
   1641 
   1642 Object* Cell::value() {
   1643   return READ_FIELD(this, kValueOffset);
   1644 }
   1645 
   1646 
   1647 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
   1648   // The write barrier is not used for global property cells.
   1649   ASSERT(!val->IsPropertyCell() && !val->IsCell());
   1650   WRITE_FIELD(this, kValueOffset, val);
   1651 }
   1652 
   1653 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
   1654 
   1655 Object* PropertyCell::type_raw() {
   1656   return READ_FIELD(this, kTypeOffset);
   1657 }
   1658 
   1659 
   1660 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
   1661   WRITE_FIELD(this, kTypeOffset, val);
   1662 }
   1663 
   1664 
   1665 int JSObject::GetHeaderSize() {
   1666   InstanceType type = map()->instance_type();
   1667   // Check for the most common kind of JavaScript object before
   1668   // falling into the generic switch. This speeds up the internal
   1669   // field operations considerably on average.
   1670   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
   1671   switch (type) {
   1672     case JS_GENERATOR_OBJECT_TYPE:
   1673       return JSGeneratorObject::kSize;
   1674     case JS_MODULE_TYPE:
   1675       return JSModule::kSize;
   1676     case JS_GLOBAL_PROXY_TYPE:
   1677       return JSGlobalProxy::kSize;
   1678     case JS_GLOBAL_OBJECT_TYPE:
   1679       return JSGlobalObject::kSize;
   1680     case JS_BUILTINS_OBJECT_TYPE:
   1681       return JSBuiltinsObject::kSize;
   1682     case JS_FUNCTION_TYPE:
   1683       return JSFunction::kSize;
   1684     case JS_VALUE_TYPE:
   1685       return JSValue::kSize;
   1686     case JS_DATE_TYPE:
   1687       return JSDate::kSize;
   1688     case JS_ARRAY_TYPE:
   1689       return JSArray::kSize;
   1690     case JS_ARRAY_BUFFER_TYPE:
   1691       return JSArrayBuffer::kSize;
   1692     case JS_TYPED_ARRAY_TYPE:
   1693       return JSTypedArray::kSize;
   1694     case JS_DATA_VIEW_TYPE:
   1695       return JSDataView::kSize;
   1696     case JS_SET_TYPE:
   1697       return JSSet::kSize;
   1698     case JS_MAP_TYPE:
   1699       return JSMap::kSize;
   1700     case JS_WEAK_MAP_TYPE:
   1701       return JSWeakMap::kSize;
   1702     case JS_WEAK_SET_TYPE:
   1703       return JSWeakSet::kSize;
   1704     case JS_REGEXP_TYPE:
   1705       return JSRegExp::kSize;
   1706     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
   1707       return JSObject::kHeaderSize;
   1708     case JS_MESSAGE_OBJECT_TYPE:
   1709       return JSMessageObject::kSize;
   1710     default:
   1711       UNREACHABLE();
   1712       return 0;
   1713   }
   1714 }
   1715 
   1716 
   1717 int JSObject::GetInternalFieldCount() {
   1718   ASSERT(1 << kPointerSizeLog2 == kPointerSize);
   1719   // Make sure to adjust for the number of in-object properties. These
   1720   // properties do contribute to the size, but are not internal fields.
   1721   return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
   1722          map()->inobject_properties();
   1723 }
   1724 
   1725 
   1726 int JSObject::GetInternalFieldOffset(int index) {
   1727   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1728   return GetHeaderSize() + (kPointerSize * index);
   1729 }
   1730 
   1731 
   1732 Object* JSObject::GetInternalField(int index) {
   1733   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1734   // Internal objects do follow immediately after the header, whereas in-object
   1735   // properties are at the end of the object. Therefore there is no need
   1736   // to adjust the index here.
   1737   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
   1738 }
   1739 
   1740 
   1741 void JSObject::SetInternalField(int index, Object* value) {
   1742   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1743   // Internal objects do follow immediately after the header, whereas in-object
   1744   // properties are at the end of the object. Therefore there is no need
   1745   // to adjust the index here.
   1746   int offset = GetHeaderSize() + (kPointerSize * index);
   1747   WRITE_FIELD(this, offset, value);
   1748   WRITE_BARRIER(GetHeap(), this, offset, value);
   1749 }
   1750 
   1751 
   1752 void JSObject::SetInternalField(int index, Smi* value) {
   1753   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1754   // Internal objects do follow immediately after the header, whereas in-object
   1755   // properties are at the end of the object. Therefore there is no need
   1756   // to adjust the index here.
   1757   int offset = GetHeaderSize() + (kPointerSize * index);
   1758   WRITE_FIELD(this, offset, value);
   1759 }
   1760 
   1761 
   1762 MaybeObject* JSObject::FastPropertyAt(Representation representation,
   1763                                       int index) {
   1764   Object* raw_value = RawFastPropertyAt(index);
   1765   return raw_value->AllocateNewStorageFor(GetHeap(), representation);
   1766 }
   1767 
   1768 
   1769 // Access fast-case object properties at index. The use of these routines
   1770 // is needed to correctly distinguish between properties stored in-object and
   1771 // properties stored in the properties array.
   1772 Object* JSObject::RawFastPropertyAt(int index) {
   1773   // Adjust for the number of properties stored in the object.
   1774   index -= map()->inobject_properties();
   1775   if (index < 0) {
   1776     int offset = map()->instance_size() + (index * kPointerSize);
   1777     return READ_FIELD(this, offset);
   1778   } else {
   1779     ASSERT(index < properties()->length());
   1780     return properties()->get(index);
   1781   }
   1782 }
   1783 
   1784 
   1785 void JSObject::FastPropertyAtPut(int index, Object* value) {
   1786   // Adjust for the number of properties stored in the object.
   1787   index -= map()->inobject_properties();
   1788   if (index < 0) {
   1789     int offset = map()->instance_size() + (index * kPointerSize);
   1790     WRITE_FIELD(this, offset, value);
   1791     WRITE_BARRIER(GetHeap(), this, offset, value);
   1792   } else {
   1793     ASSERT(index < properties()->length());
   1794     properties()->set(index, value);
   1795   }
   1796 }
   1797 
   1798 
   1799 int JSObject::GetInObjectPropertyOffset(int index) {
   1800   // Adjust for the number of properties stored in the object.
   1801   index -= map()->inobject_properties();
   1802   ASSERT(index < 0);
   1803   return map()->instance_size() + (index * kPointerSize);
   1804 }
   1805 
   1806 
   1807 Object* JSObject::InObjectPropertyAt(int index) {
   1808   // Adjust for the number of properties stored in the object.
   1809   index -= map()->inobject_properties();
   1810   ASSERT(index < 0);
   1811   int offset = map()->instance_size() + (index * kPointerSize);
   1812   return READ_FIELD(this, offset);
   1813 }
   1814 
   1815 
   1816 Object* JSObject::InObjectPropertyAtPut(int index,
   1817                                         Object* value,
   1818                                         WriteBarrierMode mode) {
   1819   // Adjust for the number of properties stored in the object.
   1820   index -= map()->inobject_properties();
   1821   ASSERT(index < 0);
   1822   int offset = map()->instance_size() + (index * kPointerSize);
   1823   WRITE_FIELD(this, offset, value);
   1824   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
   1825   return value;
   1826 }
   1827 
   1828 
   1829 
   1830 void JSObject::InitializeBody(Map* map,
   1831                               Object* pre_allocated_value,
   1832                               Object* filler_value) {
   1833   ASSERT(!filler_value->IsHeapObject() ||
   1834          !GetHeap()->InNewSpace(filler_value));
   1835   ASSERT(!pre_allocated_value->IsHeapObject() ||
   1836          !GetHeap()->InNewSpace(pre_allocated_value));
   1837   int size = map->instance_size();
   1838   int offset = kHeaderSize;
   1839   if (filler_value != pre_allocated_value) {
   1840     int pre_allocated = map->pre_allocated_property_fields();
   1841     ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
   1842     for (int i = 0; i < pre_allocated; i++) {
   1843       WRITE_FIELD(this, offset, pre_allocated_value);
   1844       offset += kPointerSize;
   1845     }
   1846   }
   1847   while (offset < size) {
   1848     WRITE_FIELD(this, offset, filler_value);
   1849     offset += kPointerSize;
   1850   }
   1851 }
   1852 
   1853 
   1854 bool JSObject::HasFastProperties() {
   1855   ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
   1856   return !properties()->IsDictionary();
   1857 }
   1858 
   1859 
   1860 bool JSObject::TooManyFastProperties(int properties,
   1861                                      JSObject::StoreFromKeyed store_mode) {
   1862   // Allow extra fast properties if the object has more than
   1863   // kFastPropertiesSoftLimit in-object properties. When this is the case,
   1864   // it is very unlikely that the object is being used as a dictionary
   1865   // and there is a good chance that allowing more map transitions
   1866   // will be worth it.
   1867   int inobject = map()->inobject_properties();
   1868 
   1869   int limit;
   1870   if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
   1871     limit = Max(inobject, kMaxFastProperties);
   1872   } else {
   1873     limit = Max(inobject, kFastPropertiesSoftLimit);
   1874   }
   1875   return properties > limit;
   1876 }
   1877 
   1878 
   1879 void Struct::InitializeBody(int object_size) {
   1880   Object* value = GetHeap()->undefined_value();
   1881   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
   1882     WRITE_FIELD(this, offset, value);
   1883   }
   1884 }
   1885 
   1886 
   1887 bool Object::ToArrayIndex(uint32_t* index) {
   1888   if (IsSmi()) {
   1889     int value = Smi::cast(this)->value();
   1890     if (value < 0) return false;
   1891     *index = value;
   1892     return true;
   1893   }
   1894   if (IsHeapNumber()) {
   1895     double value = HeapNumber::cast(this)->value();
   1896     uint32_t uint_value = static_cast<uint32_t>(value);
   1897     if (value == static_cast<double>(uint_value)) {
   1898       *index = uint_value;
   1899       return true;
   1900     }
   1901   }
   1902   return false;
   1903 }
   1904 
   1905 
   1906 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
   1907   if (!this->IsJSValue()) return false;
   1908 
   1909   JSValue* js_value = JSValue::cast(this);
   1910   if (!js_value->value()->IsString()) return false;
   1911 
   1912   String* str = String::cast(js_value->value());
   1913   if (index >= static_cast<uint32_t>(str->length())) return false;
   1914 
   1915   return true;
   1916 }
   1917 
   1918 
   1919 
   1920 void Object::VerifyApiCallResultType() {
   1921 #if ENABLE_EXTRA_CHECKS
   1922   if (!(IsSmi() ||
   1923         IsString() ||
   1924         IsSpecObject() ||
   1925         IsHeapNumber() ||
   1926         IsUndefined() ||
   1927         IsTrue() ||
   1928         IsFalse() ||
   1929         IsNull())) {
   1930     FATAL("API call returned invalid object");
   1931   }
   1932 #endif  // ENABLE_EXTRA_CHECKS
   1933 }
   1934 
   1935 
   1936 FixedArrayBase* FixedArrayBase::cast(Object* object) {
   1937   ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
   1938   return reinterpret_cast<FixedArrayBase*>(object);
   1939 }
   1940 
   1941 
   1942 Object* FixedArray::get(int index) {
   1943   ASSERT(index >= 0 && index < this->length());
   1944   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
   1945 }
   1946 
   1947 
   1948 bool FixedArray::is_the_hole(int index) {
   1949   return get(index) == GetHeap()->the_hole_value();
   1950 }
   1951 
   1952 
   1953 void FixedArray::set(int index, Smi* value) {
   1954   ASSERT(map() != HEAP->fixed_cow_array_map());
   1955   ASSERT(index >= 0 && index < this->length());
   1956   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
   1957   int offset = kHeaderSize + index * kPointerSize;
   1958   WRITE_FIELD(this, offset, value);
   1959 }
   1960 
   1961 
   1962 void FixedArray::set(int index, Object* value) {
   1963   ASSERT(map() != HEAP->fixed_cow_array_map());
   1964   ASSERT(index >= 0 && index < this->length());
   1965   int offset = kHeaderSize + index * kPointerSize;
   1966   WRITE_FIELD(this, offset, value);
   1967   WRITE_BARRIER(GetHeap(), this, offset, value);
   1968 }
   1969 
   1970 
   1971 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
   1972   return BitCast<uint64_t, double>(value) == kHoleNanInt64;
   1973 }
   1974 
   1975 
   1976 inline double FixedDoubleArray::hole_nan_as_double() {
   1977   return BitCast<double, uint64_t>(kHoleNanInt64);
   1978 }
   1979 
   1980 
   1981 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
   1982   ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
   1983   ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
   1984   return OS::nan_value();
   1985 }
   1986 
   1987 
   1988 double FixedDoubleArray::get_scalar(int index) {
   1989   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   1990          map() != HEAP->fixed_array_map());
   1991   ASSERT(index >= 0 && index < this->length());
   1992   double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
   1993   ASSERT(!is_the_hole_nan(result));
   1994   return result;
   1995 }
   1996 
   1997 int64_t FixedDoubleArray::get_representation(int index) {
   1998   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   1999          map() != HEAP->fixed_array_map());
   2000   ASSERT(index >= 0 && index < this->length());
   2001   return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
   2002 }
   2003 
   2004 MaybeObject* FixedDoubleArray::get(int index) {
   2005   if (is_the_hole(index)) {
   2006     return GetHeap()->the_hole_value();
   2007   } else {
   2008     return GetHeap()->NumberFromDouble(get_scalar(index));
   2009   }
   2010 }
   2011 
   2012 
   2013 void FixedDoubleArray::set(int index, double value) {
   2014   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   2015          map() != HEAP->fixed_array_map());
   2016   int offset = kHeaderSize + index * kDoubleSize;
   2017   if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
   2018   WRITE_DOUBLE_FIELD(this, offset, value);
   2019 }
   2020 
   2021 
   2022 void FixedDoubleArray::set_the_hole(int index) {
   2023   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   2024          map() != HEAP->fixed_array_map());
   2025   int offset = kHeaderSize + index * kDoubleSize;
   2026   WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
   2027 }
   2028 
   2029 
   2030 bool FixedDoubleArray::is_the_hole(int index) {
   2031   int offset = kHeaderSize + index * kDoubleSize;
   2032   return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
   2033 }
   2034 
   2035 
   2036 WriteBarrierMode HeapObject::GetWriteBarrierMode(
   2037     const DisallowHeapAllocation& promise) {
   2038   Heap* heap = GetHeap();
   2039   if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
   2040   if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
   2041   return UPDATE_WRITE_BARRIER;
   2042 }
   2043 
   2044 
   2045 void FixedArray::set(int index,
   2046                      Object* value,
   2047                      WriteBarrierMode mode) {
   2048   ASSERT(map() != HEAP->fixed_cow_array_map());
   2049   ASSERT(index >= 0 && index < this->length());
   2050   int offset = kHeaderSize + index * kPointerSize;
   2051   WRITE_FIELD(this, offset, value);
   2052   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
   2053 }
   2054 
   2055 
   2056 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
   2057                                               int index,
   2058                                               Object* value) {
   2059   ASSERT(array->map() != HEAP->fixed_cow_array_map());
   2060   ASSERT(index >= 0 && index < array->length());
   2061   int offset = kHeaderSize + index * kPointerSize;
   2062   WRITE_FIELD(array, offset, value);
   2063   Heap* heap = array->GetHeap();
   2064   if (heap->InNewSpace(value)) {
   2065     heap->RecordWrite(array->address(), offset);
   2066   }
   2067 }
   2068 
   2069 
   2070 void FixedArray::NoWriteBarrierSet(FixedArray* array,
   2071                                    int index,
   2072                                    Object* value) {
   2073   ASSERT(array->map() != HEAP->fixed_cow_array_map());
   2074   ASSERT(index >= 0 && index < array->length());
   2075   ASSERT(!HEAP->InNewSpace(value));
   2076   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
   2077 }
   2078 
   2079 
   2080 void FixedArray::set_undefined(int index) {
   2081   ASSERT(map() != HEAP->fixed_cow_array_map());
   2082   set_undefined(GetHeap(), index);
   2083 }
   2084 
   2085 
   2086 void FixedArray::set_undefined(Heap* heap, int index) {
   2087   ASSERT(index >= 0 && index < this->length());
   2088   ASSERT(!heap->InNewSpace(heap->undefined_value()));
   2089   WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
   2090               heap->undefined_value());
   2091 }
   2092 
   2093 
   2094 void FixedArray::set_null(int index) {
   2095   set_null(GetHeap(), index);
   2096 }
   2097 
   2098 
   2099 void FixedArray::set_null(Heap* heap, int index) {
   2100   ASSERT(index >= 0 && index < this->length());
   2101   ASSERT(!heap->InNewSpace(heap->null_value()));
   2102   WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
   2103 }
   2104 
   2105 
   2106 void FixedArray::set_the_hole(int index) {
   2107   ASSERT(map() != HEAP->fixed_cow_array_map());
   2108   ASSERT(index >= 0 && index < this->length());
   2109   ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
   2110   WRITE_FIELD(this,
   2111               kHeaderSize + index * kPointerSize,
   2112               GetHeap()->the_hole_value());
   2113 }
   2114 
   2115 
   2116 double* FixedDoubleArray::data_start() {
   2117   return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
   2118 }
   2119 
   2120 
   2121 Object** FixedArray::data_start() {
   2122   return HeapObject::RawField(this, kHeaderSize);
   2123 }
   2124 
   2125 
   2126 bool DescriptorArray::IsEmpty() {
   2127   ASSERT(length() >= kFirstIndex ||
   2128          this == HEAP->empty_descriptor_array());
   2129   return length() < kFirstIndex;
   2130 }
   2131 
   2132 
   2133 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
   2134   WRITE_FIELD(
   2135       this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
   2136 }
   2137 
   2138 
   2139 // Perform a binary search in a fixed array. Low and high are entry indices. If
   2140 // there are three entries in this array it should be called with low=0 and
   2141 // high=2.
   2142 template<SearchMode search_mode, typename T>
   2143 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
   2144   uint32_t hash = name->Hash();
   2145   int limit = high;
   2146 
   2147   ASSERT(low <= high);
   2148 
   2149   while (low != high) {
   2150     int mid = (low + high) / 2;
   2151     Name* mid_name = array->GetSortedKey(mid);
   2152     uint32_t mid_hash = mid_name->Hash();
   2153 
   2154     if (mid_hash >= hash) {
   2155       high = mid;
   2156     } else {
   2157       low = mid + 1;
   2158     }
   2159   }
   2160 
   2161   for (; low <= limit; ++low) {
   2162     int sort_index = array->GetSortedKeyIndex(low);
   2163     Name* entry = array->GetKey(sort_index);
   2164     if (entry->Hash() != hash) break;
   2165     if (entry->Equals(name)) {
   2166       if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
   2167         return sort_index;
   2168       }
   2169       return T::kNotFound;
   2170     }
   2171   }
   2172 
   2173   return T::kNotFound;
   2174 }
   2175 
   2176 
   2177 // Perform a linear search in this fixed array. len is the number of entry
   2178 // indices that are valid.
   2179 template<SearchMode search_mode, typename T>
   2180 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
   2181   uint32_t hash = name->Hash();
   2182   if (search_mode == ALL_ENTRIES) {
   2183     for (int number = 0; number < len; number++) {
   2184       int sorted_index = array->GetSortedKeyIndex(number);
   2185       Name* entry = array->GetKey(sorted_index);
   2186       uint32_t current_hash = entry->Hash();
   2187       if (current_hash > hash) break;
   2188       if (current_hash == hash && entry->Equals(name)) return sorted_index;
   2189     }
   2190   } else {
   2191     ASSERT(len >= valid_entries);
   2192     for (int number = 0; number < valid_entries; number++) {
   2193       Name* entry = array->GetKey(number);
   2194       uint32_t current_hash = entry->Hash();
   2195       if (current_hash == hash && entry->Equals(name)) return number;
   2196     }
   2197   }
   2198   return T::kNotFound;
   2199 }
   2200 
   2201 
   2202 template<SearchMode search_mode, typename T>
   2203 int Search(T* array, Name* name, int valid_entries) {
   2204   if (search_mode == VALID_ENTRIES) {
   2205     SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
   2206   } else {
   2207     SLOW_ASSERT(array->IsSortedNoDuplicates());
   2208   }
   2209 
   2210   int nof = array->number_of_entries();
   2211   if (nof == 0) return T::kNotFound;
   2212 
   2213   // Fast case: do linear search for small arrays.
   2214   const int kMaxElementsForLinearSearch = 8;
   2215   if ((search_mode == ALL_ENTRIES &&
   2216        nof <= kMaxElementsForLinearSearch) ||
   2217       (search_mode == VALID_ENTRIES &&
   2218        valid_entries <= (kMaxElementsForLinearSearch * 3))) {
   2219     return LinearSearch<search_mode>(array, name, nof, valid_entries);
   2220   }
   2221 
   2222   // Slow case: perform binary search.
   2223   return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
   2224 }
   2225 
   2226 
   2227 int DescriptorArray::Search(Name* name, int valid_descriptors) {
   2228   return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
   2229 }
   2230 
   2231 
   2232 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
   2233   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
   2234   if (number_of_own_descriptors == 0) return kNotFound;
   2235 
   2236   DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
   2237   int number = cache->Lookup(map, name);
   2238 
   2239   if (number == DescriptorLookupCache::kAbsent) {
   2240     number = Search(name, number_of_own_descriptors);
   2241     cache->Update(map, name, number);
   2242   }
   2243 
   2244   return number;
   2245 }
   2246 
   2247 
   2248 void Map::LookupDescriptor(JSObject* holder,
   2249                            Name* name,
   2250                            LookupResult* result) {
   2251   DescriptorArray* descriptors = this->instance_descriptors();
   2252   int number = descriptors->SearchWithCache(name, this);
   2253   if (number == DescriptorArray::kNotFound) return result->NotFound();
   2254   result->DescriptorResult(holder, descriptors->GetDetails(number), number);
   2255 }
   2256 
   2257 
   2258 void Map::LookupTransition(JSObject* holder,
   2259                            Name* name,
   2260                            LookupResult* result) {
   2261   if (HasTransitionArray()) {
   2262     TransitionArray* transition_array = transitions();
   2263     int number = transition_array->Search(name);
   2264     if (number != TransitionArray::kNotFound) {
   2265       return result->TransitionResult(holder, number);
   2266     }
   2267   }
   2268   result->NotFound();
   2269 }
   2270 
   2271 
   2272 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
   2273   ASSERT(descriptor_number < number_of_descriptors());
   2274   return HeapObject::RawField(
   2275       reinterpret_cast<HeapObject*>(this),
   2276       OffsetOfElementAt(ToKeyIndex(descriptor_number)));
   2277 }
   2278 
   2279 
   2280 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
   2281   return GetKeySlot(descriptor_number);
   2282 }
   2283 
   2284 
   2285 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
   2286   return GetValueSlot(descriptor_number - 1) + 1;
   2287 }
   2288 
   2289 
   2290 Name* DescriptorArray::GetKey(int descriptor_number) {
   2291   ASSERT(descriptor_number < number_of_descriptors());
   2292   return Name::cast(get(ToKeyIndex(descriptor_number)));
   2293 }
   2294 
   2295 
   2296 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
   2297   return GetDetails(descriptor_number).pointer();
   2298 }
   2299 
   2300 
   2301 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
   2302   return GetKey(GetSortedKeyIndex(descriptor_number));
   2303 }
   2304 
   2305 
   2306 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
   2307   PropertyDetails details = GetDetails(descriptor_index);
   2308   set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
   2309 }
   2310 
   2311 
   2312 void DescriptorArray::SetRepresentation(int descriptor_index,
   2313                                         Representation representation) {
   2314   ASSERT(!representation.IsNone());
   2315   PropertyDetails details = GetDetails(descriptor_index);
   2316   set(ToDetailsIndex(descriptor_index),
   2317       details.CopyWithRepresentation(representation).AsSmi());
   2318 }
   2319 
   2320 
   2321 void DescriptorArray::InitializeRepresentations(Representation representation) {
   2322   int length = number_of_descriptors();
   2323   for (int i = 0; i < length; i++) {
   2324     SetRepresentation(i, representation);
   2325   }
   2326 }
   2327 
   2328 
   2329 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
   2330   ASSERT(descriptor_number < number_of_descriptors());
   2331   return HeapObject::RawField(
   2332       reinterpret_cast<HeapObject*>(this),
   2333       OffsetOfElementAt(ToValueIndex(descriptor_number)));
   2334 }
   2335 
   2336 
   2337 Object* DescriptorArray::GetValue(int descriptor_number) {
   2338   ASSERT(descriptor_number < number_of_descriptors());
   2339   return get(ToValueIndex(descriptor_number));
   2340 }
   2341 
   2342 
   2343 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
   2344   ASSERT(descriptor_number < number_of_descriptors());
   2345   Object* details = get(ToDetailsIndex(descriptor_number));
   2346   return PropertyDetails(Smi::cast(details));
   2347 }
   2348 
   2349 
   2350 PropertyType DescriptorArray::GetType(int descriptor_number) {
   2351   return GetDetails(descriptor_number).type();
   2352 }
   2353 
   2354 
   2355 int DescriptorArray::GetFieldIndex(int descriptor_number) {
   2356   return GetDetails(descriptor_number).field_index();
   2357 }
   2358 
   2359 
   2360 Object* DescriptorArray::GetConstant(int descriptor_number) {
   2361   return GetValue(descriptor_number);
   2362 }
   2363 
   2364 
   2365 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
   2366   ASSERT(GetType(descriptor_number) == CALLBACKS);
   2367   return GetValue(descriptor_number);
   2368 }
   2369 
   2370 
   2371 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
   2372   ASSERT(GetType(descriptor_number) == CALLBACKS);
   2373   Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
   2374   return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
   2375 }
   2376 
   2377 
   2378 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
   2379   desc->Init(GetKey(descriptor_number),
   2380              GetValue(descriptor_number),
   2381              GetDetails(descriptor_number));
   2382 }
   2383 
   2384 
   2385 void DescriptorArray::Set(int descriptor_number,
   2386                           Descriptor* desc,
   2387                           const WhitenessWitness&) {
   2388   // Range check.
   2389   ASSERT(descriptor_number < number_of_descriptors());
   2390 
   2391   NoIncrementalWriteBarrierSet(this,
   2392                                ToKeyIndex(descriptor_number),
   2393                                desc->GetKey());
   2394   NoIncrementalWriteBarrierSet(this,
   2395                                ToValueIndex(descriptor_number),
   2396                                desc->GetValue());
   2397   NoIncrementalWriteBarrierSet(this,
   2398                                ToDetailsIndex(descriptor_number),
   2399                                desc->GetDetails().AsSmi());
   2400 }
   2401 
   2402 
   2403 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
   2404   // Range check.
   2405   ASSERT(descriptor_number < number_of_descriptors());
   2406 
   2407   set(ToKeyIndex(descriptor_number), desc->GetKey());
   2408   set(ToValueIndex(descriptor_number), desc->GetValue());
   2409   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
   2410 }
   2411 
   2412 
   2413 void DescriptorArray::Append(Descriptor* desc,
   2414                              const WhitenessWitness& witness) {
   2415   int descriptor_number = number_of_descriptors();
   2416   SetNumberOfDescriptors(descriptor_number + 1);
   2417   Set(descriptor_number, desc, witness);
   2418 
   2419   uint32_t hash = desc->GetKey()->Hash();
   2420 
   2421   int insertion;
   2422 
   2423   for (insertion = descriptor_number; insertion > 0; --insertion) {
   2424     Name* key = GetSortedKey(insertion - 1);
   2425     if (key->Hash() <= hash) break;
   2426     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
   2427   }
   2428 
   2429   SetSortedKey(insertion, descriptor_number);
   2430 }
   2431 
   2432 
   2433 void DescriptorArray::Append(Descriptor* desc) {
   2434   int descriptor_number = number_of_descriptors();
   2435   SetNumberOfDescriptors(descriptor_number + 1);
   2436   Set(descriptor_number, desc);
   2437 
   2438   uint32_t hash = desc->GetKey()->Hash();
   2439 
   2440   int insertion;
   2441 
   2442   for (insertion = descriptor_number; insertion > 0; --insertion) {
   2443     Name* key = GetSortedKey(insertion - 1);
   2444     if (key->Hash() <= hash) break;
   2445     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
   2446   }
   2447 
   2448   SetSortedKey(insertion, descriptor_number);
   2449 }
   2450 
   2451 
   2452 void DescriptorArray::SwapSortedKeys(int first, int second) {
   2453   int first_key = GetSortedKeyIndex(first);
   2454   SetSortedKey(first, GetSortedKeyIndex(second));
   2455   SetSortedKey(second, first_key);
   2456 }
   2457 
   2458 
   2459 DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
   2460     : marking_(array->GetHeap()->incremental_marking()) {
   2461   marking_->EnterNoMarkingScope();
   2462   ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
   2463 }
   2464 
   2465 
   2466 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
   2467   marking_->LeaveNoMarkingScope();
   2468 }
   2469 
   2470 
   2471 template<typename Shape, typename Key>
   2472 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
   2473   const int kMinCapacity = 32;
   2474   int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
   2475   if (capacity < kMinCapacity) {
   2476     capacity = kMinCapacity;  // Guarantee min capacity.
   2477   }
   2478   return capacity;
   2479 }
   2480 
   2481 
   2482 template<typename Shape, typename Key>
   2483 int HashTable<Shape, Key>::FindEntry(Key key) {
   2484   return FindEntry(GetIsolate(), key);
   2485 }
   2486 
   2487 
   2488 // Find entry for key otherwise return kNotFound.
   2489 template<typename Shape, typename Key>
   2490 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
   2491   uint32_t capacity = Capacity();
   2492   uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
   2493   uint32_t count = 1;
   2494   // EnsureCapacity will guarantee the hash table is never full.
   2495   while (true) {
   2496     Object* element = KeyAt(entry);
   2497     // Empty entry. Uses raw unchecked accessors because it is called by the
   2498     // string table during bootstrapping.
   2499     if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
   2500     if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
   2501         Shape::IsMatch(key, element)) return entry;
   2502     entry = NextProbe(entry, count++, capacity);
   2503   }
   2504   return kNotFound;
   2505 }
   2506 
   2507 
   2508 bool SeededNumberDictionary::requires_slow_elements() {
   2509   Object* max_index_object = get(kMaxNumberKeyIndex);
   2510   if (!max_index_object->IsSmi()) return false;
   2511   return 0 !=
   2512       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
   2513 }
   2514 
   2515 uint32_t SeededNumberDictionary::max_number_key() {
   2516   ASSERT(!requires_slow_elements());
   2517   Object* max_index_object = get(kMaxNumberKeyIndex);
   2518   if (!max_index_object->IsSmi()) return 0;
   2519   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
   2520   return value >> kRequiresSlowElementsTagSize;
   2521 }
   2522 
   2523 void SeededNumberDictionary::set_requires_slow_elements() {
   2524   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
   2525 }
   2526 
   2527 
   2528 // ------------------------------------
   2529 // Cast operations
   2530 
   2531 
   2532 CAST_ACCESSOR(FixedArray)
   2533 CAST_ACCESSOR(FixedDoubleArray)
   2534 CAST_ACCESSOR(DescriptorArray)
   2535 CAST_ACCESSOR(DeoptimizationInputData)
   2536 CAST_ACCESSOR(DeoptimizationOutputData)
   2537 CAST_ACCESSOR(DependentCode)
   2538 CAST_ACCESSOR(TypeFeedbackCells)
   2539 CAST_ACCESSOR(StringTable)
   2540 CAST_ACCESSOR(JSFunctionResultCache)
   2541 CAST_ACCESSOR(NormalizedMapCache)
   2542 CAST_ACCESSOR(ScopeInfo)
   2543 CAST_ACCESSOR(CompilationCacheTable)
   2544 CAST_ACCESSOR(CodeCacheHashTable)
   2545 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
   2546 CAST_ACCESSOR(MapCache)
   2547 CAST_ACCESSOR(String)
   2548 CAST_ACCESSOR(SeqString)
   2549 CAST_ACCESSOR(SeqOneByteString)
   2550 CAST_ACCESSOR(SeqTwoByteString)
   2551 CAST_ACCESSOR(SlicedString)
   2552 CAST_ACCESSOR(ConsString)
   2553 CAST_ACCESSOR(ExternalString)
   2554 CAST_ACCESSOR(ExternalAsciiString)
   2555 CAST_ACCESSOR(ExternalTwoByteString)
   2556 CAST_ACCESSOR(Symbol)
   2557 CAST_ACCESSOR(Name)
   2558 CAST_ACCESSOR(JSReceiver)
   2559 CAST_ACCESSOR(JSObject)
   2560 CAST_ACCESSOR(Smi)
   2561 CAST_ACCESSOR(HeapObject)
   2562 CAST_ACCESSOR(HeapNumber)
   2563 CAST_ACCESSOR(Oddball)
   2564 CAST_ACCESSOR(Cell)
   2565 CAST_ACCESSOR(PropertyCell)
   2566 CAST_ACCESSOR(SharedFunctionInfo)
   2567 CAST_ACCESSOR(Map)
   2568 CAST_ACCESSOR(JSFunction)
   2569 CAST_ACCESSOR(GlobalObject)
   2570 CAST_ACCESSOR(JSGlobalProxy)
   2571 CAST_ACCESSOR(JSGlobalObject)
   2572 CAST_ACCESSOR(JSBuiltinsObject)
   2573 CAST_ACCESSOR(Code)
   2574 CAST_ACCESSOR(JSArray)
   2575 CAST_ACCESSOR(JSArrayBuffer)
   2576 CAST_ACCESSOR(JSArrayBufferView)
   2577 CAST_ACCESSOR(JSTypedArray)
   2578 CAST_ACCESSOR(JSDataView)
   2579 CAST_ACCESSOR(JSRegExp)
   2580 CAST_ACCESSOR(JSProxy)
   2581 CAST_ACCESSOR(JSFunctionProxy)
   2582 CAST_ACCESSOR(JSSet)
   2583 CAST_ACCESSOR(JSMap)
   2584 CAST_ACCESSOR(JSWeakMap)
   2585 CAST_ACCESSOR(JSWeakSet)
   2586 CAST_ACCESSOR(Foreign)
   2587 CAST_ACCESSOR(ByteArray)
   2588 CAST_ACCESSOR(FreeSpace)
   2589 CAST_ACCESSOR(ExternalArray)
   2590 CAST_ACCESSOR(ExternalByteArray)
   2591 CAST_ACCESSOR(ExternalUnsignedByteArray)
   2592 CAST_ACCESSOR(ExternalShortArray)
   2593 CAST_ACCESSOR(ExternalUnsignedShortArray)
   2594 CAST_ACCESSOR(ExternalIntArray)
   2595 CAST_ACCESSOR(ExternalUnsignedIntArray)
   2596 CAST_ACCESSOR(ExternalFloatArray)
   2597 CAST_ACCESSOR(ExternalDoubleArray)
   2598 CAST_ACCESSOR(ExternalPixelArray)
   2599 CAST_ACCESSOR(Struct)
   2600 CAST_ACCESSOR(AccessorInfo)
   2601 
   2602 
   2603 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
   2604   STRUCT_LIST(MAKE_STRUCT_CAST)
   2605 #undef MAKE_STRUCT_CAST
   2606 
   2607 
   2608 template <typename Shape, typename Key>
   2609 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
   2610   ASSERT(obj->IsHashTable());
   2611   return reinterpret_cast<HashTable*>(obj);
   2612 }
   2613 
   2614 
   2615 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
   2616 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
   2617 
   2618 SMI_ACCESSORS(String, length, kLengthOffset)
   2619 
   2620 
   2621 uint32_t Name::hash_field() {
   2622   return READ_UINT32_FIELD(this, kHashFieldOffset);
   2623 }
   2624 
   2625 
   2626 void Name::set_hash_field(uint32_t value) {
   2627   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
   2628 #if V8_HOST_ARCH_64_BIT
   2629   WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
   2630 #endif
   2631 }
   2632 
   2633 
   2634 bool Name::Equals(Name* other) {
   2635   if (other == this) return true;
   2636   if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
   2637       this->IsSymbol() || other->IsSymbol()) {
   2638     return false;
   2639   }
   2640   return String::cast(this)->SlowEquals(String::cast(other));
   2641 }
   2642 
   2643 
   2644 ACCESSORS(Symbol, name, Object, kNameOffset)
   2645 
   2646 
   2647 bool String::Equals(String* other) {
   2648   if (other == this) return true;
   2649   if (this->IsInternalizedString() && other->IsInternalizedString()) {
   2650     return false;
   2651   }
   2652   return SlowEquals(other);
   2653 }
   2654 
   2655 
   2656 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
   2657   if (!StringShape(this).IsCons()) return this;
   2658   ConsString* cons = ConsString::cast(this);
   2659   if (cons->IsFlat()) return cons->first();
   2660   return SlowTryFlatten(pretenure);
   2661 }
   2662 
   2663 
   2664 String* String::TryFlattenGetString(PretenureFlag pretenure) {
   2665   MaybeObject* flat = TryFlatten(pretenure);
   2666   Object* successfully_flattened;
   2667   if (!flat->ToObject(&successfully_flattened)) return this;
   2668   return String::cast(successfully_flattened);
   2669 }
   2670 
   2671 
   2672 uint16_t String::Get(int index) {
   2673   ASSERT(index >= 0 && index < length());
   2674   switch (StringShape(this).full_representation_tag()) {
   2675     case kSeqStringTag | kOneByteStringTag:
   2676       return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
   2677     case kSeqStringTag | kTwoByteStringTag:
   2678       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
   2679     case kConsStringTag | kOneByteStringTag:
   2680     case kConsStringTag | kTwoByteStringTag:
   2681       return ConsString::cast(this)->ConsStringGet(index);
   2682     case kExternalStringTag | kOneByteStringTag:
   2683       return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
   2684     case kExternalStringTag | kTwoByteStringTag:
   2685       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
   2686     case kSlicedStringTag | kOneByteStringTag:
   2687     case kSlicedStringTag | kTwoByteStringTag:
   2688       return SlicedString::cast(this)->SlicedStringGet(index);
   2689     default:
   2690       break;
   2691   }
   2692 
   2693   UNREACHABLE();
   2694   return 0;
   2695 }
   2696 
   2697 
   2698 void String::Set(int index, uint16_t value) {
   2699   ASSERT(index >= 0 && index < length());
   2700   ASSERT(StringShape(this).IsSequential());
   2701 
   2702   return this->IsOneByteRepresentation()
   2703       ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
   2704       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
   2705 }
   2706 
   2707 
   2708 bool String::IsFlat() {
   2709   if (!StringShape(this).IsCons()) return true;
   2710   return ConsString::cast(this)->second()->length() == 0;
   2711 }
   2712 
   2713 
   2714 String* String::GetUnderlying() {
   2715   // Giving direct access to underlying string only makes sense if the
   2716   // wrapping string is already flattened.
   2717   ASSERT(this->IsFlat());
   2718   ASSERT(StringShape(this).IsIndirect());
   2719   STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
   2720   const int kUnderlyingOffset = SlicedString::kParentOffset;
   2721   return String::cast(READ_FIELD(this, kUnderlyingOffset));
   2722 }
   2723 
   2724 
   2725 template<class Visitor, class ConsOp>
   2726 void String::Visit(
   2727     String* string,
   2728     unsigned offset,
   2729     Visitor& visitor,
   2730     ConsOp& cons_op,
   2731     int32_t type,
   2732     unsigned length) {
   2733   ASSERT(length == static_cast<unsigned>(string->length()));
   2734   ASSERT(offset <= length);
   2735   unsigned slice_offset = offset;
   2736   while (true) {
   2737     ASSERT(type == string->map()->instance_type());
   2738 
   2739     switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
   2740       case kSeqStringTag | kOneByteStringTag:
   2741         visitor.VisitOneByteString(
   2742             SeqOneByteString::cast(string)->GetChars() + slice_offset,
   2743             length - offset);
   2744         return;
   2745 
   2746       case kSeqStringTag | kTwoByteStringTag:
   2747         visitor.VisitTwoByteString(
   2748             SeqTwoByteString::cast(string)->GetChars() + slice_offset,
   2749             length - offset);
   2750         return;
   2751 
   2752       case kExternalStringTag | kOneByteStringTag:
   2753         visitor.VisitOneByteString(
   2754             ExternalAsciiString::cast(string)->GetChars() + slice_offset,
   2755             length - offset);
   2756         return;
   2757 
   2758       case kExternalStringTag | kTwoByteStringTag:
   2759         visitor.VisitTwoByteString(
   2760             ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
   2761             length - offset);
   2762         return;
   2763 
   2764       case kSlicedStringTag | kOneByteStringTag:
   2765       case kSlicedStringTag | kTwoByteStringTag: {
   2766         SlicedString* slicedString = SlicedString::cast(string);
   2767         slice_offset += slicedString->offset();
   2768         string = slicedString->parent();
   2769         type = string->map()->instance_type();
   2770         continue;
   2771       }
   2772 
   2773       case kConsStringTag | kOneByteStringTag:
   2774       case kConsStringTag | kTwoByteStringTag:
   2775         string = cons_op.Operate(string, &offset, &type, &length);
   2776         if (string == NULL) return;
   2777         slice_offset = offset;
   2778         ASSERT(length == static_cast<unsigned>(string->length()));
   2779         continue;
   2780 
   2781       default:
   2782         UNREACHABLE();
   2783         return;
   2784     }
   2785   }
   2786 }
   2787 
   2788 
   2789 // TODO(dcarney): Remove this class after conversion to VisitFlat.
   2790 class ConsStringCaptureOp {
   2791  public:
   2792   inline ConsStringCaptureOp() : cons_string_(NULL) {}
   2793   inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
   2794     cons_string_ = ConsString::cast(string);
   2795     return NULL;
   2796   }
   2797   ConsString* cons_string_;
   2798 };
   2799 
   2800 
   2801 template<class Visitor>
   2802 ConsString* String::VisitFlat(Visitor* visitor,
   2803                               String* string,
   2804                               int offset,
   2805                               int length,
   2806                               int32_t type) {
   2807   ASSERT(length >= 0 && length == string->length());
   2808   ASSERT(offset >= 0 && offset <= length);
   2809   ConsStringCaptureOp op;
   2810   Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
   2811   return op.cons_string_;
   2812 }
   2813 
   2814 
   2815 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
   2816   ASSERT(index >= 0 && index < length());
   2817   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
   2818 }
   2819 
   2820 
   2821 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
   2822   ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
   2823   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
   2824                    static_cast<byte>(value));
   2825 }
   2826 
   2827 
   2828 Address SeqOneByteString::GetCharsAddress() {
   2829   return FIELD_ADDR(this, kHeaderSize);
   2830 }
   2831 
   2832 
   2833 uint8_t* SeqOneByteString::GetChars() {
   2834   return reinterpret_cast<uint8_t*>(GetCharsAddress());
   2835 }
   2836 
   2837 
   2838 Address SeqTwoByteString::GetCharsAddress() {
   2839   return FIELD_ADDR(this, kHeaderSize);
   2840 }
   2841 
   2842 
   2843 uc16* SeqTwoByteString::GetChars() {
   2844   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
   2845 }
   2846 
   2847 
   2848 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
   2849   ASSERT(index >= 0 && index < length());
   2850   return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
   2851 }
   2852 
   2853 
   2854 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
   2855   ASSERT(index >= 0 && index < length());
   2856   WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
   2857 }
   2858 
   2859 
   2860 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
   2861   return SizeFor(length());
   2862 }
   2863 
   2864 
   2865 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
   2866   return SizeFor(length());
   2867 }
   2868 
   2869 
   2870 String* SlicedString::parent() {
   2871   return String::cast(READ_FIELD(this, kParentOffset));
   2872 }
   2873 
   2874 
   2875 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
   2876   ASSERT(parent->IsSeqString() || parent->IsExternalString());
   2877   WRITE_FIELD(this, kParentOffset, parent);
   2878   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
   2879 }
   2880 
   2881 
   2882 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
   2883 
   2884 
   2885 String* ConsString::first() {
   2886   return String::cast(READ_FIELD(this, kFirstOffset));
   2887 }
   2888 
   2889 
   2890 Object* ConsString::unchecked_first() {
   2891   return READ_FIELD(this, kFirstOffset);
   2892 }
   2893 
   2894 
   2895 void ConsString::set_first(String* value, WriteBarrierMode mode) {
   2896   WRITE_FIELD(this, kFirstOffset, value);
   2897   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
   2898 }
   2899 
   2900 
   2901 String* ConsString::second() {
   2902   return String::cast(READ_FIELD(this, kSecondOffset));
   2903 }
   2904 
   2905 
   2906 Object* ConsString::unchecked_second() {
   2907   return READ_FIELD(this, kSecondOffset);
   2908 }
   2909 
   2910 
   2911 void ConsString::set_second(String* value, WriteBarrierMode mode) {
   2912   WRITE_FIELD(this, kSecondOffset, value);
   2913   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
   2914 }
   2915 
   2916 
   2917 bool ExternalString::is_short() {
   2918   InstanceType type = map()->instance_type();
   2919   return (type & kShortExternalStringMask) == kShortExternalStringTag;
   2920 }
   2921 
   2922 
   2923 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
   2924   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
   2925 }
   2926 
   2927 
   2928 void ExternalAsciiString::update_data_cache() {
   2929   if (is_short()) return;
   2930   const char** data_field =
   2931       reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
   2932   *data_field = resource()->data();
   2933 }
   2934 
   2935 
   2936 void ExternalAsciiString::set_resource(
   2937     const ExternalAsciiString::Resource* resource) {
   2938   *reinterpret_cast<const Resource**>(
   2939       FIELD_ADDR(this, kResourceOffset)) = resource;
   2940   if (resource != NULL) update_data_cache();
   2941 }
   2942 
   2943 
   2944 const uint8_t* ExternalAsciiString::GetChars() {
   2945   return reinterpret_cast<const uint8_t*>(resource()->data());
   2946 }
   2947 
   2948 
   2949 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
   2950   ASSERT(index >= 0 && index < length());
   2951   return GetChars()[index];
   2952 }
   2953 
   2954 
   2955 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
   2956   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
   2957 }
   2958 
   2959 
   2960 void ExternalTwoByteString::update_data_cache() {
   2961   if (is_short()) return;
   2962   const uint16_t** data_field =
   2963       reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
   2964   *data_field = resource()->data();
   2965 }
   2966 
   2967 
   2968 void ExternalTwoByteString::set_resource(
   2969     const ExternalTwoByteString::Resource* resource) {
   2970   *reinterpret_cast<const Resource**>(
   2971       FIELD_ADDR(this, kResourceOffset)) = resource;
   2972   if (resource != NULL) update_data_cache();
   2973 }
   2974 
   2975 
   2976 const uint16_t* ExternalTwoByteString::GetChars() {
   2977   return resource()->data();
   2978 }
   2979 
   2980 
   2981 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
   2982   ASSERT(index >= 0 && index < length());
   2983   return GetChars()[index];
   2984 }
   2985 
   2986 
   2987 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
   2988       unsigned start) {
   2989   return GetChars() + start;
   2990 }
   2991 
   2992 
   2993 String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
   2994   return NULL;
   2995 }
   2996 
   2997 
   2998 unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
   2999   return depth & kDepthMask;
   3000 }
   3001 
   3002 
   3003 void ConsStringIteratorOp::PushLeft(ConsString* string) {
   3004   frames_[depth_++ & kDepthMask] = string;
   3005 }
   3006 
   3007 
   3008 void ConsStringIteratorOp::PushRight(ConsString* string) {
   3009   // Inplace update.
   3010   frames_[(depth_-1) & kDepthMask] = string;
   3011 }
   3012 
   3013 
   3014 void ConsStringIteratorOp::AdjustMaximumDepth() {
   3015   if (depth_ > maximum_depth_) maximum_depth_ = depth_;
   3016 }
   3017 
   3018 
   3019 void ConsStringIteratorOp::Pop() {
   3020   ASSERT(depth_ > 0);
   3021   ASSERT(depth_ <= maximum_depth_);
   3022   depth_--;
   3023 }
   3024 
   3025 
   3026 bool ConsStringIteratorOp::HasMore() {
   3027   return depth_ != 0;
   3028 }
   3029 
   3030 
   3031 void ConsStringIteratorOp::Reset() {
   3032   depth_ = 0;
   3033 }
   3034 
   3035 
   3036 String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out,
   3037                                                 unsigned* length_out) {
   3038   bool blew_stack = false;
   3039   String* string = NextLeaf(&blew_stack, type_out, length_out);
   3040   // String found.
   3041   if (string != NULL) {
   3042     // Verify output.
   3043     ASSERT(*length_out == static_cast<unsigned>(string->length()));
   3044     ASSERT(*type_out == string->map()->instance_type());
   3045     return string;
   3046   }
   3047   // Traversal complete.
   3048   if (!blew_stack) return NULL;
   3049   // Restart search from root.
   3050   unsigned offset_out;
   3051   string = Search(&offset_out, type_out, length_out);
   3052   // Verify output.
   3053   ASSERT(string == NULL || offset_out == 0);
   3054   ASSERT(string == NULL ||
   3055          *length_out == static_cast<unsigned>(string->length()));
   3056   ASSERT(string == NULL || *type_out == string->map()->instance_type());
   3057   return string;
   3058 }
   3059 
   3060 
   3061 uint16_t StringCharacterStream::GetNext() {
   3062   ASSERT(buffer8_ != NULL && end_ != NULL);
   3063   // Advance cursor if needed.
   3064   // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
   3065   if (buffer8_ == end_) HasMore();
   3066   ASSERT(buffer8_ < end_);
   3067   return is_one_byte_ ? *buffer8_++ : *buffer16_++;
   3068 }
   3069 
   3070 
   3071 StringCharacterStream::StringCharacterStream(String* string,
   3072                                              ConsStringIteratorOp* op,
   3073                                              unsigned offset)
   3074   : is_one_byte_(false),
   3075     op_(op) {
   3076   Reset(string, offset);
   3077 }
   3078 
   3079 
   3080 void StringCharacterStream::Reset(String* string, unsigned offset) {
   3081   op_->Reset();
   3082   buffer8_ = NULL;
   3083   end_ = NULL;
   3084   int32_t type = string->map()->instance_type();
   3085   unsigned length = string->length();
   3086   String::Visit(string, offset, *this, *op_, type, length);
   3087 }
   3088 
   3089 
   3090 bool StringCharacterStream::HasMore() {
   3091   if (buffer8_ != end_) return true;
   3092   if (!op_->HasMore()) return false;
   3093   unsigned length;
   3094   int32_t type;
   3095   String* string = op_->ContinueOperation(&type, &length);
   3096   if (string == NULL) return false;
   3097   ASSERT(!string->IsConsString());
   3098   ASSERT(string->length() != 0);
   3099   ConsStringNullOp null_op;
   3100   String::Visit(string, 0, *this, null_op, type, length);
   3101   ASSERT(buffer8_ != end_);
   3102   return true;
   3103 }
   3104 
   3105 
   3106 void StringCharacterStream::VisitOneByteString(
   3107     const uint8_t* chars, unsigned length) {
   3108   is_one_byte_ = true;
   3109   buffer8_ = chars;
   3110   end_ = chars + length;
   3111 }
   3112 
   3113 
   3114 void StringCharacterStream::VisitTwoByteString(
   3115     const uint16_t* chars, unsigned length) {
   3116   is_one_byte_ = false;
   3117   buffer16_ = chars;
   3118   end_ = reinterpret_cast<const uint8_t*>(chars + length);
   3119 }
   3120 
   3121 
   3122 void JSFunctionResultCache::MakeZeroSize() {
   3123   set_finger_index(kEntriesIndex);
   3124   set_size(kEntriesIndex);
   3125 }
   3126 
   3127 
   3128 void JSFunctionResultCache::Clear() {
   3129   int cache_size = size();
   3130   Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
   3131   MemsetPointer(entries_start,
   3132                 GetHeap()->the_hole_value(),
   3133                 cache_size - kEntriesIndex);
   3134   MakeZeroSize();
   3135 }
   3136 
   3137 
   3138 int JSFunctionResultCache::size() {
   3139   return Smi::cast(get(kCacheSizeIndex))->value();
   3140 }
   3141 
   3142 
   3143 void JSFunctionResultCache::set_size(int size) {
   3144   set(kCacheSizeIndex, Smi::FromInt(size));
   3145 }
   3146 
   3147 
   3148 int JSFunctionResultCache::finger_index() {
   3149   return Smi::cast(get(kFingerIndex))->value();
   3150 }
   3151 
   3152 
   3153 void JSFunctionResultCache::set_finger_index(int finger_index) {
   3154   set(kFingerIndex, Smi::FromInt(finger_index));
   3155 }
   3156 
   3157 
   3158 byte ByteArray::get(int index) {
   3159   ASSERT(index >= 0 && index < this->length());
   3160   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
   3161 }
   3162 
   3163 
   3164 void ByteArray::set(int index, byte value) {
   3165   ASSERT(index >= 0 && index < this->length());
   3166   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
   3167 }
   3168 
   3169 
   3170 int ByteArray::get_int(int index) {
   3171   ASSERT(index >= 0 && (index * kIntSize) < this->length());
   3172   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
   3173 }
   3174 
   3175 
   3176 ByteArray* ByteArray::FromDataStartAddress(Address address) {
   3177   ASSERT_TAG_ALIGNED(address);
   3178   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
   3179 }
   3180 
   3181 
   3182 Address ByteArray::GetDataStartAddress() {
   3183   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
   3184 }
   3185 
   3186 
   3187 uint8_t* ExternalPixelArray::external_pixel_pointer() {
   3188   return reinterpret_cast<uint8_t*>(external_pointer());
   3189 }
   3190 
   3191 
   3192 uint8_t ExternalPixelArray::get_scalar(int index) {
   3193   ASSERT((index >= 0) && (index < this->length()));
   3194   uint8_t* ptr = external_pixel_pointer();
   3195   return ptr[index];
   3196 }
   3197 
   3198 
   3199 MaybeObject* ExternalPixelArray::get(int index) {
   3200   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3201 }
   3202 
   3203 
   3204 void ExternalPixelArray::set(int index, uint8_t value) {
   3205   ASSERT((index >= 0) && (index < this->length()));
   3206   uint8_t* ptr = external_pixel_pointer();
   3207   ptr[index] = value;
   3208 }
   3209 
   3210 
   3211 void* ExternalArray::external_pointer() {
   3212   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
   3213   return reinterpret_cast<void*>(ptr);
   3214 }
   3215 
   3216 
   3217 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
   3218   intptr_t ptr = reinterpret_cast<intptr_t>(value);
   3219   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
   3220 }
   3221 
   3222 
   3223 int8_t ExternalByteArray::get_scalar(int index) {
   3224   ASSERT((index >= 0) && (index < this->length()));
   3225   int8_t* ptr = static_cast<int8_t*>(external_pointer());
   3226   return ptr[index];
   3227 }
   3228 
   3229 
   3230 MaybeObject* ExternalByteArray::get(int index) {
   3231   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3232 }
   3233 
   3234 
   3235 void ExternalByteArray::set(int index, int8_t value) {
   3236   ASSERT((index >= 0) && (index < this->length()));
   3237   int8_t* ptr = static_cast<int8_t*>(external_pointer());
   3238   ptr[index] = value;
   3239 }
   3240 
   3241 
   3242 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
   3243   ASSERT((index >= 0) && (index < this->length()));
   3244   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
   3245   return ptr[index];
   3246 }
   3247 
   3248 
   3249 MaybeObject* ExternalUnsignedByteArray::get(int index) {
   3250   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3251 }
   3252 
   3253 
   3254 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
   3255   ASSERT((index >= 0) && (index < this->length()));
   3256   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
   3257   ptr[index] = value;
   3258 }
   3259 
   3260 
   3261 int16_t ExternalShortArray::get_scalar(int index) {
   3262   ASSERT((index >= 0) && (index < this->length()));
   3263   int16_t* ptr = static_cast<int16_t*>(external_pointer());
   3264   return ptr[index];
   3265 }
   3266 
   3267 
   3268 MaybeObject* ExternalShortArray::get(int index) {
   3269   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3270 }
   3271 
   3272 
   3273 void ExternalShortArray::set(int index, int16_t value) {
   3274   ASSERT((index >= 0) && (index < this->length()));
   3275   int16_t* ptr = static_cast<int16_t*>(external_pointer());
   3276   ptr[index] = value;
   3277 }
   3278 
   3279 
   3280 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
   3281   ASSERT((index >= 0) && (index < this->length()));
   3282   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
   3283   return ptr[index];
   3284 }
   3285 
   3286 
   3287 MaybeObject* ExternalUnsignedShortArray::get(int index) {
   3288   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   3289 }
   3290 
   3291 
   3292 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
   3293   ASSERT((index >= 0) && (index < this->length()));
   3294   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
   3295   ptr[index] = value;
   3296 }
   3297 
   3298 
   3299 int32_t ExternalIntArray::get_scalar(int index) {
   3300   ASSERT((index >= 0) && (index < this->length()));
   3301   int32_t* ptr = static_cast<int32_t*>(external_pointer());
   3302   return ptr[index];
   3303 }
   3304 
   3305 
   3306 MaybeObject* ExternalIntArray::get(int index) {
   3307     return GetHeap()->NumberFromInt32(get_scalar(index));
   3308 }
   3309 
   3310 
   3311 void ExternalIntArray::set(int index, int32_t value) {
   3312   ASSERT((index >= 0) && (index < this->length()));
   3313   int32_t* ptr = static_cast<int32_t*>(external_pointer());
   3314   ptr[index] = value;
   3315 }
   3316 
   3317 
   3318 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
   3319   ASSERT((index >= 0) && (index < this->length()));
   3320   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
   3321   return ptr[index];
   3322 }
   3323 
   3324 
   3325 MaybeObject* ExternalUnsignedIntArray::get(int index) {
   3326     return GetHeap()->NumberFromUint32(get_scalar(index));
   3327 }
   3328 
   3329 
   3330 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
   3331   ASSERT((index >= 0) && (index < this->length()));
   3332   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
   3333   ptr[index] = value;
   3334 }
   3335 
   3336 
   3337 float ExternalFloatArray::get_scalar(int index) {
   3338   ASSERT((index >= 0) && (index < this->length()));
   3339   float* ptr = static_cast<float*>(external_pointer());
   3340   return ptr[index];
   3341 }
   3342 
   3343 
   3344 MaybeObject* ExternalFloatArray::get(int index) {
   3345     return GetHeap()->NumberFromDouble(get_scalar(index));
   3346 }
   3347 
   3348 
   3349 void ExternalFloatArray::set(int index, float value) {
   3350   ASSERT((index >= 0) && (index < this->length()));
   3351   float* ptr = static_cast<float*>(external_pointer());
   3352   ptr[index] = value;
   3353 }
   3354 
   3355 
   3356 double ExternalDoubleArray::get_scalar(int index) {
   3357   ASSERT((index >= 0) && (index < this->length()));
   3358   double* ptr = static_cast<double*>(external_pointer());
   3359   return ptr[index];
   3360 }
   3361 
   3362 
   3363 MaybeObject* ExternalDoubleArray::get(int index) {
   3364     return GetHeap()->NumberFromDouble(get_scalar(index));
   3365 }
   3366 
   3367 
   3368 void ExternalDoubleArray::set(int index, double value) {
   3369   ASSERT((index >= 0) && (index < this->length()));
   3370   double* ptr = static_cast<double*>(external_pointer());
   3371   ptr[index] = value;
   3372 }
   3373 
   3374 
   3375 int Map::visitor_id() {
   3376   return READ_BYTE_FIELD(this, kVisitorIdOffset);
   3377 }
   3378 
   3379 
   3380 void Map::set_visitor_id(int id) {
   3381   ASSERT(0 <= id && id < 256);
   3382   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
   3383 }
   3384 
   3385 
   3386 int Map::instance_size() {
   3387   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
   3388 }
   3389 
   3390 
   3391 int Map::inobject_properties() {
   3392   return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
   3393 }
   3394 
   3395 
   3396 int Map::pre_allocated_property_fields() {
   3397   return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
   3398 }
   3399 
   3400 
   3401 int HeapObject::SizeFromMap(Map* map) {
   3402   int instance_size = map->instance_size();
   3403   if (instance_size != kVariableSizeSentinel) return instance_size;
   3404   // Only inline the most frequent cases.
   3405   int instance_type = static_cast<int>(map->instance_type());
   3406   if (instance_type == FIXED_ARRAY_TYPE) {
   3407     return FixedArray::BodyDescriptor::SizeOf(map, this);
   3408   }
   3409   if (instance_type == ASCII_STRING_TYPE ||
   3410       instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
   3411     return SeqOneByteString::SizeFor(
   3412         reinterpret_cast<SeqOneByteString*>(this)->length());
   3413   }
   3414   if (instance_type == BYTE_ARRAY_TYPE) {
   3415     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
   3416   }
   3417   if (instance_type == FREE_SPACE_TYPE) {
   3418     return reinterpret_cast<FreeSpace*>(this)->size();
   3419   }
   3420   if (instance_type == STRING_TYPE ||
   3421       instance_type == INTERNALIZED_STRING_TYPE) {
   3422     return SeqTwoByteString::SizeFor(
   3423         reinterpret_cast<SeqTwoByteString*>(this)->length());
   3424   }
   3425   if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
   3426     return FixedDoubleArray::SizeFor(
   3427         reinterpret_cast<FixedDoubleArray*>(this)->length());
   3428   }
   3429   ASSERT(instance_type == CODE_TYPE);
   3430   return reinterpret_cast<Code*>(this)->CodeSize();
   3431 }
   3432 
   3433 
   3434 void Map::set_instance_size(int value) {
   3435   ASSERT_EQ(0, value & (kPointerSize - 1));
   3436   value >>= kPointerSizeLog2;
   3437   ASSERT(0 <= value && value < 256);
   3438   WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
   3439 }
   3440 
   3441 
   3442 void Map::set_inobject_properties(int value) {
   3443   ASSERT(0 <= value && value < 256);
   3444   WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
   3445 }
   3446 
   3447 
   3448 void Map::set_pre_allocated_property_fields(int value) {
   3449   ASSERT(0 <= value && value < 256);
   3450   WRITE_BYTE_FIELD(this,
   3451                    kPreAllocatedPropertyFieldsOffset,
   3452                    static_cast<byte>(value));
   3453 }
   3454 
   3455 
   3456 InstanceType Map::instance_type() {
   3457   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
   3458 }
   3459 
   3460 
   3461 void Map::set_instance_type(InstanceType value) {
   3462   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
   3463 }
   3464 
   3465 
   3466 int Map::unused_property_fields() {
   3467   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
   3468 }
   3469 
   3470 
   3471 void Map::set_unused_property_fields(int value) {
   3472   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
   3473 }
   3474 
   3475 
   3476 byte Map::bit_field() {
   3477   return READ_BYTE_FIELD(this, kBitFieldOffset);
   3478 }
   3479 
   3480 
   3481 void Map::set_bit_field(byte value) {
   3482   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
   3483 }
   3484 
   3485 
   3486 byte Map::bit_field2() {
   3487   return READ_BYTE_FIELD(this, kBitField2Offset);
   3488 }
   3489 
   3490 
   3491 void Map::set_bit_field2(byte value) {
   3492   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
   3493 }
   3494 
   3495 
   3496 void Map::set_non_instance_prototype(bool value) {
   3497   if (value) {
   3498     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
   3499   } else {
   3500     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
   3501   }
   3502 }
   3503 
   3504 
   3505 bool Map::has_non_instance_prototype() {
   3506   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
   3507 }
   3508 
   3509 
   3510 void Map::set_function_with_prototype(bool value) {
   3511   set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
   3512 }
   3513 
   3514 
   3515 bool Map::function_with_prototype() {
   3516   return FunctionWithPrototype::decode(bit_field3());
   3517 }
   3518 
   3519 
   3520 void Map::set_is_access_check_needed(bool access_check_needed) {
   3521   if (access_check_needed) {
   3522     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
   3523   } else {
   3524     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
   3525   }
   3526 }
   3527 
   3528 
   3529 bool Map::is_access_check_needed() {
   3530   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
   3531 }
   3532 
   3533 
   3534 void Map::set_is_extensible(bool value) {
   3535   if (value) {
   3536     set_bit_field2(bit_field2() | (1 << kIsExtensible));
   3537   } else {
   3538     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
   3539   }
   3540 }
   3541 
   3542 bool Map::is_extensible() {
   3543   return ((1 << kIsExtensible) & bit_field2()) != 0;
   3544 }
   3545 
   3546 
   3547 void Map::set_attached_to_shared_function_info(bool value) {
   3548   if (value) {
   3549     set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
   3550   } else {
   3551     set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
   3552   }
   3553 }
   3554 
   3555 bool Map::attached_to_shared_function_info() {
   3556   return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
   3557 }
   3558 
   3559 
   3560 void Map::set_is_shared(bool value) {
   3561   set_bit_field3(IsShared::update(bit_field3(), value));
   3562 }
   3563 
   3564 
   3565 bool Map::is_shared() {
   3566   return IsShared::decode(bit_field3());
   3567 }
   3568 
   3569 
   3570 void Map::set_dictionary_map(bool value) {
   3571   if (value) mark_unstable();
   3572   set_bit_field3(DictionaryMap::update(bit_field3(), value));
   3573 }
   3574 
   3575 
   3576 bool Map::is_dictionary_map() {
   3577   return DictionaryMap::decode(bit_field3());
   3578 }
   3579 
   3580 
   3581 Code::Flags Code::flags() {
   3582   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
   3583 }
   3584 
   3585 
   3586 void Map::set_owns_descriptors(bool is_shared) {
   3587   set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
   3588 }
   3589 
   3590 
   3591 bool Map::owns_descriptors() {
   3592   return OwnsDescriptors::decode(bit_field3());
   3593 }
   3594 
   3595 
   3596 void Map::set_is_observed(bool is_observed) {
   3597   ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE ||
   3598          instance_type() > LAST_JS_OBJECT_TYPE ||
   3599          has_slow_elements_kind() || has_external_array_elements());
   3600   set_bit_field3(IsObserved::update(bit_field3(), is_observed));
   3601 }
   3602 
   3603 
   3604 bool Map::is_observed() {
   3605   return IsObserved::decode(bit_field3());
   3606 }
   3607 
   3608 
   3609 void Map::deprecate() {
   3610   set_bit_field3(Deprecated::update(bit_field3(), true));
   3611 }
   3612 
   3613 
   3614 bool Map::is_deprecated() {
   3615   if (!FLAG_track_fields) return false;
   3616   return Deprecated::decode(bit_field3());
   3617 }
   3618 
   3619 
   3620 void Map::set_migration_target(bool value) {
   3621   set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
   3622 }
   3623 
   3624 
   3625 bool Map::is_migration_target() {
   3626   if (!FLAG_track_fields) return false;
   3627   return IsMigrationTarget::decode(bit_field3());
   3628 }
   3629 
   3630 
   3631 void Map::freeze() {
   3632   set_bit_field3(IsFrozen::update(bit_field3(), true));
   3633 }
   3634 
   3635 
   3636 bool Map::is_frozen() {
   3637   return IsFrozen::decode(bit_field3());
   3638 }
   3639 
   3640 
   3641 void Map::mark_unstable() {
   3642   set_bit_field3(IsUnstable::update(bit_field3(), true));
   3643 }
   3644 
   3645 
   3646 bool Map::is_stable() {
   3647   return !IsUnstable::decode(bit_field3());
   3648 }
   3649 
   3650 
   3651 bool Map::has_code_cache() {
   3652   return code_cache() != GetIsolate()->heap()->empty_fixed_array();
   3653 }
   3654 
   3655 
   3656 bool Map::CanBeDeprecated() {
   3657   int descriptor = LastAdded();
   3658   for (int i = 0; i <= descriptor; i++) {
   3659     PropertyDetails details = instance_descriptors()->GetDetails(i);
   3660     if (FLAG_track_fields && details.representation().IsNone()) {
   3661       return true;
   3662     }
   3663     if (FLAG_track_fields && details.representation().IsSmi()) {
   3664       return true;
   3665     }
   3666     if (FLAG_track_double_fields && details.representation().IsDouble()) {
   3667       return true;
   3668     }
   3669     if (FLAG_track_heap_object_fields &&
   3670         details.representation().IsHeapObject()) {
   3671       return true;
   3672     }
   3673     if (FLAG_track_fields && details.type() == CONSTANT) {
   3674       return true;
   3675     }
   3676   }
   3677   return false;
   3678 }
   3679 
   3680 
   3681 void Map::NotifyLeafMapLayoutChange() {
   3682   if (is_stable()) {
   3683     mark_unstable();
   3684     dependent_code()->DeoptimizeDependentCodeGroup(
   3685         GetIsolate(),
   3686         DependentCode::kPrototypeCheckGroup);
   3687   }
   3688 }
   3689 
   3690 
   3691 bool Map::CanOmitMapChecks() {
   3692   return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
   3693 }
   3694 
   3695 
   3696 int DependentCode::number_of_entries(DependencyGroup group) {
   3697   if (length() == 0) return 0;
   3698   return Smi::cast(get(group))->value();
   3699 }
   3700 
   3701 
   3702 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
   3703   set(group, Smi::FromInt(value));
   3704 }
   3705 
   3706 
   3707 bool DependentCode::is_code_at(int i) {
   3708   return get(kCodesStartIndex + i)->IsCode();
   3709 }
   3710 
   3711 Code* DependentCode::code_at(int i) {
   3712   return Code::cast(get(kCodesStartIndex + i));
   3713 }
   3714 
   3715 
   3716 CompilationInfo* DependentCode::compilation_info_at(int i) {
   3717   return reinterpret_cast<CompilationInfo*>(
   3718       Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
   3719 }
   3720 
   3721 
   3722 void DependentCode::set_object_at(int i, Object* object) {
   3723   set(kCodesStartIndex + i, object);
   3724 }
   3725 
   3726 
   3727 Object* DependentCode::object_at(int i) {
   3728   return get(kCodesStartIndex + i);
   3729 }
   3730 
   3731 
   3732 Object** DependentCode::slot_at(int i) {
   3733   return HeapObject::RawField(
   3734       this, FixedArray::OffsetOfElementAt(kCodesStartIndex + i));
   3735 }
   3736 
   3737 
   3738 void DependentCode::clear_at(int i) {
   3739   set_undefined(kCodesStartIndex + i);
   3740 }
   3741 
   3742 
   3743 void DependentCode::copy(int from, int to) {
   3744   set(kCodesStartIndex + to, get(kCodesStartIndex + from));
   3745 }
   3746 
   3747 
   3748 void DependentCode::ExtendGroup(DependencyGroup group) {
   3749   GroupStartIndexes starts(this);
   3750   for (int g = kGroupCount - 1; g > group; g--) {
   3751     if (starts.at(g) < starts.at(g + 1)) {
   3752       copy(starts.at(g), starts.at(g + 1));
   3753     }
   3754   }
   3755 }
   3756 
   3757 
   3758 void Code::set_flags(Code::Flags flags) {
   3759   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
   3760   // Make sure that all call stubs have an arguments count.
   3761   ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
   3762           ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
   3763          ExtractArgumentsCountFromFlags(flags) >= 0);
   3764   WRITE_INT_FIELD(this, kFlagsOffset, flags);
   3765 }
   3766 
   3767 
   3768 Code::Kind Code::kind() {
   3769   return ExtractKindFromFlags(flags());
   3770 }
   3771 
   3772 
   3773 InlineCacheState Code::ic_state() {
   3774   InlineCacheState result = ExtractICStateFromFlags(flags());
   3775   // Only allow uninitialized or debugger states for non-IC code
   3776   // objects. This is used in the debugger to determine whether or not
   3777   // a call to code object has been replaced with a debug break call.
   3778   ASSERT(is_inline_cache_stub() ||
   3779          result == UNINITIALIZED ||
   3780          result == DEBUG_STUB);
   3781   return result;
   3782 }
   3783 
   3784 
   3785 Code::ExtraICState Code::extra_ic_state() {
   3786   ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind()))
   3787          || ic_state() == DEBUG_STUB);
   3788   return ExtractExtraICStateFromFlags(flags());
   3789 }
   3790 
   3791 
   3792 Code::ExtraICState Code::extended_extra_ic_state() {
   3793   ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
   3794   ASSERT(needs_extended_extra_ic_state(kind()));
   3795   return ExtractExtendedExtraICStateFromFlags(flags());
   3796 }
   3797 
   3798 
   3799 Code::StubType Code::type() {
   3800   return ExtractTypeFromFlags(flags());
   3801 }
   3802 
   3803 
   3804 int Code::arguments_count() {
   3805   ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
   3806   return ExtractArgumentsCountFromFlags(flags());
   3807 }
   3808 
   3809 
   3810 inline bool Code::is_crankshafted() {
   3811   return IsCrankshaftedField::decode(
   3812       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   3813 }
   3814 
   3815 
   3816 inline void Code::set_is_crankshafted(bool value) {
   3817   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   3818   int updated = IsCrankshaftedField::update(previous, value);
   3819   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   3820 }
   3821 
   3822 
   3823 int Code::major_key() {
   3824   ASSERT(kind() == STUB ||
   3825          kind() == BINARY_OP_IC ||
   3826          kind() == COMPARE_IC ||
   3827          kind() == COMPARE_NIL_IC ||
   3828          kind() == STORE_IC ||
   3829          kind() == LOAD_IC ||
   3830          kind() == KEYED_LOAD_IC ||
   3831          kind() == TO_BOOLEAN_IC);
   3832   return StubMajorKeyField::decode(
   3833       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   3834 }
   3835 
   3836 
   3837 void Code::set_major_key(int major) {
   3838   ASSERT(kind() == STUB ||
   3839          kind() == BINARY_OP_IC ||
   3840          kind() == COMPARE_IC ||
   3841          kind() == COMPARE_NIL_IC ||
   3842          kind() == LOAD_IC ||
   3843          kind() == KEYED_LOAD_IC ||
   3844          kind() == STORE_IC ||
   3845          kind() == KEYED_STORE_IC ||
   3846          kind() == TO_BOOLEAN_IC);
   3847   ASSERT(0 <= major && major < 256);
   3848   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   3849   int updated = StubMajorKeyField::update(previous, major);
   3850   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   3851 }
   3852 
   3853 
   3854 bool Code::is_pregenerated() {
   3855   return (kind() == STUB && IsPregeneratedField::decode(flags()));
   3856 }
   3857 
   3858 
   3859 void Code::set_is_pregenerated(bool value) {
   3860   ASSERT(kind() == STUB);
   3861   Flags f = flags();
   3862   f = static_cast<Flags>(IsPregeneratedField::update(f, value));
   3863   set_flags(f);
   3864 }
   3865 
   3866 
   3867 bool Code::optimizable() {
   3868   ASSERT_EQ(FUNCTION, kind());
   3869   return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
   3870 }
   3871 
   3872 
   3873 void Code::set_optimizable(bool value) {
   3874   ASSERT_EQ(FUNCTION, kind());
   3875   WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
   3876 }
   3877 
   3878 
   3879 bool Code::has_deoptimization_support() {
   3880   ASSERT_EQ(FUNCTION, kind());
   3881   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3882   return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
   3883 }
   3884 
   3885 
   3886 void Code::set_has_deoptimization_support(bool value) {
   3887   ASSERT_EQ(FUNCTION, kind());
   3888   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3889   flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
   3890   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3891 }
   3892 
   3893 
   3894 bool Code::has_debug_break_slots() {
   3895   ASSERT_EQ(FUNCTION, kind());
   3896   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3897   return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
   3898 }
   3899 
   3900 
   3901 void Code::set_has_debug_break_slots(bool value) {
   3902   ASSERT_EQ(FUNCTION, kind());
   3903   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3904   flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
   3905   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3906 }
   3907 
   3908 
   3909 bool Code::is_compiled_optimizable() {
   3910   ASSERT_EQ(FUNCTION, kind());
   3911   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3912   return FullCodeFlagsIsCompiledOptimizable::decode(flags);
   3913 }
   3914 
   3915 
   3916 void Code::set_compiled_optimizable(bool value) {
   3917   ASSERT_EQ(FUNCTION, kind());
   3918   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3919   flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
   3920   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3921 }
   3922 
   3923 
   3924 int Code::allow_osr_at_loop_nesting_level() {
   3925   ASSERT_EQ(FUNCTION, kind());
   3926   return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
   3927 }
   3928 
   3929 
   3930 void Code::set_allow_osr_at_loop_nesting_level(int level) {
   3931   ASSERT_EQ(FUNCTION, kind());
   3932   ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
   3933   WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
   3934 }
   3935 
   3936 
   3937 int Code::profiler_ticks() {
   3938   ASSERT_EQ(FUNCTION, kind());
   3939   return READ_BYTE_FIELD(this, kProfilerTicksOffset);
   3940 }
   3941 
   3942 
   3943 void Code::set_profiler_ticks(int ticks) {
   3944   ASSERT_EQ(FUNCTION, kind());
   3945   ASSERT(ticks < 256);
   3946   WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
   3947 }
   3948 
   3949 
   3950 unsigned Code::stack_slots() {
   3951   ASSERT(is_crankshafted());
   3952   return StackSlotsField::decode(
   3953       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
   3954 }
   3955 
   3956 
   3957 void Code::set_stack_slots(unsigned slots) {
   3958   CHECK(slots <= (1 << kStackSlotsBitCount));
   3959   ASSERT(is_crankshafted());
   3960   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
   3961   int updated = StackSlotsField::update(previous, slots);
   3962   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
   3963 }
   3964 
   3965 
   3966 unsigned Code::safepoint_table_offset() {
   3967   ASSERT(is_crankshafted());
   3968   return SafepointTableOffsetField::decode(
   3969       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   3970 }
   3971 
   3972 
   3973 void Code::set_safepoint_table_offset(unsigned offset) {
   3974   CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
   3975   ASSERT(is_crankshafted());
   3976   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
   3977   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   3978   int updated = SafepointTableOffsetField::update(previous, offset);
   3979   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   3980 }
   3981 
   3982 
   3983 unsigned Code::back_edge_table_offset() {
   3984   ASSERT_EQ(FUNCTION, kind());
   3985   return BackEdgeTableOffsetField::decode(
   3986       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   3987 }
   3988 
   3989 
   3990 void Code::set_back_edge_table_offset(unsigned offset) {
   3991   ASSERT_EQ(FUNCTION, kind());
   3992   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
   3993   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   3994   int updated = BackEdgeTableOffsetField::update(previous, offset);
   3995   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   3996 }
   3997 
   3998 
   3999 bool Code::back_edges_patched_for_osr() {
   4000   ASSERT_EQ(FUNCTION, kind());
   4001   return BackEdgesPatchedForOSRField::decode(
   4002       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
   4003 }
   4004 
   4005 
   4006 void Code::set_back_edges_patched_for_osr(bool value) {
   4007   ASSERT_EQ(FUNCTION, kind());
   4008   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
   4009   int updated = BackEdgesPatchedForOSRField::update(previous, value);
   4010   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
   4011 }
   4012 
   4013 
   4014 
   4015 CheckType Code::check_type() {
   4016   ASSERT(is_call_stub() || is_keyed_call_stub());
   4017   byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
   4018   return static_cast<CheckType>(type);
   4019 }
   4020 
   4021 
   4022 void Code::set_check_type(CheckType value) {
   4023   ASSERT(is_call_stub() || is_keyed_call_stub());
   4024   WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
   4025 }
   4026 
   4027 
   4028 byte Code::to_boolean_state() {
   4029   return extended_extra_ic_state();
   4030 }
   4031 
   4032 
   4033 bool Code::has_function_cache() {
   4034   ASSERT(kind() == STUB);
   4035   return HasFunctionCacheField::decode(
   4036       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
   4037 }
   4038 
   4039 
   4040 void Code::set_has_function_cache(bool flag) {
   4041   ASSERT(kind() == STUB);
   4042   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
   4043   int updated = HasFunctionCacheField::update(previous, flag);
   4044   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
   4045 }
   4046 
   4047 
   4048 bool Code::marked_for_deoptimization() {
   4049   ASSERT(kind() == OPTIMIZED_FUNCTION);
   4050   return MarkedForDeoptimizationField::decode(
   4051       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
   4052 }
   4053 
   4054 
   4055 void Code::set_marked_for_deoptimization(bool flag) {
   4056   ASSERT(kind() == OPTIMIZED_FUNCTION);
   4057   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
   4058   int updated = MarkedForDeoptimizationField::update(previous, flag);
   4059   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
   4060 }
   4061 
   4062 
   4063 bool Code::is_inline_cache_stub() {
   4064   Kind kind = this->kind();
   4065   switch (kind) {
   4066 #define CASE(name) case name: return true;
   4067     IC_KIND_LIST(CASE)
   4068 #undef CASE
   4069     default: return false;
   4070   }
   4071 }
   4072 
   4073 
   4074 bool Code::is_debug_stub() {
   4075   return ic_state() == DEBUG_STUB;
   4076 }
   4077 
   4078 
   4079 Code::Flags Code::ComputeFlags(Kind kind,
   4080                                InlineCacheState ic_state,
   4081                                ExtraICState extra_ic_state,
   4082                                StubType type,
   4083                                int argc,
   4084                                InlineCacheHolderFlag holder) {
   4085   ASSERT(argc <= Code::kMaxArguments);
   4086   // Since the extended extra ic state overlaps with the argument count
   4087   // for CALL_ICs, do so checks to make sure that they don't interfere.
   4088   ASSERT((kind != Code::CALL_IC &&
   4089           kind != Code::KEYED_CALL_IC) ||
   4090          (ExtraICStateField::encode(extra_ic_state) | true));
   4091   // Compute the bit mask.
   4092   unsigned int bits = KindField::encode(kind)
   4093       | ICStateField::encode(ic_state)
   4094       | TypeField::encode(type)
   4095       | ExtendedExtraICStateField::encode(extra_ic_state)
   4096       | CacheHolderField::encode(holder);
   4097   if (!Code::needs_extended_extra_ic_state(kind)) {
   4098     bits |= (argc << kArgumentsCountShift);
   4099   }
   4100   return static_cast<Flags>(bits);
   4101 }
   4102 
   4103 
   4104 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
   4105                                           ExtraICState extra_ic_state,
   4106                                           StubType type,
   4107                                           int argc,
   4108                                           InlineCacheHolderFlag holder) {
   4109   return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
   4110 }
   4111 
   4112 
   4113 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
   4114   return KindField::decode(flags);
   4115 }
   4116 
   4117 
   4118 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
   4119   return ICStateField::decode(flags);
   4120 }
   4121 
   4122 
   4123 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
   4124   return ExtraICStateField::decode(flags);
   4125 }
   4126 
   4127 
   4128 Code::ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
   4129     Flags flags) {
   4130   return ExtendedExtraICStateField::decode(flags);
   4131 }
   4132 
   4133 
   4134 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
   4135   return TypeField::decode(flags);
   4136 }
   4137 
   4138 
   4139 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
   4140   return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
   4141 }
   4142 
   4143 
   4144 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
   4145   return CacheHolderField::decode(flags);
   4146 }
   4147 
   4148 
   4149 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
   4150   int bits = flags & ~TypeField::kMask;
   4151   return static_cast<Flags>(bits);
   4152 }
   4153 
   4154 
   4155 Code* Code::GetCodeFromTargetAddress(Address address) {
   4156   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
   4157   // GetCodeFromTargetAddress might be called when marking objects during mark
   4158   // sweep. reinterpret_cast is therefore used instead of the more appropriate
   4159   // Code::cast. Code::cast does not work when the object's map is
   4160   // marked.
   4161   Code* result = reinterpret_cast<Code*>(code);
   4162   return result;
   4163 }
   4164 
   4165 
   4166 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
   4167   return HeapObject::
   4168       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
   4169 }
   4170 
   4171 
   4172 Object* Map::prototype() {
   4173   return READ_FIELD(this, kPrototypeOffset);
   4174 }
   4175 
   4176 
   4177 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
   4178   ASSERT(value->IsNull() || value->IsJSReceiver());
   4179   WRITE_FIELD(this, kPrototypeOffset, value);
   4180   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
   4181 }
   4182 
   4183 
   4184 // If the descriptor is using the empty transition array, install a new empty
   4185 // transition array that will have place for an element transition.
   4186 static MaybeObject* EnsureHasTransitionArray(Map* map) {
   4187   TransitionArray* transitions;
   4188   MaybeObject* maybe_transitions;
   4189   if (!map->HasTransitionArray()) {
   4190     maybe_transitions = TransitionArray::Allocate(0);
   4191     if (!maybe_transitions->To(&transitions)) return maybe_transitions;
   4192     transitions->set_back_pointer_storage(map->GetBackPointer());
   4193   } else if (!map->transitions()->IsFullTransitionArray()) {
   4194     maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
   4195     if (!maybe_transitions->To(&transitions)) return maybe_transitions;
   4196   } else {
   4197     return map;
   4198   }
   4199   map->set_transitions(transitions);
   4200   return transitions;
   4201 }
   4202 
   4203 
   4204 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
   4205   int len = descriptors->number_of_descriptors();
   4206   set_instance_descriptors(descriptors);
   4207   SetNumberOfOwnDescriptors(len);
   4208 }
   4209 
   4210 
   4211 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
   4212 
   4213 
   4214 void Map::set_bit_field3(uint32_t bits) {
   4215   // Ensure the upper 2 bits have the same value by sign extending it. This is
   4216   // necessary to be able to use the 31st bit.
   4217   int value = bits << 1;
   4218   WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
   4219 }
   4220 
   4221 
   4222 uint32_t Map::bit_field3() {
   4223   Object* value = READ_FIELD(this, kBitField3Offset);
   4224   return Smi::cast(value)->value();
   4225 }
   4226 
   4227 
   4228 void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
   4229   Object* back_pointer = GetBackPointer();
   4230 
   4231   if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
   4232     ZapTransitions();
   4233   }
   4234 
   4235   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
   4236   CONDITIONAL_WRITE_BARRIER(
   4237       heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
   4238 }
   4239 
   4240 
   4241 void Map::AppendDescriptor(Descriptor* desc,
   4242                            const DescriptorArray::WhitenessWitness& witness) {
   4243   DescriptorArray* descriptors = instance_descriptors();
   4244   int number_of_own_descriptors = NumberOfOwnDescriptors();
   4245   ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
   4246   descriptors->Append(desc, witness);
   4247   SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
   4248 }
   4249 
   4250 
   4251 Object* Map::GetBackPointer() {
   4252   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4253   if (object->IsDescriptorArray()) {
   4254     return TransitionArray::cast(object)->back_pointer_storage();
   4255   } else {
   4256     ASSERT(object->IsMap() || object->IsUndefined());
   4257     return object;
   4258   }
   4259 }
   4260 
   4261 
   4262 bool Map::HasElementsTransition() {
   4263   return HasTransitionArray() && transitions()->HasElementsTransition();
   4264 }
   4265 
   4266 
   4267 bool Map::HasTransitionArray() {
   4268   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4269   return object->IsTransitionArray();
   4270 }
   4271 
   4272 
   4273 Map* Map::elements_transition_map() {
   4274   int index = transitions()->Search(GetHeap()->elements_transition_symbol());
   4275   return transitions()->GetTarget(index);
   4276 }
   4277 
   4278 
   4279 bool Map::CanHaveMoreTransitions() {
   4280   if (!HasTransitionArray()) return true;
   4281   return FixedArray::SizeFor(transitions()->length() +
   4282                              TransitionArray::kTransitionSize)
   4283       <= Page::kMaxNonCodeHeapObjectSize;
   4284 }
   4285 
   4286 
   4287 MaybeObject* Map::AddTransition(Name* key,
   4288                                 Map* target,
   4289                                 SimpleTransitionFlag flag) {
   4290   if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
   4291   return TransitionArray::NewWith(flag, key, target, GetBackPointer());
   4292 }
   4293 
   4294 
   4295 void Map::SetTransition(int transition_index, Map* target) {
   4296   transitions()->SetTarget(transition_index, target);
   4297 }
   4298 
   4299 
   4300 Map* Map::GetTransition(int transition_index) {
   4301   return transitions()->GetTarget(transition_index);
   4302 }
   4303 
   4304 
   4305 MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
   4306   TransitionArray* transitions;
   4307   MaybeObject* maybe_transitions = AddTransition(
   4308       GetHeap()->elements_transition_symbol(),
   4309       transitioned_map,
   4310       FULL_TRANSITION);
   4311   if (!maybe_transitions->To(&transitions)) return maybe_transitions;
   4312   set_transitions(transitions);
   4313   return transitions;
   4314 }
   4315 
   4316 
   4317 FixedArray* Map::GetPrototypeTransitions() {
   4318   if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
   4319   if (!transitions()->HasPrototypeTransitions()) {
   4320     return GetHeap()->empty_fixed_array();
   4321   }
   4322   return transitions()->GetPrototypeTransitions();
   4323 }
   4324 
   4325 
   4326 MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
   4327   MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
   4328   if (allow_prototype->IsFailure()) return allow_prototype;
   4329   int old_number_of_transitions = NumberOfProtoTransitions();
   4330 #ifdef DEBUG
   4331   if (HasPrototypeTransitions()) {
   4332     ASSERT(GetPrototypeTransitions() != proto_transitions);
   4333     ZapPrototypeTransitions();
   4334   }
   4335 #endif
   4336   transitions()->SetPrototypeTransitions(proto_transitions);
   4337   SetNumberOfProtoTransitions(old_number_of_transitions);
   4338   return this;
   4339 }
   4340 
   4341 
   4342 bool Map::HasPrototypeTransitions() {
   4343   return HasTransitionArray() && transitions()->HasPrototypeTransitions();
   4344 }
   4345 
   4346 
   4347 TransitionArray* Map::transitions() {
   4348   ASSERT(HasTransitionArray());
   4349   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4350   return TransitionArray::cast(object);
   4351 }
   4352 
   4353 
   4354 void Map::set_transitions(TransitionArray* transition_array,
   4355                           WriteBarrierMode mode) {
   4356   // Transition arrays are not shared. When one is replaced, it should not
   4357   // keep referenced objects alive, so we zap it.
   4358   // When there is another reference to the array somewhere (e.g. a handle),
   4359   // not zapping turns from a waste of memory into a source of crashes.
   4360   if (HasTransitionArray()) {
   4361     ASSERT(transitions() != transition_array);
   4362     ZapTransitions();
   4363   }
   4364 
   4365   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
   4366   CONDITIONAL_WRITE_BARRIER(
   4367       GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
   4368 }
   4369 
   4370 
   4371 void Map::init_back_pointer(Object* undefined) {
   4372   ASSERT(undefined->IsUndefined());
   4373   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
   4374 }
   4375 
   4376 
   4377 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
   4378   ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
   4379   ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
   4380          (value->IsMap() && GetBackPointer()->IsUndefined()));
   4381   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
   4382   if (object->IsTransitionArray()) {
   4383     TransitionArray::cast(object)->set_back_pointer_storage(value);
   4384   } else {
   4385     WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
   4386     CONDITIONAL_WRITE_BARRIER(
   4387         GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
   4388   }
   4389 }
   4390 
   4391 
   4392 // Can either be Smi (no transitions), normal transition array, or a transition
   4393 // array with the header overwritten as a Smi (thus iterating).
   4394 TransitionArray* Map::unchecked_transition_array() {
   4395   Object* object = *HeapObject::RawField(this,
   4396                                          Map::kTransitionsOrBackPointerOffset);
   4397   TransitionArray* transition_array = static_cast<TransitionArray*>(object);
   4398   return transition_array;
   4399 }
   4400 
   4401 
   4402 HeapObject* Map::UncheckedPrototypeTransitions() {
   4403   ASSERT(HasTransitionArray());
   4404   ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
   4405   return unchecked_transition_array()->UncheckedPrototypeTransitions();
   4406 }
   4407 
   4408 
   4409 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
   4410 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
   4411 ACCESSORS(Map, constructor, Object, kConstructorOffset)
   4412 
   4413 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
   4414 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
   4415 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
   4416 
   4417 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
   4418 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
   4419 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
   4420 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
   4421 
   4422 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
   4423 
   4424 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
   4425 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
   4426 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
   4427           kExpectedReceiverTypeOffset)
   4428 
   4429 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
   4430           kSerializedDataOffset)
   4431 
   4432 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
   4433           kDescriptorOffset)
   4434 
   4435 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
   4436 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
   4437 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
   4438 
   4439 ACCESSORS(Box, value, Object, kValueOffset)
   4440 
   4441 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
   4442 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
   4443 
   4444 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
   4445 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
   4446 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
   4447 
   4448 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
   4449 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
   4450 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
   4451 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
   4452 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
   4453 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
   4454 
   4455 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
   4456 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
   4457 
   4458 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
   4459 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
   4460 
   4461 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
   4462 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
   4463 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
   4464           kPropertyAccessorsOffset)
   4465 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
   4466           kPrototypeTemplateOffset)
   4467 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
   4468 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
   4469           kNamedPropertyHandlerOffset)
   4470 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
   4471           kIndexedPropertyHandlerOffset)
   4472 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
   4473           kInstanceTemplateOffset)
   4474 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
   4475 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
   4476 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
   4477           kInstanceCallHandlerOffset)
   4478 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
   4479           kAccessCheckInfoOffset)
   4480 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
   4481 
   4482 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
   4483 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
   4484           kInternalFieldCountOffset)
   4485 
   4486 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
   4487 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
   4488 
   4489 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
   4490 
   4491 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
   4492 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
   4493 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
   4494 
   4495 ACCESSORS(Script, source, Object, kSourceOffset)
   4496 ACCESSORS(Script, name, Object, kNameOffset)
   4497 ACCESSORS(Script, id, Smi, kIdOffset)
   4498 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
   4499 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
   4500 ACCESSORS(Script, data, Object, kDataOffset)
   4501 ACCESSORS(Script, context_data, Object, kContextOffset)
   4502 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
   4503 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
   4504 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
   4505 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
   4506 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
   4507                  kEvalFrominstructionsOffsetOffset)
   4508 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
   4509 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
   4510 
   4511 Script::CompilationType Script::compilation_type() {
   4512   return BooleanBit::get(flags(), kCompilationTypeBit) ?
   4513       COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
   4514 }
   4515 void Script::set_compilation_type(CompilationType type) {
   4516   set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
   4517       type == COMPILATION_TYPE_EVAL));
   4518 }
   4519 Script::CompilationState Script::compilation_state() {
   4520   return BooleanBit::get(flags(), kCompilationStateBit) ?
   4521       COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
   4522 }
   4523 void Script::set_compilation_state(CompilationState state) {
   4524   set_flags(BooleanBit::set(flags(), kCompilationStateBit,
   4525       state == COMPILATION_STATE_COMPILED));
   4526 }
   4527 
   4528 
   4529 #ifdef ENABLE_DEBUGGER_SUPPORT
   4530 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
   4531 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
   4532 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
   4533 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
   4534 
   4535 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
   4536 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
   4537 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
   4538 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
   4539 #endif
   4540 
   4541 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
   4542 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
   4543                  kOptimizedCodeMapOffset)
   4544 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
   4545 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
   4546 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
   4547           kInstanceClassNameOffset)
   4548 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
   4549 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
   4550 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
   4551 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
   4552 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
   4553 
   4554 
   4555 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
   4556 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
   4557                kHiddenPrototypeBit)
   4558 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
   4559 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
   4560                kNeedsAccessCheckBit)
   4561 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
   4562                kReadOnlyPrototypeBit)
   4563 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
   4564                kIsExpressionBit)
   4565 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
   4566                kIsTopLevelBit)
   4567 
   4568 BOOL_ACCESSORS(SharedFunctionInfo,
   4569                compiler_hints,
   4570                allows_lazy_compilation,
   4571                kAllowLazyCompilation)
   4572 BOOL_ACCESSORS(SharedFunctionInfo,
   4573                compiler_hints,
   4574                allows_lazy_compilation_without_context,
   4575                kAllowLazyCompilationWithoutContext)
   4576 BOOL_ACCESSORS(SharedFunctionInfo,
   4577                compiler_hints,
   4578                uses_arguments,
   4579                kUsesArguments)
   4580 BOOL_ACCESSORS(SharedFunctionInfo,
   4581                compiler_hints,
   4582                has_duplicate_parameters,
   4583                kHasDuplicateParameters)
   4584 
   4585 
   4586 #if V8_HOST_ARCH_32_BIT
   4587 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
   4588 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
   4589               kFormalParameterCountOffset)
   4590 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
   4591               kExpectedNofPropertiesOffset)
   4592 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
   4593 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
   4594               kStartPositionAndTypeOffset)
   4595 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
   4596 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
   4597               kFunctionTokenPositionOffset)
   4598 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
   4599               kCompilerHintsOffset)
   4600 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
   4601 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
   4602 
   4603 #else
   4604 
   4605 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
   4606   STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
   4607   int holder::name() {                                            \
   4608     int value = READ_INT_FIELD(this, offset);                     \
   4609     ASSERT(kHeapObjectTag == 1);                                  \
   4610     ASSERT((value & kHeapObjectTag) == 0);                        \
   4611     return value >> 1;                                            \
   4612   }                                                               \
   4613   void holder::set_##name(int value) {                            \
   4614     ASSERT(kHeapObjectTag == 1);                                  \
   4615     ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
   4616            (value & 0xC0000000) == 0x000000000);                  \
   4617     WRITE_INT_FIELD(this,                                         \
   4618                     offset,                                       \
   4619                     (value << 1) & ~kHeapObjectTag);              \
   4620   }
   4621 
   4622 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
   4623   STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
   4624   INT_ACCESSORS(holder, name, offset)
   4625 
   4626 
   4627 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
   4628 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   4629                         formal_parameter_count,
   4630                         kFormalParameterCountOffset)
   4631 
   4632 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   4633                         expected_nof_properties,
   4634                         kExpectedNofPropertiesOffset)
   4635 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
   4636 
   4637 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
   4638 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   4639                         start_position_and_type,
   4640                         kStartPositionAndTypeOffset)
   4641 
   4642 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   4643                         function_token_position,
   4644                         kFunctionTokenPositionOffset)
   4645 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   4646                         compiler_hints,
   4647                         kCompilerHintsOffset)
   4648 
   4649 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, opt_count, kOptCountOffset)
   4650 
   4651 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
   4652 
   4653 #endif
   4654 
   4655 
   4656 int SharedFunctionInfo::construction_count() {
   4657   return READ_BYTE_FIELD(this, kConstructionCountOffset);
   4658 }
   4659 
   4660 
   4661 void SharedFunctionInfo::set_construction_count(int value) {
   4662   ASSERT(0 <= value && value < 256);
   4663   WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
   4664 }
   4665 
   4666 
   4667 BOOL_ACCESSORS(SharedFunctionInfo,
   4668                compiler_hints,
   4669                live_objects_may_exist,
   4670                kLiveObjectsMayExist)
   4671 
   4672 
   4673 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
   4674   return initial_map() != GetHeap()->undefined_value();
   4675 }
   4676 
   4677 
   4678 BOOL_GETTER(SharedFunctionInfo,
   4679             compiler_hints,
   4680             optimization_disabled,
   4681             kOptimizationDisabled)
   4682 
   4683 
   4684 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
   4685   set_compiler_hints(BooleanBit::set(compiler_hints(),
   4686                                      kOptimizationDisabled,
   4687                                      disable));
   4688   // If disabling optimizations we reflect that in the code object so
   4689   // it will not be counted as optimizable code.
   4690   if ((code()->kind() == Code::FUNCTION) && disable) {
   4691     code()->set_optimizable(false);
   4692   }
   4693 }
   4694 
   4695 
   4696 int SharedFunctionInfo::profiler_ticks() {
   4697   if (code()->kind() != Code::FUNCTION) return 0;
   4698   return code()->profiler_ticks();
   4699 }
   4700 
   4701 
   4702 LanguageMode SharedFunctionInfo::language_mode() {
   4703   int hints = compiler_hints();
   4704   if (BooleanBit::get(hints, kExtendedModeFunction)) {
   4705     ASSERT(BooleanBit::get(hints, kStrictModeFunction));
   4706     return EXTENDED_MODE;
   4707   }
   4708   return BooleanBit::get(hints, kStrictModeFunction)
   4709       ? STRICT_MODE : CLASSIC_MODE;
   4710 }
   4711 
   4712 
   4713 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
   4714   // We only allow language mode transitions that go set the same language mode
   4715   // again or go up in the chain:
   4716   //   CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
   4717   ASSERT(this->language_mode() == CLASSIC_MODE ||
   4718          this->language_mode() == language_mode ||
   4719          language_mode == EXTENDED_MODE);
   4720   int hints = compiler_hints();
   4721   hints = BooleanBit::set(
   4722       hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
   4723   hints = BooleanBit::set(
   4724       hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
   4725   set_compiler_hints(hints);
   4726 }
   4727 
   4728 
   4729 bool SharedFunctionInfo::is_classic_mode() {
   4730   return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
   4731 }
   4732 
   4733 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
   4734             kExtendedModeFunction)
   4735 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
   4736 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
   4737                name_should_print_as_anonymous,
   4738                kNameShouldPrintAsAnonymous)
   4739 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
   4740 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
   4741 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
   4742 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
   4743                kDontOptimize)
   4744 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
   4745 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
   4746 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
   4747 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
   4748 
   4749 void SharedFunctionInfo::BeforeVisitingPointers() {
   4750   if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
   4751 }
   4752 
   4753 
   4754 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
   4755 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
   4756 
   4757 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
   4758 
   4759 bool Script::HasValidSource() {
   4760   Object* src = this->source();
   4761   if (!src->IsString()) return true;
   4762   String* src_str = String::cast(src);
   4763   if (!StringShape(src_str).IsExternal()) return true;
   4764   if (src_str->IsOneByteRepresentation()) {
   4765     return ExternalAsciiString::cast(src)->resource() != NULL;
   4766   } else if (src_str->IsTwoByteRepresentation()) {
   4767     return ExternalTwoByteString::cast(src)->resource() != NULL;
   4768   }
   4769   return true;
   4770 }
   4771 
   4772 
   4773 void SharedFunctionInfo::DontAdaptArguments() {
   4774   ASSERT(code()->kind() == Code::BUILTIN);
   4775   set_formal_parameter_count(kDontAdaptArgumentsSentinel);
   4776 }
   4777 
   4778 
   4779 int SharedFunctionInfo::start_position() {
   4780   return start_position_and_type() >> kStartPositionShift;
   4781 }
   4782 
   4783 
   4784 void SharedFunctionInfo::set_start_position(int start_position) {
   4785   set_start_position_and_type((start_position << kStartPositionShift)
   4786     | (start_position_and_type() & ~kStartPositionMask));
   4787 }
   4788 
   4789 
   4790 Code* SharedFunctionInfo::code() {
   4791   return Code::cast(READ_FIELD(this, kCodeOffset));
   4792 }
   4793 
   4794 
   4795 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
   4796   WRITE_FIELD(this, kCodeOffset, value);
   4797   CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
   4798 }
   4799 
   4800 
   4801 void SharedFunctionInfo::ReplaceCode(Code* value) {
   4802   // If the GC metadata field is already used then the function was
   4803   // enqueued as a code flushing candidate and we remove it now.
   4804   if (code()->gc_metadata() != NULL) {
   4805     CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
   4806     flusher->EvictCandidate(this);
   4807   }
   4808 
   4809   ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
   4810   set_code(value);
   4811 }
   4812 
   4813 
   4814 ScopeInfo* SharedFunctionInfo::scope_info() {
   4815   return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
   4816 }
   4817 
   4818 
   4819 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
   4820                                         WriteBarrierMode mode) {
   4821   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
   4822   CONDITIONAL_WRITE_BARRIER(GetHeap(),
   4823                             this,
   4824                             kScopeInfoOffset,
   4825                             reinterpret_cast<Object*>(value),
   4826                             mode);
   4827 }
   4828 
   4829 
   4830 bool SharedFunctionInfo::is_compiled() {
   4831   return code() !=
   4832       Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
   4833 }
   4834 
   4835 
   4836 bool SharedFunctionInfo::IsApiFunction() {
   4837   return function_data()->IsFunctionTemplateInfo();
   4838 }
   4839 
   4840 
   4841 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
   4842   ASSERT(IsApiFunction());
   4843   return FunctionTemplateInfo::cast(function_data());
   4844 }
   4845 
   4846 
   4847 bool SharedFunctionInfo::HasBuiltinFunctionId() {
   4848   return function_data()->IsSmi();
   4849 }
   4850 
   4851 
   4852 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
   4853   ASSERT(HasBuiltinFunctionId());
   4854   return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
   4855 }
   4856 
   4857 
   4858 int SharedFunctionInfo::ic_age() {
   4859   return ICAgeBits::decode(counters());
   4860 }
   4861 
   4862 
   4863 void SharedFunctionInfo::set_ic_age(int ic_age) {
   4864   set_counters(ICAgeBits::update(counters(), ic_age));
   4865 }
   4866 
   4867 
   4868 int SharedFunctionInfo::deopt_count() {
   4869   return DeoptCountBits::decode(counters());
   4870 }
   4871 
   4872 
   4873 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
   4874   set_counters(DeoptCountBits::update(counters(), deopt_count));
   4875 }
   4876 
   4877 
   4878 void SharedFunctionInfo::increment_deopt_count() {
   4879   int value = counters();
   4880   int deopt_count = DeoptCountBits::decode(value);
   4881   deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
   4882   set_counters(DeoptCountBits::update(value, deopt_count));
   4883 }
   4884 
   4885 
   4886 int SharedFunctionInfo::opt_reenable_tries() {
   4887   return OptReenableTriesBits::decode(counters());
   4888 }
   4889 
   4890 
   4891 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
   4892   set_counters(OptReenableTriesBits::update(counters(), tries));
   4893 }
   4894 
   4895 
   4896 bool SharedFunctionInfo::has_deoptimization_support() {
   4897   Code* code = this->code();
   4898   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
   4899 }
   4900 
   4901 
   4902 void SharedFunctionInfo::TryReenableOptimization() {
   4903   int tries = opt_reenable_tries();
   4904   set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
   4905   // We reenable optimization whenever the number of tries is a large
   4906   // enough power of 2.
   4907   if (tries >= 16 && (((tries - 1) & tries) == 0)) {
   4908     set_optimization_disabled(false);
   4909     set_opt_count(0);
   4910     set_deopt_count(0);
   4911     code()->set_optimizable(true);
   4912   }
   4913 }
   4914 
   4915 
   4916 bool JSFunction::IsBuiltin() {
   4917   return context()->global_object()->IsJSBuiltinsObject();
   4918 }
   4919 
   4920 
   4921 bool JSFunction::NeedsArgumentsAdaption() {
   4922   return shared()->formal_parameter_count() !=
   4923       SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   4924 }
   4925 
   4926 
   4927 bool JSFunction::IsOptimized() {
   4928   return code()->kind() == Code::OPTIMIZED_FUNCTION;
   4929 }
   4930 
   4931 
   4932 bool JSFunction::IsOptimizable() {
   4933   return code()->kind() == Code::FUNCTION && code()->optimizable();
   4934 }
   4935 
   4936 
   4937 bool JSFunction::IsMarkedForLazyRecompilation() {
   4938   return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
   4939 }
   4940 
   4941 
   4942 bool JSFunction::IsMarkedForInstallingRecompiledCode() {
   4943   return code() == GetIsolate()->builtins()->builtin(
   4944       Builtins::kInstallRecompiledCode);
   4945 }
   4946 
   4947 
   4948 bool JSFunction::IsMarkedForParallelRecompilation() {
   4949   return code() == GetIsolate()->builtins()->builtin(
   4950       Builtins::kParallelRecompile);
   4951 }
   4952 
   4953 
   4954 bool JSFunction::IsInRecompileQueue() {
   4955   return code() == GetIsolate()->builtins()->builtin(
   4956       Builtins::kInRecompileQueue);
   4957 }
   4958 
   4959 
   4960 Code* JSFunction::code() {
   4961   return Code::cast(
   4962       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
   4963 }
   4964 
   4965 
   4966 void JSFunction::set_code(Code* value) {
   4967   ASSERT(!HEAP->InNewSpace(value));
   4968   Address entry = value->entry();
   4969   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
   4970   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
   4971       this,
   4972       HeapObject::RawField(this, kCodeEntryOffset),
   4973       value);
   4974 }
   4975 
   4976 
   4977 void JSFunction::set_code_no_write_barrier(Code* value) {
   4978   ASSERT(!HEAP->InNewSpace(value));
   4979   Address entry = value->entry();
   4980   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
   4981 }
   4982 
   4983 
   4984 void JSFunction::ReplaceCode(Code* code) {
   4985   bool was_optimized = IsOptimized();
   4986   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
   4987 
   4988   set_code(code);
   4989 
   4990   // Add/remove the function from the list of optimized functions for this
   4991   // context based on the state change.
   4992   if (!was_optimized && is_optimized) {
   4993     context()->native_context()->AddOptimizedFunction(this);
   4994   }
   4995   if (was_optimized && !is_optimized) {
   4996     context()->native_context()->RemoveOptimizedFunction(this);
   4997   }
   4998 }
   4999 
   5000 
   5001 Context* JSFunction::context() {
   5002   return Context::cast(READ_FIELD(this, kContextOffset));
   5003 }
   5004 
   5005 
   5006 void JSFunction::set_context(Object* value) {
   5007   ASSERT(value->IsUndefined() || value->IsContext());
   5008   WRITE_FIELD(this, kContextOffset, value);
   5009   WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
   5010 }
   5011 
   5012 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
   5013           kPrototypeOrInitialMapOffset)
   5014 
   5015 
   5016 Map* JSFunction::initial_map() {
   5017   return Map::cast(prototype_or_initial_map());
   5018 }
   5019 
   5020 
   5021 void JSFunction::set_initial_map(Map* value) {
   5022   set_prototype_or_initial_map(value);
   5023 }
   5024 
   5025 
   5026 bool JSFunction::has_initial_map() {
   5027   return prototype_or_initial_map()->IsMap();
   5028 }
   5029 
   5030 
   5031 bool JSFunction::has_instance_prototype() {
   5032   return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
   5033 }
   5034 
   5035 
   5036 bool JSFunction::has_prototype() {
   5037   return map()->has_non_instance_prototype() || has_instance_prototype();
   5038 }
   5039 
   5040 
   5041 Object* JSFunction::instance_prototype() {
   5042   ASSERT(has_instance_prototype());
   5043   if (has_initial_map()) return initial_map()->prototype();
   5044   // When there is no initial map and the prototype is a JSObject, the
   5045   // initial map field is used for the prototype field.
   5046   return prototype_or_initial_map();
   5047 }
   5048 
   5049 
   5050 Object* JSFunction::prototype() {
   5051   ASSERT(has_prototype());
   5052   // If the function's prototype property has been set to a non-JSObject
   5053   // value, that value is stored in the constructor field of the map.
   5054   if (map()->has_non_instance_prototype()) return map()->constructor();
   5055   return instance_prototype();
   5056 }
   5057 
   5058 
   5059 bool JSFunction::should_have_prototype() {
   5060   return map()->function_with_prototype();
   5061 }
   5062 
   5063 
   5064 bool JSFunction::is_compiled() {
   5065   return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
   5066 }
   5067 
   5068 
   5069 FixedArray* JSFunction::literals() {
   5070   ASSERT(!shared()->bound());
   5071   return literals_or_bindings();
   5072 }
   5073 
   5074 
   5075 void JSFunction::set_literals(FixedArray* literals) {
   5076   ASSERT(!shared()->bound());
   5077   set_literals_or_bindings(literals);
   5078 }
   5079 
   5080 
   5081 FixedArray* JSFunction::function_bindings() {
   5082   ASSERT(shared()->bound());
   5083   return literals_or_bindings();
   5084 }
   5085 
   5086 
   5087 void JSFunction::set_function_bindings(FixedArray* bindings) {
   5088   ASSERT(shared()->bound());
   5089   // Bound function literal may be initialized to the empty fixed array
   5090   // before the bindings are set.
   5091   ASSERT(bindings == GetHeap()->empty_fixed_array() ||
   5092          bindings->map() == GetHeap()->fixed_cow_array_map());
   5093   set_literals_or_bindings(bindings);
   5094 }
   5095 
   5096 
   5097 int JSFunction::NumberOfLiterals() {
   5098   ASSERT(!shared()->bound());
   5099   return literals()->length();
   5100 }
   5101 
   5102 
   5103 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
   5104   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5105   return READ_FIELD(this, OffsetOfFunctionWithId(id));
   5106 }
   5107 
   5108 
   5109 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
   5110                                               Object* value) {
   5111   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5112   WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
   5113   WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
   5114 }
   5115 
   5116 
   5117 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
   5118   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5119   return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
   5120 }
   5121 
   5122 
   5123 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
   5124                                                    Code* value) {
   5125   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   5126   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
   5127   ASSERT(!HEAP->InNewSpace(value));
   5128 }
   5129 
   5130 
   5131 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
   5132 ACCESSORS(JSProxy, hash, Object, kHashOffset)
   5133 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
   5134 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
   5135 
   5136 
   5137 void JSProxy::InitializeBody(int object_size, Object* value) {
   5138   ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
   5139   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
   5140     WRITE_FIELD(this, offset, value);
   5141   }
   5142 }
   5143 
   5144 
   5145 ACCESSORS(JSSet, table, Object, kTableOffset)
   5146 ACCESSORS(JSMap, table, Object, kTableOffset)
   5147 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
   5148 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
   5149 
   5150 
   5151 Address Foreign::foreign_address() {
   5152   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
   5153 }
   5154 
   5155 
   5156 void Foreign::set_foreign_address(Address value) {
   5157   WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
   5158 }
   5159 
   5160 
   5161 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
   5162 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
   5163 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
   5164 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
   5165 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
   5166 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
   5167 
   5168 
   5169 JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
   5170   ASSERT(obj->IsJSGeneratorObject());
   5171   ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
   5172   return reinterpret_cast<JSGeneratorObject*>(obj);
   5173 }
   5174 
   5175 
   5176 ACCESSORS(JSModule, context, Object, kContextOffset)
   5177 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
   5178 
   5179 
   5180 JSModule* JSModule::cast(Object* obj) {
   5181   ASSERT(obj->IsJSModule());
   5182   ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
   5183   return reinterpret_cast<JSModule*>(obj);
   5184 }
   5185 
   5186 
   5187 ACCESSORS(JSValue, value, Object, kValueOffset)
   5188 
   5189 
   5190 JSValue* JSValue::cast(Object* obj) {
   5191   ASSERT(obj->IsJSValue());
   5192   ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
   5193   return reinterpret_cast<JSValue*>(obj);
   5194 }
   5195 
   5196 
   5197 ACCESSORS(JSDate, value, Object, kValueOffset)
   5198 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
   5199 ACCESSORS(JSDate, year, Object, kYearOffset)
   5200 ACCESSORS(JSDate, month, Object, kMonthOffset)
   5201 ACCESSORS(JSDate, day, Object, kDayOffset)
   5202 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
   5203 ACCESSORS(JSDate, hour, Object, kHourOffset)
   5204 ACCESSORS(JSDate, min, Object, kMinOffset)
   5205 ACCESSORS(JSDate, sec, Object, kSecOffset)
   5206 
   5207 
   5208 JSDate* JSDate::cast(Object* obj) {
   5209   ASSERT(obj->IsJSDate());
   5210   ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
   5211   return reinterpret_cast<JSDate*>(obj);
   5212 }
   5213 
   5214 
   5215 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
   5216 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
   5217 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
   5218 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
   5219 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
   5220 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
   5221 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
   5222 
   5223 
   5224 JSMessageObject* JSMessageObject::cast(Object* obj) {
   5225   ASSERT(obj->IsJSMessageObject());
   5226   ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
   5227   return reinterpret_cast<JSMessageObject*>(obj);
   5228 }
   5229 
   5230 
   5231 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
   5232 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
   5233 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
   5234 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
   5235 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
   5236 
   5237 
   5238 // Type feedback slot: type_feedback_info for FUNCTIONs, stub_info for STUBs.
   5239 void Code::InitializeTypeFeedbackInfoNoWriteBarrier(Object* value) {
   5240   WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
   5241 }
   5242 
   5243 
   5244 Object* Code::type_feedback_info() {
   5245   ASSERT(kind() == FUNCTION);
   5246   return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
   5247 }
   5248 
   5249 
   5250 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
   5251   ASSERT(kind() == FUNCTION);
   5252   WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
   5253   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
   5254                             value, mode);
   5255 }
   5256 
   5257 
   5258 int Code::stub_info() {
   5259   ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
   5260          kind() == BINARY_OP_IC || kind() == LOAD_IC);
   5261   Object* value = READ_FIELD(this, kTypeFeedbackInfoOffset);
   5262   return Smi::cast(value)->value();
   5263 }
   5264 
   5265 
   5266 void Code::set_stub_info(int value) {
   5267   ASSERT(kind() == COMPARE_IC ||
   5268          kind() == COMPARE_NIL_IC ||
   5269          kind() == BINARY_OP_IC ||
   5270          kind() == STUB ||
   5271          kind() == LOAD_IC ||
   5272          kind() == KEYED_LOAD_IC ||
   5273          kind() == STORE_IC ||
   5274          kind() == KEYED_STORE_IC);
   5275   WRITE_FIELD(this, kTypeFeedbackInfoOffset, Smi::FromInt(value));
   5276 }
   5277 
   5278 
   5279 Object* Code::code_to_deoptimize_link() {
   5280   // Optimized code should not have type feedback.
   5281   ASSERT(kind() == OPTIMIZED_FUNCTION);
   5282   return READ_FIELD(this, kTypeFeedbackInfoOffset);
   5283 }
   5284 
   5285 
   5286 void Code::set_code_to_deoptimize_link(Object* value) {
   5287   ASSERT(kind() == OPTIMIZED_FUNCTION);
   5288   WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
   5289 }
   5290 
   5291 
   5292 Object** Code::code_to_deoptimize_link_slot() {
   5293   ASSERT(kind() == OPTIMIZED_FUNCTION);
   5294   return HeapObject::RawField(this, kTypeFeedbackInfoOffset);
   5295 }
   5296 
   5297 
   5298 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
   5299 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
   5300 
   5301 
   5302 byte* Code::instruction_start()  {
   5303   return FIELD_ADDR(this, kHeaderSize);
   5304 }
   5305 
   5306 
   5307 byte* Code::instruction_end()  {
   5308   return instruction_start() + instruction_size();
   5309 }
   5310 
   5311 
   5312 int Code::body_size() {
   5313   return RoundUp(instruction_size(), kObjectAlignment);
   5314 }
   5315 
   5316 
   5317 ByteArray* Code::unchecked_relocation_info() {
   5318   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
   5319 }
   5320 
   5321 
   5322 byte* Code::relocation_start() {
   5323   return unchecked_relocation_info()->GetDataStartAddress();
   5324 }
   5325 
   5326 
   5327 int Code::relocation_size() {
   5328   return unchecked_relocation_info()->length();
   5329 }
   5330 
   5331 
   5332 byte* Code::entry() {
   5333   return instruction_start();
   5334 }
   5335 
   5336 
   5337 bool Code::contains(byte* inner_pointer) {
   5338   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
   5339 }
   5340 
   5341 
   5342 ACCESSORS(JSArray, length, Object, kLengthOffset)
   5343 
   5344 
   5345 void* JSArrayBuffer::backing_store() {
   5346   intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
   5347   return reinterpret_cast<void*>(ptr);
   5348 }
   5349 
   5350 
   5351 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
   5352   intptr_t ptr = reinterpret_cast<intptr_t>(value);
   5353   WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
   5354 }
   5355 
   5356 
   5357 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
   5358 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
   5359 
   5360 
   5361 bool JSArrayBuffer::is_external() {
   5362   return BooleanBit::get(flag(), kIsExternalBit);
   5363 }
   5364 
   5365 
   5366 void JSArrayBuffer::set_is_external(bool value) {
   5367   set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
   5368 }
   5369 
   5370 
   5371 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
   5372 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
   5373 
   5374 
   5375 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
   5376 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
   5377 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
   5378 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
   5379 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
   5380 
   5381 ACCESSORS(JSRegExp, data, Object, kDataOffset)
   5382 
   5383 
   5384 JSRegExp::Type JSRegExp::TypeTag() {
   5385   Object* data = this->data();
   5386   if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
   5387   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
   5388   return static_cast<JSRegExp::Type>(smi->value());
   5389 }
   5390 
   5391 
   5392 int JSRegExp::CaptureCount() {
   5393   switch (TypeTag()) {
   5394     case ATOM:
   5395       return 0;
   5396     case IRREGEXP:
   5397       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
   5398     default:
   5399       UNREACHABLE();
   5400       return -1;
   5401   }
   5402 }
   5403 
   5404 
   5405 JSRegExp::Flags JSRegExp::GetFlags() {
   5406   ASSERT(this->data()->IsFixedArray());
   5407   Object* data = this->data();
   5408   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
   5409   return Flags(smi->value());
   5410 }
   5411 
   5412 
   5413 String* JSRegExp::Pattern() {
   5414   ASSERT(this->data()->IsFixedArray());
   5415   Object* data = this->data();
   5416   String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
   5417   return pattern;
   5418 }
   5419 
   5420 
   5421 Object* JSRegExp::DataAt(int index) {
   5422   ASSERT(TypeTag() != NOT_COMPILED);
   5423   return FixedArray::cast(data())->get(index);
   5424 }
   5425 
   5426 
   5427 void JSRegExp::SetDataAt(int index, Object* value) {
   5428   ASSERT(TypeTag() != NOT_COMPILED);
   5429   ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
   5430   FixedArray::cast(data())->set(index, value);
   5431 }
   5432 
   5433 
   5434 ElementsKind JSObject::GetElementsKind() {
   5435   ElementsKind kind = map()->elements_kind();
   5436 #if DEBUG
   5437   FixedArrayBase* fixed_array =
   5438       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
   5439   Map* map = fixed_array->map();
   5440   ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
   5441           (map == GetHeap()->fixed_array_map() ||
   5442            map == GetHeap()->fixed_cow_array_map())) ||
   5443          (IsFastDoubleElementsKind(kind) &&
   5444           (fixed_array->IsFixedDoubleArray() ||
   5445            fixed_array == GetHeap()->empty_fixed_array())) ||
   5446          (kind == DICTIONARY_ELEMENTS &&
   5447             fixed_array->IsFixedArray() &&
   5448           fixed_array->IsDictionary()) ||
   5449          (kind > DICTIONARY_ELEMENTS));
   5450   ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
   5451          (elements()->IsFixedArray() && elements()->length() >= 2));
   5452 #endif
   5453   return kind;
   5454 }
   5455 
   5456 
   5457 ElementsAccessor* JSObject::GetElementsAccessor() {
   5458   return ElementsAccessor::ForKind(GetElementsKind());
   5459 }
   5460 
   5461 
   5462 bool JSObject::HasFastObjectElements() {
   5463   return IsFastObjectElementsKind(GetElementsKind());
   5464 }
   5465 
   5466 
   5467 bool JSObject::HasFastSmiElements() {
   5468   return IsFastSmiElementsKind(GetElementsKind());
   5469 }
   5470 
   5471 
   5472 bool JSObject::HasFastSmiOrObjectElements() {
   5473   return IsFastSmiOrObjectElementsKind(GetElementsKind());
   5474 }
   5475 
   5476 
   5477 bool JSObject::HasFastDoubleElements() {
   5478   return IsFastDoubleElementsKind(GetElementsKind());
   5479 }
   5480 
   5481 
   5482 bool JSObject::HasFastHoleyElements() {
   5483   return IsFastHoleyElementsKind(GetElementsKind());
   5484 }
   5485 
   5486 
   5487 bool JSObject::HasFastElements() {
   5488   return IsFastElementsKind(GetElementsKind());
   5489 }
   5490 
   5491 
   5492 bool JSObject::HasDictionaryElements() {
   5493   return GetElementsKind() == DICTIONARY_ELEMENTS;
   5494 }
   5495 
   5496 
   5497 bool JSObject::HasNonStrictArgumentsElements() {
   5498   return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
   5499 }
   5500 
   5501 
   5502 bool JSObject::HasExternalArrayElements() {
   5503   HeapObject* array = elements();
   5504   ASSERT(array != NULL);
   5505   return array->IsExternalArray();
   5506 }
   5507 
   5508 
   5509 #define EXTERNAL_ELEMENTS_CHECK(name, type)          \
   5510 bool JSObject::HasExternal##name##Elements() {       \
   5511   HeapObject* array = elements();                    \
   5512   ASSERT(array != NULL);                             \
   5513   if (!array->IsHeapObject())                        \
   5514     return false;                                    \
   5515   return array->map()->instance_type() == type;      \
   5516 }
   5517 
   5518 
   5519 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
   5520 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
   5521 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
   5522 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
   5523                         EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
   5524 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
   5525 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
   5526                         EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
   5527 EXTERNAL_ELEMENTS_CHECK(Float,
   5528                         EXTERNAL_FLOAT_ARRAY_TYPE)
   5529 EXTERNAL_ELEMENTS_CHECK(Double,
   5530                         EXTERNAL_DOUBLE_ARRAY_TYPE)
   5531 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
   5532 
   5533 
   5534 bool JSObject::HasNamedInterceptor() {
   5535   return map()->has_named_interceptor();
   5536 }
   5537 
   5538 
   5539 bool JSObject::HasIndexedInterceptor() {
   5540   return map()->has_indexed_interceptor();
   5541 }
   5542 
   5543 
   5544 MaybeObject* JSObject::EnsureWritableFastElements() {
   5545   ASSERT(HasFastSmiOrObjectElements());
   5546   FixedArray* elems = FixedArray::cast(elements());
   5547   Isolate* isolate = GetIsolate();
   5548   if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
   5549   Object* writable_elems;
   5550   { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
   5551       elems, isolate->heap()->fixed_array_map());
   5552     if (!maybe_writable_elems->ToObject(&writable_elems)) {
   5553       return maybe_writable_elems;
   5554     }
   5555   }
   5556   set_elements(FixedArray::cast(writable_elems));
   5557   isolate->counters()->cow_arrays_converted()->Increment();
   5558   return writable_elems;
   5559 }
   5560 
   5561 
   5562 NameDictionary* JSObject::property_dictionary() {
   5563   ASSERT(!HasFastProperties());
   5564   return NameDictionary::cast(properties());
   5565 }
   5566 
   5567 
   5568 SeededNumberDictionary* JSObject::element_dictionary() {
   5569   ASSERT(HasDictionaryElements());
   5570   return SeededNumberDictionary::cast(elements());
   5571 }
   5572 
   5573 
   5574 bool Name::IsHashFieldComputed(uint32_t field) {
   5575   return (field & kHashNotComputedMask) == 0;
   5576 }
   5577 
   5578 
   5579 bool Name::HasHashCode() {
   5580   return IsHashFieldComputed(hash_field());
   5581 }
   5582 
   5583 
   5584 uint32_t Name::Hash() {
   5585   // Fast case: has hash code already been computed?
   5586   uint32_t field = hash_field();
   5587   if (IsHashFieldComputed(field)) return field >> kHashShift;
   5588   // Slow case: compute hash code and set it. Has to be a string.
   5589   return String::cast(this)->ComputeAndSetHash();
   5590 }
   5591 
   5592 
   5593 StringHasher::StringHasher(int length, uint32_t seed)
   5594   : length_(length),
   5595     raw_running_hash_(seed),
   5596     array_index_(0),
   5597     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
   5598     is_first_char_(true) {
   5599   ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
   5600 }
   5601 
   5602 
   5603 bool StringHasher::has_trivial_hash() {
   5604   return length_ > String::kMaxHashCalcLength;
   5605 }
   5606 
   5607 
   5608 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
   5609   running_hash += c;
   5610   running_hash += (running_hash << 10);
   5611   running_hash ^= (running_hash >> 6);
   5612   return running_hash;
   5613 }
   5614 
   5615 
   5616 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
   5617   running_hash += (running_hash << 3);
   5618   running_hash ^= (running_hash >> 11);
   5619   running_hash += (running_hash << 15);
   5620   if ((running_hash & String::kHashBitMask) == 0) {
   5621     return kZeroHash;
   5622   }
   5623   return running_hash;
   5624 }
   5625 
   5626 
   5627 void StringHasher::AddCharacter(uint16_t c) {
   5628   // Use the Jenkins one-at-a-time hash function to update the hash
   5629   // for the given character.
   5630   raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
   5631 }
   5632 
   5633 
   5634 bool StringHasher::UpdateIndex(uint16_t c) {
   5635   ASSERT(is_array_index_);
   5636   if (c < '0' || c > '9') {
   5637     is_array_index_ = false;
   5638     return false;
   5639   }
   5640   int d = c - '0';
   5641   if (is_first_char_) {
   5642     is_first_char_ = false;
   5643     if (c == '0' && length_ > 1) {
   5644       is_array_index_ = false;
   5645       return false;
   5646     }
   5647   }
   5648   if (array_index_ > 429496729U - ((d + 2) >> 3)) {
   5649     is_array_index_ = false;
   5650     return false;
   5651   }
   5652   array_index_ = array_index_ * 10 + d;
   5653   return true;
   5654 }
   5655 
   5656 
   5657 template<typename Char>
   5658 inline void StringHasher::AddCharacters(const Char* chars, int length) {
   5659   ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
   5660   int i = 0;
   5661   if (is_array_index_) {
   5662     for (; i < length; i++) {
   5663       AddCharacter(chars[i]);
   5664       if (!UpdateIndex(chars[i])) {
   5665         i++;
   5666         break;
   5667       }
   5668     }
   5669   }
   5670   for (; i < length; i++) {
   5671     ASSERT(!is_array_index_);
   5672     AddCharacter(chars[i]);
   5673   }
   5674 }
   5675 
   5676 
   5677 template <typename schar>
   5678 uint32_t StringHasher::HashSequentialString(const schar* chars,
   5679                                             int length,
   5680                                             uint32_t seed) {
   5681   StringHasher hasher(length, seed);
   5682   if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
   5683   return hasher.GetHashField();
   5684 }
   5685 
   5686 
   5687 bool Name::AsArrayIndex(uint32_t* index) {
   5688   return IsString() && String::cast(this)->AsArrayIndex(index);
   5689 }
   5690 
   5691 
   5692 bool String::AsArrayIndex(uint32_t* index) {
   5693   uint32_t field = hash_field();
   5694   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
   5695     return false;
   5696   }
   5697   return SlowAsArrayIndex(index);
   5698 }
   5699 
   5700 
   5701 Object* JSReceiver::GetPrototype() {
   5702   return map()->prototype();
   5703 }
   5704 
   5705 
   5706 Object* JSReceiver::GetConstructor() {
   5707   return map()->constructor();
   5708 }
   5709 
   5710 
   5711 bool JSReceiver::HasProperty(Name* name) {
   5712   if (IsJSProxy()) {
   5713     return JSProxy::cast(this)->HasPropertyWithHandler(name);
   5714   }
   5715   return GetPropertyAttribute(name) != ABSENT;
   5716 }
   5717 
   5718 
   5719 bool JSReceiver::HasLocalProperty(Name* name) {
   5720   if (IsJSProxy()) {
   5721     return JSProxy::cast(this)->HasPropertyWithHandler(name);
   5722   }
   5723   return GetLocalPropertyAttribute(name) != ABSENT;
   5724 }
   5725 
   5726 
   5727 PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) {
   5728   uint32_t index;
   5729   if (IsJSObject() && key->AsArrayIndex(&index)) {
   5730     return GetElementAttribute(index);
   5731   }
   5732   return GetPropertyAttributeWithReceiver(this, key);
   5733 }
   5734 
   5735 
   5736 PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
   5737   if (IsJSProxy()) {
   5738     return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
   5739   }
   5740   return JSObject::cast(this)->GetElementAttributeWithReceiver(
   5741       this, index, true);
   5742 }
   5743 
   5744 
   5745 // TODO(504): this may be useful in other places too where JSGlobalProxy
   5746 // is used.
   5747 Object* JSObject::BypassGlobalProxy() {
   5748   if (IsJSGlobalProxy()) {
   5749     Object* proto = GetPrototype();
   5750     if (proto->IsNull()) return GetHeap()->undefined_value();
   5751     ASSERT(proto->IsJSGlobalObject());
   5752     return proto;
   5753   }
   5754   return this;
   5755 }
   5756 
   5757 
   5758 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
   5759   return IsJSProxy()
   5760       ? JSProxy::cast(this)->GetIdentityHash(flag)
   5761       : JSObject::cast(this)->GetIdentityHash(flag);
   5762 }
   5763 
   5764 
   5765 bool JSReceiver::HasElement(uint32_t index) {
   5766   if (IsJSProxy()) {
   5767     return JSProxy::cast(this)->HasElementWithHandler(index);
   5768   }
   5769   return JSObject::cast(this)->GetElementAttributeWithReceiver(
   5770       this, index, true) != ABSENT;
   5771 }
   5772 
   5773 
   5774 bool JSReceiver::HasLocalElement(uint32_t index) {
   5775   if (IsJSProxy()) {
   5776     return JSProxy::cast(this)->HasElementWithHandler(index);
   5777   }
   5778   return JSObject::cast(this)->GetElementAttributeWithReceiver(
   5779       this, index, false) != ABSENT;
   5780 }
   5781 
   5782 
   5783 PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
   5784   if (IsJSProxy()) {
   5785     return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
   5786   }
   5787   return JSObject::cast(this)->GetElementAttributeWithReceiver(
   5788       this, index, false);
   5789 }
   5790 
   5791 
   5792 bool AccessorInfo::all_can_read() {
   5793   return BooleanBit::get(flag(), kAllCanReadBit);
   5794 }
   5795 
   5796 
   5797 void AccessorInfo::set_all_can_read(bool value) {
   5798   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
   5799 }
   5800 
   5801 
   5802 bool AccessorInfo::all_can_write() {
   5803   return BooleanBit::get(flag(), kAllCanWriteBit);
   5804 }
   5805 
   5806 
   5807 void AccessorInfo::set_all_can_write(bool value) {
   5808   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
   5809 }
   5810 
   5811 
   5812 bool AccessorInfo::prohibits_overwriting() {
   5813   return BooleanBit::get(flag(), kProhibitsOverwritingBit);
   5814 }
   5815 
   5816 
   5817 void AccessorInfo::set_prohibits_overwriting(bool value) {
   5818   set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
   5819 }
   5820 
   5821 
   5822 PropertyAttributes AccessorInfo::property_attributes() {
   5823   return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
   5824 }
   5825 
   5826 
   5827 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
   5828   set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
   5829 }
   5830 
   5831 
   5832 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
   5833   Object* function_template = expected_receiver_type();
   5834   if (!function_template->IsFunctionTemplateInfo()) return true;
   5835   return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
   5836 }
   5837 
   5838 
   5839 template<typename Shape, typename Key>
   5840 void Dictionary<Shape, Key>::SetEntry(int entry,
   5841                                       Object* key,
   5842                                       Object* value) {
   5843   SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
   5844 }
   5845 
   5846 
   5847 template<typename Shape, typename Key>
   5848 void Dictionary<Shape, Key>::SetEntry(int entry,
   5849                                       Object* key,
   5850                                       Object* value,
   5851                                       PropertyDetails details) {
   5852   ASSERT(!key->IsName() ||
   5853          details.IsDeleted() ||
   5854          details.dictionary_index() > 0);
   5855   int index = HashTable<Shape, Key>::EntryToIndex(entry);
   5856   DisallowHeapAllocation no_gc;
   5857   WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
   5858   FixedArray::set(index, key, mode);
   5859   FixedArray::set(index+1, value, mode);
   5860   FixedArray::set(index+2, details.AsSmi());
   5861 }
   5862 
   5863 
   5864 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
   5865   ASSERT(other->IsNumber());
   5866   return key == static_cast<uint32_t>(other->Number());
   5867 }
   5868 
   5869 
   5870 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
   5871   return ComputeIntegerHash(key, 0);
   5872 }
   5873 
   5874 
   5875 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
   5876                                                       Object* other) {
   5877   ASSERT(other->IsNumber());
   5878   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
   5879 }
   5880 
   5881 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
   5882   return ComputeIntegerHash(key, seed);
   5883 }
   5884 
   5885 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
   5886                                                           uint32_t seed,
   5887                                                           Object* other) {
   5888   ASSERT(other->IsNumber());
   5889   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
   5890 }
   5891 
   5892 MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
   5893   return heap->NumberFromUint32(key);
   5894 }
   5895 
   5896 
   5897 bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
   5898   // We know that all entries in a hash table had their hash keys created.
   5899   // Use that knowledge to have fast failure.
   5900   if (key->Hash() != Name::cast(other)->Hash()) return false;
   5901   return key->Equals(Name::cast(other));
   5902 }
   5903 
   5904 
   5905 uint32_t NameDictionaryShape::Hash(Name* key) {
   5906   return key->Hash();
   5907 }
   5908 
   5909 
   5910 uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
   5911   return Name::cast(other)->Hash();
   5912 }
   5913 
   5914 
   5915 MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
   5916   return key;
   5917 }
   5918 
   5919 
   5920 template <int entrysize>
   5921 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
   5922   return key->SameValue(other);
   5923 }
   5924 
   5925 
   5926 template <int entrysize>
   5927 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
   5928   MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
   5929   return Smi::cast(maybe_hash->ToObjectChecked())->value();
   5930 }
   5931 
   5932 
   5933 template <int entrysize>
   5934 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
   5935                                                         Object* other) {
   5936   MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
   5937   return Smi::cast(maybe_hash->ToObjectChecked())->value();
   5938 }
   5939 
   5940 
   5941 template <int entrysize>
   5942 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap,
   5943                                                        Object* key) {
   5944   return key;
   5945 }
   5946 
   5947 
   5948 void Map::ClearCodeCache(Heap* heap) {
   5949   // No write barrier is needed since empty_fixed_array is not in new space.
   5950   // Please note this function is used during marking:
   5951   //  - MarkCompactCollector::MarkUnmarkedObject
   5952   //  - IncrementalMarking::Step
   5953   ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
   5954   WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
   5955 }
   5956 
   5957 
   5958 void JSArray::EnsureSize(int required_size) {
   5959   ASSERT(HasFastSmiOrObjectElements());
   5960   FixedArray* elts = FixedArray::cast(elements());
   5961   const int kArraySizeThatFitsComfortablyInNewSpace = 128;
   5962   if (elts->length() < required_size) {
   5963     // Doubling in size would be overkill, but leave some slack to avoid
   5964     // constantly growing.
   5965     Expand(required_size + (required_size >> 3));
   5966     // It's a performance benefit to keep a frequently used array in new-space.
   5967   } else if (!GetHeap()->new_space()->Contains(elts) &&
   5968              required_size < kArraySizeThatFitsComfortablyInNewSpace) {
   5969     // Expand will allocate a new backing store in new space even if the size
   5970     // we asked for isn't larger than what we had before.
   5971     Expand(required_size);
   5972   }
   5973 }
   5974 
   5975 
   5976 void JSArray::set_length(Smi* length) {
   5977   // Don't need a write barrier for a Smi.
   5978   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
   5979 }
   5980 
   5981 
   5982 bool JSArray::AllowsSetElementsLength() {
   5983   bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
   5984   ASSERT(result == !HasExternalArrayElements());
   5985   return result;
   5986 }
   5987 
   5988 
   5989 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
   5990   MaybeObject* maybe_result = EnsureCanContainElements(
   5991       storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
   5992   if (maybe_result->IsFailure()) return maybe_result;
   5993   ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
   5994           IsFastDoubleElementsKind(GetElementsKind())) ||
   5995          ((storage->map() != GetHeap()->fixed_double_array_map()) &&
   5996           (IsFastObjectElementsKind(GetElementsKind()) ||
   5997            (IsFastSmiElementsKind(GetElementsKind()) &&
   5998             FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
   5999   set_elements(storage);
   6000   set_length(Smi::FromInt(storage->length()));
   6001   return this;
   6002 }
   6003 
   6004 
   6005 MaybeObject* FixedArray::Copy() {
   6006   if (length() == 0) return this;
   6007   return GetHeap()->CopyFixedArray(this);
   6008 }
   6009 
   6010 
   6011 MaybeObject* FixedDoubleArray::Copy() {
   6012   if (length() == 0) return this;
   6013   return GetHeap()->CopyFixedDoubleArray(this);
   6014 }
   6015 
   6016 
   6017 void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
   6018   set(1 + index * 2, Smi::FromInt(id.ToInt()));
   6019 }
   6020 
   6021 
   6022 TypeFeedbackId TypeFeedbackCells::AstId(int index) {
   6023   return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
   6024 }
   6025 
   6026 
   6027 void TypeFeedbackCells::SetCell(int index, Cell* cell) {
   6028   set(index * 2, cell);
   6029 }
   6030 
   6031 
   6032 Cell* TypeFeedbackCells::GetCell(int index) {
   6033   return Cell::cast(get(index * 2));
   6034 }
   6035 
   6036 
   6037 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
   6038   return isolate->factory()->the_hole_value();
   6039 }
   6040 
   6041 
   6042 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
   6043   return isolate->factory()->undefined_value();
   6044 }
   6045 
   6046 
   6047 Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
   6048     ElementsKind elements_kind) {
   6049   return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
   6050 }
   6051 
   6052 
   6053 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
   6054   return heap->the_hole_value();
   6055 }
   6056 
   6057 
   6058 int TypeFeedbackInfo::ic_total_count() {
   6059   int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6060   return ICTotalCountField::decode(current);
   6061 }
   6062 
   6063 
   6064 void TypeFeedbackInfo::set_ic_total_count(int count) {
   6065   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6066   value = ICTotalCountField::update(value,
   6067                                     ICTotalCountField::decode(count));
   6068   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
   6069 }
   6070 
   6071 
   6072 int TypeFeedbackInfo::ic_with_type_info_count() {
   6073   int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6074   return ICsWithTypeInfoCountField::decode(current);
   6075 }
   6076 
   6077 
   6078 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
   6079   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6080   int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
   6081   // We can get negative count here when the type-feedback info is
   6082   // shared between two code objects. The can only happen when
   6083   // the debugger made a shallow copy of code object (see Heap::CopyCode).
   6084   // Since we do not optimize when the debugger is active, we can skip
   6085   // this counter update.
   6086   if (new_count >= 0) {
   6087     new_count &= ICsWithTypeInfoCountField::kMask;
   6088     value = ICsWithTypeInfoCountField::update(value, new_count);
   6089     WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
   6090   }
   6091 }
   6092 
   6093 
   6094 void TypeFeedbackInfo::initialize_storage() {
   6095   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
   6096   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
   6097 }
   6098 
   6099 
   6100 void TypeFeedbackInfo::change_own_type_change_checksum() {
   6101   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6102   int checksum = OwnTypeChangeChecksum::decode(value);
   6103   checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
   6104   value = OwnTypeChangeChecksum::update(value, checksum);
   6105   // Ensure packed bit field is in Smi range.
   6106   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
   6107   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
   6108   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
   6109 }
   6110 
   6111 
   6112 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
   6113   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6114   int mask = (1 << kTypeChangeChecksumBits) - 1;
   6115   value = InlinedTypeChangeChecksum::update(value, checksum & mask);
   6116   // Ensure packed bit field is in Smi range.
   6117   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
   6118   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
   6119   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
   6120 }
   6121 
   6122 
   6123 int TypeFeedbackInfo::own_type_change_checksum() {
   6124   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
   6125   return OwnTypeChangeChecksum::decode(value);
   6126 }
   6127 
   6128 
   6129 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
   6130   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
   6131   int mask = (1 << kTypeChangeChecksumBits) - 1;
   6132   return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
   6133 }
   6134 
   6135 
   6136 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
   6137           kTypeFeedbackCellsOffset)
   6138 
   6139 
   6140 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
   6141 
   6142 
   6143 Relocatable::Relocatable(Isolate* isolate) {
   6144   ASSERT(isolate == Isolate::Current());
   6145   isolate_ = isolate;
   6146   prev_ = isolate->relocatable_top();
   6147   isolate->set_relocatable_top(this);
   6148 }
   6149 
   6150 
   6151 Relocatable::~Relocatable() {
   6152   ASSERT(isolate_ == Isolate::Current());
   6153   ASSERT_EQ(isolate_->relocatable_top(), this);
   6154   isolate_->set_relocatable_top(prev_);
   6155 }
   6156 
   6157 
   6158 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
   6159   return map->instance_size();
   6160 }
   6161 
   6162 
   6163 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
   6164   v->VisitExternalReference(
   6165       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
   6166 }
   6167 
   6168 
   6169 template<typename StaticVisitor>
   6170 void Foreign::ForeignIterateBody() {
   6171   StaticVisitor::VisitExternalReference(
   6172       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
   6173 }
   6174 
   6175 
   6176 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
   6177   typedef v8::String::ExternalAsciiStringResource Resource;
   6178   v->VisitExternalAsciiString(
   6179       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6180 }
   6181 
   6182 
   6183 template<typename StaticVisitor>
   6184 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
   6185   typedef v8::String::ExternalAsciiStringResource Resource;
   6186   StaticVisitor::VisitExternalAsciiString(
   6187       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6188 }
   6189 
   6190 
   6191 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
   6192   typedef v8::String::ExternalStringResource Resource;
   6193   v->VisitExternalTwoByteString(
   6194       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6195 }
   6196 
   6197 
   6198 template<typename StaticVisitor>
   6199 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
   6200   typedef v8::String::ExternalStringResource Resource;
   6201   StaticVisitor::VisitExternalTwoByteString(
   6202       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   6203 }
   6204 
   6205 
   6206 template<int start_offset, int end_offset, int size>
   6207 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
   6208     HeapObject* obj,
   6209     ObjectVisitor* v) {
   6210     v->VisitPointers(HeapObject::RawField(obj, start_offset),
   6211                      HeapObject::RawField(obj, end_offset));
   6212 }
   6213 
   6214 
   6215 template<int start_offset>
   6216 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
   6217                                                        int object_size,
   6218                                                        ObjectVisitor* v) {
   6219   v->VisitPointers(HeapObject::RawField(obj, start_offset),
   6220                    HeapObject::RawField(obj, object_size));
   6221 }
   6222 
   6223 
   6224 #undef TYPE_CHECKER
   6225 #undef CAST_ACCESSOR
   6226 #undef INT_ACCESSORS
   6227 #undef ACCESSORS
   6228 #undef ACCESSORS_TO_SMI
   6229 #undef SMI_ACCESSORS
   6230 #undef BOOL_GETTER
   6231 #undef BOOL_ACCESSORS
   6232 #undef FIELD_ADDR
   6233 #undef READ_FIELD
   6234 #undef WRITE_FIELD
   6235 #undef WRITE_BARRIER
   6236 #undef CONDITIONAL_WRITE_BARRIER
   6237 #undef READ_DOUBLE_FIELD
   6238 #undef WRITE_DOUBLE_FIELD
   6239 #undef READ_INT_FIELD
   6240 #undef WRITE_INT_FIELD
   6241 #undef READ_INTPTR_FIELD
   6242 #undef WRITE_INTPTR_FIELD
   6243 #undef READ_UINT32_FIELD
   6244 #undef WRITE_UINT32_FIELD
   6245 #undef READ_SHORT_FIELD
   6246 #undef WRITE_SHORT_FIELD
   6247 #undef READ_BYTE_FIELD
   6248 #undef WRITE_BYTE_FIELD
   6249 
   6250 
   6251 } }  // namespace v8::internal
   6252 
   6253 #endif  // V8_OBJECTS_INL_H_
   6254