Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 //
     28 // Review notes:
     29 //
     30 // - The use of macros in these inline functions may seem superfluous
     31 // but it is absolutely needed to make sure gcc generates optimal
     32 // code. gcc is not happy when attempting to inline too deep.
     33 //
     34 
     35 #ifndef V8_OBJECTS_INL_H_
     36 #define V8_OBJECTS_INL_H_
     37 
     38 #include "elements.h"
     39 #include "objects.h"
     40 #include "contexts.h"
     41 #include "conversions-inl.h"
     42 #include "heap.h"
     43 #include "isolate.h"
     44 #include "property.h"
     45 #include "spaces.h"
     46 #include "store-buffer.h"
     47 #include "v8memory.h"
     48 #include "factory.h"
     49 #include "incremental-marking.h"
     50 
     51 namespace v8 {
     52 namespace internal {
     53 
     54 PropertyDetails::PropertyDetails(Smi* smi) {
     55   value_ = smi->value();
     56 }
     57 
     58 
     59 Smi* PropertyDetails::AsSmi() {
     60   return Smi::FromInt(value_);
     61 }
     62 
     63 
     64 PropertyDetails PropertyDetails::AsDeleted() {
     65   Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
     66   return PropertyDetails(smi);
     67 }
     68 
     69 
     70 #define TYPE_CHECKER(type, instancetype)                                \
     71   bool Object::Is##type() {                                             \
     72   return Object::IsHeapObject() &&                                      \
     73       HeapObject::cast(this)->map()->instance_type() == instancetype;   \
     74   }
     75 
     76 
     77 #define CAST_ACCESSOR(type)                     \
     78   type* type::cast(Object* object) {            \
     79     ASSERT(object->Is##type());                 \
     80     return reinterpret_cast<type*>(object);     \
     81   }
     82 
     83 
     84 #define INT_ACCESSORS(holder, name, offset)                             \
     85   int holder::name() { return READ_INT_FIELD(this, offset); }           \
     86   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
     87 
     88 
     89 #define ACCESSORS(holder, name, type, offset)                           \
     90   type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
     91   void holder::set_##name(type* value, WriteBarrierMode mode) {         \
     92     WRITE_FIELD(this, offset, value);                                   \
     93     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);    \
     94   }
     95 
     96 
     97 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
     98 #define ACCESSORS_TO_SMI(holder, name, offset)                          \
     99   Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); }   \
    100   void holder::set_##name(Smi* value, WriteBarrierMode mode) {          \
    101     WRITE_FIELD(this, offset, value);                                   \
    102   }
    103 
    104 
    105 // Getter that returns a Smi as an int and writes an int as a Smi.
    106 #define SMI_ACCESSORS(holder, name, offset)             \
    107   int holder::name() {                                  \
    108     Object* value = READ_FIELD(this, offset);           \
    109     return Smi::cast(value)->value();                   \
    110   }                                                     \
    111   void holder::set_##name(int value) {                  \
    112     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
    113   }
    114 
    115 
    116 #define BOOL_GETTER(holder, field, name, offset)           \
    117   bool holder::name() {                                    \
    118     return BooleanBit::get(field(), offset);               \
    119   }                                                        \
    120 
    121 
    122 #define BOOL_ACCESSORS(holder, field, name, offset)        \
    123   bool holder::name() {                                    \
    124     return BooleanBit::get(field(), offset);               \
    125   }                                                        \
    126   void holder::set_##name(bool value) {                    \
    127     set_##field(BooleanBit::set(field(), offset, value));  \
    128   }
    129 
    130 
    131 bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
    132                                          ElementsKind to_kind) {
    133   if (to_kind == FAST_ELEMENTS) {
    134     return from_kind == FAST_SMI_ONLY_ELEMENTS ||
    135         from_kind == FAST_DOUBLE_ELEMENTS;
    136   } else {
    137     return to_kind == FAST_DOUBLE_ELEMENTS &&
    138         from_kind == FAST_SMI_ONLY_ELEMENTS;
    139   }
    140 }
    141 
    142 
    143 bool Object::IsFixedArrayBase() {
    144   return IsFixedArray() || IsFixedDoubleArray();
    145 }
    146 
    147 
    148 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
    149   // There is a constraint on the object; check.
    150   if (!this->IsJSObject()) return false;
    151   // Fetch the constructor function of the object.
    152   Object* cons_obj = JSObject::cast(this)->map()->constructor();
    153   if (!cons_obj->IsJSFunction()) return false;
    154   JSFunction* fun = JSFunction::cast(cons_obj);
    155   // Iterate through the chain of inheriting function templates to
    156   // see if the required one occurs.
    157   for (Object* type = fun->shared()->function_data();
    158        type->IsFunctionTemplateInfo();
    159        type = FunctionTemplateInfo::cast(type)->parent_template()) {
    160     if (type == expected) return true;
    161   }
    162   // Didn't find the required type in the inheritance chain.
    163   return false;
    164 }
    165 
    166 
    167 bool Object::IsSmi() {
    168   return HAS_SMI_TAG(this);
    169 }
    170 
    171 
    172 bool Object::IsHeapObject() {
    173   return Internals::HasHeapObjectTag(this);
    174 }
    175 
    176 
    177 bool Object::NonFailureIsHeapObject() {
    178   ASSERT(!this->IsFailure());
    179   return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
    180 }
    181 
    182 
    183 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
    184 
    185 
    186 bool Object::IsString() {
    187   return Object::IsHeapObject()
    188     && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
    189 }
    190 
    191 
    192 bool Object::IsSpecObject() {
    193   return Object::IsHeapObject()
    194     && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
    195 }
    196 
    197 
    198 bool Object::IsSpecFunction() {
    199   if (!Object::IsHeapObject()) return false;
    200   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    201   return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
    202 }
    203 
    204 
    205 bool Object::IsSymbol() {
    206   if (!this->IsHeapObject()) return false;
    207   uint32_t type = HeapObject::cast(this)->map()->instance_type();
    208   // Because the symbol tag is non-zero and no non-string types have the
    209   // symbol bit set we can test for symbols with a very simple test
    210   // operation.
    211   STATIC_ASSERT(kSymbolTag != 0);
    212   ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
    213   return (type & kIsSymbolMask) != 0;
    214 }
    215 
    216 
    217 bool Object::IsConsString() {
    218   if (!IsString()) return false;
    219   return StringShape(String::cast(this)).IsCons();
    220 }
    221 
    222 
    223 bool Object::IsSlicedString() {
    224   if (!IsString()) return false;
    225   return StringShape(String::cast(this)).IsSliced();
    226 }
    227 
    228 
    229 bool Object::IsSeqString() {
    230   if (!IsString()) return false;
    231   return StringShape(String::cast(this)).IsSequential();
    232 }
    233 
    234 
    235 bool Object::IsSeqAsciiString() {
    236   if (!IsString()) return false;
    237   return StringShape(String::cast(this)).IsSequential() &&
    238          String::cast(this)->IsAsciiRepresentation();
    239 }
    240 
    241 
    242 bool Object::IsSeqTwoByteString() {
    243   if (!IsString()) return false;
    244   return StringShape(String::cast(this)).IsSequential() &&
    245          String::cast(this)->IsTwoByteRepresentation();
    246 }
    247 
    248 
    249 bool Object::IsExternalString() {
    250   if (!IsString()) return false;
    251   return StringShape(String::cast(this)).IsExternal();
    252 }
    253 
    254 
    255 bool Object::IsExternalAsciiString() {
    256   if (!IsString()) return false;
    257   return StringShape(String::cast(this)).IsExternal() &&
    258          String::cast(this)->IsAsciiRepresentation();
    259 }
    260 
    261 
    262 bool Object::IsExternalTwoByteString() {
    263   if (!IsString()) return false;
    264   return StringShape(String::cast(this)).IsExternal() &&
    265          String::cast(this)->IsTwoByteRepresentation();
    266 }
    267 
    268 bool Object::HasValidElements() {
    269   // Dictionary is covered under FixedArray.
    270   return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
    271 }
    272 
    273 StringShape::StringShape(String* str)
    274   : type_(str->map()->instance_type()) {
    275   set_valid();
    276   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    277 }
    278 
    279 
    280 StringShape::StringShape(Map* map)
    281   : type_(map->instance_type()) {
    282   set_valid();
    283   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    284 }
    285 
    286 
    287 StringShape::StringShape(InstanceType t)
    288   : type_(static_cast<uint32_t>(t)) {
    289   set_valid();
    290   ASSERT((type_ & kIsNotStringMask) == kStringTag);
    291 }
    292 
    293 
    294 bool StringShape::IsSymbol() {
    295   ASSERT(valid());
    296   STATIC_ASSERT(kSymbolTag != 0);
    297   return (type_ & kIsSymbolMask) != 0;
    298 }
    299 
    300 
    301 bool String::IsAsciiRepresentation() {
    302   uint32_t type = map()->instance_type();
    303   return (type & kStringEncodingMask) == kAsciiStringTag;
    304 }
    305 
    306 
    307 bool String::IsTwoByteRepresentation() {
    308   uint32_t type = map()->instance_type();
    309   return (type & kStringEncodingMask) == kTwoByteStringTag;
    310 }
    311 
    312 
    313 bool String::IsAsciiRepresentationUnderneath() {
    314   uint32_t type = map()->instance_type();
    315   STATIC_ASSERT(kIsIndirectStringTag != 0);
    316   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
    317   ASSERT(IsFlat());
    318   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
    319     case kAsciiStringTag:
    320       return true;
    321     case kTwoByteStringTag:
    322       return false;
    323     default:  // Cons or sliced string.  Need to go deeper.
    324       return GetUnderlying()->IsAsciiRepresentation();
    325   }
    326 }
    327 
    328 
    329 bool String::IsTwoByteRepresentationUnderneath() {
    330   uint32_t type = map()->instance_type();
    331   STATIC_ASSERT(kIsIndirectStringTag != 0);
    332   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
    333   ASSERT(IsFlat());
    334   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
    335     case kAsciiStringTag:
    336       return false;
    337     case kTwoByteStringTag:
    338       return true;
    339     default:  // Cons or sliced string.  Need to go deeper.
    340       return GetUnderlying()->IsTwoByteRepresentation();
    341   }
    342 }
    343 
    344 
    345 bool String::HasOnlyAsciiChars() {
    346   uint32_t type = map()->instance_type();
    347   return (type & kStringEncodingMask) == kAsciiStringTag ||
    348          (type & kAsciiDataHintMask) == kAsciiDataHintTag;
    349 }
    350 
    351 
    352 bool StringShape::IsCons() {
    353   return (type_ & kStringRepresentationMask) == kConsStringTag;
    354 }
    355 
    356 
    357 bool StringShape::IsSliced() {
    358   return (type_ & kStringRepresentationMask) == kSlicedStringTag;
    359 }
    360 
    361 
    362 bool StringShape::IsIndirect() {
    363   return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
    364 }
    365 
    366 
    367 bool StringShape::IsExternal() {
    368   return (type_ & kStringRepresentationMask) == kExternalStringTag;
    369 }
    370 
    371 
    372 bool StringShape::IsSequential() {
    373   return (type_ & kStringRepresentationMask) == kSeqStringTag;
    374 }
    375 
    376 
    377 StringRepresentationTag StringShape::representation_tag() {
    378   uint32_t tag = (type_ & kStringRepresentationMask);
    379   return static_cast<StringRepresentationTag>(tag);
    380 }
    381 
    382 
    383 uint32_t StringShape::encoding_tag() {
    384   return type_ & kStringEncodingMask;
    385 }
    386 
    387 
    388 uint32_t StringShape::full_representation_tag() {
    389   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
    390 }
    391 
    392 
    393 STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
    394              Internals::kFullStringRepresentationMask);
    395 
    396 
    397 bool StringShape::IsSequentialAscii() {
    398   return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
    399 }
    400 
    401 
    402 bool StringShape::IsSequentialTwoByte() {
    403   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
    404 }
    405 
    406 
    407 bool StringShape::IsExternalAscii() {
    408   return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
    409 }
    410 
    411 
    412 bool StringShape::IsExternalTwoByte() {
    413   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
    414 }
    415 
    416 
    417 STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
    418              Internals::kExternalTwoByteRepresentationTag);
    419 
    420 
    421 uc32 FlatStringReader::Get(int index) {
    422   ASSERT(0 <= index && index <= length_);
    423   if (is_ascii_) {
    424     return static_cast<const byte*>(start_)[index];
    425   } else {
    426     return static_cast<const uc16*>(start_)[index];
    427   }
    428 }
    429 
    430 
    431 bool Object::IsNumber() {
    432   return IsSmi() || IsHeapNumber();
    433 }
    434 
    435 
    436 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
    437 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
    438 
    439 
    440 bool Object::IsFiller() {
    441   if (!Object::IsHeapObject()) return false;
    442   InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
    443   return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
    444 }
    445 
    446 
    447 TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
    448 
    449 
    450 bool Object::IsExternalArray() {
    451   if (!Object::IsHeapObject())
    452     return false;
    453   InstanceType instance_type =
    454       HeapObject::cast(this)->map()->instance_type();
    455   return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
    456           instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
    457 }
    458 
    459 
    460 TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
    461 TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
    462 TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
    463 TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
    464 TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
    465 TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
    466 TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
    467 TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
    468 
    469 
    470 bool MaybeObject::IsFailure() {
    471   return HAS_FAILURE_TAG(this);
    472 }
    473 
    474 
    475 bool MaybeObject::IsRetryAfterGC() {
    476   return HAS_FAILURE_TAG(this)
    477     && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
    478 }
    479 
    480 
    481 bool MaybeObject::IsOutOfMemory() {
    482   return HAS_FAILURE_TAG(this)
    483       && Failure::cast(this)->IsOutOfMemoryException();
    484 }
    485 
    486 
    487 bool MaybeObject::IsException() {
    488   return this == Failure::Exception();
    489 }
    490 
    491 
    492 bool MaybeObject::IsTheHole() {
    493   return !IsFailure() && ToObjectUnchecked()->IsTheHole();
    494 }
    495 
    496 
    497 Failure* Failure::cast(MaybeObject* obj) {
    498   ASSERT(HAS_FAILURE_TAG(obj));
    499   return reinterpret_cast<Failure*>(obj);
    500 }
    501 
    502 
    503 bool Object::IsJSReceiver() {
    504   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
    505   return IsHeapObject() &&
    506       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
    507 }
    508 
    509 
    510 bool Object::IsJSObject() {
    511   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
    512   return IsHeapObject() &&
    513       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
    514 }
    515 
    516 
    517 bool Object::IsJSProxy() {
    518   if (!Object::IsHeapObject()) return false;
    519   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    520   return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
    521 }
    522 
    523 
    524 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
    525 TYPE_CHECKER(JSSet, JS_SET_TYPE)
    526 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
    527 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
    528 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
    529 TYPE_CHECKER(Map, MAP_TYPE)
    530 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
    531 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
    532 
    533 
    534 bool Object::IsDescriptorArray() {
    535   return IsFixedArray();
    536 }
    537 
    538 
    539 bool Object::IsDeoptimizationInputData() {
    540   // Must be a fixed array.
    541   if (!IsFixedArray()) return false;
    542 
    543   // There's no sure way to detect the difference between a fixed array and
    544   // a deoptimization data array.  Since this is used for asserts we can
    545   // check that the length is zero or else the fixed size plus a multiple of
    546   // the entry size.
    547   int length = FixedArray::cast(this)->length();
    548   if (length == 0) return true;
    549 
    550   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
    551   return length >= 0 &&
    552       length % DeoptimizationInputData::kDeoptEntrySize == 0;
    553 }
    554 
    555 
    556 bool Object::IsDeoptimizationOutputData() {
    557   if (!IsFixedArray()) return false;
    558   // There's actually no way to see the difference between a fixed array and
    559   // a deoptimization data array.  Since this is used for asserts we can check
    560   // that the length is plausible though.
    561   if (FixedArray::cast(this)->length() % 2 != 0) return false;
    562   return true;
    563 }
    564 
    565 
    566 bool Object::IsTypeFeedbackCells() {
    567   if (!IsFixedArray()) return false;
    568   // There's actually no way to see the difference between a fixed array and
    569   // a cache cells array.  Since this is used for asserts we can check that
    570   // the length is plausible though.
    571   if (FixedArray::cast(this)->length() % 2 != 0) return false;
    572   return true;
    573 }
    574 
    575 
    576 bool Object::IsContext() {
    577   if (Object::IsHeapObject()) {
    578     Map* map = HeapObject::cast(this)->map();
    579     Heap* heap = map->GetHeap();
    580     return (map == heap->function_context_map() ||
    581             map == heap->catch_context_map() ||
    582             map == heap->with_context_map() ||
    583             map == heap->global_context_map() ||
    584             map == heap->block_context_map());
    585   }
    586   return false;
    587 }
    588 
    589 
    590 bool Object::IsGlobalContext() {
    591   return Object::IsHeapObject() &&
    592       HeapObject::cast(this)->map() ==
    593       HeapObject::cast(this)->GetHeap()->global_context_map();
    594 }
    595 
    596 
    597 bool Object::IsScopeInfo() {
    598   return Object::IsHeapObject() &&
    599       HeapObject::cast(this)->map() ==
    600       HeapObject::cast(this)->GetHeap()->scope_info_map();
    601 }
    602 
    603 
    604 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
    605 
    606 
    607 template <> inline bool Is<JSFunction>(Object* obj) {
    608   return obj->IsJSFunction();
    609 }
    610 
    611 
    612 TYPE_CHECKER(Code, CODE_TYPE)
    613 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
    614 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
    615 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
    616 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
    617 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
    618 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
    619 
    620 
    621 bool Object::IsStringWrapper() {
    622   return IsJSValue() && JSValue::cast(this)->value()->IsString();
    623 }
    624 
    625 
    626 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
    627 
    628 
    629 bool Object::IsBoolean() {
    630   return IsOddball() &&
    631       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
    632 }
    633 
    634 
    635 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
    636 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
    637 
    638 
    639 template <> inline bool Is<JSArray>(Object* obj) {
    640   return obj->IsJSArray();
    641 }
    642 
    643 
    644 bool Object::IsHashTable() {
    645   return Object::IsHeapObject() &&
    646       HeapObject::cast(this)->map() ==
    647       HeapObject::cast(this)->GetHeap()->hash_table_map();
    648 }
    649 
    650 
    651 bool Object::IsDictionary() {
    652   return IsHashTable() &&
    653       this != HeapObject::cast(this)->GetHeap()->symbol_table();
    654 }
    655 
    656 
    657 bool Object::IsSymbolTable() {
    658   return IsHashTable() && this ==
    659          HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
    660 }
    661 
    662 
    663 bool Object::IsJSFunctionResultCache() {
    664   if (!IsFixedArray()) return false;
    665   FixedArray* self = FixedArray::cast(this);
    666   int length = self->length();
    667   if (length < JSFunctionResultCache::kEntriesIndex) return false;
    668   if ((length - JSFunctionResultCache::kEntriesIndex)
    669       % JSFunctionResultCache::kEntrySize != 0) {
    670     return false;
    671   }
    672 #ifdef DEBUG
    673   if (FLAG_verify_heap) {
    674     reinterpret_cast<JSFunctionResultCache*>(this)->
    675         JSFunctionResultCacheVerify();
    676   }
    677 #endif
    678   return true;
    679 }
    680 
    681 
    682 bool Object::IsNormalizedMapCache() {
    683   if (!IsFixedArray()) return false;
    684   if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
    685     return false;
    686   }
    687 #ifdef DEBUG
    688   if (FLAG_verify_heap) {
    689     reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
    690   }
    691 #endif
    692   return true;
    693 }
    694 
    695 
    696 bool Object::IsCompilationCacheTable() {
    697   return IsHashTable();
    698 }
    699 
    700 
    701 bool Object::IsCodeCacheHashTable() {
    702   return IsHashTable();
    703 }
    704 
    705 
    706 bool Object::IsPolymorphicCodeCacheHashTable() {
    707   return IsHashTable();
    708 }
    709 
    710 
    711 bool Object::IsMapCache() {
    712   return IsHashTable();
    713 }
    714 
    715 
    716 bool Object::IsPrimitive() {
    717   return IsOddball() || IsNumber() || IsString();
    718 }
    719 
    720 
    721 bool Object::IsJSGlobalProxy() {
    722   bool result = IsHeapObject() &&
    723                 (HeapObject::cast(this)->map()->instance_type() ==
    724                  JS_GLOBAL_PROXY_TYPE);
    725   ASSERT(!result || IsAccessCheckNeeded());
    726   return result;
    727 }
    728 
    729 
    730 bool Object::IsGlobalObject() {
    731   if (!IsHeapObject()) return false;
    732 
    733   InstanceType type = HeapObject::cast(this)->map()->instance_type();
    734   return type == JS_GLOBAL_OBJECT_TYPE ||
    735          type == JS_BUILTINS_OBJECT_TYPE;
    736 }
    737 
    738 
    739 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
    740 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
    741 
    742 
    743 bool Object::IsUndetectableObject() {
    744   return IsHeapObject()
    745     && HeapObject::cast(this)->map()->is_undetectable();
    746 }
    747 
    748 
    749 bool Object::IsAccessCheckNeeded() {
    750   return IsHeapObject()
    751     && HeapObject::cast(this)->map()->is_access_check_needed();
    752 }
    753 
    754 
    755 bool Object::IsStruct() {
    756   if (!IsHeapObject()) return false;
    757   switch (HeapObject::cast(this)->map()->instance_type()) {
    758 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
    759   STRUCT_LIST(MAKE_STRUCT_CASE)
    760 #undef MAKE_STRUCT_CASE
    761     default: return false;
    762   }
    763 }
    764 
    765 
    766 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
    767   bool Object::Is##Name() {                                      \
    768     return Object::IsHeapObject()                                \
    769       && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
    770   }
    771   STRUCT_LIST(MAKE_STRUCT_PREDICATE)
    772 #undef MAKE_STRUCT_PREDICATE
    773 
    774 
    775 bool Object::IsUndefined() {
    776   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
    777 }
    778 
    779 
    780 bool Object::IsNull() {
    781   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
    782 }
    783 
    784 
    785 bool Object::IsTheHole() {
    786   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
    787 }
    788 
    789 
    790 bool Object::IsTrue() {
    791   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
    792 }
    793 
    794 
    795 bool Object::IsFalse() {
    796   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
    797 }
    798 
    799 
    800 bool Object::IsArgumentsMarker() {
    801   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
    802 }
    803 
    804 
    805 double Object::Number() {
    806   ASSERT(IsNumber());
    807   return IsSmi()
    808     ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
    809     : reinterpret_cast<HeapNumber*>(this)->value();
    810 }
    811 
    812 
    813 bool Object::IsNaN() {
    814   return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
    815 }
    816 
    817 
    818 MaybeObject* Object::ToSmi() {
    819   if (IsSmi()) return this;
    820   if (IsHeapNumber()) {
    821     double value = HeapNumber::cast(this)->value();
    822     int int_value = FastD2I(value);
    823     if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
    824       return Smi::FromInt(int_value);
    825     }
    826   }
    827   return Failure::Exception();
    828 }
    829 
    830 
    831 bool Object::HasSpecificClassOf(String* name) {
    832   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
    833 }
    834 
    835 
    836 MaybeObject* Object::GetElement(uint32_t index) {
    837   // GetElement can trigger a getter which can cause allocation.
    838   // This was not always the case. This ASSERT is here to catch
    839   // leftover incorrect uses.
    840   ASSERT(HEAP->IsAllocationAllowed());
    841   return GetElementWithReceiver(this, index);
    842 }
    843 
    844 
    845 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
    846   MaybeObject* maybe = GetElementWithReceiver(this, index);
    847   ASSERT(!maybe->IsFailure());
    848   Object* result = NULL;  // Initialization to please compiler.
    849   maybe->ToObject(&result);
    850   return result;
    851 }
    852 
    853 
    854 MaybeObject* Object::GetProperty(String* key) {
    855   PropertyAttributes attributes;
    856   return GetPropertyWithReceiver(this, key, &attributes);
    857 }
    858 
    859 
    860 MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
    861   return GetPropertyWithReceiver(this, key, attributes);
    862 }
    863 
    864 
    865 #define FIELD_ADDR(p, offset) \
    866   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
    867 
    868 #define READ_FIELD(p, offset) \
    869   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
    870 
    871 #define WRITE_FIELD(p, offset, value) \
    872   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
    873 
    874 #define WRITE_BARRIER(heap, object, offset, value)                      \
    875   heap->incremental_marking()->RecordWrite(                             \
    876       object, HeapObject::RawField(object, offset), value);             \
    877   if (heap->InNewSpace(value)) {                                        \
    878     heap->RecordWrite(object->address(), offset);                       \
    879   }
    880 
    881 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)    \
    882   if (mode == UPDATE_WRITE_BARRIER) {                                   \
    883     heap->incremental_marking()->RecordWrite(                           \
    884       object, HeapObject::RawField(object, offset), value);             \
    885     if (heap->InNewSpace(value)) {                                      \
    886       heap->RecordWrite(object->address(), offset);                     \
    887     }                                                                   \
    888   }
    889 
    890 #ifndef V8_TARGET_ARCH_MIPS
    891   #define READ_DOUBLE_FIELD(p, offset) \
    892     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
    893 #else  // V8_TARGET_ARCH_MIPS
    894   // Prevent gcc from using load-double (mips ldc1) on (possibly)
    895   // non-64-bit aligned HeapNumber::value.
    896   static inline double read_double_field(void* p, int offset) {
    897     union conversion {
    898       double d;
    899       uint32_t u[2];
    900     } c;
    901     c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
    902     c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
    903     return c.d;
    904   }
    905   #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
    906 #endif  // V8_TARGET_ARCH_MIPS
    907 
    908 #ifndef V8_TARGET_ARCH_MIPS
    909   #define WRITE_DOUBLE_FIELD(p, offset, value) \
    910     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
    911 #else  // V8_TARGET_ARCH_MIPS
    912   // Prevent gcc from using store-double (mips sdc1) on (possibly)
    913   // non-64-bit aligned HeapNumber::value.
    914   static inline void write_double_field(void* p, int offset,
    915                                         double value) {
    916     union conversion {
    917       double d;
    918       uint32_t u[2];
    919     } c;
    920     c.d = value;
    921     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
    922     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
    923   }
    924   #define WRITE_DOUBLE_FIELD(p, offset, value) \
    925     write_double_field(p, offset, value)
    926 #endif  // V8_TARGET_ARCH_MIPS
    927 
    928 
    929 #define READ_INT_FIELD(p, offset) \
    930   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
    931 
    932 #define WRITE_INT_FIELD(p, offset, value) \
    933   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
    934 
    935 #define READ_INTPTR_FIELD(p, offset) \
    936   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
    937 
    938 #define WRITE_INTPTR_FIELD(p, offset, value) \
    939   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
    940 
    941 #define READ_UINT32_FIELD(p, offset) \
    942   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
    943 
    944 #define WRITE_UINT32_FIELD(p, offset, value) \
    945   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
    946 
    947 #define READ_INT64_FIELD(p, offset) \
    948   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
    949 
    950 #define WRITE_INT64_FIELD(p, offset, value) \
    951   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
    952 
    953 #define READ_SHORT_FIELD(p, offset) \
    954   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
    955 
    956 #define WRITE_SHORT_FIELD(p, offset, value) \
    957   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
    958 
    959 #define READ_BYTE_FIELD(p, offset) \
    960   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
    961 
    962 #define WRITE_BYTE_FIELD(p, offset, value) \
    963   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
    964 
    965 
    966 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
    967   return &READ_FIELD(obj, byte_offset);
    968 }
    969 
    970 
    971 int Smi::value() {
    972   return Internals::SmiValue(this);
    973 }
    974 
    975 
    976 Smi* Smi::FromInt(int value) {
    977   ASSERT(Smi::IsValid(value));
    978   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
    979   intptr_t tagged_value =
    980       (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
    981   return reinterpret_cast<Smi*>(tagged_value);
    982 }
    983 
    984 
    985 Smi* Smi::FromIntptr(intptr_t value) {
    986   ASSERT(Smi::IsValid(value));
    987   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
    988   return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
    989 }
    990 
    991 
    992 Failure::Type Failure::type() const {
    993   return static_cast<Type>(value() & kFailureTypeTagMask);
    994 }
    995 
    996 
    997 bool Failure::IsInternalError() const {
    998   return type() == INTERNAL_ERROR;
    999 }
   1000 
   1001 
   1002 bool Failure::IsOutOfMemoryException() const {
   1003   return type() == OUT_OF_MEMORY_EXCEPTION;
   1004 }
   1005 
   1006 
   1007 AllocationSpace Failure::allocation_space() const {
   1008   ASSERT_EQ(RETRY_AFTER_GC, type());
   1009   return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
   1010                                       & kSpaceTagMask);
   1011 }
   1012 
   1013 
   1014 Failure* Failure::InternalError() {
   1015   return Construct(INTERNAL_ERROR);
   1016 }
   1017 
   1018 
   1019 Failure* Failure::Exception() {
   1020   return Construct(EXCEPTION);
   1021 }
   1022 
   1023 
   1024 Failure* Failure::OutOfMemoryException() {
   1025   return Construct(OUT_OF_MEMORY_EXCEPTION);
   1026 }
   1027 
   1028 
   1029 intptr_t Failure::value() const {
   1030   return static_cast<intptr_t>(
   1031       reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
   1032 }
   1033 
   1034 
   1035 Failure* Failure::RetryAfterGC() {
   1036   return RetryAfterGC(NEW_SPACE);
   1037 }
   1038 
   1039 
   1040 Failure* Failure::RetryAfterGC(AllocationSpace space) {
   1041   ASSERT((space & ~kSpaceTagMask) == 0);
   1042   return Construct(RETRY_AFTER_GC, space);
   1043 }
   1044 
   1045 
   1046 Failure* Failure::Construct(Type type, intptr_t value) {
   1047   uintptr_t info =
   1048       (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
   1049   ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
   1050   return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
   1051 }
   1052 
   1053 
   1054 bool Smi::IsValid(intptr_t value) {
   1055 #ifdef DEBUG
   1056   bool in_range = (value >= kMinValue) && (value <= kMaxValue);
   1057 #endif
   1058 
   1059 #ifdef V8_TARGET_ARCH_X64
   1060   // To be representable as a long smi, the value must be a 32-bit integer.
   1061   bool result = (value == static_cast<int32_t>(value));
   1062 #else
   1063   // To be representable as an tagged small integer, the two
   1064   // most-significant bits of 'value' must be either 00 or 11 due to
   1065   // sign-extension. To check this we add 01 to the two
   1066   // most-significant bits, and check if the most-significant bit is 0
   1067   //
   1068   // CAUTION: The original code below:
   1069   // bool result = ((value + 0x40000000) & 0x80000000) == 0;
   1070   // may lead to incorrect results according to the C language spec, and
   1071   // in fact doesn't work correctly with gcc4.1.1 in some cases: The
   1072   // compiler may produce undefined results in case of signed integer
   1073   // overflow. The computation must be done w/ unsigned ints.
   1074   bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
   1075 #endif
   1076   ASSERT(result == in_range);
   1077   return result;
   1078 }
   1079 
   1080 
   1081 MapWord MapWord::FromMap(Map* map) {
   1082   return MapWord(reinterpret_cast<uintptr_t>(map));
   1083 }
   1084 
   1085 
   1086 Map* MapWord::ToMap() {
   1087   return reinterpret_cast<Map*>(value_);
   1088 }
   1089 
   1090 
   1091 bool MapWord::IsForwardingAddress() {
   1092   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
   1093 }
   1094 
   1095 
   1096 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
   1097   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
   1098   return MapWord(reinterpret_cast<uintptr_t>(raw));
   1099 }
   1100 
   1101 
   1102 HeapObject* MapWord::ToForwardingAddress() {
   1103   ASSERT(IsForwardingAddress());
   1104   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
   1105 }
   1106 
   1107 
   1108 #ifdef DEBUG
   1109 void HeapObject::VerifyObjectField(int offset) {
   1110   VerifyPointer(READ_FIELD(this, offset));
   1111 }
   1112 
   1113 void HeapObject::VerifySmiField(int offset) {
   1114   ASSERT(READ_FIELD(this, offset)->IsSmi());
   1115 }
   1116 #endif
   1117 
   1118 
   1119 Heap* HeapObject::GetHeap() {
   1120   Heap* heap =
   1121       MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
   1122   ASSERT(heap != NULL);
   1123   ASSERT(heap->isolate() == Isolate::Current());
   1124   return heap;
   1125 }
   1126 
   1127 
   1128 Isolate* HeapObject::GetIsolate() {
   1129   return GetHeap()->isolate();
   1130 }
   1131 
   1132 
   1133 Map* HeapObject::map() {
   1134   return map_word().ToMap();
   1135 }
   1136 
   1137 
   1138 void HeapObject::set_map(Map* value) {
   1139   set_map_word(MapWord::FromMap(value));
   1140   if (value != NULL) {
   1141     // TODO(1600) We are passing NULL as a slot because maps can never be on
   1142     // evacuation candidate.
   1143     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
   1144   }
   1145 }
   1146 
   1147 
   1148 // Unsafe accessor omitting write barrier.
   1149 void HeapObject::set_map_no_write_barrier(Map* value) {
   1150   set_map_word(MapWord::FromMap(value));
   1151 }
   1152 
   1153 
   1154 MapWord HeapObject::map_word() {
   1155   return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
   1156 }
   1157 
   1158 
   1159 void HeapObject::set_map_word(MapWord map_word) {
   1160   // WRITE_FIELD does not invoke write barrier, but there is no need
   1161   // here.
   1162   WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
   1163 }
   1164 
   1165 
   1166 HeapObject* HeapObject::FromAddress(Address address) {
   1167   ASSERT_TAG_ALIGNED(address);
   1168   return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
   1169 }
   1170 
   1171 
   1172 Address HeapObject::address() {
   1173   return reinterpret_cast<Address>(this) - kHeapObjectTag;
   1174 }
   1175 
   1176 
   1177 int HeapObject::Size() {
   1178   return SizeFromMap(map());
   1179 }
   1180 
   1181 
   1182 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
   1183   v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
   1184                    reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
   1185 }
   1186 
   1187 
   1188 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
   1189   v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
   1190 }
   1191 
   1192 
   1193 double HeapNumber::value() {
   1194   return READ_DOUBLE_FIELD(this, kValueOffset);
   1195 }
   1196 
   1197 
   1198 void HeapNumber::set_value(double value) {
   1199   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
   1200 }
   1201 
   1202 
   1203 int HeapNumber::get_exponent() {
   1204   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
   1205           kExponentShift) - kExponentBias;
   1206 }
   1207 
   1208 
   1209 int HeapNumber::get_sign() {
   1210   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
   1211 }
   1212 
   1213 
   1214 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
   1215 
   1216 
   1217 Object** FixedArray::GetFirstElementAddress() {
   1218   return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
   1219 }
   1220 
   1221 
   1222 bool FixedArray::ContainsOnlySmisOrHoles() {
   1223   Object* the_hole = GetHeap()->the_hole_value();
   1224   Object** current = GetFirstElementAddress();
   1225   for (int i = 0; i < length(); ++i) {
   1226     Object* candidate = *current++;
   1227     if (!candidate->IsSmi() && candidate != the_hole) return false;
   1228   }
   1229   return true;
   1230 }
   1231 
   1232 
   1233 FixedArrayBase* JSObject::elements() {
   1234   Object* array = READ_FIELD(this, kElementsOffset);
   1235   return static_cast<FixedArrayBase*>(array);
   1236 }
   1237 
   1238 void JSObject::ValidateSmiOnlyElements() {
   1239 #if DEBUG
   1240   if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
   1241     Heap* heap = GetHeap();
   1242     // Don't use elements, since integrity checks will fail if there
   1243     // are filler pointers in the array.
   1244     FixedArray* fixed_array =
   1245         reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
   1246     Map* map = fixed_array->map();
   1247     // Arrays that have been shifted in place can't be verified.
   1248     if (map != heap->raw_unchecked_one_pointer_filler_map() &&
   1249         map != heap->raw_unchecked_two_pointer_filler_map() &&
   1250         map != heap->free_space_map()) {
   1251       for (int i = 0; i < fixed_array->length(); i++) {
   1252         Object* current = fixed_array->get(i);
   1253         ASSERT(current->IsSmi() || current->IsTheHole());
   1254       }
   1255     }
   1256   }
   1257 #endif
   1258 }
   1259 
   1260 
   1261 MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
   1262 #if DEBUG
   1263   ValidateSmiOnlyElements();
   1264 #endif
   1265   if ((map()->elements_kind() != FAST_ELEMENTS)) {
   1266     return TransitionElementsKind(FAST_ELEMENTS);
   1267   }
   1268   return this;
   1269 }
   1270 
   1271 
   1272 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
   1273                                                 uint32_t count,
   1274                                                 EnsureElementsMode mode) {
   1275   ElementsKind current_kind = map()->elements_kind();
   1276   ElementsKind target_kind = current_kind;
   1277   ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
   1278   if (current_kind == FAST_ELEMENTS) return this;
   1279 
   1280   Heap* heap = GetHeap();
   1281   Object* the_hole = heap->the_hole_value();
   1282   Object* heap_number_map = heap->heap_number_map();
   1283   for (uint32_t i = 0; i < count; ++i) {
   1284     Object* current = *objects++;
   1285     if (!current->IsSmi() && current != the_hole) {
   1286       if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
   1287           HeapObject::cast(current)->map() == heap_number_map) {
   1288         target_kind = FAST_DOUBLE_ELEMENTS;
   1289       } else {
   1290         target_kind = FAST_ELEMENTS;
   1291         break;
   1292       }
   1293     }
   1294   }
   1295 
   1296   if (target_kind != current_kind) {
   1297     return TransitionElementsKind(target_kind);
   1298   }
   1299   return this;
   1300 }
   1301 
   1302 
   1303 MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
   1304                                                 EnsureElementsMode mode) {
   1305   if (elements->map() != GetHeap()->fixed_double_array_map()) {
   1306     ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
   1307            elements->map() == GetHeap()->fixed_cow_array_map());
   1308     if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
   1309       mode = DONT_ALLOW_DOUBLE_ELEMENTS;
   1310     }
   1311     Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
   1312     return EnsureCanContainElements(objects, elements->length(), mode);
   1313   }
   1314 
   1315   ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
   1316   if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
   1317     return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
   1318   }
   1319 
   1320   return this;
   1321 }
   1322 
   1323 
   1324 MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
   1325                                                 ElementsKind to_kind) {
   1326   Map* current_map = map();
   1327   ElementsKind from_kind = current_map->elements_kind();
   1328 
   1329   if (from_kind == to_kind) return current_map;
   1330 
   1331   Context* global_context = isolate->context()->global_context();
   1332   if (current_map == global_context->smi_js_array_map()) {
   1333     if (to_kind == FAST_ELEMENTS) {
   1334       return global_context->object_js_array_map();
   1335     } else {
   1336       if (to_kind == FAST_DOUBLE_ELEMENTS) {
   1337         return global_context->double_js_array_map();
   1338       } else {
   1339         ASSERT(to_kind == DICTIONARY_ELEMENTS);
   1340       }
   1341     }
   1342   }
   1343   return GetElementsTransitionMapSlow(to_kind);
   1344 }
   1345 
   1346 
   1347 void JSObject::set_map_and_elements(Map* new_map,
   1348                                     FixedArrayBase* value,
   1349                                     WriteBarrierMode mode) {
   1350   ASSERT(value->HasValidElements());
   1351 #ifdef DEBUG
   1352   ValidateSmiOnlyElements();
   1353 #endif
   1354   if (new_map != NULL) {
   1355     if (mode == UPDATE_WRITE_BARRIER) {
   1356       set_map(new_map);
   1357     } else {
   1358       ASSERT(mode == SKIP_WRITE_BARRIER);
   1359       set_map_no_write_barrier(new_map);
   1360     }
   1361   }
   1362   ASSERT((map()->has_fast_elements() ||
   1363           map()->has_fast_smi_only_elements() ||
   1364           (value == GetHeap()->empty_fixed_array())) ==
   1365          (value->map() == GetHeap()->fixed_array_map() ||
   1366           value->map() == GetHeap()->fixed_cow_array_map()));
   1367   ASSERT((value == GetHeap()->empty_fixed_array()) ||
   1368          (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
   1369   WRITE_FIELD(this, kElementsOffset, value);
   1370   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
   1371 }
   1372 
   1373 
   1374 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
   1375   set_map_and_elements(NULL, value, mode);
   1376 }
   1377 
   1378 
   1379 void JSObject::initialize_properties() {
   1380   ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
   1381   WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
   1382 }
   1383 
   1384 
   1385 void JSObject::initialize_elements() {
   1386   ASSERT(map()->has_fast_elements() ||
   1387          map()->has_fast_smi_only_elements() ||
   1388          map()->has_fast_double_elements());
   1389   ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
   1390   WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
   1391 }
   1392 
   1393 
   1394 MaybeObject* JSObject::ResetElements() {
   1395   Object* obj;
   1396   ElementsKind elements_kind = FLAG_smi_only_arrays
   1397       ? FAST_SMI_ONLY_ELEMENTS
   1398       : FAST_ELEMENTS;
   1399   MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
   1400                                                     elements_kind);
   1401   if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   1402   set_map(Map::cast(obj));
   1403   initialize_elements();
   1404   return this;
   1405 }
   1406 
   1407 
   1408 ACCESSORS(Oddball, to_string, String, kToStringOffset)
   1409 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
   1410 
   1411 
   1412 byte Oddball::kind() {
   1413   return Smi::cast(READ_FIELD(this, kKindOffset))->value();
   1414 }
   1415 
   1416 
   1417 void Oddball::set_kind(byte value) {
   1418   WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
   1419 }
   1420 
   1421 
   1422 Object* JSGlobalPropertyCell::value() {
   1423   return READ_FIELD(this, kValueOffset);
   1424 }
   1425 
   1426 
   1427 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
   1428   // The write barrier is not used for global property cells.
   1429   ASSERT(!val->IsJSGlobalPropertyCell());
   1430   WRITE_FIELD(this, kValueOffset, val);
   1431 }
   1432 
   1433 
   1434 int JSObject::GetHeaderSize() {
   1435   InstanceType type = map()->instance_type();
   1436   // Check for the most common kind of JavaScript object before
   1437   // falling into the generic switch. This speeds up the internal
   1438   // field operations considerably on average.
   1439   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
   1440   switch (type) {
   1441     case JS_GLOBAL_PROXY_TYPE:
   1442       return JSGlobalProxy::kSize;
   1443     case JS_GLOBAL_OBJECT_TYPE:
   1444       return JSGlobalObject::kSize;
   1445     case JS_BUILTINS_OBJECT_TYPE:
   1446       return JSBuiltinsObject::kSize;
   1447     case JS_FUNCTION_TYPE:
   1448       return JSFunction::kSize;
   1449     case JS_VALUE_TYPE:
   1450       return JSValue::kSize;
   1451     case JS_DATE_TYPE:
   1452       return JSDate::kSize;
   1453     case JS_ARRAY_TYPE:
   1454       return JSArray::kSize;
   1455     case JS_WEAK_MAP_TYPE:
   1456       return JSWeakMap::kSize;
   1457     case JS_REGEXP_TYPE:
   1458       return JSRegExp::kSize;
   1459     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
   1460       return JSObject::kHeaderSize;
   1461     case JS_MESSAGE_OBJECT_TYPE:
   1462       return JSMessageObject::kSize;
   1463     default:
   1464       UNREACHABLE();
   1465       return 0;
   1466   }
   1467 }
   1468 
   1469 
   1470 int JSObject::GetInternalFieldCount() {
   1471   ASSERT(1 << kPointerSizeLog2 == kPointerSize);
   1472   // Make sure to adjust for the number of in-object properties. These
   1473   // properties do contribute to the size, but are not internal fields.
   1474   return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
   1475          map()->inobject_properties();
   1476 }
   1477 
   1478 
   1479 int JSObject::GetInternalFieldOffset(int index) {
   1480   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1481   return GetHeaderSize() + (kPointerSize * index);
   1482 }
   1483 
   1484 
   1485 Object* JSObject::GetInternalField(int index) {
   1486   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1487   // Internal objects do follow immediately after the header, whereas in-object
   1488   // properties are at the end of the object. Therefore there is no need
   1489   // to adjust the index here.
   1490   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
   1491 }
   1492 
   1493 
   1494 void JSObject::SetInternalField(int index, Object* value) {
   1495   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1496   // Internal objects do follow immediately after the header, whereas in-object
   1497   // properties are at the end of the object. Therefore there is no need
   1498   // to adjust the index here.
   1499   int offset = GetHeaderSize() + (kPointerSize * index);
   1500   WRITE_FIELD(this, offset, value);
   1501   WRITE_BARRIER(GetHeap(), this, offset, value);
   1502 }
   1503 
   1504 
   1505 void JSObject::SetInternalField(int index, Smi* value) {
   1506   ASSERT(index < GetInternalFieldCount() && index >= 0);
   1507   // Internal objects do follow immediately after the header, whereas in-object
   1508   // properties are at the end of the object. Therefore there is no need
   1509   // to adjust the index here.
   1510   int offset = GetHeaderSize() + (kPointerSize * index);
   1511   WRITE_FIELD(this, offset, value);
   1512 }
   1513 
   1514 
   1515 // Access fast-case object properties at index. The use of these routines
   1516 // is needed to correctly distinguish between properties stored in-object and
   1517 // properties stored in the properties array.
   1518 Object* JSObject::FastPropertyAt(int index) {
   1519   // Adjust for the number of properties stored in the object.
   1520   index -= map()->inobject_properties();
   1521   if (index < 0) {
   1522     int offset = map()->instance_size() + (index * kPointerSize);
   1523     return READ_FIELD(this, offset);
   1524   } else {
   1525     ASSERT(index < properties()->length());
   1526     return properties()->get(index);
   1527   }
   1528 }
   1529 
   1530 
   1531 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
   1532   // Adjust for the number of properties stored in the object.
   1533   index -= map()->inobject_properties();
   1534   if (index < 0) {
   1535     int offset = map()->instance_size() + (index * kPointerSize);
   1536     WRITE_FIELD(this, offset, value);
   1537     WRITE_BARRIER(GetHeap(), this, offset, value);
   1538   } else {
   1539     ASSERT(index < properties()->length());
   1540     properties()->set(index, value);
   1541   }
   1542   return value;
   1543 }
   1544 
   1545 
   1546 int JSObject::GetInObjectPropertyOffset(int index) {
   1547   // Adjust for the number of properties stored in the object.
   1548   index -= map()->inobject_properties();
   1549   ASSERT(index < 0);
   1550   return map()->instance_size() + (index * kPointerSize);
   1551 }
   1552 
   1553 
   1554 Object* JSObject::InObjectPropertyAt(int index) {
   1555   // Adjust for the number of properties stored in the object.
   1556   index -= map()->inobject_properties();
   1557   ASSERT(index < 0);
   1558   int offset = map()->instance_size() + (index * kPointerSize);
   1559   return READ_FIELD(this, offset);
   1560 }
   1561 
   1562 
   1563 Object* JSObject::InObjectPropertyAtPut(int index,
   1564                                         Object* value,
   1565                                         WriteBarrierMode mode) {
   1566   // Adjust for the number of properties stored in the object.
   1567   index -= map()->inobject_properties();
   1568   ASSERT(index < 0);
   1569   int offset = map()->instance_size() + (index * kPointerSize);
   1570   WRITE_FIELD(this, offset, value);
   1571   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
   1572   return value;
   1573 }
   1574 
   1575 
   1576 
   1577 void JSObject::InitializeBody(Map* map,
   1578                               Object* pre_allocated_value,
   1579                               Object* filler_value) {
   1580   ASSERT(!filler_value->IsHeapObject() ||
   1581          !GetHeap()->InNewSpace(filler_value));
   1582   ASSERT(!pre_allocated_value->IsHeapObject() ||
   1583          !GetHeap()->InNewSpace(pre_allocated_value));
   1584   int size = map->instance_size();
   1585   int offset = kHeaderSize;
   1586   if (filler_value != pre_allocated_value) {
   1587     int pre_allocated = map->pre_allocated_property_fields();
   1588     ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
   1589     for (int i = 0; i < pre_allocated; i++) {
   1590       WRITE_FIELD(this, offset, pre_allocated_value);
   1591       offset += kPointerSize;
   1592     }
   1593   }
   1594   while (offset < size) {
   1595     WRITE_FIELD(this, offset, filler_value);
   1596     offset += kPointerSize;
   1597   }
   1598 }
   1599 
   1600 
   1601 bool JSObject::HasFastProperties() {
   1602   return !properties()->IsDictionary();
   1603 }
   1604 
   1605 
   1606 int JSObject::MaxFastProperties() {
   1607   // Allow extra fast properties if the object has more than
   1608   // kMaxFastProperties in-object properties. When this is the case,
   1609   // it is very unlikely that the object is being used as a dictionary
   1610   // and there is a good chance that allowing more map transitions
   1611   // will be worth it.
   1612   return Max(map()->inobject_properties(), kMaxFastProperties);
   1613 }
   1614 
   1615 
   1616 void Struct::InitializeBody(int object_size) {
   1617   Object* value = GetHeap()->undefined_value();
   1618   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
   1619     WRITE_FIELD(this, offset, value);
   1620   }
   1621 }
   1622 
   1623 
   1624 bool Object::ToArrayIndex(uint32_t* index) {
   1625   if (IsSmi()) {
   1626     int value = Smi::cast(this)->value();
   1627     if (value < 0) return false;
   1628     *index = value;
   1629     return true;
   1630   }
   1631   if (IsHeapNumber()) {
   1632     double value = HeapNumber::cast(this)->value();
   1633     uint32_t uint_value = static_cast<uint32_t>(value);
   1634     if (value == static_cast<double>(uint_value)) {
   1635       *index = uint_value;
   1636       return true;
   1637     }
   1638   }
   1639   return false;
   1640 }
   1641 
   1642 
   1643 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
   1644   if (!this->IsJSValue()) return false;
   1645 
   1646   JSValue* js_value = JSValue::cast(this);
   1647   if (!js_value->value()->IsString()) return false;
   1648 
   1649   String* str = String::cast(js_value->value());
   1650   if (index >= (uint32_t)str->length()) return false;
   1651 
   1652   return true;
   1653 }
   1654 
   1655 
   1656 FixedArrayBase* FixedArrayBase::cast(Object* object) {
   1657   ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray());
   1658   return reinterpret_cast<FixedArrayBase*>(object);
   1659 }
   1660 
   1661 
   1662 Object* FixedArray::get(int index) {
   1663   ASSERT(index >= 0 && index < this->length());
   1664   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
   1665 }
   1666 
   1667 
   1668 void FixedArray::set(int index, Smi* value) {
   1669   ASSERT(map() != HEAP->fixed_cow_array_map());
   1670   ASSERT(index >= 0 && index < this->length());
   1671   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
   1672   int offset = kHeaderSize + index * kPointerSize;
   1673   WRITE_FIELD(this, offset, value);
   1674 }
   1675 
   1676 
   1677 void FixedArray::set(int index, Object* value) {
   1678   ASSERT(map() != HEAP->fixed_cow_array_map());
   1679   ASSERT(index >= 0 && index < this->length());
   1680   int offset = kHeaderSize + index * kPointerSize;
   1681   WRITE_FIELD(this, offset, value);
   1682   WRITE_BARRIER(GetHeap(), this, offset, value);
   1683 }
   1684 
   1685 
   1686 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
   1687   return BitCast<uint64_t, double>(value) == kHoleNanInt64;
   1688 }
   1689 
   1690 
   1691 inline double FixedDoubleArray::hole_nan_as_double() {
   1692   return BitCast<double, uint64_t>(kHoleNanInt64);
   1693 }
   1694 
   1695 
   1696 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
   1697   ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
   1698   ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
   1699   return OS::nan_value();
   1700 }
   1701 
   1702 
   1703 double FixedDoubleArray::get_scalar(int index) {
   1704   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   1705          map() != HEAP->fixed_array_map());
   1706   ASSERT(index >= 0 && index < this->length());
   1707   double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
   1708   ASSERT(!is_the_hole_nan(result));
   1709   return result;
   1710 }
   1711 
   1712 int64_t FixedDoubleArray::get_representation(int index) {
   1713   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   1714          map() != HEAP->fixed_array_map());
   1715   ASSERT(index >= 0 && index < this->length());
   1716   return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
   1717 }
   1718 
   1719 MaybeObject* FixedDoubleArray::get(int index) {
   1720   if (is_the_hole(index)) {
   1721     return GetHeap()->the_hole_value();
   1722   } else {
   1723     return GetHeap()->NumberFromDouble(get_scalar(index));
   1724   }
   1725 }
   1726 
   1727 
   1728 void FixedDoubleArray::set(int index, double value) {
   1729   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   1730          map() != HEAP->fixed_array_map());
   1731   int offset = kHeaderSize + index * kDoubleSize;
   1732   if (isnan(value)) value = canonical_not_the_hole_nan_as_double();
   1733   WRITE_DOUBLE_FIELD(this, offset, value);
   1734 }
   1735 
   1736 
   1737 void FixedDoubleArray::set_the_hole(int index) {
   1738   ASSERT(map() != HEAP->fixed_cow_array_map() &&
   1739          map() != HEAP->fixed_array_map());
   1740   int offset = kHeaderSize + index * kDoubleSize;
   1741   WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
   1742 }
   1743 
   1744 
   1745 bool FixedDoubleArray::is_the_hole(int index) {
   1746   int offset = kHeaderSize + index * kDoubleSize;
   1747   return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
   1748 }
   1749 
   1750 
   1751 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
   1752   Heap* heap = GetHeap();
   1753   if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
   1754   if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
   1755   return UPDATE_WRITE_BARRIER;
   1756 }
   1757 
   1758 
   1759 void FixedArray::set(int index,
   1760                      Object* value,
   1761                      WriteBarrierMode mode) {
   1762   ASSERT(map() != HEAP->fixed_cow_array_map());
   1763   ASSERT(index >= 0 && index < this->length());
   1764   int offset = kHeaderSize + index * kPointerSize;
   1765   WRITE_FIELD(this, offset, value);
   1766   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
   1767 }
   1768 
   1769 
   1770 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
   1771                                               int index,
   1772                                               Object* value) {
   1773   ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
   1774   ASSERT(index >= 0 && index < array->length());
   1775   int offset = kHeaderSize + index * kPointerSize;
   1776   WRITE_FIELD(array, offset, value);
   1777   Heap* heap = array->GetHeap();
   1778   if (heap->InNewSpace(value)) {
   1779     heap->RecordWrite(array->address(), offset);
   1780   }
   1781 }
   1782 
   1783 
   1784 void FixedArray::NoWriteBarrierSet(FixedArray* array,
   1785                                    int index,
   1786                                    Object* value) {
   1787   ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
   1788   ASSERT(index >= 0 && index < array->length());
   1789   ASSERT(!HEAP->InNewSpace(value));
   1790   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
   1791 }
   1792 
   1793 
   1794 void FixedArray::set_undefined(int index) {
   1795   ASSERT(map() != HEAP->fixed_cow_array_map());
   1796   set_undefined(GetHeap(), index);
   1797 }
   1798 
   1799 
   1800 void FixedArray::set_undefined(Heap* heap, int index) {
   1801   ASSERT(index >= 0 && index < this->length());
   1802   ASSERT(!heap->InNewSpace(heap->undefined_value()));
   1803   WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
   1804               heap->undefined_value());
   1805 }
   1806 
   1807 
   1808 void FixedArray::set_null(int index) {
   1809   set_null(GetHeap(), index);
   1810 }
   1811 
   1812 
   1813 void FixedArray::set_null(Heap* heap, int index) {
   1814   ASSERT(index >= 0 && index < this->length());
   1815   ASSERT(!heap->InNewSpace(heap->null_value()));
   1816   WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
   1817 }
   1818 
   1819 
   1820 void FixedArray::set_the_hole(int index) {
   1821   ASSERT(map() != HEAP->fixed_cow_array_map());
   1822   ASSERT(index >= 0 && index < this->length());
   1823   ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
   1824   WRITE_FIELD(this,
   1825               kHeaderSize + index * kPointerSize,
   1826               GetHeap()->the_hole_value());
   1827 }
   1828 
   1829 
   1830 void FixedArray::set_unchecked(int index, Smi* value) {
   1831   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
   1832   int offset = kHeaderSize + index * kPointerSize;
   1833   WRITE_FIELD(this, offset, value);
   1834 }
   1835 
   1836 
   1837 void FixedArray::set_unchecked(Heap* heap,
   1838                                int index,
   1839                                Object* value,
   1840                                WriteBarrierMode mode) {
   1841   int offset = kHeaderSize + index * kPointerSize;
   1842   WRITE_FIELD(this, offset, value);
   1843   CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
   1844 }
   1845 
   1846 
   1847 void FixedArray::set_null_unchecked(Heap* heap, int index) {
   1848   ASSERT(index >= 0 && index < this->length());
   1849   ASSERT(!HEAP->InNewSpace(heap->null_value()));
   1850   WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
   1851 }
   1852 
   1853 
   1854 Object** FixedArray::data_start() {
   1855   return HeapObject::RawField(this, kHeaderSize);
   1856 }
   1857 
   1858 
   1859 bool DescriptorArray::IsEmpty() {
   1860   ASSERT(this->IsSmi() ||
   1861          this->length() > kFirstIndex ||
   1862          this == HEAP->empty_descriptor_array());
   1863   return this->IsSmi() || length() <= kFirstIndex;
   1864 }
   1865 
   1866 
   1867 int DescriptorArray::bit_field3_storage() {
   1868   Object* storage = READ_FIELD(this, kBitField3StorageOffset);
   1869   return Smi::cast(storage)->value();
   1870 }
   1871 
   1872 void DescriptorArray::set_bit_field3_storage(int value) {
   1873   ASSERT(!IsEmpty());
   1874   WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
   1875 }
   1876 
   1877 
   1878 void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
   1879                                                     int first,
   1880                                                     int second) {
   1881   Object* tmp = array->get(first);
   1882   NoIncrementalWriteBarrierSet(array, first, array->get(second));
   1883   NoIncrementalWriteBarrierSet(array, second, tmp);
   1884 }
   1885 
   1886 
   1887 int DescriptorArray::Search(String* name) {
   1888   SLOW_ASSERT(IsSortedNoDuplicates());
   1889 
   1890   // Check for empty descriptor array.
   1891   int nof = number_of_descriptors();
   1892   if (nof == 0) return kNotFound;
   1893 
   1894   // Fast case: do linear search for small arrays.
   1895   const int kMaxElementsForLinearSearch = 8;
   1896   if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
   1897     return LinearSearch(name, nof);
   1898   }
   1899 
   1900   // Slow case: perform binary search.
   1901   return BinarySearch(name, 0, nof - 1);
   1902 }
   1903 
   1904 
   1905 int DescriptorArray::SearchWithCache(String* name) {
   1906   int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
   1907   if (number == DescriptorLookupCache::kAbsent) {
   1908     number = Search(name);
   1909     GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
   1910   }
   1911   return number;
   1912 }
   1913 
   1914 
   1915 String* DescriptorArray::GetKey(int descriptor_number) {
   1916   ASSERT(descriptor_number < number_of_descriptors());
   1917   return String::cast(get(ToKeyIndex(descriptor_number)));
   1918 }
   1919 
   1920 
   1921 Object* DescriptorArray::GetValue(int descriptor_number) {
   1922   ASSERT(descriptor_number < number_of_descriptors());
   1923   return GetContentArray()->get(ToValueIndex(descriptor_number));
   1924 }
   1925 
   1926 
   1927 Smi* DescriptorArray::GetDetails(int descriptor_number) {
   1928   ASSERT(descriptor_number < number_of_descriptors());
   1929   return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
   1930 }
   1931 
   1932 
   1933 PropertyType DescriptorArray::GetType(int descriptor_number) {
   1934   ASSERT(descriptor_number < number_of_descriptors());
   1935   return PropertyDetails(GetDetails(descriptor_number)).type();
   1936 }
   1937 
   1938 
   1939 int DescriptorArray::GetFieldIndex(int descriptor_number) {
   1940   return Descriptor::IndexFromValue(GetValue(descriptor_number));
   1941 }
   1942 
   1943 
   1944 JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
   1945   return JSFunction::cast(GetValue(descriptor_number));
   1946 }
   1947 
   1948 
   1949 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
   1950   ASSERT(GetType(descriptor_number) == CALLBACKS);
   1951   return GetValue(descriptor_number);
   1952 }
   1953 
   1954 
   1955 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
   1956   ASSERT(GetType(descriptor_number) == CALLBACKS);
   1957   Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
   1958   return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
   1959 }
   1960 
   1961 
   1962 bool DescriptorArray::IsProperty(int descriptor_number) {
   1963   Entry entry(this, descriptor_number);
   1964   return IsPropertyDescriptor(&entry);
   1965 }
   1966 
   1967 
   1968 bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
   1969   switch (GetType(descriptor_number)) {
   1970     case MAP_TRANSITION:
   1971     case CONSTANT_TRANSITION:
   1972     case ELEMENTS_TRANSITION:
   1973       return true;
   1974     case CALLBACKS: {
   1975       Object* value = GetValue(descriptor_number);
   1976       if (!value->IsAccessorPair()) return false;
   1977       AccessorPair* accessors = AccessorPair::cast(value);
   1978       return accessors->getter()->IsMap() && accessors->setter()->IsMap();
   1979     }
   1980     case NORMAL:
   1981     case FIELD:
   1982     case CONSTANT_FUNCTION:
   1983     case HANDLER:
   1984     case INTERCEPTOR:
   1985     case NULL_DESCRIPTOR:
   1986       return false;
   1987   }
   1988   UNREACHABLE();  // Keep the compiler happy.
   1989   return false;
   1990 }
   1991 
   1992 
   1993 bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
   1994   return GetType(descriptor_number) == NULL_DESCRIPTOR;
   1995 }
   1996 
   1997 
   1998 bool DescriptorArray::IsDontEnum(int descriptor_number) {
   1999   return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
   2000 }
   2001 
   2002 
   2003 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
   2004   desc->Init(GetKey(descriptor_number),
   2005              GetValue(descriptor_number),
   2006              PropertyDetails(GetDetails(descriptor_number)));
   2007 }
   2008 
   2009 
   2010 void DescriptorArray::Set(int descriptor_number,
   2011                           Descriptor* desc,
   2012                           const WhitenessWitness&) {
   2013   // Range check.
   2014   ASSERT(descriptor_number < number_of_descriptors());
   2015 
   2016   NoIncrementalWriteBarrierSet(this,
   2017                                ToKeyIndex(descriptor_number),
   2018                                desc->GetKey());
   2019   FixedArray* content_array = GetContentArray();
   2020   NoIncrementalWriteBarrierSet(content_array,
   2021                                ToValueIndex(descriptor_number),
   2022                                desc->GetValue());
   2023   NoIncrementalWriteBarrierSet(content_array,
   2024                                ToDetailsIndex(descriptor_number),
   2025                                desc->GetDetails().AsSmi());
   2026 }
   2027 
   2028 
   2029 void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
   2030     int first, int second) {
   2031   NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
   2032   FixedArray* content_array = GetContentArray();
   2033   NoIncrementalWriteBarrierSwap(content_array,
   2034                                 ToValueIndex(first),
   2035                                 ToValueIndex(second));
   2036   NoIncrementalWriteBarrierSwap(content_array,
   2037                                 ToDetailsIndex(first),
   2038                                 ToDetailsIndex(second));
   2039 }
   2040 
   2041 
   2042 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
   2043     : marking_(array->GetHeap()->incremental_marking()) {
   2044   marking_->EnterNoMarkingScope();
   2045   if (array->number_of_descriptors() > 0) {
   2046     ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
   2047     ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
   2048   }
   2049 }
   2050 
   2051 
   2052 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
   2053   marking_->LeaveNoMarkingScope();
   2054 }
   2055 
   2056 
   2057 template<typename Shape, typename Key>
   2058 int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
   2059   const int kMinCapacity = 32;
   2060   int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
   2061   if (capacity < kMinCapacity) {
   2062     capacity = kMinCapacity;  // Guarantee min capacity.
   2063   }
   2064   return capacity;
   2065 }
   2066 
   2067 
   2068 template<typename Shape, typename Key>
   2069 int HashTable<Shape, Key>::FindEntry(Key key) {
   2070   return FindEntry(GetIsolate(), key);
   2071 }
   2072 
   2073 
   2074 // Find entry for key otherwise return kNotFound.
   2075 template<typename Shape, typename Key>
   2076 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
   2077   uint32_t capacity = Capacity();
   2078   uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
   2079   uint32_t count = 1;
   2080   // EnsureCapacity will guarantee the hash table is never full.
   2081   while (true) {
   2082     Object* element = KeyAt(entry);
   2083     // Empty entry.
   2084     if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
   2085     if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
   2086         Shape::IsMatch(key, element)) return entry;
   2087     entry = NextProbe(entry, count++, capacity);
   2088   }
   2089   return kNotFound;
   2090 }
   2091 
   2092 
   2093 bool SeededNumberDictionary::requires_slow_elements() {
   2094   Object* max_index_object = get(kMaxNumberKeyIndex);
   2095   if (!max_index_object->IsSmi()) return false;
   2096   return 0 !=
   2097       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
   2098 }
   2099 
   2100 uint32_t SeededNumberDictionary::max_number_key() {
   2101   ASSERT(!requires_slow_elements());
   2102   Object* max_index_object = get(kMaxNumberKeyIndex);
   2103   if (!max_index_object->IsSmi()) return 0;
   2104   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
   2105   return value >> kRequiresSlowElementsTagSize;
   2106 }
   2107 
   2108 void SeededNumberDictionary::set_requires_slow_elements() {
   2109   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
   2110 }
   2111 
   2112 
   2113 // ------------------------------------
   2114 // Cast operations
   2115 
   2116 
   2117 CAST_ACCESSOR(FixedArray)
   2118 CAST_ACCESSOR(FixedDoubleArray)
   2119 CAST_ACCESSOR(DescriptorArray)
   2120 CAST_ACCESSOR(DeoptimizationInputData)
   2121 CAST_ACCESSOR(DeoptimizationOutputData)
   2122 CAST_ACCESSOR(TypeFeedbackCells)
   2123 CAST_ACCESSOR(SymbolTable)
   2124 CAST_ACCESSOR(JSFunctionResultCache)
   2125 CAST_ACCESSOR(NormalizedMapCache)
   2126 CAST_ACCESSOR(ScopeInfo)
   2127 CAST_ACCESSOR(CompilationCacheTable)
   2128 CAST_ACCESSOR(CodeCacheHashTable)
   2129 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
   2130 CAST_ACCESSOR(MapCache)
   2131 CAST_ACCESSOR(String)
   2132 CAST_ACCESSOR(SeqString)
   2133 CAST_ACCESSOR(SeqAsciiString)
   2134 CAST_ACCESSOR(SeqTwoByteString)
   2135 CAST_ACCESSOR(SlicedString)
   2136 CAST_ACCESSOR(ConsString)
   2137 CAST_ACCESSOR(ExternalString)
   2138 CAST_ACCESSOR(ExternalAsciiString)
   2139 CAST_ACCESSOR(ExternalTwoByteString)
   2140 CAST_ACCESSOR(JSReceiver)
   2141 CAST_ACCESSOR(JSObject)
   2142 CAST_ACCESSOR(Smi)
   2143 CAST_ACCESSOR(HeapObject)
   2144 CAST_ACCESSOR(HeapNumber)
   2145 CAST_ACCESSOR(Oddball)
   2146 CAST_ACCESSOR(JSGlobalPropertyCell)
   2147 CAST_ACCESSOR(SharedFunctionInfo)
   2148 CAST_ACCESSOR(Map)
   2149 CAST_ACCESSOR(JSFunction)
   2150 CAST_ACCESSOR(GlobalObject)
   2151 CAST_ACCESSOR(JSGlobalProxy)
   2152 CAST_ACCESSOR(JSGlobalObject)
   2153 CAST_ACCESSOR(JSBuiltinsObject)
   2154 CAST_ACCESSOR(Code)
   2155 CAST_ACCESSOR(JSArray)
   2156 CAST_ACCESSOR(JSRegExp)
   2157 CAST_ACCESSOR(JSProxy)
   2158 CAST_ACCESSOR(JSFunctionProxy)
   2159 CAST_ACCESSOR(JSSet)
   2160 CAST_ACCESSOR(JSMap)
   2161 CAST_ACCESSOR(JSWeakMap)
   2162 CAST_ACCESSOR(Foreign)
   2163 CAST_ACCESSOR(ByteArray)
   2164 CAST_ACCESSOR(FreeSpace)
   2165 CAST_ACCESSOR(ExternalArray)
   2166 CAST_ACCESSOR(ExternalByteArray)
   2167 CAST_ACCESSOR(ExternalUnsignedByteArray)
   2168 CAST_ACCESSOR(ExternalShortArray)
   2169 CAST_ACCESSOR(ExternalUnsignedShortArray)
   2170 CAST_ACCESSOR(ExternalIntArray)
   2171 CAST_ACCESSOR(ExternalUnsignedIntArray)
   2172 CAST_ACCESSOR(ExternalFloatArray)
   2173 CAST_ACCESSOR(ExternalDoubleArray)
   2174 CAST_ACCESSOR(ExternalPixelArray)
   2175 CAST_ACCESSOR(Struct)
   2176 
   2177 
   2178 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
   2179   STRUCT_LIST(MAKE_STRUCT_CAST)
   2180 #undef MAKE_STRUCT_CAST
   2181 
   2182 
   2183 template <typename Shape, typename Key>
   2184 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
   2185   ASSERT(obj->IsHashTable());
   2186   return reinterpret_cast<HashTable*>(obj);
   2187 }
   2188 
   2189 
   2190 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
   2191 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
   2192 
   2193 SMI_ACCESSORS(String, length, kLengthOffset)
   2194 
   2195 
   2196 uint32_t String::hash_field() {
   2197   return READ_UINT32_FIELD(this, kHashFieldOffset);
   2198 }
   2199 
   2200 
   2201 void String::set_hash_field(uint32_t value) {
   2202   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
   2203 #if V8_HOST_ARCH_64_BIT
   2204   WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
   2205 #endif
   2206 }
   2207 
   2208 
   2209 bool String::Equals(String* other) {
   2210   if (other == this) return true;
   2211   if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
   2212     return false;
   2213   }
   2214   return SlowEquals(other);
   2215 }
   2216 
   2217 
   2218 MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
   2219   if (!StringShape(this).IsCons()) return this;
   2220   ConsString* cons = ConsString::cast(this);
   2221   if (cons->IsFlat()) return cons->first();
   2222   return SlowTryFlatten(pretenure);
   2223 }
   2224 
   2225 
   2226 String* String::TryFlattenGetString(PretenureFlag pretenure) {
   2227   MaybeObject* flat = TryFlatten(pretenure);
   2228   Object* successfully_flattened;
   2229   if (!flat->ToObject(&successfully_flattened)) return this;
   2230   return String::cast(successfully_flattened);
   2231 }
   2232 
   2233 
   2234 uint16_t String::Get(int index) {
   2235   ASSERT(index >= 0 && index < length());
   2236   switch (StringShape(this).full_representation_tag()) {
   2237     case kSeqStringTag | kAsciiStringTag:
   2238       return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
   2239     case kSeqStringTag | kTwoByteStringTag:
   2240       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
   2241     case kConsStringTag | kAsciiStringTag:
   2242     case kConsStringTag | kTwoByteStringTag:
   2243       return ConsString::cast(this)->ConsStringGet(index);
   2244     case kExternalStringTag | kAsciiStringTag:
   2245       return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
   2246     case kExternalStringTag | kTwoByteStringTag:
   2247       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
   2248     case kSlicedStringTag | kAsciiStringTag:
   2249     case kSlicedStringTag | kTwoByteStringTag:
   2250       return SlicedString::cast(this)->SlicedStringGet(index);
   2251     default:
   2252       break;
   2253   }
   2254 
   2255   UNREACHABLE();
   2256   return 0;
   2257 }
   2258 
   2259 
   2260 void String::Set(int index, uint16_t value) {
   2261   ASSERT(index >= 0 && index < length());
   2262   ASSERT(StringShape(this).IsSequential());
   2263 
   2264   return this->IsAsciiRepresentation()
   2265       ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
   2266       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
   2267 }
   2268 
   2269 
   2270 bool String::IsFlat() {
   2271   if (!StringShape(this).IsCons()) return true;
   2272   return ConsString::cast(this)->second()->length() == 0;
   2273 }
   2274 
   2275 
   2276 String* String::GetUnderlying() {
   2277   // Giving direct access to underlying string only makes sense if the
   2278   // wrapping string is already flattened.
   2279   ASSERT(this->IsFlat());
   2280   ASSERT(StringShape(this).IsIndirect());
   2281   STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
   2282   const int kUnderlyingOffset = SlicedString::kParentOffset;
   2283   return String::cast(READ_FIELD(this, kUnderlyingOffset));
   2284 }
   2285 
   2286 
   2287 uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
   2288   ASSERT(index >= 0 && index < length());
   2289   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
   2290 }
   2291 
   2292 
   2293 void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
   2294   ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
   2295   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
   2296                    static_cast<byte>(value));
   2297 }
   2298 
   2299 
   2300 Address SeqAsciiString::GetCharsAddress() {
   2301   return FIELD_ADDR(this, kHeaderSize);
   2302 }
   2303 
   2304 
   2305 char* SeqAsciiString::GetChars() {
   2306   return reinterpret_cast<char*>(GetCharsAddress());
   2307 }
   2308 
   2309 
   2310 Address SeqTwoByteString::GetCharsAddress() {
   2311   return FIELD_ADDR(this, kHeaderSize);
   2312 }
   2313 
   2314 
   2315 uc16* SeqTwoByteString::GetChars() {
   2316   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
   2317 }
   2318 
   2319 
   2320 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
   2321   ASSERT(index >= 0 && index < length());
   2322   return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
   2323 }
   2324 
   2325 
   2326 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
   2327   ASSERT(index >= 0 && index < length());
   2328   WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
   2329 }
   2330 
   2331 
   2332 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
   2333   return SizeFor(length());
   2334 }
   2335 
   2336 
   2337 int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
   2338   return SizeFor(length());
   2339 }
   2340 
   2341 
   2342 String* SlicedString::parent() {
   2343   return String::cast(READ_FIELD(this, kParentOffset));
   2344 }
   2345 
   2346 
   2347 void SlicedString::set_parent(String* parent) {
   2348   ASSERT(parent->IsSeqString() || parent->IsExternalString());
   2349   WRITE_FIELD(this, kParentOffset, parent);
   2350 }
   2351 
   2352 
   2353 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
   2354 
   2355 
   2356 String* ConsString::first() {
   2357   return String::cast(READ_FIELD(this, kFirstOffset));
   2358 }
   2359 
   2360 
   2361 Object* ConsString::unchecked_first() {
   2362   return READ_FIELD(this, kFirstOffset);
   2363 }
   2364 
   2365 
   2366 void ConsString::set_first(String* value, WriteBarrierMode mode) {
   2367   WRITE_FIELD(this, kFirstOffset, value);
   2368   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
   2369 }
   2370 
   2371 
   2372 String* ConsString::second() {
   2373   return String::cast(READ_FIELD(this, kSecondOffset));
   2374 }
   2375 
   2376 
   2377 Object* ConsString::unchecked_second() {
   2378   return READ_FIELD(this, kSecondOffset);
   2379 }
   2380 
   2381 
   2382 void ConsString::set_second(String* value, WriteBarrierMode mode) {
   2383   WRITE_FIELD(this, kSecondOffset, value);
   2384   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
   2385 }
   2386 
   2387 
   2388 bool ExternalString::is_short() {
   2389   InstanceType type = map()->instance_type();
   2390   return (type & kShortExternalStringMask) == kShortExternalStringTag;
   2391 }
   2392 
   2393 
   2394 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
   2395   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
   2396 }
   2397 
   2398 
   2399 void ExternalAsciiString::update_data_cache() {
   2400   if (is_short()) return;
   2401   const char** data_field =
   2402       reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
   2403   *data_field = resource()->data();
   2404 }
   2405 
   2406 
   2407 void ExternalAsciiString::set_resource(
   2408     const ExternalAsciiString::Resource* resource) {
   2409   *reinterpret_cast<const Resource**>(
   2410       FIELD_ADDR(this, kResourceOffset)) = resource;
   2411   if (resource != NULL) update_data_cache();
   2412 }
   2413 
   2414 
   2415 const char* ExternalAsciiString::GetChars() {
   2416   return resource()->data();
   2417 }
   2418 
   2419 
   2420 uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
   2421   ASSERT(index >= 0 && index < length());
   2422   return GetChars()[index];
   2423 }
   2424 
   2425 
   2426 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
   2427   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
   2428 }
   2429 
   2430 
   2431 void ExternalTwoByteString::update_data_cache() {
   2432   if (is_short()) return;
   2433   const uint16_t** data_field =
   2434       reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
   2435   *data_field = resource()->data();
   2436 }
   2437 
   2438 
   2439 void ExternalTwoByteString::set_resource(
   2440     const ExternalTwoByteString::Resource* resource) {
   2441   *reinterpret_cast<const Resource**>(
   2442       FIELD_ADDR(this, kResourceOffset)) = resource;
   2443   if (resource != NULL) update_data_cache();
   2444 }
   2445 
   2446 
   2447 const uint16_t* ExternalTwoByteString::GetChars() {
   2448   return resource()->data();
   2449 }
   2450 
   2451 
   2452 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
   2453   ASSERT(index >= 0 && index < length());
   2454   return GetChars()[index];
   2455 }
   2456 
   2457 
   2458 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
   2459       unsigned start) {
   2460   return GetChars() + start;
   2461 }
   2462 
   2463 
   2464 void JSFunctionResultCache::MakeZeroSize() {
   2465   set_finger_index(kEntriesIndex);
   2466   set_size(kEntriesIndex);
   2467 }
   2468 
   2469 
   2470 void JSFunctionResultCache::Clear() {
   2471   int cache_size = size();
   2472   Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
   2473   MemsetPointer(entries_start,
   2474                 GetHeap()->the_hole_value(),
   2475                 cache_size - kEntriesIndex);
   2476   MakeZeroSize();
   2477 }
   2478 
   2479 
   2480 int JSFunctionResultCache::size() {
   2481   return Smi::cast(get(kCacheSizeIndex))->value();
   2482 }
   2483 
   2484 
   2485 void JSFunctionResultCache::set_size(int size) {
   2486   set(kCacheSizeIndex, Smi::FromInt(size));
   2487 }
   2488 
   2489 
   2490 int JSFunctionResultCache::finger_index() {
   2491   return Smi::cast(get(kFingerIndex))->value();
   2492 }
   2493 
   2494 
   2495 void JSFunctionResultCache::set_finger_index(int finger_index) {
   2496   set(kFingerIndex, Smi::FromInt(finger_index));
   2497 }
   2498 
   2499 
   2500 byte ByteArray::get(int index) {
   2501   ASSERT(index >= 0 && index < this->length());
   2502   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
   2503 }
   2504 
   2505 
   2506 void ByteArray::set(int index, byte value) {
   2507   ASSERT(index >= 0 && index < this->length());
   2508   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
   2509 }
   2510 
   2511 
   2512 int ByteArray::get_int(int index) {
   2513   ASSERT(index >= 0 && (index * kIntSize) < this->length());
   2514   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
   2515 }
   2516 
   2517 
   2518 ByteArray* ByteArray::FromDataStartAddress(Address address) {
   2519   ASSERT_TAG_ALIGNED(address);
   2520   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
   2521 }
   2522 
   2523 
   2524 Address ByteArray::GetDataStartAddress() {
   2525   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
   2526 }
   2527 
   2528 
   2529 uint8_t* ExternalPixelArray::external_pixel_pointer() {
   2530   return reinterpret_cast<uint8_t*>(external_pointer());
   2531 }
   2532 
   2533 
   2534 uint8_t ExternalPixelArray::get_scalar(int index) {
   2535   ASSERT((index >= 0) && (index < this->length()));
   2536   uint8_t* ptr = external_pixel_pointer();
   2537   return ptr[index];
   2538 }
   2539 
   2540 
   2541 MaybeObject* ExternalPixelArray::get(int index) {
   2542   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   2543 }
   2544 
   2545 
   2546 void ExternalPixelArray::set(int index, uint8_t value) {
   2547   ASSERT((index >= 0) && (index < this->length()));
   2548   uint8_t* ptr = external_pixel_pointer();
   2549   ptr[index] = value;
   2550 }
   2551 
   2552 
   2553 void* ExternalArray::external_pointer() {
   2554   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
   2555   return reinterpret_cast<void*>(ptr);
   2556 }
   2557 
   2558 
   2559 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
   2560   intptr_t ptr = reinterpret_cast<intptr_t>(value);
   2561   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
   2562 }
   2563 
   2564 
   2565 int8_t ExternalByteArray::get_scalar(int index) {
   2566   ASSERT((index >= 0) && (index < this->length()));
   2567   int8_t* ptr = static_cast<int8_t*>(external_pointer());
   2568   return ptr[index];
   2569 }
   2570 
   2571 
   2572 MaybeObject* ExternalByteArray::get(int index) {
   2573   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   2574 }
   2575 
   2576 
   2577 void ExternalByteArray::set(int index, int8_t value) {
   2578   ASSERT((index >= 0) && (index < this->length()));
   2579   int8_t* ptr = static_cast<int8_t*>(external_pointer());
   2580   ptr[index] = value;
   2581 }
   2582 
   2583 
   2584 uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
   2585   ASSERT((index >= 0) && (index < this->length()));
   2586   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
   2587   return ptr[index];
   2588 }
   2589 
   2590 
   2591 MaybeObject* ExternalUnsignedByteArray::get(int index) {
   2592   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   2593 }
   2594 
   2595 
   2596 void ExternalUnsignedByteArray::set(int index, uint8_t value) {
   2597   ASSERT((index >= 0) && (index < this->length()));
   2598   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
   2599   ptr[index] = value;
   2600 }
   2601 
   2602 
   2603 int16_t ExternalShortArray::get_scalar(int index) {
   2604   ASSERT((index >= 0) && (index < this->length()));
   2605   int16_t* ptr = static_cast<int16_t*>(external_pointer());
   2606   return ptr[index];
   2607 }
   2608 
   2609 
   2610 MaybeObject* ExternalShortArray::get(int index) {
   2611   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   2612 }
   2613 
   2614 
   2615 void ExternalShortArray::set(int index, int16_t value) {
   2616   ASSERT((index >= 0) && (index < this->length()));
   2617   int16_t* ptr = static_cast<int16_t*>(external_pointer());
   2618   ptr[index] = value;
   2619 }
   2620 
   2621 
   2622 uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
   2623   ASSERT((index >= 0) && (index < this->length()));
   2624   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
   2625   return ptr[index];
   2626 }
   2627 
   2628 
   2629 MaybeObject* ExternalUnsignedShortArray::get(int index) {
   2630   return Smi::FromInt(static_cast<int>(get_scalar(index)));
   2631 }
   2632 
   2633 
   2634 void ExternalUnsignedShortArray::set(int index, uint16_t value) {
   2635   ASSERT((index >= 0) && (index < this->length()));
   2636   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
   2637   ptr[index] = value;
   2638 }
   2639 
   2640 
   2641 int32_t ExternalIntArray::get_scalar(int index) {
   2642   ASSERT((index >= 0) && (index < this->length()));
   2643   int32_t* ptr = static_cast<int32_t*>(external_pointer());
   2644   return ptr[index];
   2645 }
   2646 
   2647 
   2648 MaybeObject* ExternalIntArray::get(int index) {
   2649     return GetHeap()->NumberFromInt32(get_scalar(index));
   2650 }
   2651 
   2652 
   2653 void ExternalIntArray::set(int index, int32_t value) {
   2654   ASSERT((index >= 0) && (index < this->length()));
   2655   int32_t* ptr = static_cast<int32_t*>(external_pointer());
   2656   ptr[index] = value;
   2657 }
   2658 
   2659 
   2660 uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
   2661   ASSERT((index >= 0) && (index < this->length()));
   2662   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
   2663   return ptr[index];
   2664 }
   2665 
   2666 
   2667 MaybeObject* ExternalUnsignedIntArray::get(int index) {
   2668     return GetHeap()->NumberFromUint32(get_scalar(index));
   2669 }
   2670 
   2671 
   2672 void ExternalUnsignedIntArray::set(int index, uint32_t value) {
   2673   ASSERT((index >= 0) && (index < this->length()));
   2674   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
   2675   ptr[index] = value;
   2676 }
   2677 
   2678 
   2679 float ExternalFloatArray::get_scalar(int index) {
   2680   ASSERT((index >= 0) && (index < this->length()));
   2681   float* ptr = static_cast<float*>(external_pointer());
   2682   return ptr[index];
   2683 }
   2684 
   2685 
   2686 MaybeObject* ExternalFloatArray::get(int index) {
   2687     return GetHeap()->NumberFromDouble(get_scalar(index));
   2688 }
   2689 
   2690 
   2691 void ExternalFloatArray::set(int index, float value) {
   2692   ASSERT((index >= 0) && (index < this->length()));
   2693   float* ptr = static_cast<float*>(external_pointer());
   2694   ptr[index] = value;
   2695 }
   2696 
   2697 
   2698 double ExternalDoubleArray::get_scalar(int index) {
   2699   ASSERT((index >= 0) && (index < this->length()));
   2700   double* ptr = static_cast<double*>(external_pointer());
   2701   return ptr[index];
   2702 }
   2703 
   2704 
   2705 MaybeObject* ExternalDoubleArray::get(int index) {
   2706     return GetHeap()->NumberFromDouble(get_scalar(index));
   2707 }
   2708 
   2709 
   2710 void ExternalDoubleArray::set(int index, double value) {
   2711   ASSERT((index >= 0) && (index < this->length()));
   2712   double* ptr = static_cast<double*>(external_pointer());
   2713   ptr[index] = value;
   2714 }
   2715 
   2716 
   2717 int Map::visitor_id() {
   2718   return READ_BYTE_FIELD(this, kVisitorIdOffset);
   2719 }
   2720 
   2721 
   2722 void Map::set_visitor_id(int id) {
   2723   ASSERT(0 <= id && id < 256);
   2724   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
   2725 }
   2726 
   2727 
   2728 int Map::instance_size() {
   2729   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
   2730 }
   2731 
   2732 
   2733 int Map::inobject_properties() {
   2734   return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
   2735 }
   2736 
   2737 
   2738 int Map::pre_allocated_property_fields() {
   2739   return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
   2740 }
   2741 
   2742 
   2743 int HeapObject::SizeFromMap(Map* map) {
   2744   int instance_size = map->instance_size();
   2745   if (instance_size != kVariableSizeSentinel) return instance_size;
   2746   // We can ignore the "symbol" bit becase it is only set for symbols
   2747   // and implies a string type.
   2748   int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
   2749   // Only inline the most frequent cases.
   2750   if (instance_type == FIXED_ARRAY_TYPE) {
   2751     return FixedArray::BodyDescriptor::SizeOf(map, this);
   2752   }
   2753   if (instance_type == ASCII_STRING_TYPE) {
   2754     return SeqAsciiString::SizeFor(
   2755         reinterpret_cast<SeqAsciiString*>(this)->length());
   2756   }
   2757   if (instance_type == BYTE_ARRAY_TYPE) {
   2758     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
   2759   }
   2760   if (instance_type == FREE_SPACE_TYPE) {
   2761     return reinterpret_cast<FreeSpace*>(this)->size();
   2762   }
   2763   if (instance_type == STRING_TYPE) {
   2764     return SeqTwoByteString::SizeFor(
   2765         reinterpret_cast<SeqTwoByteString*>(this)->length());
   2766   }
   2767   if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
   2768     return FixedDoubleArray::SizeFor(
   2769         reinterpret_cast<FixedDoubleArray*>(this)->length());
   2770   }
   2771   ASSERT(instance_type == CODE_TYPE);
   2772   return reinterpret_cast<Code*>(this)->CodeSize();
   2773 }
   2774 
   2775 
   2776 void Map::set_instance_size(int value) {
   2777   ASSERT_EQ(0, value & (kPointerSize - 1));
   2778   value >>= kPointerSizeLog2;
   2779   ASSERT(0 <= value && value < 256);
   2780   WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
   2781 }
   2782 
   2783 
   2784 void Map::set_inobject_properties(int value) {
   2785   ASSERT(0 <= value && value < 256);
   2786   WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
   2787 }
   2788 
   2789 
   2790 void Map::set_pre_allocated_property_fields(int value) {
   2791   ASSERT(0 <= value && value < 256);
   2792   WRITE_BYTE_FIELD(this,
   2793                    kPreAllocatedPropertyFieldsOffset,
   2794                    static_cast<byte>(value));
   2795 }
   2796 
   2797 
   2798 InstanceType Map::instance_type() {
   2799   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
   2800 }
   2801 
   2802 
   2803 void Map::set_instance_type(InstanceType value) {
   2804   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
   2805 }
   2806 
   2807 
   2808 int Map::unused_property_fields() {
   2809   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
   2810 }
   2811 
   2812 
   2813 void Map::set_unused_property_fields(int value) {
   2814   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
   2815 }
   2816 
   2817 
   2818 byte Map::bit_field() {
   2819   return READ_BYTE_FIELD(this, kBitFieldOffset);
   2820 }
   2821 
   2822 
   2823 void Map::set_bit_field(byte value) {
   2824   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
   2825 }
   2826 
   2827 
   2828 byte Map::bit_field2() {
   2829   return READ_BYTE_FIELD(this, kBitField2Offset);
   2830 }
   2831 
   2832 
   2833 void Map::set_bit_field2(byte value) {
   2834   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
   2835 }
   2836 
   2837 
   2838 void Map::set_non_instance_prototype(bool value) {
   2839   if (value) {
   2840     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
   2841   } else {
   2842     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
   2843   }
   2844 }
   2845 
   2846 
   2847 bool Map::has_non_instance_prototype() {
   2848   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
   2849 }
   2850 
   2851 
   2852 void Map::set_function_with_prototype(bool value) {
   2853   if (value) {
   2854     set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
   2855   } else {
   2856     set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
   2857   }
   2858 }
   2859 
   2860 
   2861 bool Map::function_with_prototype() {
   2862   return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
   2863 }
   2864 
   2865 
   2866 void Map::set_is_access_check_needed(bool access_check_needed) {
   2867   if (access_check_needed) {
   2868     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
   2869   } else {
   2870     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
   2871   }
   2872 }
   2873 
   2874 
   2875 bool Map::is_access_check_needed() {
   2876   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
   2877 }
   2878 
   2879 
   2880 void Map::set_is_extensible(bool value) {
   2881   if (value) {
   2882     set_bit_field2(bit_field2() | (1 << kIsExtensible));
   2883   } else {
   2884     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
   2885   }
   2886 }
   2887 
   2888 bool Map::is_extensible() {
   2889   return ((1 << kIsExtensible) & bit_field2()) != 0;
   2890 }
   2891 
   2892 
   2893 void Map::set_attached_to_shared_function_info(bool value) {
   2894   if (value) {
   2895     set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
   2896   } else {
   2897     set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
   2898   }
   2899 }
   2900 
   2901 bool Map::attached_to_shared_function_info() {
   2902   return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
   2903 }
   2904 
   2905 
   2906 void Map::set_is_shared(bool value) {
   2907   if (value) {
   2908     set_bit_field3(bit_field3() | (1 << kIsShared));
   2909   } else {
   2910     set_bit_field3(bit_field3() & ~(1 << kIsShared));
   2911   }
   2912 }
   2913 
   2914 bool Map::is_shared() {
   2915   return ((1 << kIsShared) & bit_field3()) != 0;
   2916 }
   2917 
   2918 
   2919 JSFunction* Map::unchecked_constructor() {
   2920   return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
   2921 }
   2922 
   2923 
   2924 Code::Flags Code::flags() {
   2925   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
   2926 }
   2927 
   2928 
   2929 void Code::set_flags(Code::Flags flags) {
   2930   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
   2931   // Make sure that all call stubs have an arguments count.
   2932   ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
   2933           ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
   2934          ExtractArgumentsCountFromFlags(flags) >= 0);
   2935   WRITE_INT_FIELD(this, kFlagsOffset, flags);
   2936 }
   2937 
   2938 
   2939 Code::Kind Code::kind() {
   2940   return ExtractKindFromFlags(flags());
   2941 }
   2942 
   2943 
   2944 InlineCacheState Code::ic_state() {
   2945   InlineCacheState result = ExtractICStateFromFlags(flags());
   2946   // Only allow uninitialized or debugger states for non-IC code
   2947   // objects. This is used in the debugger to determine whether or not
   2948   // a call to code object has been replaced with a debug break call.
   2949   ASSERT(is_inline_cache_stub() ||
   2950          result == UNINITIALIZED ||
   2951          result == DEBUG_BREAK ||
   2952          result == DEBUG_PREPARE_STEP_IN);
   2953   return result;
   2954 }
   2955 
   2956 
   2957 Code::ExtraICState Code::extra_ic_state() {
   2958   ASSERT(is_inline_cache_stub());
   2959   return ExtractExtraICStateFromFlags(flags());
   2960 }
   2961 
   2962 
   2963 PropertyType Code::type() {
   2964   return ExtractTypeFromFlags(flags());
   2965 }
   2966 
   2967 
   2968 int Code::arguments_count() {
   2969   ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
   2970   return ExtractArgumentsCountFromFlags(flags());
   2971 }
   2972 
   2973 
   2974 int Code::major_key() {
   2975   ASSERT(kind() == STUB ||
   2976          kind() == UNARY_OP_IC ||
   2977          kind() == BINARY_OP_IC ||
   2978          kind() == COMPARE_IC ||
   2979          kind() == TO_BOOLEAN_IC);
   2980   return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
   2981 }
   2982 
   2983 
   2984 void Code::set_major_key(int major) {
   2985   ASSERT(kind() == STUB ||
   2986          kind() == UNARY_OP_IC ||
   2987          kind() == BINARY_OP_IC ||
   2988          kind() == COMPARE_IC ||
   2989          kind() == TO_BOOLEAN_IC);
   2990   ASSERT(0 <= major && major < 256);
   2991   WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
   2992 }
   2993 
   2994 
   2995 bool Code::is_pregenerated() {
   2996   return kind() == STUB && IsPregeneratedField::decode(flags());
   2997 }
   2998 
   2999 
   3000 void Code::set_is_pregenerated(bool value) {
   3001   ASSERT(kind() == STUB);
   3002   Flags f = flags();
   3003   f = static_cast<Flags>(IsPregeneratedField::update(f, value));
   3004   set_flags(f);
   3005 }
   3006 
   3007 
   3008 bool Code::optimizable() {
   3009   ASSERT(kind() == FUNCTION);
   3010   return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
   3011 }
   3012 
   3013 
   3014 void Code::set_optimizable(bool value) {
   3015   ASSERT(kind() == FUNCTION);
   3016   WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
   3017 }
   3018 
   3019 
   3020 bool Code::has_deoptimization_support() {
   3021   ASSERT(kind() == FUNCTION);
   3022   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3023   return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
   3024 }
   3025 
   3026 
   3027 void Code::set_has_deoptimization_support(bool value) {
   3028   ASSERT(kind() == FUNCTION);
   3029   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3030   flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
   3031   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3032 }
   3033 
   3034 
   3035 bool Code::has_debug_break_slots() {
   3036   ASSERT(kind() == FUNCTION);
   3037   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3038   return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
   3039 }
   3040 
   3041 
   3042 void Code::set_has_debug_break_slots(bool value) {
   3043   ASSERT(kind() == FUNCTION);
   3044   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3045   flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
   3046   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3047 }
   3048 
   3049 
   3050 bool Code::is_compiled_optimizable() {
   3051   ASSERT(kind() == FUNCTION);
   3052   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3053   return FullCodeFlagsIsCompiledOptimizable::decode(flags);
   3054 }
   3055 
   3056 
   3057 void Code::set_compiled_optimizable(bool value) {
   3058   ASSERT(kind() == FUNCTION);
   3059   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3060   flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
   3061   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3062 }
   3063 
   3064 
   3065 bool Code::has_self_optimization_header() {
   3066   ASSERT(kind() == FUNCTION);
   3067   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3068   return FullCodeFlagsHasSelfOptimizationHeader::decode(flags);
   3069 }
   3070 
   3071 
   3072 void Code::set_self_optimization_header(bool value) {
   3073   ASSERT(kind() == FUNCTION);
   3074   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
   3075   flags = FullCodeFlagsHasSelfOptimizationHeader::update(flags, value);
   3076   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
   3077 }
   3078 
   3079 
   3080 int Code::allow_osr_at_loop_nesting_level() {
   3081   ASSERT(kind() == FUNCTION);
   3082   return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
   3083 }
   3084 
   3085 
   3086 void Code::set_allow_osr_at_loop_nesting_level(int level) {
   3087   ASSERT(kind() == FUNCTION);
   3088   ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
   3089   WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
   3090 }
   3091 
   3092 
   3093 int Code::profiler_ticks() {
   3094   ASSERT(kind() == FUNCTION);
   3095   return READ_BYTE_FIELD(this, kProfilerTicksOffset);
   3096 }
   3097 
   3098 
   3099 void Code::set_profiler_ticks(int ticks) {
   3100   ASSERT(kind() == FUNCTION);
   3101   ASSERT(ticks < 256);
   3102   WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
   3103 }
   3104 
   3105 
   3106 unsigned Code::stack_slots() {
   3107   ASSERT(kind() == OPTIMIZED_FUNCTION);
   3108   return READ_UINT32_FIELD(this, kStackSlotsOffset);
   3109 }
   3110 
   3111 
   3112 void Code::set_stack_slots(unsigned slots) {
   3113   ASSERT(kind() == OPTIMIZED_FUNCTION);
   3114   WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
   3115 }
   3116 
   3117 
   3118 unsigned Code::safepoint_table_offset() {
   3119   ASSERT(kind() == OPTIMIZED_FUNCTION);
   3120   return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
   3121 }
   3122 
   3123 
   3124 void Code::set_safepoint_table_offset(unsigned offset) {
   3125   ASSERT(kind() == OPTIMIZED_FUNCTION);
   3126   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
   3127   WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
   3128 }
   3129 
   3130 
   3131 unsigned Code::stack_check_table_offset() {
   3132   ASSERT(kind() == FUNCTION);
   3133   return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
   3134 }
   3135 
   3136 
   3137 void Code::set_stack_check_table_offset(unsigned offset) {
   3138   ASSERT(kind() == FUNCTION);
   3139   ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
   3140   WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
   3141 }
   3142 
   3143 
   3144 CheckType Code::check_type() {
   3145   ASSERT(is_call_stub() || is_keyed_call_stub());
   3146   byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
   3147   return static_cast<CheckType>(type);
   3148 }
   3149 
   3150 
   3151 void Code::set_check_type(CheckType value) {
   3152   ASSERT(is_call_stub() || is_keyed_call_stub());
   3153   WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
   3154 }
   3155 
   3156 
   3157 byte Code::unary_op_type() {
   3158   ASSERT(is_unary_op_stub());
   3159   return READ_BYTE_FIELD(this, kUnaryOpTypeOffset);
   3160 }
   3161 
   3162 
   3163 void Code::set_unary_op_type(byte value) {
   3164   ASSERT(is_unary_op_stub());
   3165   WRITE_BYTE_FIELD(this, kUnaryOpTypeOffset, value);
   3166 }
   3167 
   3168 
   3169 byte Code::binary_op_type() {
   3170   ASSERT(is_binary_op_stub());
   3171   return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
   3172 }
   3173 
   3174 
   3175 void Code::set_binary_op_type(byte value) {
   3176   ASSERT(is_binary_op_stub());
   3177   WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
   3178 }
   3179 
   3180 
   3181 byte Code::binary_op_result_type() {
   3182   ASSERT(is_binary_op_stub());
   3183   return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
   3184 }
   3185 
   3186 
   3187 void Code::set_binary_op_result_type(byte value) {
   3188   ASSERT(is_binary_op_stub());
   3189   WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
   3190 }
   3191 
   3192 
   3193 byte Code::compare_state() {
   3194   ASSERT(is_compare_ic_stub());
   3195   return READ_BYTE_FIELD(this, kCompareStateOffset);
   3196 }
   3197 
   3198 
   3199 void Code::set_compare_state(byte value) {
   3200   ASSERT(is_compare_ic_stub());
   3201   WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
   3202 }
   3203 
   3204 
   3205 byte Code::to_boolean_state() {
   3206   ASSERT(is_to_boolean_ic_stub());
   3207   return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
   3208 }
   3209 
   3210 
   3211 void Code::set_to_boolean_state(byte value) {
   3212   ASSERT(is_to_boolean_ic_stub());
   3213   WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
   3214 }
   3215 
   3216 
   3217 bool Code::has_function_cache() {
   3218   ASSERT(kind() == STUB);
   3219   return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
   3220 }
   3221 
   3222 
   3223 void Code::set_has_function_cache(bool flag) {
   3224   ASSERT(kind() == STUB);
   3225   WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
   3226 }
   3227 
   3228 
   3229 bool Code::is_inline_cache_stub() {
   3230   Kind kind = this->kind();
   3231   return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
   3232 }
   3233 
   3234 
   3235 Code::Flags Code::ComputeFlags(Kind kind,
   3236                                InlineCacheState ic_state,
   3237                                ExtraICState extra_ic_state,
   3238                                PropertyType type,
   3239                                int argc,
   3240                                InlineCacheHolderFlag holder) {
   3241   // Extra IC state is only allowed for call IC stubs or for store IC
   3242   // stubs.
   3243   ASSERT(extra_ic_state == kNoExtraICState ||
   3244          kind == CALL_IC ||
   3245          kind == STORE_IC ||
   3246          kind == KEYED_STORE_IC);
   3247   // Compute the bit mask.
   3248   int bits = KindField::encode(kind)
   3249       | ICStateField::encode(ic_state)
   3250       | TypeField::encode(type)
   3251       | ExtraICStateField::encode(extra_ic_state)
   3252       | (argc << kArgumentsCountShift)
   3253       | CacheHolderField::encode(holder);
   3254   return static_cast<Flags>(bits);
   3255 }
   3256 
   3257 
   3258 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
   3259                                           PropertyType type,
   3260                                           ExtraICState extra_ic_state,
   3261                                           InlineCacheHolderFlag holder,
   3262                                           int argc) {
   3263   return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
   3264 }
   3265 
   3266 
   3267 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
   3268   return KindField::decode(flags);
   3269 }
   3270 
   3271 
   3272 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
   3273   return ICStateField::decode(flags);
   3274 }
   3275 
   3276 
   3277 Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
   3278   return ExtraICStateField::decode(flags);
   3279 }
   3280 
   3281 
   3282 PropertyType Code::ExtractTypeFromFlags(Flags flags) {
   3283   return TypeField::decode(flags);
   3284 }
   3285 
   3286 
   3287 int Code::ExtractArgumentsCountFromFlags(Flags flags) {
   3288   return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
   3289 }
   3290 
   3291 
   3292 InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
   3293   return CacheHolderField::decode(flags);
   3294 }
   3295 
   3296 
   3297 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
   3298   int bits = flags & ~TypeField::kMask;
   3299   return static_cast<Flags>(bits);
   3300 }
   3301 
   3302 
   3303 Code* Code::GetCodeFromTargetAddress(Address address) {
   3304   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
   3305   // GetCodeFromTargetAddress might be called when marking objects during mark
   3306   // sweep. reinterpret_cast is therefore used instead of the more appropriate
   3307   // Code::cast. Code::cast does not work when the object's map is
   3308   // marked.
   3309   Code* result = reinterpret_cast<Code*>(code);
   3310   return result;
   3311 }
   3312 
   3313 
   3314 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
   3315   return HeapObject::
   3316       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
   3317 }
   3318 
   3319 
   3320 Object* Map::prototype() {
   3321   return READ_FIELD(this, kPrototypeOffset);
   3322 }
   3323 
   3324 
   3325 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
   3326   ASSERT(value->IsNull() || value->IsJSReceiver());
   3327   WRITE_FIELD(this, kPrototypeOffset, value);
   3328   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
   3329 }
   3330 
   3331 
   3332 DescriptorArray* Map::instance_descriptors() {
   3333   Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
   3334   if (object->IsSmi()) {
   3335     return GetHeap()->empty_descriptor_array();
   3336   } else {
   3337     return DescriptorArray::cast(object);
   3338   }
   3339 }
   3340 
   3341 
   3342 void Map::init_instance_descriptors() {
   3343   WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, Smi::FromInt(0));
   3344 }
   3345 
   3346 
   3347 void Map::clear_instance_descriptors() {
   3348   Object* object = READ_FIELD(this,
   3349                               kInstanceDescriptorsOrBitField3Offset);
   3350   if (!object->IsSmi()) {
   3351     WRITE_FIELD(
   3352         this,
   3353         kInstanceDescriptorsOrBitField3Offset,
   3354         Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
   3355   }
   3356 }
   3357 
   3358 
   3359 void Map::set_instance_descriptors(DescriptorArray* value,
   3360                                    WriteBarrierMode mode) {
   3361   Object* object = READ_FIELD(this,
   3362                               kInstanceDescriptorsOrBitField3Offset);
   3363   Heap* heap = GetHeap();
   3364   if (value == heap->empty_descriptor_array()) {
   3365     clear_instance_descriptors();
   3366     return;
   3367   } else {
   3368     if (object->IsSmi()) {
   3369       value->set_bit_field3_storage(Smi::cast(object)->value());
   3370     } else {
   3371       value->set_bit_field3_storage(
   3372           DescriptorArray::cast(object)->bit_field3_storage());
   3373     }
   3374   }
   3375   ASSERT(!is_shared());
   3376   WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
   3377   CONDITIONAL_WRITE_BARRIER(
   3378       heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
   3379 }
   3380 
   3381 
   3382 int Map::bit_field3() {
   3383   Object* object = READ_FIELD(this,
   3384                               kInstanceDescriptorsOrBitField3Offset);
   3385   if (object->IsSmi()) {
   3386     return Smi::cast(object)->value();
   3387   } else {
   3388     return DescriptorArray::cast(object)->bit_field3_storage();
   3389   }
   3390 }
   3391 
   3392 
   3393 void Map::set_bit_field3(int value) {
   3394   ASSERT(Smi::IsValid(value));
   3395   Object* object = READ_FIELD(this,
   3396                               kInstanceDescriptorsOrBitField3Offset);
   3397   if (object->IsSmi()) {
   3398     WRITE_FIELD(this,
   3399                 kInstanceDescriptorsOrBitField3Offset,
   3400                 Smi::FromInt(value));
   3401   } else {
   3402     DescriptorArray::cast(object)->set_bit_field3_storage(value);
   3403   }
   3404 }
   3405 
   3406 
   3407 FixedArray* Map::unchecked_prototype_transitions() {
   3408   return reinterpret_cast<FixedArray*>(
   3409       READ_FIELD(this, kPrototypeTransitionsOffset));
   3410 }
   3411 
   3412 
   3413 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
   3414 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
   3415 ACCESSORS(Map, constructor, Object, kConstructorOffset)
   3416 
   3417 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
   3418 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
   3419 ACCESSORS(JSFunction,
   3420           next_function_link,
   3421           Object,
   3422           kNextFunctionLinkOffset)
   3423 
   3424 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
   3425 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
   3426 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
   3427 
   3428 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
   3429 
   3430 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
   3431 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
   3432 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
   3433 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
   3434 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
   3435 
   3436 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
   3437 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
   3438 
   3439 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
   3440 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
   3441 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
   3442 
   3443 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
   3444 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
   3445 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
   3446 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
   3447 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
   3448 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
   3449 
   3450 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
   3451 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
   3452 
   3453 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
   3454 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
   3455 
   3456 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
   3457 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
   3458 ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
   3459           kPropertyAccessorsOffset)
   3460 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
   3461           kPrototypeTemplateOffset)
   3462 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
   3463 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
   3464           kNamedPropertyHandlerOffset)
   3465 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
   3466           kIndexedPropertyHandlerOffset)
   3467 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
   3468           kInstanceTemplateOffset)
   3469 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
   3470 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
   3471 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
   3472           kInstanceCallHandlerOffset)
   3473 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
   3474           kAccessCheckInfoOffset)
   3475 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
   3476 
   3477 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
   3478 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
   3479           kInternalFieldCountOffset)
   3480 
   3481 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
   3482 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
   3483 
   3484 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
   3485 
   3486 ACCESSORS(Script, source, Object, kSourceOffset)
   3487 ACCESSORS(Script, name, Object, kNameOffset)
   3488 ACCESSORS(Script, id, Object, kIdOffset)
   3489 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
   3490 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
   3491 ACCESSORS(Script, data, Object, kDataOffset)
   3492 ACCESSORS(Script, context_data, Object, kContextOffset)
   3493 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
   3494 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
   3495 ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
   3496 ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
   3497 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
   3498 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
   3499 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
   3500                  kEvalFrominstructionsOffsetOffset)
   3501 
   3502 #ifdef ENABLE_DEBUGGER_SUPPORT
   3503 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
   3504 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
   3505 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
   3506 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
   3507 
   3508 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
   3509 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
   3510 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
   3511 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
   3512 #endif
   3513 
   3514 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
   3515 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
   3516 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
   3517 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
   3518           kInstanceClassNameOffset)
   3519 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
   3520 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
   3521 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
   3522 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
   3523 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
   3524           kThisPropertyAssignmentsOffset)
   3525 SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
   3526 
   3527 
   3528 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
   3529                kHiddenPrototypeBit)
   3530 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
   3531 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
   3532                kNeedsAccessCheckBit)
   3533 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
   3534                kReadOnlyPrototypeBit)
   3535 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
   3536                kIsExpressionBit)
   3537 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
   3538                kIsTopLevelBit)
   3539 BOOL_GETTER(SharedFunctionInfo,
   3540             compiler_hints,
   3541             has_only_simple_this_property_assignments,
   3542             kHasOnlySimpleThisPropertyAssignments)
   3543 BOOL_ACCESSORS(SharedFunctionInfo,
   3544                compiler_hints,
   3545                allows_lazy_compilation,
   3546                kAllowLazyCompilation)
   3547 BOOL_ACCESSORS(SharedFunctionInfo,
   3548                compiler_hints,
   3549                uses_arguments,
   3550                kUsesArguments)
   3551 BOOL_ACCESSORS(SharedFunctionInfo,
   3552                compiler_hints,
   3553                has_duplicate_parameters,
   3554                kHasDuplicateParameters)
   3555 
   3556 
   3557 #if V8_HOST_ARCH_32_BIT
   3558 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
   3559 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
   3560               kFormalParameterCountOffset)
   3561 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
   3562               kExpectedNofPropertiesOffset)
   3563 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
   3564 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
   3565               kStartPositionAndTypeOffset)
   3566 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
   3567 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
   3568               kFunctionTokenPositionOffset)
   3569 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
   3570               kCompilerHintsOffset)
   3571 SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
   3572               kThisPropertyAssignmentsCountOffset)
   3573 SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
   3574 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
   3575 SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
   3576 #else
   3577 
   3578 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
   3579   STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
   3580   int holder::name() {                                            \
   3581     int value = READ_INT_FIELD(this, offset);                     \
   3582     ASSERT(kHeapObjectTag == 1);                                  \
   3583     ASSERT((value & kHeapObjectTag) == 0);                        \
   3584     return value >> 1;                                            \
   3585   }                                                               \
   3586   void holder::set_##name(int value) {                            \
   3587     ASSERT(kHeapObjectTag == 1);                                  \
   3588     ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
   3589            (value & 0xC0000000) == 0x000000000);                  \
   3590     WRITE_INT_FIELD(this,                                         \
   3591                     offset,                                       \
   3592                     (value << 1) & ~kHeapObjectTag);              \
   3593   }
   3594 
   3595 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
   3596   STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
   3597   INT_ACCESSORS(holder, name, offset)
   3598 
   3599 
   3600 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
   3601 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   3602                         formal_parameter_count,
   3603                         kFormalParameterCountOffset)
   3604 
   3605 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   3606                         expected_nof_properties,
   3607                         kExpectedNofPropertiesOffset)
   3608 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
   3609 
   3610 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
   3611 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   3612                         start_position_and_type,
   3613                         kStartPositionAndTypeOffset)
   3614 
   3615 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   3616                         function_token_position,
   3617                         kFunctionTokenPositionOffset)
   3618 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
   3619                         compiler_hints,
   3620                         kCompilerHintsOffset)
   3621 
   3622 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
   3623                         this_property_assignments_count,
   3624                         kThisPropertyAssignmentsCountOffset)
   3625 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
   3626 
   3627 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
   3628 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
   3629 #endif
   3630 
   3631 
   3632 int SharedFunctionInfo::construction_count() {
   3633   return READ_BYTE_FIELD(this, kConstructionCountOffset);
   3634 }
   3635 
   3636 
   3637 void SharedFunctionInfo::set_construction_count(int value) {
   3638   ASSERT(0 <= value && value < 256);
   3639   WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
   3640 }
   3641 
   3642 
   3643 BOOL_ACCESSORS(SharedFunctionInfo,
   3644                compiler_hints,
   3645                live_objects_may_exist,
   3646                kLiveObjectsMayExist)
   3647 
   3648 
   3649 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
   3650   return initial_map() != GetHeap()->undefined_value();
   3651 }
   3652 
   3653 
   3654 BOOL_GETTER(SharedFunctionInfo,
   3655             compiler_hints,
   3656             optimization_disabled,
   3657             kOptimizationDisabled)
   3658 
   3659 
   3660 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
   3661   set_compiler_hints(BooleanBit::set(compiler_hints(),
   3662                                      kOptimizationDisabled,
   3663                                      disable));
   3664   // If disabling optimizations we reflect that in the code object so
   3665   // it will not be counted as optimizable code.
   3666   if ((code()->kind() == Code::FUNCTION) && disable) {
   3667     code()->set_optimizable(false);
   3668   }
   3669 }
   3670 
   3671 
   3672 LanguageMode SharedFunctionInfo::language_mode() {
   3673   int hints = compiler_hints();
   3674   if (BooleanBit::get(hints, kExtendedModeFunction)) {
   3675     ASSERT(BooleanBit::get(hints, kStrictModeFunction));
   3676     return EXTENDED_MODE;
   3677   }
   3678   return BooleanBit::get(hints, kStrictModeFunction)
   3679       ? STRICT_MODE : CLASSIC_MODE;
   3680 }
   3681 
   3682 
   3683 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
   3684   // We only allow language mode transitions that go set the same language mode
   3685   // again or go up in the chain:
   3686   //   CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
   3687   ASSERT(this->language_mode() == CLASSIC_MODE ||
   3688          this->language_mode() == language_mode ||
   3689          language_mode == EXTENDED_MODE);
   3690   int hints = compiler_hints();
   3691   hints = BooleanBit::set(
   3692       hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
   3693   hints = BooleanBit::set(
   3694       hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
   3695   set_compiler_hints(hints);
   3696 }
   3697 
   3698 
   3699 bool SharedFunctionInfo::is_classic_mode() {
   3700   return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
   3701 }
   3702 
   3703 BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
   3704             kExtendedModeFunction)
   3705 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
   3706 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
   3707                name_should_print_as_anonymous,
   3708                kNameShouldPrintAsAnonymous)
   3709 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
   3710 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
   3711 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
   3712 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
   3713                kDontOptimize)
   3714 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
   3715 
   3716 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
   3717 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
   3718 
   3719 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
   3720 
   3721 bool Script::HasValidSource() {
   3722   Object* src = this->source();
   3723   if (!src->IsString()) return true;
   3724   String* src_str = String::cast(src);
   3725   if (!StringShape(src_str).IsExternal()) return true;
   3726   if (src_str->IsAsciiRepresentation()) {
   3727     return ExternalAsciiString::cast(src)->resource() != NULL;
   3728   } else if (src_str->IsTwoByteRepresentation()) {
   3729     return ExternalTwoByteString::cast(src)->resource() != NULL;
   3730   }
   3731   return true;
   3732 }
   3733 
   3734 
   3735 void SharedFunctionInfo::DontAdaptArguments() {
   3736   ASSERT(code()->kind() == Code::BUILTIN);
   3737   set_formal_parameter_count(kDontAdaptArgumentsSentinel);
   3738 }
   3739 
   3740 
   3741 int SharedFunctionInfo::start_position() {
   3742   return start_position_and_type() >> kStartPositionShift;
   3743 }
   3744 
   3745 
   3746 void SharedFunctionInfo::set_start_position(int start_position) {
   3747   set_start_position_and_type((start_position << kStartPositionShift)
   3748     | (start_position_and_type() & ~kStartPositionMask));
   3749 }
   3750 
   3751 
   3752 Code* SharedFunctionInfo::code() {
   3753   return Code::cast(READ_FIELD(this, kCodeOffset));
   3754 }
   3755 
   3756 
   3757 Code* SharedFunctionInfo::unchecked_code() {
   3758   return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
   3759 }
   3760 
   3761 
   3762 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
   3763   WRITE_FIELD(this, kCodeOffset, value);
   3764   CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
   3765 }
   3766 
   3767 
   3768 ScopeInfo* SharedFunctionInfo::scope_info() {
   3769   return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
   3770 }
   3771 
   3772 
   3773 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
   3774                                         WriteBarrierMode mode) {
   3775   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
   3776   CONDITIONAL_WRITE_BARRIER(GetHeap(),
   3777                             this,
   3778                             kScopeInfoOffset,
   3779                             reinterpret_cast<Object*>(value),
   3780                             mode);
   3781 }
   3782 
   3783 
   3784 bool SharedFunctionInfo::is_compiled() {
   3785   return code() !=
   3786       Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
   3787 }
   3788 
   3789 
   3790 bool SharedFunctionInfo::IsApiFunction() {
   3791   return function_data()->IsFunctionTemplateInfo();
   3792 }
   3793 
   3794 
   3795 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
   3796   ASSERT(IsApiFunction());
   3797   return FunctionTemplateInfo::cast(function_data());
   3798 }
   3799 
   3800 
   3801 bool SharedFunctionInfo::HasBuiltinFunctionId() {
   3802   return function_data()->IsSmi();
   3803 }
   3804 
   3805 
   3806 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
   3807   ASSERT(HasBuiltinFunctionId());
   3808   return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
   3809 }
   3810 
   3811 
   3812 int SharedFunctionInfo::code_age() {
   3813   return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
   3814 }
   3815 
   3816 
   3817 void SharedFunctionInfo::set_code_age(int code_age) {
   3818   int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
   3819   set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
   3820 }
   3821 
   3822 
   3823 bool SharedFunctionInfo::has_deoptimization_support() {
   3824   Code* code = this->code();
   3825   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
   3826 }
   3827 
   3828 
   3829 bool JSFunction::IsBuiltin() {
   3830   return context()->global()->IsJSBuiltinsObject();
   3831 }
   3832 
   3833 
   3834 bool JSFunction::NeedsArgumentsAdaption() {
   3835   return shared()->formal_parameter_count() !=
   3836       SharedFunctionInfo::kDontAdaptArgumentsSentinel;
   3837 }
   3838 
   3839 
   3840 bool JSFunction::IsOptimized() {
   3841   return code()->kind() == Code::OPTIMIZED_FUNCTION;
   3842 }
   3843 
   3844 
   3845 bool JSFunction::IsOptimizable() {
   3846   return code()->kind() == Code::FUNCTION && code()->optimizable();
   3847 }
   3848 
   3849 
   3850 bool JSFunction::IsMarkedForLazyRecompilation() {
   3851   return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
   3852 }
   3853 
   3854 
   3855 Code* JSFunction::code() {
   3856   return Code::cast(unchecked_code());
   3857 }
   3858 
   3859 
   3860 Code* JSFunction::unchecked_code() {
   3861   return reinterpret_cast<Code*>(
   3862       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
   3863 }
   3864 
   3865 
   3866 void JSFunction::set_code(Code* value) {
   3867   ASSERT(!HEAP->InNewSpace(value));
   3868   Address entry = value->entry();
   3869   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
   3870   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
   3871       this,
   3872       HeapObject::RawField(this, kCodeEntryOffset),
   3873       value);
   3874 }
   3875 
   3876 
   3877 void JSFunction::ReplaceCode(Code* code) {
   3878   bool was_optimized = IsOptimized();
   3879   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
   3880 
   3881   set_code(code);
   3882 
   3883   // Add/remove the function from the list of optimized functions for this
   3884   // context based on the state change.
   3885   if (!was_optimized && is_optimized) {
   3886     context()->global_context()->AddOptimizedFunction(this);
   3887   }
   3888   if (was_optimized && !is_optimized) {
   3889     context()->global_context()->RemoveOptimizedFunction(this);
   3890   }
   3891 }
   3892 
   3893 
   3894 Context* JSFunction::context() {
   3895   return Context::cast(READ_FIELD(this, kContextOffset));
   3896 }
   3897 
   3898 
   3899 Object* JSFunction::unchecked_context() {
   3900   return READ_FIELD(this, kContextOffset);
   3901 }
   3902 
   3903 
   3904 SharedFunctionInfo* JSFunction::unchecked_shared() {
   3905   return reinterpret_cast<SharedFunctionInfo*>(
   3906       READ_FIELD(this, kSharedFunctionInfoOffset));
   3907 }
   3908 
   3909 
   3910 void JSFunction::set_context(Object* value) {
   3911   ASSERT(value->IsUndefined() || value->IsContext());
   3912   WRITE_FIELD(this, kContextOffset, value);
   3913   WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
   3914 }
   3915 
   3916 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
   3917           kPrototypeOrInitialMapOffset)
   3918 
   3919 
   3920 Map* JSFunction::initial_map() {
   3921   return Map::cast(prototype_or_initial_map());
   3922 }
   3923 
   3924 
   3925 void JSFunction::set_initial_map(Map* value) {
   3926   set_prototype_or_initial_map(value);
   3927 }
   3928 
   3929 
   3930 MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
   3931     Map* initial_map) {
   3932   Context* global_context = context()->global_context();
   3933   Object* array_function =
   3934       global_context->get(Context::ARRAY_FUNCTION_INDEX);
   3935   if (array_function->IsJSFunction() &&
   3936       this == JSFunction::cast(array_function)) {
   3937     ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
   3938 
   3939     MaybeObject* maybe_map = initial_map->CopyDropTransitions();
   3940     Map* new_double_map = NULL;
   3941     if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
   3942     new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
   3943     maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
   3944                                                    new_double_map);
   3945     if (maybe_map->IsFailure()) return maybe_map;
   3946 
   3947     maybe_map = new_double_map->CopyDropTransitions();
   3948     Map* new_object_map = NULL;
   3949     if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
   3950     new_object_map->set_elements_kind(FAST_ELEMENTS);
   3951     maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
   3952                                                       new_object_map);
   3953     if (maybe_map->IsFailure()) return maybe_map;
   3954 
   3955     global_context->set_smi_js_array_map(initial_map);
   3956     global_context->set_double_js_array_map(new_double_map);
   3957     global_context->set_object_js_array_map(new_object_map);
   3958   }
   3959   set_initial_map(initial_map);
   3960   return this;
   3961 }
   3962 
   3963 
   3964 bool JSFunction::has_initial_map() {
   3965   return prototype_or_initial_map()->IsMap();
   3966 }
   3967 
   3968 
   3969 bool JSFunction::has_instance_prototype() {
   3970   return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
   3971 }
   3972 
   3973 
   3974 bool JSFunction::has_prototype() {
   3975   return map()->has_non_instance_prototype() || has_instance_prototype();
   3976 }
   3977 
   3978 
   3979 Object* JSFunction::instance_prototype() {
   3980   ASSERT(has_instance_prototype());
   3981   if (has_initial_map()) return initial_map()->prototype();
   3982   // When there is no initial map and the prototype is a JSObject, the
   3983   // initial map field is used for the prototype field.
   3984   return prototype_or_initial_map();
   3985 }
   3986 
   3987 
   3988 Object* JSFunction::prototype() {
   3989   ASSERT(has_prototype());
   3990   // If the function's prototype property has been set to a non-JSObject
   3991   // value, that value is stored in the constructor field of the map.
   3992   if (map()->has_non_instance_prototype()) return map()->constructor();
   3993   return instance_prototype();
   3994 }
   3995 
   3996 bool JSFunction::should_have_prototype() {
   3997   return map()->function_with_prototype();
   3998 }
   3999 
   4000 
   4001 bool JSFunction::is_compiled() {
   4002   return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
   4003 }
   4004 
   4005 
   4006 FixedArray* JSFunction::literals() {
   4007   ASSERT(!shared()->bound());
   4008   return literals_or_bindings();
   4009 }
   4010 
   4011 
   4012 void JSFunction::set_literals(FixedArray* literals) {
   4013   ASSERT(!shared()->bound());
   4014   set_literals_or_bindings(literals);
   4015 }
   4016 
   4017 
   4018 FixedArray* JSFunction::function_bindings() {
   4019   ASSERT(shared()->bound());
   4020   return literals_or_bindings();
   4021 }
   4022 
   4023 
   4024 void JSFunction::set_function_bindings(FixedArray* bindings) {
   4025   ASSERT(shared()->bound());
   4026   // Bound function literal may be initialized to the empty fixed array
   4027   // before the bindings are set.
   4028   ASSERT(bindings == GetHeap()->empty_fixed_array() ||
   4029          bindings->map() == GetHeap()->fixed_cow_array_map());
   4030   set_literals_or_bindings(bindings);
   4031 }
   4032 
   4033 
   4034 int JSFunction::NumberOfLiterals() {
   4035   ASSERT(!shared()->bound());
   4036   return literals()->length();
   4037 }
   4038 
   4039 
   4040 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
   4041   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   4042   return READ_FIELD(this, OffsetOfFunctionWithId(id));
   4043 }
   4044 
   4045 
   4046 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
   4047                                               Object* value) {
   4048   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   4049   WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
   4050   WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
   4051 }
   4052 
   4053 
   4054 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
   4055   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   4056   return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
   4057 }
   4058 
   4059 
   4060 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
   4061                                                    Code* value) {
   4062   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   4063   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
   4064   ASSERT(!HEAP->InNewSpace(value));
   4065 }
   4066 
   4067 
   4068 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
   4069 ACCESSORS(JSProxy, hash, Object, kHashOffset)
   4070 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
   4071 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
   4072 
   4073 
   4074 void JSProxy::InitializeBody(int object_size, Object* value) {
   4075   ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
   4076   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
   4077     WRITE_FIELD(this, offset, value);
   4078   }
   4079 }
   4080 
   4081 
   4082 ACCESSORS(JSSet, table, Object, kTableOffset)
   4083 ACCESSORS(JSMap, table, Object, kTableOffset)
   4084 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
   4085 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
   4086 
   4087 
   4088 Address Foreign::foreign_address() {
   4089   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
   4090 }
   4091 
   4092 
   4093 void Foreign::set_foreign_address(Address value) {
   4094   WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
   4095 }
   4096 
   4097 
   4098 ACCESSORS(JSValue, value, Object, kValueOffset)
   4099 
   4100 
   4101 JSValue* JSValue::cast(Object* obj) {
   4102   ASSERT(obj->IsJSValue());
   4103   ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
   4104   return reinterpret_cast<JSValue*>(obj);
   4105 }
   4106 
   4107 
   4108 ACCESSORS(JSDate, value, Object, kValueOffset)
   4109 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
   4110 ACCESSORS(JSDate, year, Object, kYearOffset)
   4111 ACCESSORS(JSDate, month, Object, kMonthOffset)
   4112 ACCESSORS(JSDate, day, Object, kDayOffset)
   4113 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
   4114 ACCESSORS(JSDate, hour, Object, kHourOffset)
   4115 ACCESSORS(JSDate, min, Object, kMinOffset)
   4116 ACCESSORS(JSDate, sec, Object, kSecOffset)
   4117 
   4118 
   4119 JSDate* JSDate::cast(Object* obj) {
   4120   ASSERT(obj->IsJSDate());
   4121   ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
   4122   return reinterpret_cast<JSDate*>(obj);
   4123 }
   4124 
   4125 
   4126 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
   4127 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
   4128 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
   4129 ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
   4130 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
   4131 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
   4132 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
   4133 
   4134 
   4135 JSMessageObject* JSMessageObject::cast(Object* obj) {
   4136   ASSERT(obj->IsJSMessageObject());
   4137   ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
   4138   return reinterpret_cast<JSMessageObject*>(obj);
   4139 }
   4140 
   4141 
   4142 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
   4143 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
   4144 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
   4145 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
   4146 ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
   4147 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
   4148 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
   4149 
   4150 byte* Code::instruction_start()  {
   4151   return FIELD_ADDR(this, kHeaderSize);
   4152 }
   4153 
   4154 
   4155 byte* Code::instruction_end()  {
   4156   return instruction_start() + instruction_size();
   4157 }
   4158 
   4159 
   4160 int Code::body_size() {
   4161   return RoundUp(instruction_size(), kObjectAlignment);
   4162 }
   4163 
   4164 
   4165 FixedArray* Code::unchecked_deoptimization_data() {
   4166   return reinterpret_cast<FixedArray*>(
   4167       READ_FIELD(this, kDeoptimizationDataOffset));
   4168 }
   4169 
   4170 
   4171 ByteArray* Code::unchecked_relocation_info() {
   4172   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
   4173 }
   4174 
   4175 
   4176 byte* Code::relocation_start() {
   4177   return unchecked_relocation_info()->GetDataStartAddress();
   4178 }
   4179 
   4180 
   4181 int Code::relocation_size() {
   4182   return unchecked_relocation_info()->length();
   4183 }
   4184 
   4185 
   4186 byte* Code::entry() {
   4187   return instruction_start();
   4188 }
   4189 
   4190 
   4191 bool Code::contains(byte* inner_pointer) {
   4192   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
   4193 }
   4194 
   4195 
   4196 ACCESSORS(JSArray, length, Object, kLengthOffset)
   4197 
   4198 
   4199 ACCESSORS(JSRegExp, data, Object, kDataOffset)
   4200 
   4201 
   4202 JSRegExp::Type JSRegExp::TypeTag() {
   4203   Object* data = this->data();
   4204   if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
   4205   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
   4206   return static_cast<JSRegExp::Type>(smi->value());
   4207 }
   4208 
   4209 
   4210 JSRegExp::Type JSRegExp::TypeTagUnchecked() {
   4211   Smi* smi = Smi::cast(DataAtUnchecked(kTagIndex));
   4212   return static_cast<JSRegExp::Type>(smi->value());
   4213 }
   4214 
   4215 
   4216 int JSRegExp::CaptureCount() {
   4217   switch (TypeTag()) {
   4218     case ATOM:
   4219       return 0;
   4220     case IRREGEXP:
   4221       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
   4222     default:
   4223       UNREACHABLE();
   4224       return -1;
   4225   }
   4226 }
   4227 
   4228 
   4229 JSRegExp::Flags JSRegExp::GetFlags() {
   4230   ASSERT(this->data()->IsFixedArray());
   4231   Object* data = this->data();
   4232   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
   4233   return Flags(smi->value());
   4234 }
   4235 
   4236 
   4237 String* JSRegExp::Pattern() {
   4238   ASSERT(this->data()->IsFixedArray());
   4239   Object* data = this->data();
   4240   String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
   4241   return pattern;
   4242 }
   4243 
   4244 
   4245 Object* JSRegExp::DataAt(int index) {
   4246   ASSERT(TypeTag() != NOT_COMPILED);
   4247   return FixedArray::cast(data())->get(index);
   4248 }
   4249 
   4250 
   4251 Object* JSRegExp::DataAtUnchecked(int index) {
   4252   FixedArray* fa = reinterpret_cast<FixedArray*>(data());
   4253   int offset = FixedArray::kHeaderSize + index * kPointerSize;
   4254   return READ_FIELD(fa, offset);
   4255 }
   4256 
   4257 
   4258 void JSRegExp::SetDataAt(int index, Object* value) {
   4259   ASSERT(TypeTag() != NOT_COMPILED);
   4260   ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
   4261   FixedArray::cast(data())->set(index, value);
   4262 }
   4263 
   4264 
   4265 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
   4266   ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
   4267   FixedArray* fa = reinterpret_cast<FixedArray*>(data());
   4268   if (value->IsSmi()) {
   4269     fa->set_unchecked(index, Smi::cast(value));
   4270   } else {
   4271     // We only do this during GC, so we don't need to notify the write barrier.
   4272     fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
   4273   }
   4274 }
   4275 
   4276 
   4277 ElementsKind JSObject::GetElementsKind() {
   4278   ElementsKind kind = map()->elements_kind();
   4279 #if DEBUG
   4280   FixedArrayBase* fixed_array =
   4281       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
   4282   Map* map = fixed_array->map();
   4283     ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
   4284             (map == GetHeap()->fixed_array_map() ||
   4285              map == GetHeap()->fixed_cow_array_map())) ||
   4286            (kind == FAST_DOUBLE_ELEMENTS &&
   4287             (fixed_array->IsFixedDoubleArray() ||
   4288             fixed_array == GetHeap()->empty_fixed_array())) ||
   4289            (kind == DICTIONARY_ELEMENTS &&
   4290             fixed_array->IsFixedArray() &&
   4291             fixed_array->IsDictionary()) ||
   4292            (kind > DICTIONARY_ELEMENTS));
   4293     ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
   4294            (elements()->IsFixedArray() && elements()->length() >= 2));
   4295 #endif
   4296   return kind;
   4297 }
   4298 
   4299 
   4300 ElementsAccessor* JSObject::GetElementsAccessor() {
   4301   return ElementsAccessor::ForKind(GetElementsKind());
   4302 }
   4303 
   4304 
   4305 bool JSObject::HasFastElements() {
   4306   return GetElementsKind() == FAST_ELEMENTS;
   4307 }
   4308 
   4309 
   4310 bool JSObject::HasFastSmiOnlyElements() {
   4311   return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
   4312 }
   4313 
   4314 
   4315 bool JSObject::HasFastTypeElements() {
   4316   ElementsKind elements_kind = GetElementsKind();
   4317   return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
   4318       elements_kind == FAST_ELEMENTS;
   4319 }
   4320 
   4321 
   4322 bool JSObject::HasFastDoubleElements() {
   4323   return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
   4324 }
   4325 
   4326 
   4327 bool JSObject::HasDictionaryElements() {
   4328   return GetElementsKind() == DICTIONARY_ELEMENTS;
   4329 }
   4330 
   4331 
   4332 bool JSObject::HasNonStrictArgumentsElements() {
   4333   return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
   4334 }
   4335 
   4336 
   4337 bool JSObject::HasExternalArrayElements() {
   4338   HeapObject* array = elements();
   4339   ASSERT(array != NULL);
   4340   return array->IsExternalArray();
   4341 }
   4342 
   4343 
   4344 #define EXTERNAL_ELEMENTS_CHECK(name, type)          \
   4345 bool JSObject::HasExternal##name##Elements() {       \
   4346   HeapObject* array = elements();                    \
   4347   ASSERT(array != NULL);                             \
   4348   if (!array->IsHeapObject())                        \
   4349     return false;                                    \
   4350   return array->map()->instance_type() == type;      \
   4351 }
   4352 
   4353 
   4354 EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
   4355 EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
   4356 EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
   4357 EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
   4358                         EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
   4359 EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
   4360 EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
   4361                         EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
   4362 EXTERNAL_ELEMENTS_CHECK(Float,
   4363                         EXTERNAL_FLOAT_ARRAY_TYPE)
   4364 EXTERNAL_ELEMENTS_CHECK(Double,
   4365                         EXTERNAL_DOUBLE_ARRAY_TYPE)
   4366 EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
   4367 
   4368 
   4369 bool JSObject::HasNamedInterceptor() {
   4370   return map()->has_named_interceptor();
   4371 }
   4372 
   4373 
   4374 bool JSObject::HasIndexedInterceptor() {
   4375   return map()->has_indexed_interceptor();
   4376 }
   4377 
   4378 
   4379 MaybeObject* JSObject::EnsureWritableFastElements() {
   4380   ASSERT(HasFastTypeElements());
   4381   FixedArray* elems = FixedArray::cast(elements());
   4382   Isolate* isolate = GetIsolate();
   4383   if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
   4384   Object* writable_elems;
   4385   { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
   4386       elems, isolate->heap()->fixed_array_map());
   4387     if (!maybe_writable_elems->ToObject(&writable_elems)) {
   4388       return maybe_writable_elems;
   4389     }
   4390   }
   4391   set_elements(FixedArray::cast(writable_elems));
   4392   isolate->counters()->cow_arrays_converted()->Increment();
   4393   return writable_elems;
   4394 }
   4395 
   4396 
   4397 StringDictionary* JSObject::property_dictionary() {
   4398   ASSERT(!HasFastProperties());
   4399   return StringDictionary::cast(properties());
   4400 }
   4401 
   4402 
   4403 SeededNumberDictionary* JSObject::element_dictionary() {
   4404   ASSERT(HasDictionaryElements());
   4405   return SeededNumberDictionary::cast(elements());
   4406 }
   4407 
   4408 
   4409 bool String::IsHashFieldComputed(uint32_t field) {
   4410   return (field & kHashNotComputedMask) == 0;
   4411 }
   4412 
   4413 
   4414 bool String::HasHashCode() {
   4415   return IsHashFieldComputed(hash_field());
   4416 }
   4417 
   4418 
   4419 uint32_t String::Hash() {
   4420   // Fast case: has hash code already been computed?
   4421   uint32_t field = hash_field();
   4422   if (IsHashFieldComputed(field)) return field >> kHashShift;
   4423   // Slow case: compute hash code and set it.
   4424   return ComputeAndSetHash();
   4425 }
   4426 
   4427 
   4428 StringHasher::StringHasher(int length, uint32_t seed)
   4429   : length_(length),
   4430     raw_running_hash_(seed),
   4431     array_index_(0),
   4432     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
   4433     is_first_char_(true),
   4434     is_valid_(true) {
   4435   ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
   4436 }
   4437 
   4438 
   4439 bool StringHasher::has_trivial_hash() {
   4440   return length_ > String::kMaxHashCalcLength;
   4441 }
   4442 
   4443 
   4444 void StringHasher::AddCharacter(uint32_t c) {
   4445   if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
   4446     AddSurrogatePair(c);  // Not inlined.
   4447     return;
   4448   }
   4449   // Use the Jenkins one-at-a-time hash function to update the hash
   4450   // for the given character.
   4451   raw_running_hash_ += c;
   4452   raw_running_hash_ += (raw_running_hash_ << 10);
   4453   raw_running_hash_ ^= (raw_running_hash_ >> 6);
   4454   // Incremental array index computation.
   4455   if (is_array_index_) {
   4456     if (c < '0' || c > '9') {
   4457       is_array_index_ = false;
   4458     } else {
   4459       int d = c - '0';
   4460       if (is_first_char_) {
   4461         is_first_char_ = false;
   4462         if (c == '0' && length_ > 1) {
   4463           is_array_index_ = false;
   4464           return;
   4465         }
   4466       }
   4467       if (array_index_ > 429496729U - ((d + 2) >> 3)) {
   4468         is_array_index_ = false;
   4469       } else {
   4470         array_index_ = array_index_ * 10 + d;
   4471       }
   4472     }
   4473   }
   4474 }
   4475 
   4476 
   4477 void StringHasher::AddCharacterNoIndex(uint32_t c) {
   4478   ASSERT(!is_array_index());
   4479   if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
   4480     AddSurrogatePairNoIndex(c);  // Not inlined.
   4481     return;
   4482   }
   4483   raw_running_hash_ += c;
   4484   raw_running_hash_ += (raw_running_hash_ << 10);
   4485   raw_running_hash_ ^= (raw_running_hash_ >> 6);
   4486 }
   4487 
   4488 
   4489 uint32_t StringHasher::GetHash() {
   4490   // Get the calculated raw hash value and do some more bit ops to distribute
   4491   // the hash further. Ensure that we never return zero as the hash value.
   4492   uint32_t result = raw_running_hash_;
   4493   result += (result << 3);
   4494   result ^= (result >> 11);
   4495   result += (result << 15);
   4496   if ((result & String::kHashBitMask) == 0) {
   4497     result = 27;
   4498   }
   4499   return result;
   4500 }
   4501 
   4502 
   4503 template <typename schar>
   4504 uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
   4505   StringHasher hasher(length, seed);
   4506   if (!hasher.has_trivial_hash()) {
   4507     int i;
   4508     for (i = 0; hasher.is_array_index() && (i < length); i++) {
   4509       hasher.AddCharacter(chars[i]);
   4510     }
   4511     for (; i < length; i++) {
   4512       hasher.AddCharacterNoIndex(chars[i]);
   4513     }
   4514   }
   4515   return hasher.GetHashField();
   4516 }
   4517 
   4518 
   4519 bool String::AsArrayIndex(uint32_t* index) {
   4520   uint32_t field = hash_field();
   4521   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
   4522     return false;
   4523   }
   4524   return SlowAsArrayIndex(index);
   4525 }
   4526 
   4527 
   4528 Object* JSReceiver::GetPrototype() {
   4529   return HeapObject::cast(this)->map()->prototype();
   4530 }
   4531 
   4532 
   4533 bool JSReceiver::HasProperty(String* name) {
   4534   if (IsJSProxy()) {
   4535     return JSProxy::cast(this)->HasPropertyWithHandler(name);
   4536   }
   4537   return GetPropertyAttribute(name) != ABSENT;
   4538 }
   4539 
   4540 
   4541 bool JSReceiver::HasLocalProperty(String* name) {
   4542   if (IsJSProxy()) {
   4543     return JSProxy::cast(this)->HasPropertyWithHandler(name);
   4544   }
   4545   return GetLocalPropertyAttribute(name) != ABSENT;
   4546 }
   4547 
   4548 
   4549 PropertyAttributes JSReceiver::GetPropertyAttribute(String* key) {
   4550   return GetPropertyAttributeWithReceiver(this, key);
   4551 }
   4552 
   4553 // TODO(504): this may be useful in other places too where JSGlobalProxy
   4554 // is used.
   4555 Object* JSObject::BypassGlobalProxy() {
   4556   if (IsJSGlobalProxy()) {
   4557     Object* proto = GetPrototype();
   4558     if (proto->IsNull()) return GetHeap()->undefined_value();
   4559     ASSERT(proto->IsJSGlobalObject());
   4560     return proto;
   4561   }
   4562   return this;
   4563 }
   4564 
   4565 
   4566 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
   4567   return IsJSProxy()
   4568       ? JSProxy::cast(this)->GetIdentityHash(flag)
   4569       : JSObject::cast(this)->GetIdentityHash(flag);
   4570 }
   4571 
   4572 
   4573 bool JSReceiver::HasElement(uint32_t index) {
   4574   if (IsJSProxy()) {
   4575     return JSProxy::cast(this)->HasElementWithHandler(index);
   4576   }
   4577   return JSObject::cast(this)->HasElementWithReceiver(this, index);
   4578 }
   4579 
   4580 
   4581 bool AccessorInfo::all_can_read() {
   4582   return BooleanBit::get(flag(), kAllCanReadBit);
   4583 }
   4584 
   4585 
   4586 void AccessorInfo::set_all_can_read(bool value) {
   4587   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
   4588 }
   4589 
   4590 
   4591 bool AccessorInfo::all_can_write() {
   4592   return BooleanBit::get(flag(), kAllCanWriteBit);
   4593 }
   4594 
   4595 
   4596 void AccessorInfo::set_all_can_write(bool value) {
   4597   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
   4598 }
   4599 
   4600 
   4601 bool AccessorInfo::prohibits_overwriting() {
   4602   return BooleanBit::get(flag(), kProhibitsOverwritingBit);
   4603 }
   4604 
   4605 
   4606 void AccessorInfo::set_prohibits_overwriting(bool value) {
   4607   set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
   4608 }
   4609 
   4610 
   4611 PropertyAttributes AccessorInfo::property_attributes() {
   4612   return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
   4613 }
   4614 
   4615 
   4616 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
   4617   set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
   4618 }
   4619 
   4620 
   4621 template<typename Shape, typename Key>
   4622 void Dictionary<Shape, Key>::SetEntry(int entry,
   4623                                       Object* key,
   4624                                       Object* value) {
   4625   SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
   4626 }
   4627 
   4628 
   4629 template<typename Shape, typename Key>
   4630 void Dictionary<Shape, Key>::SetEntry(int entry,
   4631                                       Object* key,
   4632                                       Object* value,
   4633                                       PropertyDetails details) {
   4634   ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
   4635   int index = HashTable<Shape, Key>::EntryToIndex(entry);
   4636   AssertNoAllocation no_gc;
   4637   WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
   4638   FixedArray::set(index, key, mode);
   4639   FixedArray::set(index+1, value, mode);
   4640   FixedArray::set(index+2, details.AsSmi());
   4641 }
   4642 
   4643 
   4644 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
   4645   ASSERT(other->IsNumber());
   4646   return key == static_cast<uint32_t>(other->Number());
   4647 }
   4648 
   4649 
   4650 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
   4651   return ComputeIntegerHash(key, 0);
   4652 }
   4653 
   4654 
   4655 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
   4656                                                       Object* other) {
   4657   ASSERT(other->IsNumber());
   4658   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
   4659 }
   4660 
   4661 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
   4662   return ComputeIntegerHash(key, seed);
   4663 }
   4664 
   4665 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
   4666                                                           uint32_t seed,
   4667                                                           Object* other) {
   4668   ASSERT(other->IsNumber());
   4669   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
   4670 }
   4671 
   4672 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
   4673   return Isolate::Current()->heap()->NumberFromUint32(key);
   4674 }
   4675 
   4676 
   4677 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
   4678   // We know that all entries in a hash table had their hash keys created.
   4679   // Use that knowledge to have fast failure.
   4680   if (key->Hash() != String::cast(other)->Hash()) return false;
   4681   return key->Equals(String::cast(other));
   4682 }
   4683 
   4684 
   4685 uint32_t StringDictionaryShape::Hash(String* key) {
   4686   return key->Hash();
   4687 }
   4688 
   4689 
   4690 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
   4691   return String::cast(other)->Hash();
   4692 }
   4693 
   4694 
   4695 MaybeObject* StringDictionaryShape::AsObject(String* key) {
   4696   return key;
   4697 }
   4698 
   4699 
   4700 template <int entrysize>
   4701 bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
   4702   return key->SameValue(other);
   4703 }
   4704 
   4705 
   4706 template <int entrysize>
   4707 uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
   4708   MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
   4709   return Smi::cast(maybe_hash->ToObjectChecked())->value();
   4710 }
   4711 
   4712 
   4713 template <int entrysize>
   4714 uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
   4715                                                         Object* other) {
   4716   MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
   4717   return Smi::cast(maybe_hash->ToObjectChecked())->value();
   4718 }
   4719 
   4720 
   4721 template <int entrysize>
   4722 MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
   4723   return key;
   4724 }
   4725 
   4726 
   4727 void Map::ClearCodeCache(Heap* heap) {
   4728   // No write barrier is needed since empty_fixed_array is not in new space.
   4729   // Please note this function is used during marking:
   4730   //  - MarkCompactCollector::MarkUnmarkedObject
   4731   //  - IncrementalMarking::Step
   4732   ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
   4733   WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
   4734 }
   4735 
   4736 
   4737 void JSArray::EnsureSize(int required_size) {
   4738   ASSERT(HasFastTypeElements());
   4739   FixedArray* elts = FixedArray::cast(elements());
   4740   const int kArraySizeThatFitsComfortablyInNewSpace = 128;
   4741   if (elts->length() < required_size) {
   4742     // Doubling in size would be overkill, but leave some slack to avoid
   4743     // constantly growing.
   4744     Expand(required_size + (required_size >> 3));
   4745     // It's a performance benefit to keep a frequently used array in new-space.
   4746   } else if (!GetHeap()->new_space()->Contains(elts) &&
   4747              required_size < kArraySizeThatFitsComfortablyInNewSpace) {
   4748     // Expand will allocate a new backing store in new space even if the size
   4749     // we asked for isn't larger than what we had before.
   4750     Expand(required_size);
   4751   }
   4752 }
   4753 
   4754 
   4755 void JSArray::set_length(Smi* length) {
   4756   // Don't need a write barrier for a Smi.
   4757   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
   4758 }
   4759 
   4760 
   4761 bool JSArray::AllowsSetElementsLength() {
   4762   bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
   4763   ASSERT(result == !HasExternalArrayElements());
   4764   return result;
   4765 }
   4766 
   4767 
   4768 MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
   4769   MaybeObject* maybe_result = EnsureCanContainElements(
   4770       storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
   4771   if (maybe_result->IsFailure()) return maybe_result;
   4772   ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
   4773           GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
   4774          ((storage->map() != GetHeap()->fixed_double_array_map()) &&
   4775           ((GetElementsKind() == FAST_ELEMENTS) ||
   4776            (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
   4777             FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
   4778   set_elements(storage);
   4779   set_length(Smi::FromInt(storage->length()));
   4780   return this;
   4781 }
   4782 
   4783 
   4784 MaybeObject* FixedArray::Copy() {
   4785   if (length() == 0) return this;
   4786   return GetHeap()->CopyFixedArray(this);
   4787 }
   4788 
   4789 
   4790 MaybeObject* FixedDoubleArray::Copy() {
   4791   if (length() == 0) return this;
   4792   return GetHeap()->CopyFixedDoubleArray(this);
   4793 }
   4794 
   4795 
   4796 void TypeFeedbackCells::SetAstId(int index, Smi* id) {
   4797   set(1 + index * 2, id);
   4798 }
   4799 
   4800 
   4801 Smi* TypeFeedbackCells::AstId(int index) {
   4802   return Smi::cast(get(1 + index * 2));
   4803 }
   4804 
   4805 
   4806 void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
   4807   set(index * 2, cell);
   4808 }
   4809 
   4810 
   4811 JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
   4812   return JSGlobalPropertyCell::cast(get(index * 2));
   4813 }
   4814 
   4815 
   4816 Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
   4817   return isolate->factory()->the_hole_value();
   4818 }
   4819 
   4820 
   4821 Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
   4822   return isolate->factory()->undefined_value();
   4823 }
   4824 
   4825 
   4826 Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
   4827   return heap->raw_unchecked_the_hole_value();
   4828 }
   4829 
   4830 
   4831 SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
   4832 SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
   4833               kIcWithTypeinfoCountOffset)
   4834 ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
   4835           kTypeFeedbackCellsOffset)
   4836 
   4837 
   4838 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
   4839 
   4840 
   4841 Relocatable::Relocatable(Isolate* isolate) {
   4842   ASSERT(isolate == Isolate::Current());
   4843   isolate_ = isolate;
   4844   prev_ = isolate->relocatable_top();
   4845   isolate->set_relocatable_top(this);
   4846 }
   4847 
   4848 
   4849 Relocatable::~Relocatable() {
   4850   ASSERT(isolate_ == Isolate::Current());
   4851   ASSERT_EQ(isolate_->relocatable_top(), this);
   4852   isolate_->set_relocatable_top(prev_);
   4853 }
   4854 
   4855 
   4856 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
   4857   return map->instance_size();
   4858 }
   4859 
   4860 
   4861 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
   4862   v->VisitExternalReference(
   4863       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
   4864 }
   4865 
   4866 
   4867 template<typename StaticVisitor>
   4868 void Foreign::ForeignIterateBody() {
   4869   StaticVisitor::VisitExternalReference(
   4870       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
   4871 }
   4872 
   4873 
   4874 void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
   4875   typedef v8::String::ExternalAsciiStringResource Resource;
   4876   v->VisitExternalAsciiString(
   4877       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   4878 }
   4879 
   4880 
   4881 template<typename StaticVisitor>
   4882 void ExternalAsciiString::ExternalAsciiStringIterateBody() {
   4883   typedef v8::String::ExternalAsciiStringResource Resource;
   4884   StaticVisitor::VisitExternalAsciiString(
   4885       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   4886 }
   4887 
   4888 
   4889 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
   4890   typedef v8::String::ExternalStringResource Resource;
   4891   v->VisitExternalTwoByteString(
   4892       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   4893 }
   4894 
   4895 
   4896 template<typename StaticVisitor>
   4897 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
   4898   typedef v8::String::ExternalStringResource Resource;
   4899   StaticVisitor::VisitExternalTwoByteString(
   4900       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
   4901 }
   4902 
   4903 #define SLOT_ADDR(obj, offset) \
   4904   reinterpret_cast<Object**>((obj)->address() + offset)
   4905 
   4906 template<int start_offset, int end_offset, int size>
   4907 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
   4908     HeapObject* obj,
   4909     ObjectVisitor* v) {
   4910     v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
   4911 }
   4912 
   4913 
   4914 template<int start_offset>
   4915 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
   4916                                                        int object_size,
   4917                                                        ObjectVisitor* v) {
   4918   v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
   4919 }
   4920 
   4921 #undef SLOT_ADDR
   4922 
   4923 #undef TYPE_CHECKER
   4924 #undef CAST_ACCESSOR
   4925 #undef INT_ACCESSORS
   4926 #undef ACCESSORS
   4927 #undef ACCESSORS_TO_SMI
   4928 #undef SMI_ACCESSORS
   4929 #undef BOOL_GETTER
   4930 #undef BOOL_ACCESSORS
   4931 #undef FIELD_ADDR
   4932 #undef READ_FIELD
   4933 #undef WRITE_FIELD
   4934 #undef WRITE_BARRIER
   4935 #undef CONDITIONAL_WRITE_BARRIER
   4936 #undef READ_DOUBLE_FIELD
   4937 #undef WRITE_DOUBLE_FIELD
   4938 #undef READ_INT_FIELD
   4939 #undef WRITE_INT_FIELD
   4940 #undef READ_INTPTR_FIELD
   4941 #undef WRITE_INTPTR_FIELD
   4942 #undef READ_UINT32_FIELD
   4943 #undef WRITE_UINT32_FIELD
   4944 #undef READ_SHORT_FIELD
   4945 #undef WRITE_SHORT_FIELD
   4946 #undef READ_BYTE_FIELD
   4947 #undef WRITE_BYTE_FIELD
   4948 
   4949 
   4950 } }  // namespace v8::internal
   4951 
   4952 #endif  // V8_OBJECTS_INL_H_
   4953