2014-04-24 11:44:22 +00:00
|
|
|
// Copyright 2012 the V8 project authors. All rights reserved.
|
2014-04-29 06:42:26 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
2014-04-24 11:44:22 +00:00
|
|
|
//
|
|
|
|
// Review notes:
|
|
|
|
//
|
|
|
|
// - The use of macros in these inline functions may seem superfluous
|
|
|
|
// but it is absolutely needed to make sure gcc generates optimal
|
|
|
|
// code. gcc is not happy when attempting to inline too deep.
|
|
|
|
//
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
#ifndef V8_OBJECTS_INL_H_
|
|
|
|
#define V8_OBJECTS_INL_H_
|
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#include "src/base/atomicops.h"
|
2014-09-02 13:36:35 +00:00
|
|
|
#include "src/base/bits.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/contexts.h"
|
|
|
|
#include "src/conversions-inl.h"
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/elements.h"
|
|
|
|
#include "src/factory.h"
|
2014-06-10 14:01:08 +00:00
|
|
|
#include "src/field-index-inl.h"
|
2014-08-05 08:18:22 +00:00
|
|
|
#include "src/heap/heap-inl.h"
|
|
|
|
#include "src/heap/heap.h"
|
|
|
|
#include "src/heap/incremental-marking.h"
|
2014-08-07 12:21:01 +00:00
|
|
|
#include "src/heap/objects-visiting.h"
|
2014-08-05 08:18:22 +00:00
|
|
|
#include "src/heap/spaces.h"
|
2014-08-11 14:22:24 +00:00
|
|
|
#include "src/heap/store-buffer.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/isolate.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
#include "src/layout-descriptor-inl.h"
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/lookup.h"
|
|
|
|
#include "src/objects.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/property.h"
|
2014-07-17 09:44:37 +00:00
|
|
|
#include "src/prototype.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/transitions-inl.h"
|
2014-09-18 12:31:31 +00:00
|
|
|
#include "src/type-feedback-vector-inl.h"
|
2014-06-20 08:40:11 +00:00
|
|
|
#include "src/v8memory.h"
|
2011-09-19 18:36:47 +00:00
|
|
|
|
2009-05-25 10:05:56 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
PropertyDetails::PropertyDetails(Smi* smi) {
|
|
|
|
value_ = smi->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-18 11:30:51 +00:00
|
|
|
Smi* PropertyDetails::AsSmi() const {
|
2013-05-31 19:11:09 +00:00
|
|
|
// Ensure the upper 2 bits have the same value by sign extending it. This is
|
|
|
|
// necessary to be able to use the 31st bit of the property details.
|
|
|
|
int value = value_ << 1;
|
|
|
|
return Smi::FromInt(value >> 1);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-18 11:30:51 +00:00
|
|
|
PropertyDetails PropertyDetails::AsDeleted() const {
|
2011-01-27 08:35:39 +00:00
|
|
|
Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
|
2009-06-30 10:05:36 +00:00
|
|
|
return PropertyDetails(smi);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
int PropertyDetails::field_width_in_words() const {
|
2015-01-19 17:49:13 +00:00
|
|
|
DCHECK(type() == DATA);
|
2014-11-11 10:24:52 +00:00
|
|
|
if (!FLAG_unbox_double_fields) return 1;
|
|
|
|
if (kDoubleSize == kPointerSize) return 1;
|
|
|
|
return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
#define TYPE_CHECKER(type, instancetype) \
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::Is##type() const { \
|
2011-10-25 14:14:56 +00:00
|
|
|
return Object::IsHeapObject() && \
|
|
|
|
HeapObject::cast(this)->map()->instance_type() == instancetype; \
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-23 08:51:13 +00:00
|
|
|
#define CAST_ACCESSOR(type) \
|
|
|
|
type* type::cast(Object* object) { \
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(object->Is##type()); \
|
2014-06-23 08:51:13 +00:00
|
|
|
return reinterpret_cast<type*>(object); \
|
|
|
|
} \
|
|
|
|
const type* type::cast(const Object* object) { \
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(object->Is##type()); \
|
2014-06-23 08:51:13 +00:00
|
|
|
return reinterpret_cast<const type*>(object); \
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
#define INT_ACCESSORS(holder, name, offset) \
|
|
|
|
int holder::name() const { return READ_INT_FIELD(this, offset); } \
|
2008-07-03 15:10:15 +00:00
|
|
|
void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
#define ACCESSORS(holder, name, type, offset) \
|
|
|
|
type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
|
|
|
|
void holder::set_##name(type* value, WriteBarrierMode mode) { \
|
|
|
|
WRITE_FIELD(this, offset, value); \
|
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-03-14 16:16:46 +00:00
|
|
|
// Getter that returns a tagged Smi and setter that writes a tagged Smi.
|
2014-06-20 10:31:17 +00:00
|
|
|
#define ACCESSORS_TO_SMI(holder, name, offset) \
|
|
|
|
Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
|
|
|
|
void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
|
|
|
|
WRITE_FIELD(this, offset, value); \
|
2012-03-14 16:16:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Getter that returns a Smi as an int and writes an int as a Smi.
|
2008-07-03 15:10:15 +00:00
|
|
|
#define SMI_ACCESSORS(holder, name, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
int holder::name() const { \
|
2008-07-03 15:10:15 +00:00
|
|
|
Object* value = READ_FIELD(this, offset); \
|
|
|
|
return Smi::cast(value)->value(); \
|
|
|
|
} \
|
|
|
|
void holder::set_##name(int value) { \
|
|
|
|
WRITE_FIELD(this, offset, Smi::FromInt(value)); \
|
|
|
|
}
|
|
|
|
|
2014-03-31 14:29:01 +00:00
|
|
|
#define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
int holder::synchronized_##name() const { \
|
2014-03-31 14:29:01 +00:00
|
|
|
Object* value = ACQUIRE_READ_FIELD(this, offset); \
|
|
|
|
return Smi::cast(value)->value(); \
|
|
|
|
} \
|
|
|
|
void holder::synchronized_set_##name(int value) { \
|
|
|
|
RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
|
|
|
|
}
|
|
|
|
|
2014-04-08 16:31:57 +00:00
|
|
|
#define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
int holder::nobarrier_##name() const { \
|
2014-04-08 16:31:57 +00:00
|
|
|
Object* value = NOBARRIER_READ_FIELD(this, offset); \
|
|
|
|
return Smi::cast(value)->value(); \
|
|
|
|
} \
|
|
|
|
void holder::nobarrier_set_##name(int value) { \
|
|
|
|
NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2009-08-19 07:30:20 +00:00
|
|
|
#define BOOL_GETTER(holder, field, name, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
bool holder::name() const { \
|
2009-08-19 07:30:20 +00:00
|
|
|
return BooleanBit::get(field(), offset); \
|
|
|
|
} \
|
|
|
|
|
|
|
|
|
|
|
|
#define BOOL_ACCESSORS(holder, field, name, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
bool holder::name() const { \
|
2008-07-03 15:10:15 +00:00
|
|
|
return BooleanBit::get(field(), offset); \
|
|
|
|
} \
|
|
|
|
void holder::set_##name(bool value) { \
|
|
|
|
set_##field(BooleanBit::set(field(), offset, value)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsFixedArrayBase() const {
|
2014-01-16 17:08:45 +00:00
|
|
|
return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
|
|
|
|
IsFixedTypedArrayBase() || IsExternalArray();
|
2011-10-20 09:38:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-13 12:27:03 +00:00
|
|
|
// External objects are not extensible, so the map check is enough.
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsExternal() const {
|
2012-11-13 12:27:03 +00:00
|
|
|
return Object::IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map() ==
|
|
|
|
HeapObject::cast(this)->GetHeap()->external_map();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-07 16:37:26 +00:00
|
|
|
bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
|
2013-02-12 14:33:08 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSmi() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return HAS_SMI_TAG(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsHeapObject() const {
|
2009-08-26 10:33:11 +00:00
|
|
|
return Internals::HasHeapObjectTag(this);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
|
2014-07-01 15:02:31 +00:00
|
|
|
TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
|
2013-03-01 10:34:31 +00:00
|
|
|
TYPE_CHECKER(Symbol, SYMBOL_TYPE)
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsString() const {
|
2013-03-01 10:34:31 +00:00
|
|
|
return Object::IsHeapObject()
|
2013-03-01 13:28:55 +00:00
|
|
|
&& HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
|
2013-03-01 10:34:31 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsName() const {
|
2013-03-01 13:28:55 +00:00
|
|
|
return IsString() || IsSymbol();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsUniqueName() const {
|
2013-03-01 13:28:55 +00:00
|
|
|
return IsInternalizedString() || IsSymbol();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSpecObject() const {
|
2011-07-21 13:51:04 +00:00
|
|
|
return Object::IsHeapObject()
|
|
|
|
&& HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSpecFunction() const {
|
2011-09-22 17:12:41 +00:00
|
|
|
if (!Object::IsHeapObject()) return false;
|
|
|
|
InstanceType type = HeapObject::cast(this)->map()->instance_type();
|
|
|
|
return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsTemplateInfo() const {
|
2014-04-28 13:42:03 +00:00
|
|
|
return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsInternalizedString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!this->IsHeapObject()) return false;
|
|
|
|
uint32_t type = HeapObject::cast(this)->map()->instance_type();
|
2013-07-19 11:29:11 +00:00
|
|
|
STATIC_ASSERT(kNotInternalizedTag != 0);
|
|
|
|
return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
|
|
|
|
(kStringTag | kInternalizedTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsConsString() const {
|
2011-08-26 13:03:30 +00:00
|
|
|
if (!IsString()) return false;
|
|
|
|
return StringShape(String::cast(this)).IsCons();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSlicedString() const {
|
2011-08-26 13:03:30 +00:00
|
|
|
if (!IsString()) return false;
|
|
|
|
return StringShape(String::cast(this)).IsSliced();
|
2008-11-03 10:16:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSeqString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!IsString()) return false;
|
|
|
|
return StringShape(String::cast(this)).IsSequential();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSeqOneByteString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!IsString()) return false;
|
2009-03-17 09:33:06 +00:00
|
|
|
return StringShape(String::cast(this)).IsSequential() &&
|
2012-11-21 10:01:05 +00:00
|
|
|
String::cast(this)->IsOneByteRepresentation();
|
2008-10-20 11:38:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSeqTwoByteString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!IsString()) return false;
|
2009-03-17 09:33:06 +00:00
|
|
|
return StringShape(String::cast(this)).IsSequential() &&
|
2009-05-01 11:16:29 +00:00
|
|
|
String::cast(this)->IsTwoByteRepresentation();
|
2008-10-09 08:08:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsExternalString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!IsString()) return false;
|
|
|
|
return StringShape(String::cast(this)).IsExternal();
|
2008-10-09 08:08:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
bool Object::IsExternalOneByteString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!IsString()) return false;
|
2009-03-17 09:33:06 +00:00
|
|
|
return StringShape(String::cast(this)).IsExternal() &&
|
2012-11-21 10:01:05 +00:00
|
|
|
String::cast(this)->IsOneByteRepresentation();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsExternalTwoByteString() const {
|
2008-11-03 10:16:05 +00:00
|
|
|
if (!IsString()) return false;
|
2009-03-17 09:33:06 +00:00
|
|
|
return StringShape(String::cast(this)).IsExternal() &&
|
2009-05-01 11:16:29 +00:00
|
|
|
String::cast(this)->IsTwoByteRepresentation();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2014-04-15 07:36:47 +00:00
|
|
|
|
2011-06-09 10:03:35 +00:00
|
|
|
bool Object::HasValidElements() {
|
|
|
|
// Dictionary is covered under FixedArray.
|
2014-01-16 17:08:45 +00:00
|
|
|
return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
|
|
|
|
IsFixedTypedArrayBase();
|
2011-06-09 10:03:35 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2013-05-08 15:02:08 +00:00
|
|
|
|
2014-04-15 12:11:39 +00:00
|
|
|
Handle<Object> Object::NewStorageFor(Isolate* isolate,
|
|
|
|
Handle<Object> object,
|
|
|
|
Representation representation) {
|
|
|
|
if (representation.IsSmi() && object->IsUninitialized()) {
|
|
|
|
return handle(Smi::FromInt(0), isolate);
|
2013-12-18 17:53:50 +00:00
|
|
|
}
|
2014-04-15 12:11:39 +00:00
|
|
|
if (!representation.IsDouble()) return object;
|
2014-07-01 15:02:31 +00:00
|
|
|
double value;
|
2014-04-15 12:11:39 +00:00
|
|
|
if (object->IsUninitialized()) {
|
2014-07-01 15:02:31 +00:00
|
|
|
value = 0;
|
|
|
|
} else if (object->IsMutableHeapNumber()) {
|
|
|
|
value = HeapNumber::cast(*object)->value();
|
|
|
|
} else {
|
|
|
|
value = object->Number();
|
|
|
|
}
|
|
|
|
return isolate->factory()->NewHeapNumber(value, MUTABLE);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Object::WrapForRead(Isolate* isolate,
|
|
|
|
Handle<Object> object,
|
|
|
|
Representation representation) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!object->IsUninitialized());
|
2014-07-01 15:02:31 +00:00
|
|
|
if (!representation.IsDouble()) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(object->FitsRepresentation(representation));
|
2014-07-01 15:02:31 +00:00
|
|
|
return object;
|
2013-06-06 14:21:35 +00:00
|
|
|
}
|
2014-07-01 15:02:31 +00:00
|
|
|
return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
|
2013-05-08 15:02:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
StringShape::StringShape(const String* str)
|
2008-11-03 12:08:01 +00:00
|
|
|
: type_(str->map()->instance_type()) {
|
|
|
|
set_valid();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((type_ & kIsNotStringMask) == kStringTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
StringShape::StringShape(Map* map)
|
2008-11-03 12:08:01 +00:00
|
|
|
: type_(map->instance_type()) {
|
|
|
|
set_valid();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((type_ & kIsNotStringMask) == kStringTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
StringShape::StringShape(InstanceType t)
|
2008-11-03 12:08:01 +00:00
|
|
|
: type_(static_cast<uint32_t>(t)) {
|
|
|
|
set_valid();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((type_ & kIsNotStringMask) == kStringTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-28 17:03:34 +00:00
|
|
|
bool StringShape::IsInternalized() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(valid());
|
2013-07-19 11:29:11 +00:00
|
|
|
STATIC_ASSERT(kNotInternalizedTag != 0);
|
|
|
|
return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
|
|
|
|
(kStringTag | kInternalizedTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool String::IsOneByteRepresentation() const {
|
2009-05-01 11:16:29 +00:00
|
|
|
uint32_t type = map()->instance_type();
|
2012-11-08 12:14:29 +00:00
|
|
|
return (type & kStringEncodingMask) == kOneByteStringTag;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool String::IsTwoByteRepresentation() const {
|
2009-05-01 11:16:29 +00:00
|
|
|
uint32_t type = map()->instance_type();
|
|
|
|
return (type & kStringEncodingMask) == kTwoByteStringTag;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-21 10:01:05 +00:00
|
|
|
bool String::IsOneByteRepresentationUnderneath() {
|
2011-08-26 13:03:30 +00:00
|
|
|
uint32_t type = map()->instance_type();
|
|
|
|
STATIC_ASSERT(kIsIndirectStringTag != 0);
|
|
|
|
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsFlat());
|
2011-08-26 13:03:30 +00:00
|
|
|
switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
|
2012-11-08 12:14:29 +00:00
|
|
|
case kOneByteStringTag:
|
2011-08-26 13:03:30 +00:00
|
|
|
return true;
|
|
|
|
case kTwoByteStringTag:
|
|
|
|
return false;
|
|
|
|
default: // Cons or sliced string. Need to go deeper.
|
2012-11-21 10:01:05 +00:00
|
|
|
return GetUnderlying()->IsOneByteRepresentation();
|
2011-08-26 13:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool String::IsTwoByteRepresentationUnderneath() {
|
|
|
|
uint32_t type = map()->instance_type();
|
|
|
|
STATIC_ASSERT(kIsIndirectStringTag != 0);
|
|
|
|
STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsFlat());
|
2011-08-26 13:03:30 +00:00
|
|
|
switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
|
2012-11-08 12:14:29 +00:00
|
|
|
case kOneByteStringTag:
|
2011-08-26 13:03:30 +00:00
|
|
|
return false;
|
|
|
|
case kTwoByteStringTag:
|
|
|
|
return true;
|
|
|
|
default: // Cons or sliced string. Need to go deeper.
|
|
|
|
return GetUnderlying()->IsTwoByteRepresentation();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-26 11:34:44 +00:00
|
|
|
bool String::HasOnlyOneByteChars() {
|
2010-06-17 16:19:28 +00:00
|
|
|
uint32_t type = map()->instance_type();
|
2013-04-26 14:08:18 +00:00
|
|
|
return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
|
|
|
|
IsOneByteRepresentation();
|
2013-01-09 10:30:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
bool StringShape::IsCons() {
|
|
|
|
return (type_ & kStringRepresentationMask) == kConsStringTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-26 13:03:30 +00:00
|
|
|
bool StringShape::IsSliced() {
|
|
|
|
return (type_ & kStringRepresentationMask) == kSlicedStringTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool StringShape::IsIndirect() {
|
|
|
|
return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
bool StringShape::IsExternal() {
|
|
|
|
return (type_ & kStringRepresentationMask) == kExternalStringTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool StringShape::IsSequential() {
|
|
|
|
return (type_ & kStringRepresentationMask) == kSeqStringTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
StringRepresentationTag StringShape::representation_tag() {
|
|
|
|
uint32_t tag = (type_ & kStringRepresentationMask);
|
|
|
|
return static_cast<StringRepresentationTag>(tag);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-23 12:22:12 +00:00
|
|
|
uint32_t StringShape::encoding_tag() {
|
|
|
|
return type_ & kStringEncodingMask;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
uint32_t StringShape::full_representation_tag() {
|
|
|
|
return (type_ & (kStringRepresentationMask | kStringEncodingMask));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-27 13:43:29 +00:00
|
|
|
STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
|
2009-08-26 10:33:11 +00:00
|
|
|
Internals::kFullStringRepresentationMask);
|
|
|
|
|
2014-05-27 13:43:29 +00:00
|
|
|
STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
|
2012-09-12 11:29:50 +00:00
|
|
|
Internals::kStringEncodingMask);
|
|
|
|
|
2009-08-26 10:33:11 +00:00
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
bool StringShape::IsSequentialOneByte() {
|
2012-11-08 12:14:29 +00:00
|
|
|
return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
|
2008-11-03 10:16:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool StringShape::IsSequentialTwoByte() {
|
2009-04-17 09:17:04 +00:00
|
|
|
return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
|
2008-11-03 10:16:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
bool StringShape::IsExternalOneByte() {
|
2012-11-08 12:14:29 +00:00
|
|
|
return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
|
2008-11-03 10:16:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-27 13:43:29 +00:00
|
|
|
STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
|
2014-09-10 12:38:12 +00:00
|
|
|
Internals::kExternalOneByteRepresentationTag);
|
2012-09-12 11:29:50 +00:00
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
|
2012-09-12 11:29:50 +00:00
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
bool StringShape::IsExternalTwoByte() {
|
2009-04-17 09:17:04 +00:00
|
|
|
return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-27 13:43:29 +00:00
|
|
|
STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
|
2009-08-26 10:33:11 +00:00
|
|
|
Internals::kExternalTwoByteRepresentationTag);
|
|
|
|
|
2014-05-27 13:43:29 +00:00
|
|
|
STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
|
2009-08-26 10:33:11 +00:00
|
|
|
|
2014-11-19 11:16:47 +00:00
|
|
|
|
2008-11-25 11:07:48 +00:00
|
|
|
uc32 FlatStringReader::Get(int index) {
|
2014-09-10 12:38:12 +00:00
|
|
|
if (is_one_byte_) {
|
2014-11-19 11:16:47 +00:00
|
|
|
return Get<uint8_t>(index);
|
|
|
|
} else {
|
|
|
|
return Get<uc16>(index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <typename Char>
|
|
|
|
Char FlatStringReader::Get(int index) {
|
|
|
|
DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
|
|
|
|
DCHECK(0 <= index && index <= length_);
|
|
|
|
if (sizeof(Char) == 1) {
|
|
|
|
return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
|
2008-11-25 11:07:48 +00:00
|
|
|
} else {
|
2014-11-19 11:16:47 +00:00
|
|
|
return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
|
2008-11-25 11:07:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-25 13:50:19 +00:00
|
|
|
Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
|
|
|
|
return key->AsHandle(isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
|
|
|
|
HashTableKey* key) {
|
|
|
|
return key->AsHandle(isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
|
|
|
|
HashTableKey* key) {
|
|
|
|
return key->AsHandle(isolate);
|
|
|
|
}
|
|
|
|
|
2014-01-17 10:27:57 +00:00
|
|
|
template <typename Char>
|
|
|
|
class SequentialStringKey : public HashTableKey {
|
|
|
|
public:
|
|
|
|
explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
|
|
|
|
: string_(string), hash_field_(0), seed_(seed) { }
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
uint32_t Hash() OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
|
|
|
|
string_.length(),
|
|
|
|
seed_);
|
|
|
|
|
|
|
|
uint32_t result = hash_field_ >> String::kHashShift;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
|
2014-01-17 10:27:57 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
uint32_t HashForObject(Object* other) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
return String::cast(other)->Hash();
|
|
|
|
}
|
|
|
|
|
|
|
|
Vector<const Char> string_;
|
|
|
|
uint32_t hash_field_;
|
|
|
|
uint32_t seed_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class OneByteStringKey : public SequentialStringKey<uint8_t> {
|
|
|
|
public:
|
|
|
|
OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
|
|
|
|
: SequentialStringKey<uint8_t>(str, seed) { }
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
bool IsMatch(Object* string) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
return String::cast(string)->IsOneByteEqualTo(string_);
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
|
2014-01-17 10:27:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-08-19 08:53:38 +00:00
|
|
|
class SeqOneByteSubStringKey : public HashTableKey {
|
2014-01-17 10:27:57 +00:00
|
|
|
public:
|
2014-08-19 08:53:38 +00:00
|
|
|
SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
|
2014-01-29 14:31:34 +00:00
|
|
|
: string_(string), from_(from), length_(length) {
|
2014-08-19 08:53:38 +00:00
|
|
|
DCHECK(string_->IsSeqOneByteString());
|
2014-01-29 14:31:34 +00:00
|
|
|
}
|
2014-01-17 10:27:57 +00:00
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
uint32_t Hash() OVERRIDE {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(length_ >= 0);
|
|
|
|
DCHECK(from_ + length_ <= string_->length());
|
2014-08-19 08:53:38 +00:00
|
|
|
const uint8_t* chars = string_->GetChars() + from_;
|
2014-01-17 10:27:57 +00:00
|
|
|
hash_field_ = StringHasher::HashSequentialString(
|
|
|
|
chars, length_, string_->GetHeap()->HashSeed());
|
|
|
|
uint32_t result = hash_field_ >> String::kHashShift;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
|
2014-01-17 10:27:57 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
uint32_t HashForObject(Object* other) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
return String::cast(other)->Hash();
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
bool IsMatch(Object* string) OVERRIDE;
|
|
|
|
Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
|
2014-01-17 10:27:57 +00:00
|
|
|
|
|
|
|
private:
|
2014-08-19 08:53:38 +00:00
|
|
|
Handle<SeqOneByteString> string_;
|
2014-01-17 10:27:57 +00:00
|
|
|
int from_;
|
|
|
|
int length_;
|
|
|
|
uint32_t hash_field_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
class TwoByteStringKey : public SequentialStringKey<uc16> {
|
|
|
|
public:
|
|
|
|
explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
|
|
|
|
: SequentialStringKey<uc16>(str, seed) { }
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
bool IsMatch(Object* string) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
return String::cast(string)->IsTwoByteEqualTo(string_);
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
|
2014-01-17 10:27:57 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
// Utf8StringKey carries a vector of chars as key.
|
|
|
|
class Utf8StringKey : public HashTableKey {
|
|
|
|
public:
|
|
|
|
explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
|
|
|
|
: string_(string), hash_field_(0), seed_(seed) { }
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
bool IsMatch(Object* string) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
return String::cast(string)->IsUtf8EqualTo(string_);
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
uint32_t Hash() OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
|
|
|
|
hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
|
|
|
|
uint32_t result = hash_field_ >> String::kHashShift;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
|
2014-01-17 10:27:57 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
uint32_t HashForObject(Object* other) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
return String::cast(other)->Hash();
|
|
|
|
}
|
|
|
|
|
2014-12-12 10:44:12 +00:00
|
|
|
Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
|
2014-01-17 10:27:57 +00:00
|
|
|
if (hash_field_ == 0) Hash();
|
2014-04-25 13:59:33 +00:00
|
|
|
return isolate->factory()->NewInternalizedStringFromUtf8(
|
|
|
|
string_, chars_, hash_field_);
|
2014-01-17 10:27:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Vector<const char> string_;
|
|
|
|
uint32_t hash_field_;
|
|
|
|
int chars_; // Caches the number of characters when computing the hash code.
|
|
|
|
uint32_t seed_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsNumber() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return IsSmi() || IsHeapNumber();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
|
|
|
|
TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
|
2011-09-19 18:36:47 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsFiller() const {
|
2011-09-19 18:36:47 +00:00
|
|
|
if (!Object::IsHeapObject()) return false;
|
|
|
|
InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
|
|
|
|
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsExternalArray() const {
|
2009-10-20 15:26:17 +00:00
|
|
|
if (!Object::IsHeapObject())
|
|
|
|
return false;
|
|
|
|
InstanceType instance_type =
|
|
|
|
HeapObject::cast(this)->map()->instance_type();
|
2009-12-22 13:34:02 +00:00
|
|
|
return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
|
|
|
|
instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
|
2009-10-20 15:26:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
#define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
|
|
|
|
TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE) \
|
|
|
|
TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
|
|
|
|
|
|
|
|
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
|
|
|
|
#undef TYPED_ARRAY_TYPE_CHECKER
|
2011-04-21 07:15:43 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsFixedTypedArrayBase() const {
|
2014-01-16 17:08:45 +00:00
|
|
|
if (!Object::IsHeapObject()) return false;
|
|
|
|
|
|
|
|
InstanceType instance_type =
|
|
|
|
HeapObject::cast(this)->map()->instance_type();
|
|
|
|
return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
|
|
|
|
instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSReceiver() const {
|
2011-09-21 14:46:54 +00:00
|
|
|
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
|
Implement set trap for proxies, and revamp class hierarchy in preparation:
- Introduce a class JSReceiver, that is a common superclass of JSObject and
JSProxy. Use JSReceiver where appropriate (probably lots of places that we
still have to migrate, but we will find those later with proxy test suite).
- Move appropriate methods to JSReceiver class (SetProperty,
GetPropertyAttribute, Get/SetPrototype, Lookup, and so on).
- Introduce new JSFunctionProxy subclass of JSProxy. Currently only a stub.
- Overhaul enum InstanceType:
* Introduce FIRST/LAST_SPEC_OBJECT_TYPE that ranges over all types that
represent JS objects, and use that consistently to check language types.
* Rename FIRST/LAST_JS_OBJECT_TYPE and FIRST/LAST_FUNCTION_CLASS_TYPE
to FIRST/LAST_[NON]CALLABLE_SPEC_OBJECT_TYPE for clarity.
* Eliminate the overlap over JS_REGEXP_TYPE.
* Also replace FIRST_JS_OBJECT with FIRST_JS_RECEIVER, but only use it where
we exclusively talk about the internal representation type.
* Insert JS_PROXY and JS_FUNCTION_PROXY in the appropriate places.
- Fix all checks concerning classification, especially for functions, to
use the CALLABLE_SPEC_OBJECT range (that includes funciton proxies).
- Handle proxies in SetProperty (that was the easiest part :) ).
- A few simple test cases.
R=kmillikin@chromium.org
Review URL: http://codereview.chromium.org/6992072
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8126 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-05-31 16:38:40 +00:00
|
|
|
return IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSObject() const {
|
2011-09-21 14:46:54 +00:00
|
|
|
STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
|
|
|
|
return IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
|
Implement set trap for proxies, and revamp class hierarchy in preparation:
- Introduce a class JSReceiver, that is a common superclass of JSObject and
JSProxy. Use JSReceiver where appropriate (probably lots of places that we
still have to migrate, but we will find those later with proxy test suite).
- Move appropriate methods to JSReceiver class (SetProperty,
GetPropertyAttribute, Get/SetPrototype, Lookup, and so on).
- Introduce new JSFunctionProxy subclass of JSProxy. Currently only a stub.
- Overhaul enum InstanceType:
* Introduce FIRST/LAST_SPEC_OBJECT_TYPE that ranges over all types that
represent JS objects, and use that consistently to check language types.
* Rename FIRST/LAST_JS_OBJECT_TYPE and FIRST/LAST_FUNCTION_CLASS_TYPE
to FIRST/LAST_[NON]CALLABLE_SPEC_OBJECT_TYPE for clarity.
* Eliminate the overlap over JS_REGEXP_TYPE.
* Also replace FIRST_JS_OBJECT with FIRST_JS_RECEIVER, but only use it where
we exclusively talk about the internal representation type.
* Insert JS_PROXY and JS_FUNCTION_PROXY in the appropriate places.
- Fix all checks concerning classification, especially for functions, to
use the CALLABLE_SPEC_OBJECT range (that includes funciton proxies).
- Handle proxies in SetProperty (that was the easiest part :) ).
- A few simple test cases.
R=kmillikin@chromium.org
Review URL: http://codereview.chromium.org/6992072
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8126 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-05-31 16:38:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSProxy() const {
|
2011-09-21 14:46:54 +00:00
|
|
|
if (!Object::IsHeapObject()) return false;
|
2014-06-11 09:59:14 +00:00
|
|
|
return HeapObject::cast(this)->map()->IsJSProxyMap();
|
Implement set trap for proxies, and revamp class hierarchy in preparation:
- Introduce a class JSReceiver, that is a common superclass of JSObject and
JSProxy. Use JSReceiver where appropriate (probably lots of places that we
still have to migrate, but we will find those later with proxy test suite).
- Move appropriate methods to JSReceiver class (SetProperty,
GetPropertyAttribute, Get/SetPrototype, Lookup, and so on).
- Introduce new JSFunctionProxy subclass of JSProxy. Currently only a stub.
- Overhaul enum InstanceType:
* Introduce FIRST/LAST_SPEC_OBJECT_TYPE that ranges over all types that
represent JS objects, and use that consistently to check language types.
* Rename FIRST/LAST_JS_OBJECT_TYPE and FIRST/LAST_FUNCTION_CLASS_TYPE
to FIRST/LAST_[NON]CALLABLE_SPEC_OBJECT_TYPE for clarity.
* Eliminate the overlap over JS_REGEXP_TYPE.
* Also replace FIRST_JS_OBJECT with FIRST_JS_RECEIVER, but only use it where
we exclusively talk about the internal representation type.
* Insert JS_PROXY and JS_FUNCTION_PROXY in the appropriate places.
- Fix all checks concerning classification, especially for functions, to
use the CALLABLE_SPEC_OBJECT range (that includes funciton proxies).
- Handle proxies in SetProperty (that was the easiest part :) ).
- A few simple test cases.
R=kmillikin@chromium.org
Review URL: http://codereview.chromium.org/6992072
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8126 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-05-31 16:38:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
|
|
|
|
TYPE_CHECKER(JSSet, JS_SET_TYPE)
|
|
|
|
TYPE_CHECKER(JSMap, JS_MAP_TYPE)
|
2014-04-17 17:45:32 +00:00
|
|
|
TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
|
|
|
|
TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
|
2013-07-22 08:32:24 +00:00
|
|
|
TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
|
|
|
|
TYPE_CHECKER(Map, MAP_TYPE)
|
|
|
|
TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
|
|
|
|
TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
|
2014-12-15 19:57:37 +00:00
|
|
|
TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
|
2013-10-14 13:35:06 +00:00
|
|
|
TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
|
2011-06-09 10:03:35 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSWeakCollection() const {
|
2013-07-22 08:32:24 +00:00
|
|
|
return IsJSWeakMap() || IsJSWeakSet();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsDescriptorArray() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return IsFixedArray();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
bool Object::IsLayoutDescriptor() const {
|
|
|
|
return IsSmi() || IsFixedTypedArrayBase();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsTransitionArray() const {
|
2012-07-05 13:54:20 +00:00
|
|
|
return IsFixedArray();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-18 09:59:53 +00:00
|
|
|
bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsDeoptimizationInputData() const {
|
2010-12-07 11:31:57 +00:00
|
|
|
// Must be a fixed array.
|
|
|
|
if (!IsFixedArray()) return false;
|
|
|
|
|
|
|
|
// There's no sure way to detect the difference between a fixed array and
|
|
|
|
// a deoptimization data array. Since this is used for asserts we can
|
|
|
|
// check that the length is zero or else the fixed size plus a multiple of
|
|
|
|
// the entry size.
|
|
|
|
int length = FixedArray::cast(this)->length();
|
|
|
|
if (length == 0) return true;
|
2014-08-25 07:02:19 +00:00
|
|
|
|
|
|
|
length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
|
|
|
|
return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsDeoptimizationOutputData() const {
|
2010-12-07 11:31:57 +00:00
|
|
|
if (!IsFixedArray()) return false;
|
|
|
|
// There's actually no way to see the difference between a fixed array and
|
|
|
|
// a deoptimization data array. Since this is used for asserts we can check
|
|
|
|
// that the length is plausible though.
|
|
|
|
if (FixedArray::cast(this)->length() % 2 != 0) return false;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsDependentCode() const {
|
2013-01-24 11:55:05 +00:00
|
|
|
if (!IsFixedArray()) return false;
|
|
|
|
// There's actually no way to see the difference between a fixed array and
|
|
|
|
// a dependent codes array.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsContext() const {
|
2012-08-27 09:40:26 +00:00
|
|
|
if (!Object::IsHeapObject()) return false;
|
|
|
|
Map* map = HeapObject::cast(this)->map();
|
|
|
|
Heap* heap = map->GetHeap();
|
|
|
|
return (map == heap->function_context_map() ||
|
|
|
|
map == heap->catch_context_map() ||
|
|
|
|
map == heap->with_context_map() ||
|
|
|
|
map == heap->native_context_map() ||
|
|
|
|
map == heap->block_context_map() ||
|
|
|
|
map == heap->module_context_map() ||
|
2014-11-12 11:34:09 +00:00
|
|
|
map == heap->script_context_map());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsNativeContext() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return Object::IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map() ==
|
2012-08-17 09:03:08 +00:00
|
|
|
HeapObject::cast(this)->GetHeap()->native_context_map();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-12 11:34:09 +00:00
|
|
|
bool Object::IsScriptContextTable() const {
|
2014-11-07 16:29:13 +00:00
|
|
|
if (!Object::IsHeapObject()) return false;
|
|
|
|
Map* map = HeapObject::cast(this)->map();
|
|
|
|
Heap* heap = map->GetHeap();
|
2014-11-12 11:34:09 +00:00
|
|
|
return map == heap->script_context_table_map();
|
2014-11-07 16:29:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsScopeInfo() const {
|
2011-08-11 16:29:28 +00:00
|
|
|
return Object::IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map() ==
|
2011-11-03 10:36:55 +00:00
|
|
|
HeapObject::cast(this)->GetHeap()->scope_info_map();
|
2011-08-11 16:29:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2008-09-03 08:19:44 +00:00
|
|
|
template <> inline bool Is<JSFunction>(Object* obj) {
|
2008-07-03 15:10:15 +00:00
|
|
|
return obj->IsJSFunction();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(Code, CODE_TYPE)
|
|
|
|
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
|
2013-06-12 15:03:44 +00:00
|
|
|
TYPE_CHECKER(Cell, CELL_TYPE)
|
2013-06-14 16:06:12 +00:00
|
|
|
TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
|
2014-10-14 14:43:45 +00:00
|
|
|
TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
|
2013-04-15 12:29:44 +00:00
|
|
|
TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
|
2012-04-16 14:43:27 +00:00
|
|
|
TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
|
2012-03-09 11:11:55 +00:00
|
|
|
TYPE_CHECKER(JSDate, JS_DATE_TYPE)
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsStringWrapper() const {
|
2008-10-27 14:36:08 +00:00
|
|
|
return IsJSValue() && JSValue::cast(this)->value()->IsString();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(Foreign, FOREIGN_TYPE)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsBoolean() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() &&
|
|
|
|
((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
|
2013-03-28 12:50:18 +00:00
|
|
|
TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
|
2013-04-16 14:16:30 +00:00
|
|
|
TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
|
2013-06-21 13:02:38 +00:00
|
|
|
TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSArrayBufferView() const {
|
2013-06-21 13:02:38 +00:00
|
|
|
return IsJSDataView() || IsJSTypedArray();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
|
2008-09-23 11:45:43 +00:00
|
|
|
|
|
|
|
|
2008-09-03 08:19:44 +00:00
|
|
|
template <> inline bool Is<JSArray>(Object* obj) {
|
2008-07-03 15:10:15 +00:00
|
|
|
return obj->IsJSArray();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsHashTable() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return Object::IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map() ==
|
|
|
|
HeapObject::cast(this)->GetHeap()->hash_table_map();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsWeakHashTable() const {
|
2014-06-23 08:51:13 +00:00
|
|
|
return IsHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsDictionary() const {
|
2011-06-16 14:12:58 +00:00
|
|
|
return IsHashTable() &&
|
2013-02-28 17:03:34 +00:00
|
|
|
this != HeapObject::cast(this)->GetHeap()->string_table();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsNameDictionary() const {
|
2014-06-23 08:51:13 +00:00
|
|
|
return IsDictionary();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsSeededNumberDictionary() const {
|
2014-06-23 08:51:13 +00:00
|
|
|
return IsDictionary();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsUnseededNumberDictionary() const {
|
2014-06-23 08:51:13 +00:00
|
|
|
return IsDictionary();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsStringTable() const {
|
2014-04-14 15:56:57 +00:00
|
|
|
return IsHashTable();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSFunctionResultCache() const {
|
2010-05-04 16:42:11 +00:00
|
|
|
if (!IsFixedArray()) return false;
|
2014-06-24 09:47:25 +00:00
|
|
|
const FixedArray* self = FixedArray::cast(this);
|
2010-05-04 16:42:11 +00:00
|
|
|
int length = self->length();
|
|
|
|
if (length < JSFunctionResultCache::kEntriesIndex) return false;
|
|
|
|
if ((length - JSFunctionResultCache::kEntriesIndex)
|
|
|
|
% JSFunctionResultCache::kEntrySize != 0) {
|
|
|
|
return false;
|
|
|
|
}
|
2012-10-12 11:41:14 +00:00
|
|
|
#ifdef VERIFY_HEAP
|
2011-10-25 13:27:46 +00:00
|
|
|
if (FLAG_verify_heap) {
|
2014-06-24 09:47:25 +00:00
|
|
|
// TODO(svenpanne) We use const_cast here and below to break our dependency
|
|
|
|
// cycle between the predicates and the verifiers. This can be removed when
|
|
|
|
// the verifiers are const-correct, too.
|
|
|
|
reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
|
2011-10-25 13:27:46 +00:00
|
|
|
JSFunctionResultCacheVerify();
|
|
|
|
}
|
2010-05-04 16:42:11 +00:00
|
|
|
#endif
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsNormalizedMapCache() const {
|
2014-05-02 10:27:12 +00:00
|
|
|
return NormalizedMapCache::IsNormalizedMapCache(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int NormalizedMapCache::GetIndex(Handle<Map> map) {
|
|
|
|
return map->Hash() % NormalizedMapCache::kEntries;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
|
2014-05-02 10:27:12 +00:00
|
|
|
if (!obj->IsFixedArray()) return false;
|
|
|
|
if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
|
2010-08-25 13:25:54 +00:00
|
|
|
return false;
|
|
|
|
}
|
2012-10-12 11:41:14 +00:00
|
|
|
#ifdef VERIFY_HEAP
|
2011-10-25 13:27:46 +00:00
|
|
|
if (FLAG_verify_heap) {
|
2014-06-24 09:47:25 +00:00
|
|
|
reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
|
|
|
|
NormalizedMapCacheVerify();
|
2011-10-25 13:27:46 +00:00
|
|
|
}
|
2010-08-25 13:25:54 +00:00
|
|
|
#endif
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsCompilationCacheTable() const {
|
2008-09-11 10:51:52 +00:00
|
|
|
return IsHashTable();
|
2008-09-05 16:27:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsCodeCacheHashTable() const {
|
2010-03-09 10:49:41 +00:00
|
|
|
return IsHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsPolymorphicCodeCacheHashTable() const {
|
2011-06-06 13:15:11 +00:00
|
|
|
return IsHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsMapCache() const {
|
2008-09-25 07:46:07 +00:00
|
|
|
return IsHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsObjectHashTable() const {
|
2012-11-06 16:47:15 +00:00
|
|
|
return IsHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsOrderedHashTable() const {
|
2014-04-08 20:06:35 +00:00
|
|
|
return IsHeapObject() &&
|
|
|
|
HeapObject::cast(this)->map() ==
|
|
|
|
HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsOrderedHashSet() const {
|
2014-06-23 08:51:13 +00:00
|
|
|
return IsOrderedHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsOrderedHashMap() const {
|
2014-06-23 08:51:13 +00:00
|
|
|
return IsOrderedHashTable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsPrimitive() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return IsOddball() || IsNumber() || IsString();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsJSGlobalProxy() const {
|
Split window support from V8.
Here is a description of the background and design of split window in Chrome and V8:
https://docs.google.com/a/google.com/Doc?id=chhjkpg_47fwddxbfr
This change list splits the window object into two parts: 1) an inner window object used as the global object of contexts; 2) an outer window object exposed to JavaScript and accessible by the name 'window'. Firefox did it awhile ago, here are some discussions: https://wiki.mozilla.org/Gecko:SplitWindow. One additional benefit of splitting window in Chrome is that accessing global variables don't need security checks anymore, it can improve applications that use many global variables.
V8 support of split window:
There are a small number of changes on V8 api to support split window:
Security context is removed from V8, so does related API functions;
A global object can be detached from its context and reused by a new context;
Access checks on an object template can be turned on/off by default;
An object can turn on its access checks later;
V8 has a new object type, ApiGlobalObject, which is the outer window object type. The existing JSGlobalObject becomes the inner window object type. Security checks are moved from JSGlobalObject to ApiGlobalObject. ApiGlobalObject is the one exposed to JavaScript, it is accessible through Context::Global(). ApiGlobalObject's prototype is set to JSGlobalObject so that property lookups are forwarded to JSGlobalObject. ApiGlobalObject forwards all other property access requests to JSGlobalObject, such as SetProperty, DeleteProperty, etc.
Security token is moved to a global context, and ApiGlobalObject has a reference to its global context. JSGlobalObject has a reference to its global context as well. When accessing properties on a global object in JavaScript, the domain security check is performed by comparing the security token of the lexical context (Top::global_context()) to the token of global object's context. The check is only needed when the receiver is a window object, such as 'window.document'. Accessing global variables, such as 'var foo = 3; foo' does not need checks because the receiver is the inner window object.
When an outer window is detached from its global context (when a frame navigates away from a page), it is completely detached from the inner window. A new context is created for the new page, and the outer global object is reused. At this point, the access check on the DOMWindow wrapper of the old context is turned on. The code in old context is still able to access DOMWindow properties, but it has to go through domain security checks.
It is debatable on how to implement the outer window object. Currently each property access function has to check if the receiver is ApiGlobalObject type. This approach might be error-prone that one may forget to check the receiver when adding new functions. It is unlikely a performance issue because accessing global variables are more common than 'window.foo' style coding.
I am still working on the ARM port, and I'd like to hear comments and suggestions on the best way to support it in V8.
Review URL: http://codereview.chromium.org/7366
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@540 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2008-10-21 19:07:58 +00:00
|
|
|
bool result = IsHeapObject() &&
|
|
|
|
(HeapObject::cast(this)->map()->instance_type() ==
|
|
|
|
JS_GLOBAL_PROXY_TYPE);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!result ||
|
2014-03-10 12:23:05 +00:00
|
|
|
HeapObject::cast(this)->map()->is_access_check_needed());
|
Split window support from V8.
Here is a description of the background and design of split window in Chrome and V8:
https://docs.google.com/a/google.com/Doc?id=chhjkpg_47fwddxbfr
This change list splits the window object into two parts: 1) an inner window object used as the global object of contexts; 2) an outer window object exposed to JavaScript and accessible by the name 'window'. Firefox did it awhile ago, here are some discussions: https://wiki.mozilla.org/Gecko:SplitWindow. One additional benefit of splitting window in Chrome is that accessing global variables don't need security checks anymore, it can improve applications that use many global variables.
V8 support of split window:
There are a small number of changes on V8 api to support split window:
Security context is removed from V8, so does related API functions;
A global object can be detached from its context and reused by a new context;
Access checks on an object template can be turned on/off by default;
An object can turn on its access checks later;
V8 has a new object type, ApiGlobalObject, which is the outer window object type. The existing JSGlobalObject becomes the inner window object type. Security checks are moved from JSGlobalObject to ApiGlobalObject. ApiGlobalObject is the one exposed to JavaScript, it is accessible through Context::Global(). ApiGlobalObject's prototype is set to JSGlobalObject so that property lookups are forwarded to JSGlobalObject. ApiGlobalObject forwards all other property access requests to JSGlobalObject, such as SetProperty, DeleteProperty, etc.
Security token is moved to a global context, and ApiGlobalObject has a reference to its global context. JSGlobalObject has a reference to its global context as well. When accessing properties on a global object in JavaScript, the domain security check is performed by comparing the security token of the lexical context (Top::global_context()) to the token of global object's context. The check is only needed when the receiver is a window object, such as 'window.document'. Accessing global variables, such as 'var foo = 3; foo' does not need checks because the receiver is the inner window object.
When an outer window is detached from its global context (when a frame navigates away from a page), it is completely detached from the inner window. A new context is created for the new page, and the outer global object is reused. At this point, the access check on the DOMWindow wrapper of the old context is turned on. The code in old context is still able to access DOMWindow properties, but it has to go through domain security checks.
It is debatable on how to implement the outer window object. Currently each property access function has to check if the receiver is ApiGlobalObject type. This approach might be error-prone that one may forget to check the receiver when adding new functions. It is unlikely a performance issue because accessing global variables are more common than 'window.foo' style coding.
I am still working on the ARM port, and I'd like to hear comments and suggestions on the best way to support it in V8.
Review URL: http://codereview.chromium.org/7366
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@540 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2008-10-21 19:07:58 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsGlobalObject() const {
|
Split window support from V8.
Here is a description of the background and design of split window in Chrome and V8:
https://docs.google.com/a/google.com/Doc?id=chhjkpg_47fwddxbfr
This change list splits the window object into two parts: 1) an inner window object used as the global object of contexts; 2) an outer window object exposed to JavaScript and accessible by the name 'window'. Firefox did it awhile ago, here are some discussions: https://wiki.mozilla.org/Gecko:SplitWindow. One additional benefit of splitting window in Chrome is that accessing global variables don't need security checks anymore, it can improve applications that use many global variables.
V8 support of split window:
There are a small number of changes on V8 api to support split window:
Security context is removed from V8, so does related API functions;
A global object can be detached from its context and reused by a new context;
Access checks on an object template can be turned on/off by default;
An object can turn on its access checks later;
V8 has a new object type, ApiGlobalObject, which is the outer window object type. The existing JSGlobalObject becomes the inner window object type. Security checks are moved from JSGlobalObject to ApiGlobalObject. ApiGlobalObject is the one exposed to JavaScript, it is accessible through Context::Global(). ApiGlobalObject's prototype is set to JSGlobalObject so that property lookups are forwarded to JSGlobalObject. ApiGlobalObject forwards all other property access requests to JSGlobalObject, such as SetProperty, DeleteProperty, etc.
Security token is moved to a global context, and ApiGlobalObject has a reference to its global context. JSGlobalObject has a reference to its global context as well. When accessing properties on a global object in JavaScript, the domain security check is performed by comparing the security token of the lexical context (Top::global_context()) to the token of global object's context. The check is only needed when the receiver is a window object, such as 'window.document'. Accessing global variables, such as 'var foo = 3; foo' does not need checks because the receiver is the inner window object.
When an outer window is detached from its global context (when a frame navigates away from a page), it is completely detached from the inner window. A new context is created for the new page, and the outer global object is reused. At this point, the access check on the DOMWindow wrapper of the old context is turned on. The code in old context is still able to access DOMWindow properties, but it has to go through domain security checks.
It is debatable on how to implement the outer window object. Currently each property access function has to check if the receiver is ApiGlobalObject type. This approach might be error-prone that one may forget to check the receiver when adding new functions. It is unlikely a performance issue because accessing global variables are more common than 'window.foo' style coding.
I am still working on the ARM port, and I'd like to hear comments and suggestions on the best way to support it in V8.
Review URL: http://codereview.chromium.org/7366
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@540 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2008-10-21 19:07:58 +00:00
|
|
|
if (!IsHeapObject()) return false;
|
|
|
|
|
2008-10-28 13:46:04 +00:00
|
|
|
InstanceType type = HeapObject::cast(this)->map()->instance_type();
|
Split window support from V8.
Here is a description of the background and design of split window in Chrome and V8:
https://docs.google.com/a/google.com/Doc?id=chhjkpg_47fwddxbfr
This change list splits the window object into two parts: 1) an inner window object used as the global object of contexts; 2) an outer window object exposed to JavaScript and accessible by the name 'window'. Firefox did it awhile ago, here are some discussions: https://wiki.mozilla.org/Gecko:SplitWindow. One additional benefit of splitting window in Chrome is that accessing global variables don't need security checks anymore, it can improve applications that use many global variables.
V8 support of split window:
There are a small number of changes on V8 api to support split window:
Security context is removed from V8, so does related API functions;
A global object can be detached from its context and reused by a new context;
Access checks on an object template can be turned on/off by default;
An object can turn on its access checks later;
V8 has a new object type, ApiGlobalObject, which is the outer window object type. The existing JSGlobalObject becomes the inner window object type. Security checks are moved from JSGlobalObject to ApiGlobalObject. ApiGlobalObject is the one exposed to JavaScript, it is accessible through Context::Global(). ApiGlobalObject's prototype is set to JSGlobalObject so that property lookups are forwarded to JSGlobalObject. ApiGlobalObject forwards all other property access requests to JSGlobalObject, such as SetProperty, DeleteProperty, etc.
Security token is moved to a global context, and ApiGlobalObject has a reference to its global context. JSGlobalObject has a reference to its global context as well. When accessing properties on a global object in JavaScript, the domain security check is performed by comparing the security token of the lexical context (Top::global_context()) to the token of global object's context. The check is only needed when the receiver is a window object, such as 'window.document'. Accessing global variables, such as 'var foo = 3; foo' does not need checks because the receiver is the inner window object.
When an outer window is detached from its global context (when a frame navigates away from a page), it is completely detached from the inner window. A new context is created for the new page, and the outer global object is reused. At this point, the access check on the DOMWindow wrapper of the old context is turned on. The code in old context is still able to access DOMWindow properties, but it has to go through domain security checks.
It is debatable on how to implement the outer window object. Currently each property access function has to check if the receiver is ApiGlobalObject type. This approach might be error-prone that one may forget to check the receiver when adding new functions. It is unlikely a performance issue because accessing global variables are more common than 'window.foo' style coding.
I am still working on the ARM port, and I'd like to hear comments and suggestions on the best way to support it in V8.
Review URL: http://codereview.chromium.org/7366
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@540 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2008-10-21 19:07:58 +00:00
|
|
|
return type == JS_GLOBAL_OBJECT_TYPE ||
|
|
|
|
type == JS_BUILTINS_OBJECT_TYPE;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-25 14:14:56 +00:00
|
|
|
TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
|
|
|
|
TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsUndetectableObject() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return IsHeapObject()
|
|
|
|
&& HeapObject::cast(this)->map()->is_undetectable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsAccessCheckNeeded() const {
|
2014-03-10 12:23:05 +00:00
|
|
|
if (!IsHeapObject()) return false;
|
|
|
|
if (IsJSGlobalProxy()) {
|
2014-06-24 09:47:25 +00:00
|
|
|
const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
|
|
|
|
GlobalObject* global = proxy->GetIsolate()->context()->global_object();
|
2014-03-10 12:23:05 +00:00
|
|
|
return proxy->IsDetachedFrom(global);
|
|
|
|
}
|
|
|
|
return HeapObject::cast(this)->map()->is_access_check_needed();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsStruct() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
if (!IsHeapObject()) return false;
|
|
|
|
switch (HeapObject::cast(this)->map()->instance_type()) {
|
|
|
|
#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
|
|
|
|
STRUCT_LIST(MAKE_STRUCT_CASE)
|
|
|
|
#undef MAKE_STRUCT_CASE
|
|
|
|
default: return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
|
|
|
|
bool Object::Is##Name() const { \
|
|
|
|
return Object::IsHeapObject() \
|
2008-07-03 15:10:15 +00:00
|
|
|
&& HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
|
|
|
|
}
|
|
|
|
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
|
|
|
|
#undef MAKE_STRUCT_PREDICATE
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsUndefined() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsNull() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsTheHole() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsException() const {
|
2014-04-22 07:33:20 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsUninitialized() const {
|
2013-06-06 14:21:35 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsTrue() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsFalse() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsArgumentsMarker() const {
|
2011-03-18 20:35:07 +00:00
|
|
|
return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
|
2011-01-07 10:06:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
double Object::Number() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsNumber());
|
2008-07-03 15:10:15 +00:00
|
|
|
return IsSmi()
|
|
|
|
? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
|
|
|
|
: reinterpret_cast<HeapNumber*>(this)->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool Object::IsNaN() const {
|
2013-04-19 13:26:47 +00:00
|
|
|
return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
|
2012-03-09 11:19:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-30 13:54:45 +00:00
|
|
|
bool Object::IsMinusZero() const {
|
|
|
|
return this->IsHeapNumber() &&
|
|
|
|
i::IsMinusZero(HeapNumber::cast(this)->value());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-29 13:18:27 +00:00
|
|
|
MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
|
|
|
|
if (object->IsSmi()) return Handle<Smi>::cast(object);
|
2014-03-19 16:29:19 +00:00
|
|
|
if (object->IsHeapNumber()) {
|
|
|
|
double value = Handle<HeapNumber>::cast(object)->value();
|
|
|
|
int int_value = FastD2I(value);
|
|
|
|
if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
|
|
|
|
return handle(Smi::FromInt(int_value), isolate);
|
|
|
|
}
|
|
|
|
}
|
2014-04-29 13:18:27 +00:00
|
|
|
return Handle<Smi>();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-14 09:19:09 +00:00
|
|
|
MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
|
|
|
|
Handle<Object> object) {
|
|
|
|
return ToObject(
|
|
|
|
isolate, object, handle(isolate->context()->native_context(), isolate));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-23 08:25:23 +00:00
|
|
|
bool Object::HasSpecificClassOf(String* name) {
|
|
|
|
return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:47:34 +00:00
|
|
|
MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
|
|
|
|
Handle<Name> name) {
|
2014-06-11 09:59:14 +00:00
|
|
|
LookupIterator it(object, name);
|
|
|
|
return GetProperty(&it);
|
2014-04-11 12:47:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-10 09:20:11 +00:00
|
|
|
MaybeHandle<Object> Object::GetElement(Isolate* isolate,
|
|
|
|
Handle<Object> object,
|
|
|
|
uint32_t index) {
|
2011-02-24 14:00:52 +00:00
|
|
|
// GetElement can trigger a getter which can cause allocation.
|
2014-08-04 11:34:54 +00:00
|
|
|
// This was not always the case. This DCHECK is here to catch
|
2011-02-24 14:00:52 +00:00
|
|
|
// leftover incorrect uses.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(AllowHeapAllocation::IsAllowed());
|
2014-03-25 10:15:12 +00:00
|
|
|
return Object::GetElementWithReceiver(isolate, object, object, index);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-30 08:23:02 +00:00
|
|
|
Handle<Object> Object::GetPrototypeSkipHiddenPrototypes(
|
|
|
|
Isolate* isolate, Handle<Object> receiver) {
|
|
|
|
PrototypeIterator iter(isolate, receiver);
|
|
|
|
while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) {
|
|
|
|
if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) {
|
|
|
|
return PrototypeIterator::GetCurrent(iter);
|
|
|
|
}
|
|
|
|
iter.Advance();
|
|
|
|
}
|
|
|
|
return PrototypeIterator::GetCurrent(iter);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:47:34 +00:00
|
|
|
MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
|
|
|
|
Handle<Name> name) {
|
|
|
|
uint32_t index;
|
|
|
|
Isolate* isolate = name->GetIsolate();
|
|
|
|
if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
|
|
|
|
return GetProperty(object, name);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-16 13:28:11 +00:00
|
|
|
MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
|
|
|
|
Handle<Object> object,
|
|
|
|
const char* name) {
|
|
|
|
Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!str.is_null());
|
2014-04-16 13:28:11 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
uint32_t index; // Assert that the name is not an array index.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!str->AsArrayIndex(&index));
|
2014-04-16 13:28:11 +00:00
|
|
|
#endif // DEBUG
|
|
|
|
return GetProperty(object, str);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:47:34 +00:00
|
|
|
MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
|
|
|
|
Handle<Object> receiver,
|
2014-03-20 12:22:13 +00:00
|
|
|
uint32_t index) {
|
2014-04-11 12:47:34 +00:00
|
|
|
return GetPropertyWithHandler(
|
|
|
|
proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
|
2014-04-11 11:26:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:47:34 +00:00
|
|
|
MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
|
|
|
|
Handle<JSReceiver> receiver,
|
|
|
|
uint32_t index,
|
|
|
|
Handle<Object> value,
|
2015-02-04 09:34:05 +00:00
|
|
|
LanguageMode language_mode) {
|
2014-04-11 12:47:34 +00:00
|
|
|
Isolate* isolate = proxy->GetIsolate();
|
|
|
|
Handle<String> name = isolate->factory()->Uint32ToString(index);
|
2015-02-04 09:34:05 +00:00
|
|
|
return SetPropertyWithHandler(proxy, receiver, name, value, language_mode);
|
2014-04-11 12:47:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
|
|
|
|
uint32_t index) {
|
2014-04-11 12:47:34 +00:00
|
|
|
Isolate* isolate = proxy->GetIsolate();
|
|
|
|
Handle<String> name = isolate->factory()->Uint32ToString(index);
|
|
|
|
return HasPropertyWithHandler(proxy, name);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#define FIELD_ADDR(p, offset) \
|
|
|
|
(reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
#define FIELD_ADDR_CONST(p, offset) \
|
|
|
|
(reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
#define READ_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#define ACQUIRE_READ_FIELD(p, offset) \
|
|
|
|
reinterpret_cast<Object*>(base::Acquire_Load( \
|
2014-06-20 10:31:17 +00:00
|
|
|
reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
|
2014-03-31 14:29:01 +00:00
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#define NOBARRIER_READ_FIELD(p, offset) \
|
|
|
|
reinterpret_cast<Object*>(base::NoBarrier_Load( \
|
2014-06-20 10:31:17 +00:00
|
|
|
reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
|
2014-03-31 14:29:01 +00:00
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
#define WRITE_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#define RELEASE_WRITE_FIELD(p, offset, value) \
|
|
|
|
base::Release_Store( \
|
|
|
|
reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
|
|
|
|
reinterpret_cast<base::AtomicWord>(value));
|
2014-03-31 14:29:01 +00:00
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#define NOBARRIER_WRITE_FIELD(p, offset, value) \
|
|
|
|
base::NoBarrier_Store( \
|
|
|
|
reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
|
|
|
|
reinterpret_cast<base::AtomicWord>(value));
|
2014-03-31 14:29:01 +00:00
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
#define WRITE_BARRIER(heap, object, offset, value) \
|
|
|
|
heap->incremental_marking()->RecordWrite( \
|
|
|
|
object, HeapObject::RawField(object, offset), value); \
|
|
|
|
if (heap->InNewSpace(value)) { \
|
|
|
|
heap->RecordWrite(object->address(), offset); \
|
|
|
|
}
|
|
|
|
|
|
|
|
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
|
|
|
|
if (mode == UPDATE_WRITE_BARRIER) { \
|
|
|
|
heap->incremental_marking()->RecordWrite( \
|
|
|
|
object, HeapObject::RawField(object, offset), value); \
|
|
|
|
if (heap->InNewSpace(value)) { \
|
|
|
|
heap->RecordWrite(object->address(), offset); \
|
|
|
|
} \
|
2008-10-23 08:46:32 +00:00
|
|
|
}
|
|
|
|
|
2011-03-28 13:05:36 +00:00
|
|
|
#ifndef V8_TARGET_ARCH_MIPS
|
|
|
|
#define READ_DOUBLE_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
|
2011-03-28 13:05:36 +00:00
|
|
|
#else // V8_TARGET_ARCH_MIPS
|
|
|
|
// Prevent gcc from using load-double (mips ldc1) on (possibly)
|
|
|
|
// non-64-bit aligned HeapNumber::value.
|
2014-06-20 10:31:17 +00:00
|
|
|
static inline double read_double_field(const void* p, int offset) {
|
2011-03-28 13:05:36 +00:00
|
|
|
union conversion {
|
|
|
|
double d;
|
|
|
|
uint32_t u[2];
|
|
|
|
} c;
|
2014-06-20 20:36:49 +00:00
|
|
|
c.u[0] = (*reinterpret_cast<const uint32_t*>(
|
|
|
|
FIELD_ADDR_CONST(p, offset)));
|
|
|
|
c.u[1] = (*reinterpret_cast<const uint32_t*>(
|
|
|
|
FIELD_ADDR_CONST(p, offset + 4)));
|
2011-03-28 13:05:36 +00:00
|
|
|
return c.d;
|
|
|
|
}
|
|
|
|
#define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
|
|
|
|
#endif // V8_TARGET_ARCH_MIPS
|
|
|
|
|
|
|
|
#ifndef V8_TARGET_ARCH_MIPS
|
|
|
|
#define WRITE_DOUBLE_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
#else // V8_TARGET_ARCH_MIPS
|
|
|
|
// Prevent gcc from using store-double (mips sdc1) on (possibly)
|
|
|
|
// non-64-bit aligned HeapNumber::value.
|
2011-06-10 07:09:14 +00:00
|
|
|
static inline void write_double_field(void* p, int offset,
|
2011-03-28 13:05:36 +00:00
|
|
|
double value) {
|
|
|
|
union conversion {
|
|
|
|
double d;
|
|
|
|
uint32_t u[2];
|
|
|
|
} c;
|
|
|
|
c.d = value;
|
|
|
|
(*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
|
|
|
|
(*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
|
|
|
|
}
|
|
|
|
#define WRITE_DOUBLE_FIELD(p, offset, value) \
|
|
|
|
write_double_field(p, offset, value)
|
|
|
|
#endif // V8_TARGET_ARCH_MIPS
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
#define READ_INT_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
#define WRITE_INT_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2009-06-29 08:04:39 +00:00
|
|
|
#define READ_INTPTR_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
|
2009-06-29 08:04:39 +00:00
|
|
|
|
|
|
|
#define WRITE_INTPTR_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2008-10-06 09:35:52 +00:00
|
|
|
#define READ_UINT32_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
|
2008-10-06 09:35:52 +00:00
|
|
|
|
|
|
|
#define WRITE_UINT32_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2013-10-14 13:35:06 +00:00
|
|
|
#define READ_INT32_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
|
2013-10-14 13:35:06 +00:00
|
|
|
|
|
|
|
#define WRITE_INT32_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2015-01-21 08:52:00 +00:00
|
|
|
#define READ_UINT64_FIELD(p, offset) \
|
|
|
|
(*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
|
|
|
|
|
|
|
|
#define WRITE_UINT64_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2012-03-23 10:14:51 +00:00
|
|
|
#define READ_INT64_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
|
2012-03-23 10:14:51 +00:00
|
|
|
|
|
|
|
#define WRITE_INT64_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
#define READ_SHORT_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
#define WRITE_SHORT_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
|
|
|
#define READ_BYTE_FIELD(p, offset) \
|
2014-06-20 10:31:17 +00:00
|
|
|
(*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#define NOBARRIER_READ_BYTE_FIELD(p, offset) \
|
|
|
|
static_cast<byte>(base::NoBarrier_Load( \
|
|
|
|
reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
|
2014-04-09 09:50:25 +00:00
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
#define WRITE_BYTE_FIELD(p, offset, value) \
|
|
|
|
(*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
|
|
|
|
|
2014-06-05 12:14:47 +00:00
|
|
|
#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
|
|
|
|
base::NoBarrier_Store( \
|
|
|
|
reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
|
|
|
|
static_cast<base::Atomic8>(value));
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2008-10-29 10:37:14 +00:00
|
|
|
Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
|
2014-06-20 10:31:17 +00:00
|
|
|
return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
int Smi::value() const {
|
2009-08-26 10:33:11 +00:00
|
|
|
return Internals::SmiValue(this);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Smi* Smi::FromInt(int value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(Smi::IsValid(value));
|
2013-05-22 06:35:38 +00:00
|
|
|
return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
|
2009-05-06 07:53:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Smi* Smi::FromIntptr(intptr_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(Smi::IsValid(value));
|
2009-10-08 12:36:12 +00:00
|
|
|
int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
|
|
|
|
return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-08-24 11:56:29 +00:00
|
|
|
bool Smi::IsValid(intptr_t value) {
|
2013-05-22 06:35:38 +00:00
|
|
|
bool result = Internals::IsValidSmi(value);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
|
2009-05-06 07:53:08 +00:00
|
|
|
return result;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
MapWord MapWord::FromMap(const Map* map) {
|
2008-07-30 08:49:36 +00:00
|
|
|
return MapWord(reinterpret_cast<uintptr_t>(map));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Map* MapWord::ToMap() {
|
|
|
|
return reinterpret_cast<Map*>(value_);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool MapWord::IsForwardingAddress() {
|
2008-10-16 07:17:26 +00:00
|
|
|
return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
|
2008-07-30 08:49:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
MapWord MapWord::FromForwardingAddress(HeapObject* object) {
|
2008-10-16 07:17:26 +00:00
|
|
|
Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
|
|
|
|
return MapWord(reinterpret_cast<uintptr_t>(raw));
|
2008-07-30 08:49:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
HeapObject* MapWord::ToForwardingAddress() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsForwardingAddress());
|
2008-10-16 07:17:26 +00:00
|
|
|
return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
|
2008-07-30 08:49:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-10-12 11:41:14 +00:00
|
|
|
#ifdef VERIFY_HEAP
|
2008-07-03 15:10:15 +00:00
|
|
|
void HeapObject::VerifyObjectField(int offset) {
|
|
|
|
VerifyPointer(READ_FIELD(this, offset));
|
|
|
|
}
|
2010-05-27 12:30:45 +00:00
|
|
|
|
|
|
|
void HeapObject::VerifySmiField(int offset) {
|
2012-10-12 11:41:14 +00:00
|
|
|
CHECK(READ_FIELD(this, offset)->IsSmi());
|
2010-05-27 12:30:45 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
Heap* HeapObject::GetHeap() const {
|
2011-09-19 18:36:47 +00:00
|
|
|
Heap* heap =
|
2014-06-20 10:31:17 +00:00
|
|
|
MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(heap != NULL);
|
2011-09-19 18:36:47 +00:00
|
|
|
return heap;
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
Isolate* HeapObject::GetIsolate() const {
|
2011-03-22 19:15:02 +00:00
|
|
|
return GetHeap()->isolate();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
Map* HeapObject::map() const {
|
2014-06-03 08:28:38 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
// Clear mark potentially added by PathTracer.
|
|
|
|
uintptr_t raw_value =
|
|
|
|
map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
|
|
|
|
return MapWord::FromRawValue(raw_value).ToMap();
|
|
|
|
#else
|
2008-07-30 08:49:36 +00:00
|
|
|
return map_word().ToMap();
|
2014-06-03 08:28:38 +00:00
|
|
|
#endif
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void HeapObject::set_map(Map* value) {
|
2008-07-30 08:49:36 +00:00
|
|
|
set_map_word(MapWord::FromMap(value));
|
2011-09-19 18:36:47 +00:00
|
|
|
if (value != NULL) {
|
|
|
|
// TODO(1600) We are passing NULL as a slot because maps can never be on
|
|
|
|
// evacuation candidate.
|
|
|
|
value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
|
|
|
|
}
|
2008-07-30 08:49:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-31 14:29:01 +00:00
|
|
|
Map* HeapObject::synchronized_map() {
|
|
|
|
return synchronized_map_word().ToMap();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void HeapObject::synchronized_set_map(Map* value) {
|
|
|
|
synchronized_set_map_word(MapWord::FromMap(value));
|
|
|
|
if (value != NULL) {
|
|
|
|
// TODO(1600) We are passing NULL as a slot because maps can never be on
|
|
|
|
// evacuation candidate.
|
|
|
|
value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 16:31:57 +00:00
|
|
|
void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
|
|
|
|
synchronized_set_map_word(MapWord::FromMap(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-30 13:03:48 +00:00
|
|
|
// Unsafe accessor omitting write barrier.
|
2011-12-07 08:43:18 +00:00
|
|
|
void HeapObject::set_map_no_write_barrier(Map* value) {
|
2011-09-30 13:03:48 +00:00
|
|
|
set_map_word(MapWord::FromMap(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
MapWord HeapObject::map_word() const {
|
2014-03-31 14:29:01 +00:00
|
|
|
return MapWord(
|
2014-04-08 16:31:57 +00:00
|
|
|
reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-30 08:49:36 +00:00
|
|
|
void HeapObject::set_map_word(MapWord map_word) {
|
2014-04-08 16:31:57 +00:00
|
|
|
NOBARRIER_WRITE_FIELD(
|
2014-03-31 14:29:01 +00:00
|
|
|
this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
MapWord HeapObject::synchronized_map_word() const {
|
2014-03-31 14:29:01 +00:00
|
|
|
return MapWord(
|
|
|
|
reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void HeapObject::synchronized_set_map_word(MapWord map_word) {
|
|
|
|
RELEASE_WRITE_FIELD(
|
|
|
|
this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
|
2008-07-30 08:49:36 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
HeapObject* HeapObject::FromAddress(Address address) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_TAG_ALIGNED(address);
|
2008-07-03 15:10:15 +00:00
|
|
|
return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Address HeapObject::address() {
|
|
|
|
return reinterpret_cast<Address>(this) - kHeapObjectTag;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int HeapObject::Size() {
|
|
|
|
return SizeFromMap(map());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-09 10:07:29 +00:00
|
|
|
bool HeapObject::MayContainRawValues() {
|
2014-07-24 10:45:37 +00:00
|
|
|
InstanceType type = map()->instance_type();
|
|
|
|
if (type <= LAST_NAME_TYPE) {
|
|
|
|
if (type == SYMBOL_TYPE) {
|
2014-09-09 10:07:29 +00:00
|
|
|
return false;
|
2014-07-24 10:45:37 +00:00
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(type < FIRST_NONSTRING_TYPE);
|
2014-07-24 10:45:37 +00:00
|
|
|
// There are four string representations: sequential strings, external
|
|
|
|
// strings, cons strings, and sliced strings.
|
2014-09-09 10:07:29 +00:00
|
|
|
// Only the former two contain raw values and no heap pointers (besides the
|
|
|
|
// map-word).
|
|
|
|
return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
|
2014-07-24 10:45:37 +00:00
|
|
|
}
|
2014-09-09 10:07:29 +00:00
|
|
|
// The ConstantPoolArray contains heap pointers, but also raw values.
|
|
|
|
if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
|
|
|
|
return (type <= LAST_DATA_TYPE);
|
2014-07-24 10:45:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
|
|
|
|
v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
|
|
|
|
reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
|
|
|
|
v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-26 15:14:51 +00:00
|
|
|
void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
|
|
|
|
v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
double HeapNumber::value() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_DOUBLE_FIELD(this, kValueOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void HeapNumber::set_value(double value) {
|
|
|
|
WRITE_DOUBLE_FIELD(this, kValueOffset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-18 13:00:57 +00:00
|
|
|
int HeapNumber::get_exponent() {
|
|
|
|
return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
|
|
|
|
kExponentShift) - kExponentBias;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int HeapNumber::get_sign() {
|
|
|
|
return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
|
2009-07-28 08:43:51 +00:00
|
|
|
|
|
|
|
|
2011-12-09 08:50:19 +00:00
|
|
|
Object** FixedArray::GetFirstElementAddress() {
|
|
|
|
return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool FixedArray::ContainsOnlySmisOrHoles() {
|
|
|
|
Object* the_hole = GetHeap()->the_hole_value();
|
|
|
|
Object** current = GetFirstElementAddress();
|
|
|
|
for (int i = 0; i < length(); ++i) {
|
|
|
|
Object* candidate = *current++;
|
|
|
|
if (!candidate->IsSmi() && candidate != the_hole) return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
FixedArrayBase* JSObject::elements() const {
|
2009-07-28 08:43:51 +00:00
|
|
|
Object* array = READ_FIELD(this, kElementsOffset);
|
2011-08-12 13:54:27 +00:00
|
|
|
return static_cast<FixedArrayBase*>(array);
|
2009-07-28 08:43:51 +00:00
|
|
|
}
|
|
|
|
|
2012-05-23 14:24:29 +00:00
|
|
|
|
2014-04-07 10:00:14 +00:00
|
|
|
void JSObject::ValidateElements(Handle<JSObject> object) {
|
2014-08-04 11:34:54 +00:00
|
|
|
#ifdef ENABLE_SLOW_DCHECKS
|
2012-05-23 14:24:29 +00:00
|
|
|
if (FLAG_enable_slow_asserts) {
|
2014-04-07 10:00:14 +00:00
|
|
|
ElementsAccessor* accessor = object->GetElementsAccessor();
|
|
|
|
accessor->Validate(object);
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-09-19 14:13:34 +00:00
|
|
|
void AllocationSite::Initialize() {
|
2013-11-14 12:05:09 +00:00
|
|
|
set_transition_info(Smi::FromInt(0));
|
2013-09-19 14:13:34 +00:00
|
|
|
SetElementsKind(GetInitialFastElementsKind());
|
2013-09-24 10:30:41 +00:00
|
|
|
set_nested_site(Smi::FromInt(0));
|
2014-01-13 10:28:01 +00:00
|
|
|
set_pretenure_data(Smi::FromInt(0));
|
|
|
|
set_pretenure_create_count(Smi::FromInt(0));
|
2013-09-19 14:13:34 +00:00
|
|
|
set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
|
|
|
|
SKIP_WRITE_BARRIER);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-27 14:03:40 +00:00
|
|
|
void AllocationSite::MarkZombie() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!IsZombie());
|
2014-01-07 14:27:31 +00:00
|
|
|
Initialize();
|
2014-01-13 10:28:01 +00:00
|
|
|
set_pretenure_decision(kZombie);
|
2013-11-27 14:03:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-25 16:00:32 +00:00
|
|
|
// Heuristic: We only need to create allocation site info if the boilerplate
|
|
|
|
// elements kind is the initial elements kind.
|
2013-07-08 10:02:16 +00:00
|
|
|
AllocationSiteMode AllocationSite::GetMode(
|
2013-04-25 16:00:32 +00:00
|
|
|
ElementsKind boilerplate_elements_kind) {
|
2014-03-19 13:39:09 +00:00
|
|
|
if (FLAG_pretenuring_call_new ||
|
|
|
|
IsFastSmiElementsKind(boilerplate_elements_kind)) {
|
2013-04-25 16:00:32 +00:00
|
|
|
return TRACK_ALLOCATION_SITE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return DONT_TRACK_ALLOCATION_SITE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-08 10:02:16 +00:00
|
|
|
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
|
|
|
|
ElementsKind to) {
|
2014-03-19 13:39:09 +00:00
|
|
|
if (FLAG_pretenuring_call_new ||
|
|
|
|
(IsFastSmiElementsKind(from) &&
|
|
|
|
IsMoreGeneralElementsKindTransition(from, to))) {
|
2013-04-25 16:00:32 +00:00
|
|
|
return TRACK_ALLOCATION_SITE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return DONT_TRACK_ALLOCATION_SITE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-08-28 08:39:43 +00:00
|
|
|
inline bool AllocationSite::CanTrack(InstanceType type) {
|
2013-11-27 14:03:40 +00:00
|
|
|
if (FLAG_allocation_site_pretenuring) {
|
2014-01-02 15:31:27 +00:00
|
|
|
return type == JS_ARRAY_TYPE ||
|
|
|
|
type == JS_OBJECT_TYPE ||
|
|
|
|
type < FIRST_NONSTRING_TYPE;
|
2013-11-27 14:03:40 +00:00
|
|
|
}
|
2013-08-28 08:39:43 +00:00
|
|
|
return type == JS_ARRAY_TYPE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-13 10:28:01 +00:00
|
|
|
inline void AllocationSite::set_memento_found_count(int count) {
|
|
|
|
int value = pretenure_data()->value();
|
|
|
|
// Verify that we can count more mementos than we can possibly find in one
|
|
|
|
// new space collection.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((GetHeap()->MaxSemiSpaceSize() /
|
2014-01-13 10:28:01 +00:00
|
|
|
(StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
|
|
|
|
AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(count < MementoFoundCountBits::kMax);
|
2014-01-13 10:28:01 +00:00
|
|
|
set_pretenure_data(
|
|
|
|
Smi::FromInt(MementoFoundCountBits::update(value, count)),
|
|
|
|
SKIP_WRITE_BARRIER);
|
|
|
|
}
|
|
|
|
|
2013-12-18 21:23:56 +00:00
|
|
|
inline bool AllocationSite::IncrementMementoFoundCount() {
|
|
|
|
if (IsZombie()) return false;
|
|
|
|
|
2014-01-13 10:28:01 +00:00
|
|
|
int value = memento_found_count();
|
|
|
|
set_memento_found_count(value + 1);
|
2014-05-27 07:48:36 +00:00
|
|
|
return memento_found_count() == kPretenureMinimumCreated;
|
2013-11-27 14:03:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline void AllocationSite::IncrementMementoCreateCount() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(FLAG_allocation_site_pretenuring);
|
2014-01-13 10:28:01 +00:00
|
|
|
int value = memento_create_count();
|
|
|
|
set_memento_create_count(value + 1);
|
2013-11-27 14:03:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 07:55:38 +00:00
|
|
|
inline bool AllocationSite::MakePretenureDecision(
|
|
|
|
PretenureDecision current_decision,
|
|
|
|
double ratio,
|
|
|
|
bool maximum_size_scavenge) {
|
|
|
|
// Here we just allow state transitions from undecided or maybe tenure
|
|
|
|
// to don't tenure, maybe tenure, or tenure.
|
|
|
|
if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
|
|
|
|
if (ratio >= kPretenureRatio) {
|
|
|
|
// We just transition into tenure state when the semi-space was at
|
|
|
|
// maximum capacity.
|
|
|
|
if (maximum_size_scavenge) {
|
|
|
|
set_deopt_dependent_code(true);
|
|
|
|
set_pretenure_decision(kTenure);
|
|
|
|
// Currently we just need to deopt when we make a state transition to
|
|
|
|
// tenure.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
set_pretenure_decision(kMaybeTenure);
|
|
|
|
} else {
|
|
|
|
set_pretenure_decision(kDontTenure);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline bool AllocationSite::DigestPretenuringFeedback(
|
|
|
|
bool maximum_size_scavenge) {
|
|
|
|
bool deopt = false;
|
2014-01-13 10:28:01 +00:00
|
|
|
int create_count = memento_create_count();
|
2014-02-06 10:30:13 +00:00
|
|
|
int found_count = memento_found_count();
|
2014-02-06 11:27:08 +00:00
|
|
|
bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
|
|
|
|
double ratio =
|
|
|
|
minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
|
|
|
|
static_cast<double>(found_count) / create_count : 0.0;
|
2014-06-03 07:55:38 +00:00
|
|
|
PretenureDecision current_decision = pretenure_decision();
|
|
|
|
|
|
|
|
if (minimum_mementos_created) {
|
|
|
|
deopt = MakePretenureDecision(
|
|
|
|
current_decision, ratio, maximum_size_scavenge);
|
2013-11-27 14:03:40 +00:00
|
|
|
}
|
|
|
|
|
2014-02-06 10:30:13 +00:00
|
|
|
if (FLAG_trace_pretenuring_statistics) {
|
|
|
|
PrintF(
|
|
|
|
"AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
|
|
|
|
static_cast<void*>(this), create_count, found_count, ratio,
|
2014-06-03 07:55:38 +00:00
|
|
|
PretenureDecisionName(current_decision),
|
|
|
|
PretenureDecisionName(pretenure_decision()));
|
2014-02-06 10:30:13 +00:00
|
|
|
}
|
|
|
|
|
2013-11-27 14:03:40 +00:00
|
|
|
// Clear feedback calculation fields until the next gc.
|
2014-01-13 10:28:01 +00:00
|
|
|
set_memento_found_count(0);
|
|
|
|
set_memento_create_count(0);
|
2014-06-03 07:55:38 +00:00
|
|
|
return deopt;
|
2013-11-27 14:03:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-09-09 14:29:47 +00:00
|
|
|
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
|
2014-04-07 10:00:14 +00:00
|
|
|
JSObject::ValidateElements(object);
|
2013-09-09 14:29:47 +00:00
|
|
|
ElementsKind elements_kind = object->map()->elements_kind();
|
2012-05-23 14:24:29 +00:00
|
|
|
if (!IsFastObjectElementsKind(elements_kind)) {
|
|
|
|
if (IsFastHoleyElementsKind(elements_kind)) {
|
2013-09-09 14:29:47 +00:00
|
|
|
TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
|
2012-05-23 14:24:29 +00:00
|
|
|
} else {
|
2013-09-09 14:29:47 +00:00
|
|
|
TransitionElementsKind(object, FAST_ELEMENTS);
|
2012-05-23 14:24:29 +00:00
|
|
|
}
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-21 14:29:27 +00:00
|
|
|
void JSObject::EnsureCanContainElements(Handle<JSObject> object,
|
|
|
|
Object** objects,
|
|
|
|
uint32_t count,
|
|
|
|
EnsureElementsMode mode) {
|
|
|
|
ElementsKind current_kind = object->map()->elements_kind();
|
2011-12-09 08:50:19 +00:00
|
|
|
ElementsKind target_kind = current_kind;
|
2014-03-21 14:29:27 +00:00
|
|
|
{
|
|
|
|
DisallowHeapAllocation no_allocation;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
|
2014-03-21 14:29:27 +00:00
|
|
|
bool is_holey = IsFastHoleyElementsKind(current_kind);
|
|
|
|
if (current_kind == FAST_HOLEY_ELEMENTS) return;
|
|
|
|
Heap* heap = object->GetHeap();
|
|
|
|
Object* the_hole = heap->the_hole_value();
|
|
|
|
for (uint32_t i = 0; i < count; ++i) {
|
|
|
|
Object* current = *objects++;
|
|
|
|
if (current == the_hole) {
|
|
|
|
is_holey = true;
|
|
|
|
target_kind = GetHoleyElementsKind(target_kind);
|
|
|
|
} else if (!current->IsSmi()) {
|
|
|
|
if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
|
|
|
|
if (IsFastSmiElementsKind(target_kind)) {
|
|
|
|
if (is_holey) {
|
|
|
|
target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
|
|
|
|
} else {
|
|
|
|
target_kind = FAST_DOUBLE_ELEMENTS;
|
|
|
|
}
|
2012-05-23 14:24:29 +00:00
|
|
|
}
|
2014-03-21 14:29:27 +00:00
|
|
|
} else if (is_holey) {
|
|
|
|
target_kind = FAST_HOLEY_ELEMENTS;
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
target_kind = FAST_ELEMENTS;
|
2012-05-23 14:24:29 +00:00
|
|
|
}
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-12-09 08:50:19 +00:00
|
|
|
if (target_kind != current_kind) {
|
2014-03-21 14:29:27 +00:00
|
|
|
TransitionElementsKind(object, target_kind);
|
2011-12-09 08:50:19 +00:00
|
|
|
}
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-21 08:30:42 +00:00
|
|
|
void JSObject::EnsureCanContainElements(Handle<JSObject> object,
|
|
|
|
Handle<FixedArrayBase> elements,
|
|
|
|
uint32_t length,
|
|
|
|
EnsureElementsMode mode) {
|
2014-03-21 14:29:27 +00:00
|
|
|
Heap* heap = object->GetHeap();
|
|
|
|
if (elements->map() != heap->fixed_double_array_map()) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(elements->map() == heap->fixed_array_map() ||
|
2014-03-21 14:29:27 +00:00
|
|
|
elements->map() == heap->fixed_cow_array_map());
|
2011-12-09 08:50:19 +00:00
|
|
|
if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
|
|
|
|
mode = DONT_ALLOW_DOUBLE_ELEMENTS;
|
|
|
|
}
|
2014-03-21 14:29:27 +00:00
|
|
|
Object** objects =
|
|
|
|
Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
|
|
|
|
EnsureCanContainElements(object, objects, length, mode);
|
|
|
|
return;
|
2011-12-09 08:50:19 +00:00
|
|
|
}
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
|
2014-03-21 14:29:27 +00:00
|
|
|
if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
|
|
|
|
TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
|
|
|
|
} else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
|
|
|
|
Handle<FixedDoubleArray> double_array =
|
|
|
|
Handle<FixedDoubleArray>::cast(elements);
|
2012-05-23 14:24:29 +00:00
|
|
|
for (uint32_t i = 0; i < length; ++i) {
|
|
|
|
if (double_array->is_the_hole(i)) {
|
2014-03-21 14:29:27 +00:00
|
|
|
TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
|
|
|
|
return;
|
2012-05-23 14:24:29 +00:00
|
|
|
}
|
|
|
|
}
|
2014-03-21 14:29:27 +00:00
|
|
|
TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
|
2011-12-09 08:50:19 +00:00
|
|
|
}
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
|
2009-07-28 08:43:51 +00:00
|
|
|
|
2014-03-31 15:30:13 +00:00
|
|
|
void JSObject::SetMapAndElements(Handle<JSObject> object,
|
|
|
|
Handle<Map> new_map,
|
|
|
|
Handle<FixedArrayBase> value) {
|
|
|
|
JSObject::MigrateToMap(object, new_map);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((object->map()->has_fast_smi_or_object_elements() ||
|
2014-03-31 15:30:13 +00:00
|
|
|
(*value == object->GetHeap()->empty_fixed_array())) ==
|
|
|
|
(value->map() == object->GetHeap()->fixed_array_map() ||
|
|
|
|
value->map() == object->GetHeap()->fixed_cow_array_map()));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
|
2014-03-31 15:30:13 +00:00
|
|
|
(object->map()->has_fast_double_elements() ==
|
|
|
|
value->IsFixedDoubleArray()));
|
|
|
|
object->set_elements(*value);
|
2009-07-28 08:43:51 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
|
2011-12-30 14:28:14 +00:00
|
|
|
void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
|
2014-03-31 15:30:13 +00:00
|
|
|
WRITE_FIELD(this, kElementsOffset, value);
|
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
|
2011-12-30 14:28:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void JSObject::initialize_properties() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
|
2011-03-18 20:35:07 +00:00
|
|
|
WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSObject::initialize_elements() {
|
2014-04-09 14:26:32 +00:00
|
|
|
FixedArrayBase* elements = map()->GetInitialElements();
|
|
|
|
WRITE_FIELD(this, kElementsOffset, elements);
|
2010-06-24 13:56:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-23 13:46:49 +00:00
|
|
|
Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2013-04-09 16:49:28 +00:00
|
|
|
if (!map->HasTransitionArray()) return Handle<String>::null();
|
|
|
|
TransitionArray* transitions = map->transitions();
|
|
|
|
if (!transitions->IsSimpleTransition()) return Handle<String>::null();
|
|
|
|
int transition = TransitionArray::kSimpleTransitionIndex;
|
|
|
|
PropertyDetails details = transitions->GetTargetDetails(transition);
|
|
|
|
Name* name = transitions->GetKey(transition);
|
2015-01-19 17:49:13 +00:00
|
|
|
if (details.type() != DATA) return Handle<String>::null();
|
2013-04-09 16:49:28 +00:00
|
|
|
if (details.attributes() != NONE) return Handle<String>::null();
|
|
|
|
if (!name->IsString()) return Handle<String>::null();
|
|
|
|
return Handle<String>(String::cast(name));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-23 13:46:49 +00:00
|
|
|
Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!ExpectedTransitionKey(map).is_null());
|
2013-04-09 16:49:28 +00:00
|
|
|
return Handle<Map>(map->transitions()->GetTarget(
|
|
|
|
TransitionArray::kSimpleTransitionIndex));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-23 13:46:49 +00:00
|
|
|
Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHeapAllocation no_allocation;
|
2013-04-09 16:49:28 +00:00
|
|
|
if (!map->HasTransitionArray()) return Handle<Map>::null();
|
|
|
|
TransitionArray* transitions = map->transitions();
|
2015-01-19 17:49:13 +00:00
|
|
|
int transition = transitions->Search(kData, *key, NONE);
|
2013-04-09 16:49:28 +00:00
|
|
|
if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
|
2014-12-10 15:18:44 +00:00
|
|
|
PropertyDetails details = transitions->GetTargetDetails(transition);
|
2015-01-19 17:49:13 +00:00
|
|
|
if (details.type() != DATA) return Handle<Map>::null();
|
2014-12-10 15:18:44 +00:00
|
|
|
DCHECK_EQ(NONE, details.attributes());
|
2013-04-09 16:49:28 +00:00
|
|
|
return Handle<Map>(transitions->GetTarget(transition));
|
2012-10-17 14:09:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(Oddball, to_string, String, kToStringOffset)
|
|
|
|
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
byte Oddball::kind() const {
|
2011-09-19 18:36:47 +00:00
|
|
|
return Smi::cast(READ_FIELD(this, kKindOffset))->value();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Oddball::set_kind(byte value) {
|
2011-09-19 18:36:47 +00:00
|
|
|
WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
Object* Cell::value() const {
|
2009-07-09 14:34:08 +00:00
|
|
|
return READ_FIELD(this, kValueOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-12 15:03:44 +00:00
|
|
|
void Cell::set_value(Object* val, WriteBarrierMode ignored) {
|
2009-07-09 14:34:08 +00:00
|
|
|
// The write barrier is not used for global property cells.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!val->IsPropertyCell() && !val->IsCell());
|
2009-07-09 14:34:08 +00:00
|
|
|
WRITE_FIELD(this, kValueOffset, val);
|
|
|
|
}
|
|
|
|
|
2013-06-26 16:17:12 +00:00
|
|
|
ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
|
2009-06-30 10:05:36 +00:00
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
Object* PropertyCell::type_raw() const {
|
2013-06-12 15:03:44 +00:00
|
|
|
return READ_FIELD(this, kTypeOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-14 16:06:12 +00:00
|
|
|
void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
|
2013-06-12 15:03:44 +00:00
|
|
|
WRITE_FIELD(this, kTypeOffset, val);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-21 10:33:57 +00:00
|
|
|
Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
|
2014-10-14 14:43:45 +00:00
|
|
|
|
|
|
|
|
2014-10-21 09:42:16 +00:00
|
|
|
void WeakCell::clear() {
|
|
|
|
DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
|
|
|
|
WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
|
2014-10-14 14:43:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void WeakCell::initialize(HeapObject* val) {
|
|
|
|
WRITE_FIELD(this, kValueOffset, val);
|
|
|
|
WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-21 09:42:16 +00:00
|
|
|
bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
|
|
|
|
|
|
|
|
|
2014-10-14 14:43:45 +00:00
|
|
|
Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
|
|
|
|
|
|
|
|
|
|
|
|
void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
|
|
|
|
WRITE_FIELD(this, kNextOffset, val);
|
|
|
|
if (mode == UPDATE_WRITE_BARRIER) {
|
|
|
|
WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
int JSObject::GetHeaderSize() {
|
2009-07-17 09:16:23 +00:00
|
|
|
InstanceType type = map()->instance_type();
|
|
|
|
// Check for the most common kind of JavaScript object before
|
|
|
|
// falling into the generic switch. This speeds up the internal
|
|
|
|
// field operations considerably on average.
|
|
|
|
if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
|
|
|
|
switch (type) {
|
2013-04-15 12:29:44 +00:00
|
|
|
case JS_GENERATOR_OBJECT_TYPE:
|
|
|
|
return JSGeneratorObject::kSize;
|
2012-04-16 14:43:27 +00:00
|
|
|
case JS_MODULE_TYPE:
|
|
|
|
return JSModule::kSize;
|
Split window support from V8.
Here is a description of the background and design of split window in Chrome and V8:
https://docs.google.com/a/google.com/Doc?id=chhjkpg_47fwddxbfr
This change list splits the window object into two parts: 1) an inner window object used as the global object of contexts; 2) an outer window object exposed to JavaScript and accessible by the name 'window'. Firefox did it awhile ago, here are some discussions: https://wiki.mozilla.org/Gecko:SplitWindow. One additional benefit of splitting window in Chrome is that accessing global variables don't need security checks anymore, it can improve applications that use many global variables.
V8 support of split window:
There are a small number of changes on V8 api to support split window:
Security context is removed from V8, so does related API functions;
A global object can be detached from its context and reused by a new context;
Access checks on an object template can be turned on/off by default;
An object can turn on its access checks later;
V8 has a new object type, ApiGlobalObject, which is the outer window object type. The existing JSGlobalObject becomes the inner window object type. Security checks are moved from JSGlobalObject to ApiGlobalObject. ApiGlobalObject is the one exposed to JavaScript, it is accessible through Context::Global(). ApiGlobalObject's prototype is set to JSGlobalObject so that property lookups are forwarded to JSGlobalObject. ApiGlobalObject forwards all other property access requests to JSGlobalObject, such as SetProperty, DeleteProperty, etc.
Security token is moved to a global context, and ApiGlobalObject has a reference to its global context. JSGlobalObject has a reference to its global context as well. When accessing properties on a global object in JavaScript, the domain security check is performed by comparing the security token of the lexical context (Top::global_context()) to the token of global object's context. The check is only needed when the receiver is a window object, such as 'window.document'. Accessing global variables, such as 'var foo = 3; foo' does not need checks because the receiver is the inner window object.
When an outer window is detached from its global context (when a frame navigates away from a page), it is completely detached from the inner window. A new context is created for the new page, and the outer global object is reused. At this point, the access check on the DOMWindow wrapper of the old context is turned on. The code in old context is still able to access DOMWindow properties, but it has to go through domain security checks.
It is debatable on how to implement the outer window object. Currently each property access function has to check if the receiver is ApiGlobalObject type. This approach might be error-prone that one may forget to check the receiver when adding new functions. It is unlikely a performance issue because accessing global variables are more common than 'window.foo' style coding.
I am still working on the ARM port, and I'd like to hear comments and suggestions on the best way to support it in V8.
Review URL: http://codereview.chromium.org/7366
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@540 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2008-10-21 19:07:58 +00:00
|
|
|
case JS_GLOBAL_PROXY_TYPE:
|
|
|
|
return JSGlobalProxy::kSize;
|
2008-07-03 15:10:15 +00:00
|
|
|
case JS_GLOBAL_OBJECT_TYPE:
|
|
|
|
return JSGlobalObject::kSize;
|
|
|
|
case JS_BUILTINS_OBJECT_TYPE:
|
|
|
|
return JSBuiltinsObject::kSize;
|
|
|
|
case JS_FUNCTION_TYPE:
|
|
|
|
return JSFunction::kSize;
|
|
|
|
case JS_VALUE_TYPE:
|
|
|
|
return JSValue::kSize;
|
2012-03-09 11:11:55 +00:00
|
|
|
case JS_DATE_TYPE:
|
|
|
|
return JSDate::kSize;
|
2008-07-03 15:10:15 +00:00
|
|
|
case JS_ARRAY_TYPE:
|
2012-01-18 17:01:57 +00:00
|
|
|
return JSArray::kSize;
|
2013-03-28 12:50:18 +00:00
|
|
|
case JS_ARRAY_BUFFER_TYPE:
|
|
|
|
return JSArrayBuffer::kSize;
|
2013-04-16 14:16:30 +00:00
|
|
|
case JS_TYPED_ARRAY_TYPE:
|
|
|
|
return JSTypedArray::kSize;
|
2013-06-21 13:02:38 +00:00
|
|
|
case JS_DATA_VIEW_TYPE:
|
|
|
|
return JSDataView::kSize;
|
2013-03-11 11:31:53 +00:00
|
|
|
case JS_SET_TYPE:
|
|
|
|
return JSSet::kSize;
|
|
|
|
case JS_MAP_TYPE:
|
|
|
|
return JSMap::kSize;
|
2014-04-17 17:45:32 +00:00
|
|
|
case JS_SET_ITERATOR_TYPE:
|
|
|
|
return JSSetIterator::kSize;
|
|
|
|
case JS_MAP_ITERATOR_TYPE:
|
|
|
|
return JSMapIterator::kSize;
|
2011-08-03 11:55:13 +00:00
|
|
|
case JS_WEAK_MAP_TYPE:
|
|
|
|
return JSWeakMap::kSize;
|
2013-07-22 08:32:24 +00:00
|
|
|
case JS_WEAK_SET_TYPE:
|
|
|
|
return JSWeakSet::kSize;
|
2008-09-23 11:45:43 +00:00
|
|
|
case JS_REGEXP_TYPE:
|
2012-01-18 17:01:57 +00:00
|
|
|
return JSRegExp::kSize;
|
2009-01-14 12:13:26 +00:00
|
|
|
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
|
2008-07-03 15:10:15 +00:00
|
|
|
return JSObject::kHeaderSize;
|
2011-02-02 13:31:52 +00:00
|
|
|
case JS_MESSAGE_OBJECT_TYPE:
|
|
|
|
return JSMessageObject::kSize;
|
2008-07-03 15:10:15 +00:00
|
|
|
default:
|
2013-10-28 17:54:43 +00:00
|
|
|
// TODO(jkummerow): Re-enable this. Blink currently hits this
|
|
|
|
// from its CustomElementConstructorBuilder.
|
|
|
|
// UNREACHABLE();
|
2008-07-03 15:10:15 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int JSObject::GetInternalFieldCount() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(1 << kPointerSizeLog2 == kPointerSize);
|
2008-10-15 06:03:26 +00:00
|
|
|
// Make sure to adjust for the number of in-object properties. These
|
|
|
|
// properties do contribute to the size, but are not internal fields.
|
|
|
|
return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
|
|
|
|
map()->inobject_properties();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 12:49:27 +00:00
|
|
|
int JSObject::GetInternalFieldOffset(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index < GetInternalFieldCount() && index >= 0);
|
2011-03-18 12:49:27 +00:00
|
|
|
return GetHeaderSize() + (kPointerSize * index);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Object* JSObject::GetInternalField(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index < GetInternalFieldCount() && index >= 0);
|
2008-10-15 06:03:26 +00:00
|
|
|
// Internal objects do follow immediately after the header, whereas in-object
|
|
|
|
// properties are at the end of the object. Therefore there is no need
|
|
|
|
// to adjust the index here.
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSObject::SetInternalField(int index, Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index < GetInternalFieldCount() && index >= 0);
|
2008-10-15 06:03:26 +00:00
|
|
|
// Internal objects do follow immediately after the header, whereas in-object
|
|
|
|
// properties are at the end of the object. Therefore there is no need
|
|
|
|
// to adjust the index here.
|
2008-07-03 15:10:15 +00:00
|
|
|
int offset = GetHeaderSize() + (kPointerSize * index);
|
|
|
|
WRITE_FIELD(this, offset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
WRITE_BARRIER(GetHeap(), this, offset, value);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-11-15 12:18:24 +00:00
|
|
|
void JSObject::SetInternalField(int index, Smi* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index < GetInternalFieldCount() && index >= 0);
|
2011-11-15 12:18:24 +00:00
|
|
|
// Internal objects do follow immediately after the header, whereas in-object
|
|
|
|
// properties are at the end of the object. Therefore there is no need
|
|
|
|
// to adjust the index here.
|
|
|
|
int offset = GetHeaderSize() + (kPointerSize * index);
|
|
|
|
WRITE_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
|
|
|
|
if (!FLAG_unbox_double_fields) return false;
|
|
|
|
return map()->IsUnboxedDoubleField(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::IsUnboxedDoubleField(FieldIndex index) {
|
|
|
|
if (!FLAG_unbox_double_fields) return false;
|
|
|
|
if (index.is_hidden_field() || !index.is_inobject()) return false;
|
|
|
|
return !layout_descriptor()->IsTagged(index.property_index());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-15 06:03:26 +00:00
|
|
|
// Access fast-case object properties at index. The use of these routines
|
|
|
|
// is needed to correctly distinguish between properties stored in-object and
|
|
|
|
// properties stored in the properties array.
|
2014-06-10 14:01:08 +00:00
|
|
|
Object* JSObject::RawFastPropertyAt(FieldIndex index) {
|
2014-11-11 10:24:52 +00:00
|
|
|
DCHECK(!IsUnboxedDoubleField(index));
|
2014-06-10 14:01:08 +00:00
|
|
|
if (index.is_inobject()) {
|
|
|
|
return READ_FIELD(this, index.offset());
|
2008-10-15 06:03:26 +00:00
|
|
|
} else {
|
2014-06-10 14:01:08 +00:00
|
|
|
return properties()->get(index.outobject_array_index());
|
2008-10-15 06:03:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
|
|
|
|
DCHECK(IsUnboxedDoubleField(index));
|
|
|
|
return READ_DOUBLE_FIELD(this, index.offset());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
|
2014-06-10 14:01:08 +00:00
|
|
|
if (index.is_inobject()) {
|
|
|
|
int offset = index.offset();
|
2008-10-15 06:03:26 +00:00
|
|
|
WRITE_FIELD(this, offset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
WRITE_BARRIER(GetHeap(), this, offset, value);
|
2008-10-15 06:03:26 +00:00
|
|
|
} else {
|
2014-06-10 14:01:08 +00:00
|
|
|
properties()->set(index.outobject_array_index(), value);
|
2008-10-15 06:03:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
|
|
|
|
WRITE_DOUBLE_FIELD(this, index.offset(), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
|
|
|
|
if (IsUnboxedDoubleField(index)) {
|
|
|
|
DCHECK(value->IsMutableHeapNumber());
|
|
|
|
RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
|
|
|
|
} else {
|
|
|
|
RawFastPropertyAtPut(index, value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 12:49:27 +00:00
|
|
|
int JSObject::GetInObjectPropertyOffset(int index) {
|
2014-02-04 10:48:49 +00:00
|
|
|
return map()->GetInObjectPropertyOffset(index);
|
2011-03-18 12:49:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-23 07:27:47 +00:00
|
|
|
Object* JSObject::InObjectPropertyAt(int index) {
|
2014-02-04 10:48:49 +00:00
|
|
|
int offset = GetInObjectPropertyOffset(index);
|
2009-03-23 07:27:47 +00:00
|
|
|
return READ_FIELD(this, offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-23 08:46:32 +00:00
|
|
|
Object* JSObject::InObjectPropertyAtPut(int index,
|
|
|
|
Object* value,
|
|
|
|
WriteBarrierMode mode) {
|
|
|
|
// Adjust for the number of properties stored in the object.
|
2014-02-04 10:48:49 +00:00
|
|
|
int offset = GetInObjectPropertyOffset(index);
|
2008-10-23 08:46:32 +00:00
|
|
|
WRITE_FIELD(this, offset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
|
2008-10-23 08:46:32 +00:00
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2011-09-20 10:06:23 +00:00
|
|
|
void JSObject::InitializeBody(Map* map,
|
|
|
|
Object* pre_allocated_value,
|
|
|
|
Object* filler_value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!filler_value->IsHeapObject() ||
|
2011-09-20 10:06:23 +00:00
|
|
|
!GetHeap()->InNewSpace(filler_value));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!pre_allocated_value->IsHeapObject() ||
|
2011-09-20 10:06:23 +00:00
|
|
|
!GetHeap()->InNewSpace(pre_allocated_value));
|
|
|
|
int size = map->instance_size();
|
|
|
|
int offset = kHeaderSize;
|
|
|
|
if (filler_value != pre_allocated_value) {
|
|
|
|
int pre_allocated = map->pre_allocated_property_fields();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
|
2011-09-20 10:06:23 +00:00
|
|
|
for (int i = 0; i < pre_allocated; i++) {
|
|
|
|
WRITE_FIELD(this, offset, pre_allocated_value);
|
|
|
|
offset += kPointerSize;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while (offset < size) {
|
|
|
|
WRITE_FIELD(this, offset, filler_value);
|
|
|
|
offset += kPointerSize;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-07-02 11:27:57 +00:00
|
|
|
bool JSObject::HasFastProperties() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
|
2010-07-02 11:27:57 +00:00
|
|
|
return !properties()->IsDictionary();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-04 17:23:07 +00:00
|
|
|
MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
|
|
|
|
uint32_t index,
|
|
|
|
Handle<Object> value,
|
|
|
|
LanguageMode language_mode) {
|
|
|
|
return JSObject::SetOwnElement(object, index, value, NONE, language_mode);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-18 13:47:25 +00:00
|
|
|
bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
|
|
|
|
if (unused_property_fields() != 0) return false;
|
2014-08-04 15:02:07 +00:00
|
|
|
if (is_prototype_map()) return false;
|
2014-07-18 13:47:25 +00:00
|
|
|
int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
|
|
|
|
int limit = Max(minimum, inobject_properties());
|
|
|
|
int external = NumberOfFields() - inobject_properties();
|
|
|
|
return external > limit;
|
2010-07-02 11:27:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void Struct::InitializeBody(int object_size) {
|
2011-03-18 20:35:07 +00:00
|
|
|
Object* value = GetHeap()->undefined_value();
|
2008-09-23 11:45:43 +00:00
|
|
|
for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
2008-10-22 08:21:18 +00:00
|
|
|
WRITE_FIELD(this, offset, value);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-05-27 12:30:45 +00:00
|
|
|
bool Object::ToArrayIndex(uint32_t* index) {
|
|
|
|
if (IsSmi()) {
|
|
|
|
int value = Smi::cast(this)->value();
|
2008-07-03 15:10:15 +00:00
|
|
|
if (value < 0) return false;
|
|
|
|
*index = value;
|
|
|
|
return true;
|
|
|
|
}
|
2010-05-27 12:30:45 +00:00
|
|
|
if (IsHeapNumber()) {
|
|
|
|
double value = HeapNumber::cast(this)->value();
|
2008-07-03 15:10:15 +00:00
|
|
|
uint32_t uint_value = static_cast<uint32_t>(value);
|
|
|
|
if (value == static_cast<double>(uint_value)) {
|
|
|
|
*index = uint_value;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
|
|
|
|
if (!this->IsJSValue()) return false;
|
|
|
|
|
|
|
|
JSValue* js_value = JSValue::cast(this);
|
|
|
|
if (!js_value->value()->IsString()) return false;
|
|
|
|
|
|
|
|
String* str = String::cast(js_value->value());
|
2013-01-07 15:02:56 +00:00
|
|
|
if (index >= static_cast<uint32_t>(str->length())) return false;
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-09-10 13:38:21 +00:00
|
|
|
void Object::VerifyApiCallResultType() {
|
2015-01-08 16:29:27 +00:00
|
|
|
#if DEBUG
|
2012-09-10 13:38:21 +00:00
|
|
|
if (!(IsSmi() ||
|
|
|
|
IsString() ||
|
2014-03-20 16:13:09 +00:00
|
|
|
IsSymbol() ||
|
2012-09-10 13:38:21 +00:00
|
|
|
IsSpecObject() ||
|
|
|
|
IsHeapNumber() ||
|
|
|
|
IsUndefined() ||
|
|
|
|
IsTrue() ||
|
|
|
|
IsFalse() ||
|
|
|
|
IsNull())) {
|
|
|
|
FATAL("API call returned invalid object");
|
|
|
|
}
|
2015-01-08 16:29:27 +00:00
|
|
|
#endif // DEBUG
|
2012-09-10 13:38:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-27 16:34:28 +00:00
|
|
|
Object* FixedArray::get(int index) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(index >= 0 && index < this->length());
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_FIELD(this, kHeaderSize + index * kPointerSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
|
|
|
|
return handle(array->get(index), array->GetIsolate());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-23 14:24:29 +00:00
|
|
|
bool FixedArray::is_the_hole(int index) {
|
|
|
|
return get(index) == GetHeap()->the_hole_value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-25 11:07:48 +00:00
|
|
|
void FixedArray::set(int index, Smi* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map());
|
|
|
|
DCHECK(index >= 0 && index < this->length());
|
|
|
|
DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
|
2008-11-25 11:07:48 +00:00
|
|
|
int offset = kHeaderSize + index * kPointerSize;
|
|
|
|
WRITE_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void FixedArray::set(int index, Object* value) {
|
2014-08-11 14:00:58 +00:00
|
|
|
DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
|
|
|
|
DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < this->length());
|
2008-07-03 15:10:15 +00:00
|
|
|
int offset = kHeaderSize + index * kPointerSize;
|
|
|
|
WRITE_FIELD(this, offset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
WRITE_BARRIER(GetHeap(), this, offset, value);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-03 11:12:46 +00:00
|
|
|
double FixedDoubleArray::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
|
2013-09-11 07:14:41 +00:00
|
|
|
map() != GetHeap()->fixed_array_map());
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < this->length());
|
2015-01-21 08:52:00 +00:00
|
|
|
DCHECK(!is_the_hole(index));
|
|
|
|
return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
|
2011-06-09 10:03:35 +00:00
|
|
|
}
|
|
|
|
|
2015-01-21 08:52:00 +00:00
|
|
|
|
|
|
|
uint64_t FixedDoubleArray::get_representation(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
|
2013-09-11 07:14:41 +00:00
|
|
|
map() != GetHeap()->fixed_array_map());
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < this->length());
|
2015-01-21 08:52:00 +00:00
|
|
|
int offset = kHeaderSize + index * kDoubleSize;
|
|
|
|
return READ_UINT64_FIELD(this, offset);
|
2012-03-23 10:14:51 +00:00
|
|
|
}
|
2011-06-09 10:03:35 +00:00
|
|
|
|
2011-08-03 11:12:46 +00:00
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
|
|
|
|
int index) {
|
|
|
|
if (array->is_the_hole(index)) {
|
|
|
|
return array->GetIsolate()->factory()->the_hole_value();
|
2014-03-27 16:41:09 +00:00
|
|
|
} else {
|
2014-04-08 14:20:29 +00:00
|
|
|
return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
|
2014-03-27 16:41:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-06-09 10:03:35 +00:00
|
|
|
void FixedDoubleArray::set(int index, double value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
|
2013-09-11 07:14:41 +00:00
|
|
|
map() != GetHeap()->fixed_array_map());
|
2011-06-09 10:03:35 +00:00
|
|
|
int offset = kHeaderSize + index * kDoubleSize;
|
2015-01-21 08:52:00 +00:00
|
|
|
if (std::isnan(value)) {
|
2015-02-11 09:05:16 +00:00
|
|
|
WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
|
2015-01-21 08:52:00 +00:00
|
|
|
} else {
|
|
|
|
WRITE_DOUBLE_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
DCHECK(!is_the_hole(index));
|
2011-06-09 10:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void FixedDoubleArray::set_the_hole(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
|
2013-09-11 07:14:41 +00:00
|
|
|
map() != GetHeap()->fixed_array_map());
|
2011-06-09 10:03:35 +00:00
|
|
|
int offset = kHeaderSize + index * kDoubleSize;
|
2015-01-21 08:52:00 +00:00
|
|
|
WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
|
2011-06-09 10:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool FixedDoubleArray::is_the_hole(int index) {
|
2015-01-21 08:52:00 +00:00
|
|
|
return get_representation(index) == kHoleNanInt64;
|
2011-06-09 10:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-28 13:05:23 +00:00
|
|
|
double* FixedDoubleArray::data_start() {
|
|
|
|
return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void FixedDoubleArray::FillWithHoles(int from, int to) {
|
|
|
|
for (int i = from; i < to; i++) {
|
|
|
|
set_the_hole(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-15 19:57:37 +00:00
|
|
|
Object* WeakFixedArray::Get(int index) const {
|
|
|
|
Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
|
|
|
|
if (raw->IsSmi()) return raw;
|
|
|
|
return WeakCell::cast(raw)->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool WeakFixedArray::IsEmptySlot(int index) const {
|
|
|
|
DCHECK(index < Length());
|
|
|
|
return Get(index)->IsSmi();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void WeakFixedArray::clear(int index) {
|
|
|
|
FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int WeakFixedArray::Length() const {
|
|
|
|
return FixedArray::cast(this)->length() - kFirstIndex;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int WeakFixedArray::last_used_index() const {
|
|
|
|
return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void WeakFixedArray::set_last_used_index(int index) {
|
|
|
|
FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-03 17:01:14 +00:00
|
|
|
void ConstantPoolArray::NumberOfEntries::increment(Type type) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(type < NUMBER_OF_TYPES);
|
2014-07-03 17:01:14 +00:00
|
|
|
element_counts_[type]++;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::NumberOfEntries::equals(
|
|
|
|
const ConstantPoolArray::NumberOfEntries& other) const {
|
|
|
|
for (int i = 0; i < NUMBER_OF_TYPES; i++) {
|
|
|
|
if (element_counts_[i] != other.element_counts_[i]) return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool ConstantPoolArray::NumberOfEntries::is_empty() const {
|
|
|
|
return total_count() == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(type < NUMBER_OF_TYPES);
|
2014-07-03 17:01:14 +00:00
|
|
|
return element_counts_[type];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
|
|
|
|
int base = 0;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(type < NUMBER_OF_TYPES);
|
2014-07-03 17:01:14 +00:00
|
|
|
for (int i = 0; i < type; i++) {
|
|
|
|
base += element_counts_[i];
|
|
|
|
}
|
|
|
|
return base;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::NumberOfEntries::total_count() const {
|
|
|
|
int count = 0;
|
|
|
|
for (int i = 0; i < NUMBER_OF_TYPES; i++) {
|
|
|
|
count += element_counts_[i];
|
|
|
|
}
|
|
|
|
return count;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
|
|
|
|
for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
|
|
|
|
if (element_counts_[i] < min || element_counts_[i] > max) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::Iterator::next_index() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!is_finished());
|
2014-07-03 17:01:14 +00:00
|
|
|
int ret = next_index_++;
|
|
|
|
update_section();
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool ConstantPoolArray::Iterator::is_finished() {
|
|
|
|
return next_index_ > array_->last_index(type_, final_section_);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ConstantPoolArray::Iterator::update_section() {
|
|
|
|
if (next_index_ > array_->last_index(type_, current_section_) &&
|
|
|
|
current_section_ != final_section_) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(final_section_ == EXTENDED_SECTION);
|
2014-07-03 17:01:14 +00:00
|
|
|
current_section_ = EXTENDED_SECTION;
|
|
|
|
next_index_ = array_->first_index(type_, EXTENDED_SECTION);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
bool ConstantPoolArray::is_extended_layout() {
|
|
|
|
uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
|
|
|
|
return IsExtendedField::decode(small_layout_1);
|
2014-04-08 10:00:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
|
|
|
|
return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
|
2014-04-08 10:00:57 +00:00
|
|
|
}
|
2013-10-14 13:35:06 +00:00
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
int ConstantPoolArray::first_extended_section_index() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_extended_layout());
|
2014-06-03 16:22:10 +00:00
|
|
|
uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
|
|
|
|
return TotalCountField::decode(small_layout_2);
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
int ConstantPoolArray::get_extended_section_header_offset() {
|
|
|
|
return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
|
2014-04-08 10:00:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
|
|
|
|
uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
|
|
|
|
return WeakObjectStateField::decode(small_layout_2);
|
2014-04-08 10:00:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
void ConstantPoolArray::set_weak_object_state(
|
|
|
|
ConstantPoolArray::WeakObjectState state) {
|
|
|
|
uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
|
|
|
|
small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
|
|
|
|
WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
|
2014-04-08 10:00:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
int ConstantPoolArray::first_index(Type type, LayoutSection section) {
|
|
|
|
int index = 0;
|
|
|
|
if (section == EXTENDED_SECTION) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_extended_layout());
|
2014-06-03 16:22:10 +00:00
|
|
|
index += first_extended_section_index();
|
|
|
|
}
|
2013-10-14 13:35:06 +00:00
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
for (Type type_iter = FIRST_TYPE; type_iter < type;
|
|
|
|
type_iter = next_type(type_iter)) {
|
|
|
|
index += number_of_entries(type_iter, section);
|
|
|
|
}
|
2013-10-14 13:35:06 +00:00
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
return index;
|
2014-03-10 19:05:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
int ConstantPoolArray::last_index(Type type, LayoutSection section) {
|
|
|
|
return first_index(type, section) + number_of_entries(type, section) - 1;
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
|
|
|
|
if (section == SMALL_SECTION) {
|
|
|
|
uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
|
|
|
|
uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
|
|
|
|
switch (type) {
|
|
|
|
case INT64:
|
|
|
|
return Int64CountField::decode(small_layout_1);
|
|
|
|
case CODE_PTR:
|
|
|
|
return CodePtrCountField::decode(small_layout_1);
|
|
|
|
case HEAP_PTR:
|
|
|
|
return HeapPtrCountField::decode(small_layout_1);
|
|
|
|
case INT32:
|
|
|
|
return Int32CountField::decode(small_layout_2);
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(section == EXTENDED_SECTION && is_extended_layout());
|
2014-06-03 16:22:10 +00:00
|
|
|
int offset = get_extended_section_header_offset();
|
|
|
|
switch (type) {
|
|
|
|
case INT64:
|
|
|
|
offset += kExtendedInt64CountOffset;
|
|
|
|
break;
|
|
|
|
case CODE_PTR:
|
|
|
|
offset += kExtendedCodePtrCountOffset;
|
|
|
|
break;
|
|
|
|
case HEAP_PTR:
|
|
|
|
offset += kExtendedHeapPtrCountOffset;
|
|
|
|
break;
|
|
|
|
case INT32:
|
|
|
|
offset += kExtendedInt32CountOffset;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
}
|
|
|
|
return READ_INT_FIELD(this, offset);
|
|
|
|
}
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-09 09:08:37 +00:00
|
|
|
bool ConstantPoolArray::offset_is_type(int offset, Type type) {
|
|
|
|
return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
|
|
|
|
offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
|
|
|
|
(is_extended_layout() &&
|
|
|
|
offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
|
|
|
|
offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
|
|
|
|
LayoutSection section;
|
|
|
|
if (is_extended_layout() && index >= first_extended_section_index()) {
|
|
|
|
section = EXTENDED_SECTION;
|
|
|
|
} else {
|
|
|
|
section = SMALL_SECTION;
|
|
|
|
}
|
|
|
|
|
|
|
|
Type type = FIRST_TYPE;
|
|
|
|
while (index > last_index(type, section)) {
|
|
|
|
type = next_type(type);
|
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(type <= LAST_TYPE);
|
2014-06-03 16:22:10 +00:00
|
|
|
return type;
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int64_t ConstantPoolArray::get_int64_entry(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == INT64);
|
2013-10-14 13:35:06 +00:00
|
|
|
return READ_INT64_FIELD(this, OffsetOfElementAt(index));
|
|
|
|
}
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
|
2013-10-14 13:35:06 +00:00
|
|
|
double ConstantPoolArray::get_int64_entry_as_double(int index) {
|
|
|
|
STATIC_ASSERT(kDoubleSize == kInt64Size);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == INT64);
|
2013-10-14 13:35:06 +00:00
|
|
|
return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-10 19:05:43 +00:00
|
|
|
Address ConstantPoolArray::get_code_ptr_entry(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == CODE_PTR);
|
2014-03-10 19:05:43 +00:00
|
|
|
return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == HEAP_PTR);
|
2013-10-14 13:35:06 +00:00
|
|
|
return READ_FIELD(this, OffsetOfElementAt(index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int32_t ConstantPoolArray::get_int32_entry(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == INT32);
|
2013-10-14 13:35:06 +00:00
|
|
|
return READ_INT32_FIELD(this, OffsetOfElementAt(index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
void ConstantPoolArray::set(int index, int64_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == INT64);
|
2014-06-03 16:22:10 +00:00
|
|
|
WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ConstantPoolArray::set(int index, double value) {
|
|
|
|
STATIC_ASSERT(kDoubleSize == kInt64Size);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == INT64);
|
2014-06-03 16:22:10 +00:00
|
|
|
WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-10 19:05:43 +00:00
|
|
|
void ConstantPoolArray::set(int index, Address value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == CODE_PTR);
|
2014-03-10 19:05:43 +00:00
|
|
|
WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-10-14 13:35:06 +00:00
|
|
|
void ConstantPoolArray::set(int index, Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(!GetHeap()->InNewSpace(value));
|
|
|
|
DCHECK(get_type(index) == HEAP_PTR);
|
2013-10-14 13:35:06 +00:00
|
|
|
WRITE_FIELD(this, OffsetOfElementAt(index), value);
|
|
|
|
WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
void ConstantPoolArray::set(int index, int32_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(get_type(index) == INT32);
|
2014-06-03 16:22:10 +00:00
|
|
|
WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-09 09:08:37 +00:00
|
|
|
void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(offset_is_type(offset, INT32));
|
2014-07-09 09:08:37 +00:00
|
|
|
WRITE_INT32_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(offset_is_type(offset, INT64));
|
2014-07-09 09:08:37 +00:00
|
|
|
WRITE_INT64_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ConstantPoolArray::set_at_offset(int offset, double value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(offset_is_type(offset, INT64));
|
2014-07-09 09:08:37 +00:00
|
|
|
WRITE_DOUBLE_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ConstantPoolArray::set_at_offset(int offset, Address value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(offset_is_type(offset, CODE_PTR));
|
2014-07-09 09:08:37 +00:00
|
|
|
WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
|
|
|
|
WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ConstantPoolArray::set_at_offset(int offset, Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() == GetHeap()->constant_pool_array_map());
|
|
|
|
DCHECK(!GetHeap()->InNewSpace(value));
|
|
|
|
DCHECK(offset_is_type(offset, HEAP_PTR));
|
2014-07-09 09:08:37 +00:00
|
|
|
WRITE_FIELD(this, offset, value);
|
|
|
|
WRITE_BARRIER(GetHeap(), this, offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
void ConstantPoolArray::Init(const NumberOfEntries& small) {
|
|
|
|
uint32_t small_layout_1 =
|
|
|
|
Int64CountField::encode(small.count_of(INT64)) |
|
|
|
|
CodePtrCountField::encode(small.count_of(CODE_PTR)) |
|
|
|
|
HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
|
|
|
|
IsExtendedField::encode(false);
|
|
|
|
uint32_t small_layout_2 =
|
|
|
|
Int32CountField::encode(small.count_of(INT32)) |
|
|
|
|
TotalCountField::encode(small.total_count()) |
|
|
|
|
WeakObjectStateField::encode(NO_WEAK_OBJECTS);
|
|
|
|
WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
|
|
|
|
WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
|
|
|
|
if (kHeaderSize != kFirstEntryOffset) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
|
2014-06-03 16:22:10 +00:00
|
|
|
WRITE_UINT32_FIELD(this, kHeaderSize, 0); // Zero out header padding.
|
|
|
|
}
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-03 16:22:10 +00:00
|
|
|
void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
|
|
|
|
const NumberOfEntries& extended) {
|
|
|
|
// Initialize small layout fields first.
|
|
|
|
Init(small);
|
|
|
|
|
|
|
|
// Set is_extended_layout field.
|
|
|
|
uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
|
|
|
|
small_layout_1 = IsExtendedField::update(small_layout_1, true);
|
|
|
|
WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
|
|
|
|
|
|
|
|
// Initialize the extended layout fields.
|
|
|
|
int extended_header_offset = get_extended_section_header_offset();
|
2014-10-01 13:14:14 +00:00
|
|
|
WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
|
|
|
|
extended.count_of(INT64));
|
|
|
|
WRITE_INT32_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
|
|
|
|
extended.count_of(CODE_PTR));
|
|
|
|
WRITE_INT32_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
|
|
|
|
extended.count_of(HEAP_PTR));
|
|
|
|
WRITE_INT32_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
|
|
|
|
extended.count_of(INT32));
|
2014-06-03 16:22:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::size() {
|
|
|
|
NumberOfEntries small(this, SMALL_SECTION);
|
|
|
|
if (!is_extended_layout()) {
|
|
|
|
return SizeFor(small);
|
|
|
|
} else {
|
|
|
|
NumberOfEntries extended(this, EXTENDED_SECTION);
|
|
|
|
return SizeForExtended(small, extended);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ConstantPoolArray::length() {
|
|
|
|
uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
|
|
|
|
int length = TotalCountField::decode(small_layout_2);
|
|
|
|
if (is_extended_layout()) {
|
|
|
|
length += number_of_entries(INT64, EXTENDED_SECTION) +
|
|
|
|
number_of_entries(CODE_PTR, EXTENDED_SECTION) +
|
|
|
|
number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
|
|
|
|
number_of_entries(INT32, EXTENDED_SECTION);
|
|
|
|
}
|
|
|
|
return length;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-03 15:32:22 +00:00
|
|
|
WriteBarrierMode HeapObject::GetWriteBarrierMode(
|
|
|
|
const DisallowHeapAllocation& promise) {
|
2011-09-19 18:36:47 +00:00
|
|
|
Heap* heap = GetHeap();
|
|
|
|
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
|
|
|
|
if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
2008-07-03 15:10:15 +00:00
|
|
|
return UPDATE_WRITE_BARRIER;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-27 09:22:10 +00:00
|
|
|
bool HeapObject::NeedsToEnsureDoubleAlignment() {
|
|
|
|
#ifndef V8_HOST_ARCH_64_BIT
|
|
|
|
return (IsFixedFloat64Array() || IsFixedDoubleArray() ||
|
|
|
|
IsConstantPoolArray()) &&
|
|
|
|
FixedArrayBase::cast(this)->length() != 0;
|
|
|
|
#else
|
|
|
|
return false;
|
|
|
|
#endif // V8_HOST_ARCH_64_BIT
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void FixedArray::set(int index,
|
|
|
|
Object* value,
|
2008-10-23 08:46:32 +00:00
|
|
|
WriteBarrierMode mode) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map());
|
|
|
|
DCHECK(index >= 0 && index < this->length());
|
2008-07-03 15:10:15 +00:00
|
|
|
int offset = kHeaderSize + index * kPointerSize;
|
|
|
|
WRITE_FIELD(this, offset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-12-06 12:11:08 +00:00
|
|
|
void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
|
|
|
|
int index,
|
|
|
|
Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
|
|
|
|
DCHECK(index >= 0 && index < array->length());
|
2011-12-06 12:11:08 +00:00
|
|
|
int offset = kHeaderSize + index * kPointerSize;
|
|
|
|
WRITE_FIELD(array, offset, value);
|
|
|
|
Heap* heap = array->GetHeap();
|
|
|
|
if (heap->InNewSpace(value)) {
|
|
|
|
heap->RecordWrite(array->address(), offset);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-21 10:32:38 +00:00
|
|
|
void FixedArray::NoWriteBarrierSet(FixedArray* array,
|
|
|
|
int index,
|
|
|
|
Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
|
|
|
|
DCHECK(index >= 0 && index < array->length());
|
|
|
|
DCHECK(!array->GetHeap()->InNewSpace(value));
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void FixedArray::set_undefined(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map());
|
|
|
|
DCHECK(index >= 0 && index < this->length());
|
|
|
|
DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
|
2013-08-28 14:26:22 +00:00
|
|
|
WRITE_FIELD(this,
|
|
|
|
kHeaderSize + index * kPointerSize,
|
|
|
|
GetHeap()->undefined_value());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-09-18 11:18:27 +00:00
|
|
|
void FixedArray::set_null(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < this->length());
|
|
|
|
DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
|
2013-08-28 14:26:22 +00:00
|
|
|
WRITE_FIELD(this,
|
|
|
|
kHeaderSize + index * kPointerSize,
|
|
|
|
GetHeap()->null_value());
|
2008-09-18 11:18:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void FixedArray::set_the_hole(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map() != GetHeap()->fixed_cow_array_map());
|
|
|
|
DCHECK(index >= 0 && index < this->length());
|
|
|
|
DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
|
2011-03-18 20:35:07 +00:00
|
|
|
WRITE_FIELD(this,
|
|
|
|
kHeaderSize + index * kPointerSize,
|
|
|
|
GetHeap()->the_hole_value());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-28 13:05:23 +00:00
|
|
|
void FixedArray::FillWithHoles(int from, int to) {
|
|
|
|
for (int i = from; i < to; i++) {
|
|
|
|
set_the_hole(i);
|
|
|
|
}
|
2012-11-15 12:19:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-01 15:33:30 +00:00
|
|
|
Object** FixedArray::data_start() {
|
|
|
|
return HeapObject::RawField(this, kHeaderSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-08-27 10:11:39 +00:00
|
|
|
bool DescriptorArray::IsEmpty() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(length() >= kFirstIndex ||
|
2013-09-11 07:14:41 +00:00
|
|
|
this == GetHeap()->empty_descriptor_array());
|
2012-06-18 12:42:46 +00:00
|
|
|
return length() < kFirstIndex;
|
2012-06-11 06:59:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-09-18 13:25:12 +00:00
|
|
|
void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
|
|
|
|
WRITE_FIELD(
|
|
|
|
this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-05 13:54:20 +00:00
|
|
|
// Perform a binary search in a fixed array. Low and high are entry indices. If
|
|
|
|
// there are three entries in this array it should be called with low=0 and
|
|
|
|
// high=2.
|
2014-11-24 14:31:31 +00:00
|
|
|
template <SearchMode search_mode, typename T>
|
|
|
|
int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
|
|
|
|
int* out_insertion_index) {
|
|
|
|
DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
|
2012-07-05 13:54:20 +00:00
|
|
|
uint32_t hash = name->Hash();
|
|
|
|
int limit = high;
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(low <= high);
|
2012-07-05 13:54:20 +00:00
|
|
|
|
|
|
|
while (low != high) {
|
|
|
|
int mid = (low + high) / 2;
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* mid_name = array->GetSortedKey(mid);
|
2012-07-05 13:54:20 +00:00
|
|
|
uint32_t mid_hash = mid_name->Hash();
|
|
|
|
|
|
|
|
if (mid_hash >= hash) {
|
|
|
|
high = mid;
|
|
|
|
} else {
|
|
|
|
low = mid + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-27 13:47:34 +00:00
|
|
|
for (; low <= limit; ++low) {
|
|
|
|
int sort_index = array->GetSortedKeyIndex(low);
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* entry = array->GetKey(sort_index);
|
2014-11-24 14:31:31 +00:00
|
|
|
uint32_t current_hash = entry->Hash();
|
|
|
|
if (current_hash != hash) {
|
|
|
|
if (out_insertion_index != NULL) {
|
|
|
|
*out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
|
|
|
|
}
|
|
|
|
return T::kNotFound;
|
|
|
|
}
|
2012-10-10 14:48:07 +00:00
|
|
|
if (entry->Equals(name)) {
|
|
|
|
if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
|
|
|
|
return sort_index;
|
|
|
|
}
|
|
|
|
return T::kNotFound;
|
|
|
|
}
|
2012-07-05 13:54:20 +00:00
|
|
|
}
|
|
|
|
|
2014-11-24 14:31:31 +00:00
|
|
|
if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
|
2012-07-05 13:54:20 +00:00
|
|
|
return T::kNotFound;
|
|
|
|
}
|
|
|
|
|
2012-10-10 14:48:07 +00:00
|
|
|
|
2012-07-05 13:54:20 +00:00
|
|
|
// Perform a linear search in this fixed array. len is the number of entry
|
|
|
|
// indices that are valid.
|
2014-11-24 14:31:31 +00:00
|
|
|
template <SearchMode search_mode, typename T>
|
|
|
|
int LinearSearch(T* array, Name* name, int len, int valid_entries,
|
|
|
|
int* out_insertion_index) {
|
2012-07-05 13:54:20 +00:00
|
|
|
uint32_t hash = name->Hash();
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
if (search_mode == ALL_ENTRIES) {
|
|
|
|
for (int number = 0; number < len; number++) {
|
|
|
|
int sorted_index = array->GetSortedKeyIndex(number);
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* entry = array->GetKey(sorted_index);
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
uint32_t current_hash = entry->Hash();
|
2014-11-24 14:31:31 +00:00
|
|
|
if (current_hash > hash) {
|
|
|
|
if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
|
|
|
|
return T::kNotFound;
|
|
|
|
}
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
if (current_hash == hash && entry->Equals(name)) return sorted_index;
|
|
|
|
}
|
2014-11-24 14:31:31 +00:00
|
|
|
if (out_insertion_index != NULL) *out_insertion_index = len;
|
|
|
|
return T::kNotFound;
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(len >= valid_entries);
|
2015-01-30 09:29:25 +00:00
|
|
|
DCHECK_NULL(out_insertion_index); // Not supported here.
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
for (int number = 0; number < valid_entries; number++) {
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* entry = array->GetKey(number);
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
uint32_t current_hash = entry->Hash();
|
|
|
|
if (current_hash == hash && entry->Equals(name)) return number;
|
|
|
|
}
|
2014-11-24 14:31:31 +00:00
|
|
|
return T::kNotFound;
|
2012-07-05 13:54:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-24 14:31:31 +00:00
|
|
|
template <SearchMode search_mode, typename T>
|
|
|
|
int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
if (search_mode == VALID_ENTRIES) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(array->IsSortedNoDuplicates());
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2012-07-05 13:54:20 +00:00
|
|
|
int nof = array->number_of_entries();
|
2014-11-24 14:31:31 +00:00
|
|
|
if (nof == 0) {
|
|
|
|
if (out_insertion_index != NULL) *out_insertion_index = 0;
|
|
|
|
return T::kNotFound;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
// Fast case: do linear search for small arrays.
|
|
|
|
const int kMaxElementsForLinearSearch = 8;
|
2012-10-10 14:48:07 +00:00
|
|
|
if ((search_mode == ALL_ENTRIES &&
|
|
|
|
nof <= kMaxElementsForLinearSearch) ||
|
|
|
|
(search_mode == VALID_ENTRIES &&
|
|
|
|
valid_entries <= (kMaxElementsForLinearSearch * 3))) {
|
2014-11-24 14:31:31 +00:00
|
|
|
return LinearSearch<search_mode>(array, name, nof, valid_entries,
|
|
|
|
out_insertion_index);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Slow case: perform binary search.
|
2014-11-24 14:31:31 +00:00
|
|
|
return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
|
|
|
|
out_insertion_index);
|
2012-07-05 13:54:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
int DescriptorArray::Search(Name* name, int valid_descriptors) {
|
2014-11-24 14:31:31 +00:00
|
|
|
return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
int DescriptorArray::SearchWithCache(Name* name, Map* map) {
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
|
|
|
|
if (number_of_own_descriptors == 0) return kNotFound;
|
2012-08-27 13:47:34 +00:00
|
|
|
|
2012-07-05 13:54:20 +00:00
|
|
|
DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
int number = cache->Lookup(map, name);
|
2012-08-27 13:47:34 +00:00
|
|
|
|
2010-08-12 14:51:59 +00:00
|
|
|
if (number == DescriptorLookupCache::kAbsent) {
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
number = Search(name, number_of_own_descriptors);
|
|
|
|
cache->Update(map, name, number);
|
2010-08-12 14:51:59 +00:00
|
|
|
}
|
2012-08-27 13:47:34 +00:00
|
|
|
|
2010-08-12 14:51:59 +00:00
|
|
|
return number;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-18 12:19:32 +00:00
|
|
|
PropertyDetails Map::GetLastDescriptorDetails() {
|
|
|
|
return instance_descriptors()->GetDetails(LastAdded());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-27 13:47:34 +00:00
|
|
|
void Map::LookupDescriptor(JSObject* holder,
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* name,
|
2012-08-27 13:47:34 +00:00
|
|
|
LookupResult* result) {
|
|
|
|
DescriptorArray* descriptors = this->instance_descriptors();
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
int number = descriptors->SearchWithCache(name, this);
|
2012-08-27 13:47:34 +00:00
|
|
|
if (number == DescriptorArray::kNotFound) return result->NotFound();
|
|
|
|
result->DescriptorResult(holder, descriptors->GetDetails(number), number);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-10 15:18:44 +00:00
|
|
|
void Map::LookupTransition(JSObject* holder, Name* name,
|
|
|
|
PropertyAttributes attributes,
|
|
|
|
LookupResult* result) {
|
2015-01-19 17:49:13 +00:00
|
|
|
int transition_index = this->SearchTransition(kData, name, attributes);
|
2014-04-04 04:49:07 +00:00
|
|
|
if (transition_index == TransitionArray::kNotFound) return result->NotFound();
|
|
|
|
result->TransitionResult(holder, this->GetTransition(transition_index));
|
2012-08-27 13:47:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-09 14:26:32 +00:00
|
|
|
FixedArrayBase* Map::GetInitialElements() {
|
|
|
|
if (has_fast_smi_or_object_elements() ||
|
|
|
|
has_fast_double_elements()) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
|
2014-04-09 14:26:32 +00:00
|
|
|
return GetHeap()->empty_fixed_array();
|
|
|
|
} else if (has_external_array_elements()) {
|
|
|
|
ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(empty_array));
|
2014-04-09 14:26:32 +00:00
|
|
|
return empty_array;
|
|
|
|
} else if (has_fixed_typed_array_elements()) {
|
|
|
|
FixedTypedArrayBase* empty_array =
|
|
|
|
GetHeap()->EmptyFixedTypedArrayForMap(this);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(empty_array));
|
2014-04-09 14:26:32 +00:00
|
|
|
return empty_array;
|
|
|
|
} else {
|
|
|
|
UNREACHABLE();
|
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-04 07:20:11 +00:00
|
|
|
Object** DescriptorArray::GetKeySlot(int descriptor_number) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2013-12-23 14:42:42 +00:00
|
|
|
return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
|
2012-06-04 07:20:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-31 10:50:42 +00:00
|
|
|
Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
|
|
|
|
return GetKeySlot(descriptor_number);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
|
|
|
|
return GetValueSlot(descriptor_number - 1) + 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* DescriptorArray::GetKey(int descriptor_number) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2013-03-04 15:00:57 +00:00
|
|
|
return Name::cast(get(ToKeyIndex(descriptor_number)));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-27 13:47:34 +00:00
|
|
|
int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
|
|
|
|
return GetDetails(descriptor_number).pointer();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* DescriptorArray::GetSortedKey(int descriptor_number) {
|
2012-08-27 13:47:34 +00:00
|
|
|
return GetKey(GetSortedKeyIndex(descriptor_number));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
|
|
|
|
PropertyDetails details = GetDetails(descriptor_index);
|
|
|
|
set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
|
2012-08-27 13:47:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-26 15:30:41 +00:00
|
|
|
void DescriptorArray::SetRepresentation(int descriptor_index,
|
|
|
|
Representation representation) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!representation.IsNone());
|
2013-04-26 15:30:41 +00:00
|
|
|
PropertyDetails details = GetDetails(descriptor_index);
|
|
|
|
set(ToDetailsIndex(descriptor_index),
|
|
|
|
details.CopyWithRepresentation(representation).AsSmi());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-30 15:15:17 +00:00
|
|
|
Object** DescriptorArray::GetValueSlot(int descriptor_number) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2013-12-23 14:42:42 +00:00
|
|
|
return RawFieldOfElementAt(ToValueIndex(descriptor_number));
|
2012-05-30 15:15:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-08-06 08:02:21 +00:00
|
|
|
int DescriptorArray::GetValueOffset(int descriptor_number) {
|
|
|
|
return OffsetOfElementAt(ToValueIndex(descriptor_number));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Object* DescriptorArray::GetValue(int descriptor_number) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2012-06-04 07:20:11 +00:00
|
|
|
return get(ToValueIndex(descriptor_number));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-15 07:36:47 +00:00
|
|
|
void DescriptorArray::SetValue(int descriptor_index, Object* value) {
|
|
|
|
set(ToValueIndex(descriptor_index), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-04-17 07:16:19 +00:00
|
|
|
PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2012-06-04 07:20:11 +00:00
|
|
|
Object* details = get(ToDetailsIndex(descriptor_number));
|
2012-04-17 07:16:19 +00:00
|
|
|
return PropertyDetails(Smi::cast(details));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-07-10 19:25:18 +00:00
|
|
|
PropertyType DescriptorArray::GetType(int descriptor_number) {
|
2012-04-17 07:16:19 +00:00
|
|
|
return GetDetails(descriptor_number).type();
|
2009-07-10 19:25:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int DescriptorArray::GetFieldIndex(int descriptor_number) {
|
2015-01-19 17:49:13 +00:00
|
|
|
DCHECK(GetDetails(descriptor_number).type() == DATA);
|
2013-05-31 19:11:09 +00:00
|
|
|
return GetDetails(descriptor_number).field_index();
|
2009-07-10 19:25:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-15 07:36:47 +00:00
|
|
|
HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
|
2015-01-19 17:49:13 +00:00
|
|
|
DCHECK(GetDetails(descriptor_number).type() == DATA);
|
2014-04-15 07:36:47 +00:00
|
|
|
return HeapType::cast(GetValue(descriptor_number));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-24 12:34:50 +00:00
|
|
|
Object* DescriptorArray::GetConstant(int descriptor_number) {
|
|
|
|
return GetValue(descriptor_number);
|
2009-07-10 19:25:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
|
2015-01-19 17:49:13 +00:00
|
|
|
DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
|
2009-07-10 19:25:18 +00:00
|
|
|
return GetValue(descriptor_number);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
|
2015-01-19 17:49:13 +00:00
|
|
|
DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
|
2011-05-19 11:47:34 +00:00
|
|
|
Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
|
2011-10-28 12:37:29 +00:00
|
|
|
return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
|
2009-07-10 19:25:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
|
2014-04-09 14:26:32 +00:00
|
|
|
desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
|
|
|
|
handle(GetValue(descriptor_number), GetIsolate()),
|
2012-04-17 07:16:19 +00:00
|
|
|
GetDetails(descriptor_number));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-21 10:32:38 +00:00
|
|
|
void DescriptorArray::Set(int descriptor_number,
|
|
|
|
Descriptor* desc,
|
|
|
|
const WhitenessWitness&) {
|
2008-07-03 15:10:15 +00:00
|
|
|
// Range check.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2011-12-06 12:11:08 +00:00
|
|
|
NoIncrementalWriteBarrierSet(this,
|
|
|
|
ToKeyIndex(descriptor_number),
|
2014-04-09 14:26:32 +00:00
|
|
|
*desc->GetKey());
|
2012-06-04 07:20:11 +00:00
|
|
|
NoIncrementalWriteBarrierSet(this,
|
2011-12-06 12:11:08 +00:00
|
|
|
ToValueIndex(descriptor_number),
|
2014-04-09 14:26:32 +00:00
|
|
|
*desc->GetValue());
|
2014-11-11 10:24:52 +00:00
|
|
|
NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
|
2011-12-06 12:11:08 +00:00
|
|
|
desc->GetDetails().AsSmi());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-09-18 13:25:12 +00:00
|
|
|
void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
|
|
|
|
// Range check.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptor_number < number_of_descriptors());
|
2012-09-18 13:25:12 +00:00
|
|
|
|
2014-04-09 14:26:32 +00:00
|
|
|
set(ToKeyIndex(descriptor_number), *desc->GetKey());
|
|
|
|
set(ToValueIndex(descriptor_number), *desc->GetValue());
|
2012-09-18 13:25:12 +00:00
|
|
|
set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void DescriptorArray::Append(Descriptor* desc) {
|
2014-04-09 14:26:32 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2012-09-18 13:25:12 +00:00
|
|
|
int descriptor_number = number_of_descriptors();
|
|
|
|
SetNumberOfDescriptors(descriptor_number + 1);
|
|
|
|
Set(descriptor_number, desc);
|
|
|
|
|
|
|
|
uint32_t hash = desc->GetKey()->Hash();
|
|
|
|
|
|
|
|
int insertion;
|
|
|
|
|
|
|
|
for (insertion = descriptor_number; insertion > 0; --insertion) {
|
2013-03-04 15:00:57 +00:00
|
|
|
Name* key = GetSortedKey(insertion - 1);
|
2012-09-18 13:25:12 +00:00
|
|
|
if (key->Hash() <= hash) break;
|
|
|
|
SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
SetSortedKey(insertion, descriptor_number);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-27 13:47:34 +00:00
|
|
|
void DescriptorArray::SwapSortedKeys(int first, int second) {
|
|
|
|
int first_key = GetSortedKeyIndex(first);
|
|
|
|
SetSortedKey(first, GetSortedKeyIndex(second));
|
|
|
|
SetSortedKey(second, first_key);
|
2011-10-21 10:32:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:13:53 +00:00
|
|
|
DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
|
2011-10-21 10:32:38 +00:00
|
|
|
: marking_(array->GetHeap()->incremental_marking()) {
|
|
|
|
marking_->EnterNoMarkingScope();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!marking_->IsMarking() ||
|
2014-03-19 10:48:54 +00:00
|
|
|
Marking::Color(array) == Marking::WHITE_OBJECT);
|
2011-10-21 10:32:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-09-14 15:10:31 +00:00
|
|
|
DescriptorArray::WhitenessWitness::~WhitenessWitness() {
|
2011-10-21 10:32:38 +00:00
|
|
|
marking_->LeaveNoMarkingScope();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-14 15:56:57 +00:00
|
|
|
template<typename Derived, typename Shape, typename Key>
|
|
|
|
int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
|
2011-07-26 13:56:21 +00:00
|
|
|
const int kMinCapacity = 32;
|
2014-09-02 13:36:35 +00:00
|
|
|
int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
|
2011-07-26 13:56:21 +00:00
|
|
|
if (capacity < kMinCapacity) {
|
|
|
|
capacity = kMinCapacity; // Guarantee min capacity.
|
|
|
|
}
|
|
|
|
return capacity;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-14 15:56:57 +00:00
|
|
|
template<typename Derived, typename Shape, typename Key>
|
|
|
|
int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
|
2011-03-18 20:35:07 +00:00
|
|
|
return FindEntry(GetIsolate(), key);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Find entry for key otherwise return kNotFound.
|
2014-04-14 15:56:57 +00:00
|
|
|
template<typename Derived, typename Shape, typename Key>
|
|
|
|
int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
|
2011-03-18 20:35:07 +00:00
|
|
|
uint32_t capacity = Capacity();
|
2014-04-14 15:56:57 +00:00
|
|
|
uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
|
2011-03-18 20:35:07 +00:00
|
|
|
uint32_t count = 1;
|
|
|
|
// EnsureCapacity will guarantee the hash table is never full.
|
|
|
|
while (true) {
|
|
|
|
Object* element = KeyAt(entry);
|
2012-10-25 13:54:10 +00:00
|
|
|
// Empty entry. Uses raw unchecked accessors because it is called by the
|
2013-02-28 17:03:34 +00:00
|
|
|
// string table during bootstrapping.
|
2012-10-25 13:54:10 +00:00
|
|
|
if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
|
|
|
|
if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
|
2011-03-18 20:35:07 +00:00
|
|
|
Shape::IsMatch(key, element)) return entry;
|
|
|
|
entry = NextProbe(entry, count++, capacity);
|
|
|
|
}
|
|
|
|
return kNotFound;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
bool SeededNumberDictionary::requires_slow_elements() {
|
2008-10-22 09:46:09 +00:00
|
|
|
Object* max_index_object = get(kMaxNumberKeyIndex);
|
2008-07-03 15:10:15 +00:00
|
|
|
if (!max_index_object->IsSmi()) return false;
|
|
|
|
return 0 !=
|
|
|
|
(Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
|
|
|
|
}
|
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
uint32_t SeededNumberDictionary::max_number_key() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!requires_slow_elements());
|
2008-10-22 09:46:09 +00:00
|
|
|
Object* max_index_object = get(kMaxNumberKeyIndex);
|
2008-07-03 15:10:15 +00:00
|
|
|
if (!max_index_object->IsSmi()) return 0;
|
|
|
|
uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
|
|
|
|
return value >> kRequiresSlowElementsTagSize;
|
|
|
|
}
|
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
void SeededNumberDictionary::set_requires_slow_elements() {
|
2010-01-29 11:46:55 +00:00
|
|
|
set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
|
2009-03-13 11:40:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
// ------------------------------------
|
|
|
|
// Cast operations
|
|
|
|
|
|
|
|
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(AccessorInfo)
|
|
|
|
CAST_ACCESSOR(ByteArray)
|
|
|
|
CAST_ACCESSOR(Cell)
|
|
|
|
CAST_ACCESSOR(Code)
|
|
|
|
CAST_ACCESSOR(CodeCacheHashTable)
|
|
|
|
CAST_ACCESSOR(CompilationCacheTable)
|
|
|
|
CAST_ACCESSOR(ConsString)
|
2013-10-14 13:35:06 +00:00
|
|
|
CAST_ACCESSOR(ConstantPoolArray)
|
2010-12-07 11:31:57 +00:00
|
|
|
CAST_ACCESSOR(DeoptimizationInputData)
|
|
|
|
CAST_ACCESSOR(DeoptimizationOutputData)
|
2013-02-20 11:49:54 +00:00
|
|
|
CAST_ACCESSOR(DependentCode)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(DescriptorArray)
|
|
|
|
CAST_ACCESSOR(ExternalArray)
|
2014-09-10 12:38:12 +00:00
|
|
|
CAST_ACCESSOR(ExternalOneByteString)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(ExternalFloat32Array)
|
|
|
|
CAST_ACCESSOR(ExternalFloat64Array)
|
|
|
|
CAST_ACCESSOR(ExternalInt16Array)
|
|
|
|
CAST_ACCESSOR(ExternalInt32Array)
|
|
|
|
CAST_ACCESSOR(ExternalInt8Array)
|
|
|
|
CAST_ACCESSOR(ExternalString)
|
2008-07-03 15:10:15 +00:00
|
|
|
CAST_ACCESSOR(ExternalTwoByteString)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(ExternalUint16Array)
|
|
|
|
CAST_ACCESSOR(ExternalUint32Array)
|
|
|
|
CAST_ACCESSOR(ExternalUint8Array)
|
|
|
|
CAST_ACCESSOR(ExternalUint8ClampedArray)
|
|
|
|
CAST_ACCESSOR(FixedArray)
|
|
|
|
CAST_ACCESSOR(FixedArrayBase)
|
|
|
|
CAST_ACCESSOR(FixedDoubleArray)
|
|
|
|
CAST_ACCESSOR(FixedTypedArrayBase)
|
|
|
|
CAST_ACCESSOR(Foreign)
|
2008-10-23 08:42:22 +00:00
|
|
|
CAST_ACCESSOR(GlobalObject)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(HeapObject)
|
2008-07-03 15:10:15 +00:00
|
|
|
CAST_ACCESSOR(JSArray)
|
2013-03-28 12:50:18 +00:00
|
|
|
CAST_ACCESSOR(JSArrayBuffer)
|
2013-06-21 13:02:38 +00:00
|
|
|
CAST_ACCESSOR(JSArrayBufferView)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(JSBuiltinsObject)
|
2013-06-21 13:02:38 +00:00
|
|
|
CAST_ACCESSOR(JSDataView)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(JSDate)
|
|
|
|
CAST_ACCESSOR(JSFunction)
|
Implement set trap for proxies, and revamp class hierarchy in preparation:
- Introduce a class JSReceiver, that is a common superclass of JSObject and
JSProxy. Use JSReceiver where appropriate (probably lots of places that we
still have to migrate, but we will find those later with proxy test suite).
- Move appropriate methods to JSReceiver class (SetProperty,
GetPropertyAttribute, Get/SetPrototype, Lookup, and so on).
- Introduce new JSFunctionProxy subclass of JSProxy. Currently only a stub.
- Overhaul enum InstanceType:
* Introduce FIRST/LAST_SPEC_OBJECT_TYPE that ranges over all types that
represent JS objects, and use that consistently to check language types.
* Rename FIRST/LAST_JS_OBJECT_TYPE and FIRST/LAST_FUNCTION_CLASS_TYPE
to FIRST/LAST_[NON]CALLABLE_SPEC_OBJECT_TYPE for clarity.
* Eliminate the overlap over JS_REGEXP_TYPE.
* Also replace FIRST_JS_OBJECT with FIRST_JS_RECEIVER, but only use it where
we exclusively talk about the internal representation type.
* Insert JS_PROXY and JS_FUNCTION_PROXY in the appropriate places.
- Fix all checks concerning classification, especially for functions, to
use the CALLABLE_SPEC_OBJECT range (that includes funciton proxies).
- Handle proxies in SetProperty (that was the easiest part :) ).
- A few simple test cases.
R=kmillikin@chromium.org
Review URL: http://codereview.chromium.org/6992072
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8126 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2011-05-31 16:38:40 +00:00
|
|
|
CAST_ACCESSOR(JSFunctionProxy)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(JSFunctionResultCache)
|
|
|
|
CAST_ACCESSOR(JSGeneratorObject)
|
|
|
|
CAST_ACCESSOR(JSGlobalObject)
|
|
|
|
CAST_ACCESSOR(JSGlobalProxy)
|
2011-10-25 14:14:56 +00:00
|
|
|
CAST_ACCESSOR(JSMap)
|
2014-04-17 17:45:32 +00:00
|
|
|
CAST_ACCESSOR(JSMapIterator)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(JSMessageObject)
|
|
|
|
CAST_ACCESSOR(JSModule)
|
|
|
|
CAST_ACCESSOR(JSObject)
|
|
|
|
CAST_ACCESSOR(JSProxy)
|
|
|
|
CAST_ACCESSOR(JSReceiver)
|
|
|
|
CAST_ACCESSOR(JSRegExp)
|
|
|
|
CAST_ACCESSOR(JSSet)
|
|
|
|
CAST_ACCESSOR(JSSetIterator)
|
|
|
|
CAST_ACCESSOR(JSTypedArray)
|
|
|
|
CAST_ACCESSOR(JSValue)
|
2011-08-03 11:55:13 +00:00
|
|
|
CAST_ACCESSOR(JSWeakMap)
|
2013-07-22 08:32:24 +00:00
|
|
|
CAST_ACCESSOR(JSWeakSet)
|
2014-11-11 10:24:52 +00:00
|
|
|
CAST_ACCESSOR(LayoutDescriptor)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(Map)
|
|
|
|
CAST_ACCESSOR(Name)
|
|
|
|
CAST_ACCESSOR(NameDictionary)
|
|
|
|
CAST_ACCESSOR(NormalizedMapCache)
|
|
|
|
CAST_ACCESSOR(Object)
|
|
|
|
CAST_ACCESSOR(ObjectHashTable)
|
|
|
|
CAST_ACCESSOR(Oddball)
|
|
|
|
CAST_ACCESSOR(OrderedHashMap)
|
|
|
|
CAST_ACCESSOR(OrderedHashSet)
|
|
|
|
CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
|
|
|
|
CAST_ACCESSOR(PropertyCell)
|
|
|
|
CAST_ACCESSOR(ScopeInfo)
|
|
|
|
CAST_ACCESSOR(SeededNumberDictionary)
|
|
|
|
CAST_ACCESSOR(SeqOneByteString)
|
|
|
|
CAST_ACCESSOR(SeqString)
|
|
|
|
CAST_ACCESSOR(SeqTwoByteString)
|
|
|
|
CAST_ACCESSOR(SharedFunctionInfo)
|
|
|
|
CAST_ACCESSOR(SlicedString)
|
|
|
|
CAST_ACCESSOR(Smi)
|
|
|
|
CAST_ACCESSOR(String)
|
|
|
|
CAST_ACCESSOR(StringTable)
|
2008-07-03 15:10:15 +00:00
|
|
|
CAST_ACCESSOR(Struct)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(Symbol)
|
|
|
|
CAST_ACCESSOR(UnseededNumberDictionary)
|
2014-10-14 14:43:45 +00:00
|
|
|
CAST_ACCESSOR(WeakCell)
|
2014-12-15 19:57:37 +00:00
|
|
|
CAST_ACCESSOR(WeakFixedArray)
|
2014-06-23 08:51:13 +00:00
|
|
|
CAST_ACCESSOR(WeakHashTable)
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2015-01-30 09:29:25 +00:00
|
|
|
// static
|
|
|
|
template <class Traits>
|
|
|
|
STATIC_CONST_MEMBER_DEFINITION const InstanceType
|
|
|
|
FixedTypedArray<Traits>::kInstanceType;
|
|
|
|
|
|
|
|
|
2014-01-16 17:08:45 +00:00
|
|
|
template <class Traits>
|
|
|
|
FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(object->IsHeapObject() &&
|
2014-06-23 08:51:13 +00:00
|
|
|
HeapObject::cast(object)->map()->instance_type() ==
|
|
|
|
Traits::kInstanceType);
|
|
|
|
return reinterpret_cast<FixedTypedArray<Traits>*>(object);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <class Traits>
|
|
|
|
const FixedTypedArray<Traits>*
|
|
|
|
FixedTypedArray<Traits>::cast(const Object* object) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(object->IsHeapObject() &&
|
2014-06-23 08:51:13 +00:00
|
|
|
HeapObject::cast(object)->map()->instance_type() ==
|
|
|
|
Traits::kInstanceType);
|
2014-01-16 17:08:45 +00:00
|
|
|
return reinterpret_cast<FixedTypedArray<Traits>*>(object);
|
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
|
|
|
|
STRUCT_LIST(MAKE_STRUCT_CAST)
|
|
|
|
#undef MAKE_STRUCT_CAST
|
|
|
|
|
2009-07-02 06:50:43 +00:00
|
|
|
|
2014-04-14 15:56:57 +00:00
|
|
|
template <typename Derived, typename Shape, typename Key>
|
|
|
|
HashTable<Derived, Shape, Key>*
|
|
|
|
HashTable<Derived, Shape, Key>::cast(Object* obj) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(obj->IsHashTable());
|
2008-07-03 15:10:15 +00:00
|
|
|
return reinterpret_cast<HashTable*>(obj);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-23 08:51:13 +00:00
|
|
|
template <typename Derived, typename Shape, typename Key>
|
|
|
|
const HashTable<Derived, Shape, Key>*
|
|
|
|
HashTable<Derived, Shape, Key>::cast(const Object* obj) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(obj->IsHashTable());
|
2014-06-23 08:51:13 +00:00
|
|
|
return reinterpret_cast<const HashTable*>(obj);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-06-09 10:03:35 +00:00
|
|
|
SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
|
2014-03-31 14:29:01 +00:00
|
|
|
SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
|
|
|
|
|
2011-09-19 18:36:47 +00:00
|
|
|
SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
|
2014-04-08 16:31:57 +00:00
|
|
|
NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2010-05-04 14:49:50 +00:00
|
|
|
SMI_ACCESSORS(String, length, kLengthOffset)
|
2014-03-31 14:29:01 +00:00
|
|
|
SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2008-10-27 12:39:34 +00:00
|
|
|
|
2015-01-27 14:56:51 +00:00
|
|
|
FreeSpace* FreeSpace::next() {
|
|
|
|
DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
|
|
|
|
(!GetHeap()->deserialization_complete() && map() == NULL));
|
|
|
|
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
|
|
|
|
return reinterpret_cast<FreeSpace*>(
|
|
|
|
Memory::Address_at(address() + kNextOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
FreeSpace** FreeSpace::next_address() {
|
|
|
|
DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
|
|
|
|
(!GetHeap()->deserialization_complete() && map() == NULL));
|
|
|
|
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
|
|
|
|
return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void FreeSpace::set_next(FreeSpace* next) {
|
|
|
|
DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
|
|
|
|
(!GetHeap()->deserialization_complete() && map() == NULL));
|
|
|
|
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
|
|
|
|
base::NoBarrier_Store(
|
|
|
|
reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
|
|
|
|
reinterpret_cast<base::AtomicWord>(next));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
FreeSpace* FreeSpace::cast(HeapObject* o) {
|
|
|
|
SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
|
|
|
|
return reinterpret_cast<FreeSpace*>(o);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-01 10:34:31 +00:00
|
|
|
uint32_t Name::hash_field() {
|
2009-11-24 14:10:06 +00:00
|
|
|
return READ_UINT32_FIELD(this, kHashFieldOffset);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-01 10:34:31 +00:00
|
|
|
void Name::set_hash_field(uint32_t value) {
|
2009-11-24 14:10:06 +00:00
|
|
|
WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
|
2010-05-27 12:30:45 +00:00
|
|
|
#if V8_HOST_ARCH_64_BIT
|
2014-10-01 13:14:14 +00:00
|
|
|
#if V8_TARGET_LITTLE_ENDIAN
|
|
|
|
WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
|
|
|
|
#else
|
|
|
|
WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
|
|
|
|
#endif
|
2010-05-27 12:30:45 +00:00
|
|
|
#endif
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
bool Name::Equals(Name* other) {
|
|
|
|
if (other == this) return true;
|
2013-04-10 09:53:43 +00:00
|
|
|
if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
|
|
|
|
this->IsSymbol() || other->IsSymbol()) {
|
2013-04-03 17:06:22 +00:00
|
|
|
return false;
|
|
|
|
}
|
2013-03-04 15:00:57 +00:00
|
|
|
return String::cast(this)->SlowEquals(String::cast(other));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 07:27:25 +00:00
|
|
|
bool Name::Equals(Handle<Name> one, Handle<Name> two) {
|
|
|
|
if (one.is_identical_to(two)) return true;
|
|
|
|
if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
|
|
|
|
one->IsSymbol() || two->IsSymbol()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return String::SlowEquals(Handle<String>::cast(one),
|
|
|
|
Handle<String>::cast(two));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-22 16:51:28 +00:00
|
|
|
ACCESSORS(Symbol, name, Object, kNameOffset)
|
Provide private symbols through internal APIs
Adds a notion of private symbols, mainly intended for internal use, especially, self-hosting of built-in types that would otherwise require new C++ classes.
On the JS side (i.e., in built-ins), private properties can be created and accessed through a set of macros:
NEW_PRIVATE(print_name)
HAS_PRIVATE(obj, sym)
GET_PRIVATE(obj, sym)
SET_PRIVATE(obj, sym, val)
DELETE_PRIVATE(obj, sym)
In the V8 API, they are accessible via a new class Private, and respective HasPrivate/Get/Private/SetPrivate/DeletePrivate methods on calss Object.
These APIs are designed and restricted such that their implementation can later be replaced by whatever ES7+ will officially provide.
R=yangguo@chromium.org
BUG=
Review URL: https://codereview.chromium.org/48923002
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17683 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2013-11-13 10:34:06 +00:00
|
|
|
ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
|
|
|
|
BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
|
2014-08-12 15:28:20 +00:00
|
|
|
BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
|
2013-03-22 16:51:28 +00:00
|
|
|
|
|
|
|
|
2009-11-24 14:10:06 +00:00
|
|
|
bool String::Equals(String* other) {
|
|
|
|
if (other == this) return true;
|
2013-03-04 15:00:57 +00:00
|
|
|
if (this->IsInternalizedString() && other->IsInternalizedString()) {
|
2009-11-24 14:10:06 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return SlowEquals(other);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 07:27:25 +00:00
|
|
|
bool String::Equals(Handle<String> one, Handle<String> two) {
|
|
|
|
if (one.is_identical_to(two)) return true;
|
|
|
|
if (one->IsInternalizedString() && two->IsInternalizedString()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return SlowEquals(one, two);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 09:49:49 +00:00
|
|
|
Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
|
|
|
|
if (!string->IsConsString()) return string;
|
|
|
|
Handle<ConsString> cons = Handle<ConsString>::cast(string);
|
|
|
|
if (cons->IsFlat()) return handle(cons->first());
|
|
|
|
return SlowFlatten(cons, pretenure);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-17 09:33:06 +00:00
|
|
|
uint16_t String::Get(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
2009-03-17 09:33:06 +00:00
|
|
|
switch (StringShape(this).full_representation_tag()) {
|
2012-11-08 12:14:29 +00:00
|
|
|
case kSeqStringTag | kOneByteStringTag:
|
2012-11-15 13:31:27 +00:00
|
|
|
return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
|
2008-11-03 10:16:05 +00:00
|
|
|
case kSeqStringTag | kTwoByteStringTag:
|
|
|
|
return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
|
2012-11-08 12:14:29 +00:00
|
|
|
case kConsStringTag | kOneByteStringTag:
|
2008-11-03 10:16:05 +00:00
|
|
|
case kConsStringTag | kTwoByteStringTag:
|
2008-07-03 15:10:15 +00:00
|
|
|
return ConsString::cast(this)->ConsStringGet(index);
|
2012-11-08 12:14:29 +00:00
|
|
|
case kExternalStringTag | kOneByteStringTag:
|
2014-09-10 12:38:12 +00:00
|
|
|
return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
|
2008-11-03 10:16:05 +00:00
|
|
|
case kExternalStringTag | kTwoByteStringTag:
|
|
|
|
return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
|
2012-11-08 12:14:29 +00:00
|
|
|
case kSlicedStringTag | kOneByteStringTag:
|
2011-08-26 13:03:30 +00:00
|
|
|
case kSlicedStringTag | kTwoByteStringTag:
|
|
|
|
return SlicedString::cast(this)->SlicedStringGet(index);
|
2008-07-03 15:10:15 +00:00
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
UNREACHABLE();
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-17 09:33:06 +00:00
|
|
|
void String::Set(int index, uint16_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
|
|
|
DCHECK(StringShape(this).IsSequential());
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2012-11-21 10:01:05 +00:00
|
|
|
return this->IsOneByteRepresentation()
|
2012-11-15 13:31:27 +00:00
|
|
|
? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
|
2008-10-09 08:08:04 +00:00
|
|
|
: SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-17 09:33:06 +00:00
|
|
|
bool String::IsFlat() {
|
2011-08-26 13:03:30 +00:00
|
|
|
if (!StringShape(this).IsCons()) return true;
|
|
|
|
return ConsString::cast(this)->second()->length() == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
String* String::GetUnderlying() {
|
|
|
|
// Giving direct access to underlying string only makes sense if the
|
|
|
|
// wrapping string is already flattened.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(this->IsFlat());
|
|
|
|
DCHECK(StringShape(this).IsIndirect());
|
2011-08-26 13:03:30 +00:00
|
|
|
STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
|
|
|
|
const int kUnderlyingOffset = SlicedString::kParentOffset;
|
|
|
|
return String::cast(READ_FIELD(this, kUnderlyingOffset));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-29 13:09:31 +00:00
|
|
|
template<class Visitor>
|
|
|
|
ConsString* String::VisitFlat(Visitor* visitor,
|
|
|
|
String* string,
|
|
|
|
const int offset) {
|
|
|
|
int slice_offset = offset;
|
|
|
|
const int length = string->length();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(offset <= length);
|
2012-12-06 11:49:15 +00:00
|
|
|
while (true) {
|
2014-04-29 13:09:31 +00:00
|
|
|
int32_t type = string->map()->instance_type();
|
2012-12-06 11:49:15 +00:00
|
|
|
switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
|
|
|
|
case kSeqStringTag | kOneByteStringTag:
|
2014-04-29 13:09:31 +00:00
|
|
|
visitor->VisitOneByteString(
|
2013-01-09 15:47:53 +00:00
|
|
|
SeqOneByteString::cast(string)->GetChars() + slice_offset,
|
|
|
|
length - offset);
|
2014-04-29 13:09:31 +00:00
|
|
|
return NULL;
|
2012-12-06 11:49:15 +00:00
|
|
|
|
|
|
|
case kSeqStringTag | kTwoByteStringTag:
|
2014-04-29 13:09:31 +00:00
|
|
|
visitor->VisitTwoByteString(
|
2013-01-09 15:47:53 +00:00
|
|
|
SeqTwoByteString::cast(string)->GetChars() + slice_offset,
|
|
|
|
length - offset);
|
2014-04-29 13:09:31 +00:00
|
|
|
return NULL;
|
2012-12-06 11:49:15 +00:00
|
|
|
|
|
|
|
case kExternalStringTag | kOneByteStringTag:
|
2014-04-29 13:09:31 +00:00
|
|
|
visitor->VisitOneByteString(
|
2014-09-10 12:38:12 +00:00
|
|
|
ExternalOneByteString::cast(string)->GetChars() + slice_offset,
|
2013-01-09 15:47:53 +00:00
|
|
|
length - offset);
|
2014-04-29 13:09:31 +00:00
|
|
|
return NULL;
|
2012-12-06 11:49:15 +00:00
|
|
|
|
|
|
|
case kExternalStringTag | kTwoByteStringTag:
|
2014-04-29 13:09:31 +00:00
|
|
|
visitor->VisitTwoByteString(
|
2013-01-09 15:47:53 +00:00
|
|
|
ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
|
|
|
|
length - offset);
|
2014-04-29 13:09:31 +00:00
|
|
|
return NULL;
|
2012-12-06 11:49:15 +00:00
|
|
|
|
|
|
|
case kSlicedStringTag | kOneByteStringTag:
|
|
|
|
case kSlicedStringTag | kTwoByteStringTag: {
|
|
|
|
SlicedString* slicedString = SlicedString::cast(string);
|
2012-12-11 10:22:15 +00:00
|
|
|
slice_offset += slicedString->offset();
|
2012-12-06 11:49:15 +00:00
|
|
|
string = slicedString->parent();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
case kConsStringTag | kOneByteStringTag:
|
|
|
|
case kConsStringTag | kTwoByteStringTag:
|
2014-04-29 13:09:31 +00:00
|
|
|
return ConsString::cast(string);
|
2012-12-06 11:49:15 +00:00
|
|
|
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
2014-04-29 13:09:31 +00:00
|
|
|
return NULL;
|
2012-12-06 11:49:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-25 15:29:50 +00:00
|
|
|
template <>
|
|
|
|
inline Vector<const uint8_t> String::GetCharVector() {
|
|
|
|
String::FlatContent flat = GetFlatContent();
|
|
|
|
DCHECK(flat.IsOneByte());
|
|
|
|
return flat.ToOneByteVector();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <>
|
|
|
|
inline Vector<const uc16> String::GetCharVector() {
|
|
|
|
String::FlatContent flat = GetFlatContent();
|
|
|
|
DCHECK(flat.IsTwoByte());
|
|
|
|
return flat.ToUC16Vector();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-15 13:31:27 +00:00
|
|
|
uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-15 13:31:27 +00:00
|
|
|
void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
|
|
|
|
static_cast<byte>(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-15 13:31:27 +00:00
|
|
|
Address SeqOneByteString::GetCharsAddress() {
|
2008-07-03 15:10:15 +00:00
|
|
|
return FIELD_ADDR(this, kHeaderSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-09 15:47:53 +00:00
|
|
|
uint8_t* SeqOneByteString::GetChars() {
|
2013-01-09 10:30:54 +00:00
|
|
|
return reinterpret_cast<uint8_t*>(GetCharsAddress());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-09 08:08:04 +00:00
|
|
|
Address SeqTwoByteString::GetCharsAddress() {
|
2008-10-07 08:11:44 +00:00
|
|
|
return FIELD_ADDR(this, kHeaderSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-22 09:09:07 +00:00
|
|
|
uc16* SeqTwoByteString::GetChars() {
|
|
|
|
return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-09 08:08:04 +00:00
|
|
|
uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-09 08:08:04 +00:00
|
|
|
void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-17 09:33:06 +00:00
|
|
|
int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
|
2010-05-04 14:49:50 +00:00
|
|
|
return SizeFor(length());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-15 13:31:27 +00:00
|
|
|
int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
|
2010-05-04 14:49:50 +00:00
|
|
|
return SizeFor(length());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-26 13:03:30 +00:00
|
|
|
String* SlicedString::parent() {
|
|
|
|
return String::cast(READ_FIELD(this, kParentOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-18 13:39:53 +00:00
|
|
|
void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(parent->IsSeqString() || parent->IsExternalString());
|
2011-08-26 13:03:30 +00:00
|
|
|
WRITE_FIELD(this, kParentOffset, parent);
|
2012-07-18 13:39:53 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
|
2011-08-26 13:03:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
String* ConsString::first() {
|
|
|
|
return String::cast(READ_FIELD(this, kFirstOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* ConsString::unchecked_first() {
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_FIELD(this, kFirstOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
void ConsString::set_first(String* value, WriteBarrierMode mode) {
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_FIELD(this, kFirstOffset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
String* ConsString::second() {
|
|
|
|
return String::cast(READ_FIELD(this, kSecondOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* ConsString::unchecked_second() {
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_FIELD(this, kSecondOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-03 10:16:05 +00:00
|
|
|
void ConsString::set_second(String* value, WriteBarrierMode mode) {
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_FIELD(this, kSecondOffset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-11-23 13:08:28 +00:00
|
|
|
bool ExternalString::is_short() {
|
|
|
|
InstanceType type = map()->instance_type();
|
|
|
|
return (type & kShortExternalStringMask) == kShortExternalStringTag;
|
2011-11-17 17:05:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
|
2008-07-03 15:10:15 +00:00
|
|
|
return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
void ExternalOneByteString::update_data_cache() {
|
2011-11-23 13:08:28 +00:00
|
|
|
if (is_short()) return;
|
|
|
|
const char** data_field =
|
|
|
|
reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
|
|
|
|
*data_field = resource()->data();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
void ExternalOneByteString::set_resource(
|
|
|
|
const ExternalOneByteString::Resource* resource) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
|
2011-09-21 13:28:09 +00:00
|
|
|
*reinterpret_cast<const Resource**>(
|
|
|
|
FIELD_ADDR(this, kResourceOffset)) = resource;
|
2011-11-23 13:08:28 +00:00
|
|
|
if (resource != NULL) update_data_cache();
|
2011-11-17 17:05:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
const uint8_t* ExternalOneByteString::GetChars() {
|
2013-01-09 15:47:53 +00:00
|
|
|
return reinterpret_cast<const uint8_t*>(resource()->data());
|
2011-11-17 17:05:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
2011-11-17 17:05:12 +00:00
|
|
|
return GetChars()[index];
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-21 13:28:09 +00:00
|
|
|
const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
|
2008-07-03 15:10:15 +00:00
|
|
|
return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-11-23 13:08:28 +00:00
|
|
|
void ExternalTwoByteString::update_data_cache() {
|
|
|
|
if (is_short()) return;
|
|
|
|
const uint16_t** data_field =
|
|
|
|
reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
|
|
|
|
*data_field = resource()->data();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void ExternalTwoByteString::set_resource(
|
2011-09-21 13:28:09 +00:00
|
|
|
const ExternalTwoByteString::Resource* resource) {
|
|
|
|
*reinterpret_cast<const Resource**>(
|
|
|
|
FIELD_ADDR(this, kResourceOffset)) = resource;
|
2011-11-23 13:08:28 +00:00
|
|
|
if (resource != NULL) update_data_cache();
|
2011-11-17 17:05:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const uint16_t* ExternalTwoByteString::GetChars() {
|
2011-11-23 13:08:28 +00:00
|
|
|
return resource()->data();
|
2011-11-17 17:05:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < length());
|
2011-11-17 17:05:12 +00:00
|
|
|
return GetChars()[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
|
|
|
|
unsigned start) {
|
|
|
|
return GetChars() + start;
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 05:57:01 +00:00
|
|
|
int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
|
2012-12-06 11:49:15 +00:00
|
|
|
|
|
|
|
|
2014-10-23 05:57:01 +00:00
|
|
|
void ConsStringIterator::PushLeft(ConsString* string) {
|
2012-12-06 11:49:15 +00:00
|
|
|
frames_[depth_++ & kDepthMask] = string;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 05:57:01 +00:00
|
|
|
void ConsStringIterator::PushRight(ConsString* string) {
|
2012-12-11 10:22:15 +00:00
|
|
|
// Inplace update.
|
2012-12-06 11:49:15 +00:00
|
|
|
frames_[(depth_-1) & kDepthMask] = string;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 05:57:01 +00:00
|
|
|
void ConsStringIterator::AdjustMaximumDepth() {
|
2012-12-06 11:49:15 +00:00
|
|
|
if (depth_ > maximum_depth_) maximum_depth_ = depth_;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 05:57:01 +00:00
|
|
|
void ConsStringIterator::Pop() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(depth_ > 0);
|
|
|
|
DCHECK(depth_ <= maximum_depth_);
|
2012-12-06 11:49:15 +00:00
|
|
|
depth_--;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uint16_t StringCharacterStream::GetNext() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(buffer8_ != NULL && end_ != NULL);
|
2012-12-31 11:13:50 +00:00
|
|
|
// Advance cursor if needed.
|
|
|
|
if (buffer8_ == end_) HasMore();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(buffer8_ < end_);
|
2012-12-06 11:49:15 +00:00
|
|
|
return is_one_byte_ ? *buffer8_++ : *buffer16_++;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-23 05:57:01 +00:00
|
|
|
StringCharacterStream::StringCharacterStream(String* string, int offset)
|
|
|
|
: is_one_byte_(false) {
|
2012-12-31 11:13:50 +00:00
|
|
|
Reset(string, offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-29 13:09:31 +00:00
|
|
|
void StringCharacterStream::Reset(String* string, int offset) {
|
2012-12-31 11:13:50 +00:00
|
|
|
buffer8_ = NULL;
|
|
|
|
end_ = NULL;
|
2014-04-29 13:09:31 +00:00
|
|
|
ConsString* cons_string = String::VisitFlat(this, string, offset);
|
2014-10-23 05:57:01 +00:00
|
|
|
iter_.Reset(cons_string, offset);
|
2014-04-29 13:09:31 +00:00
|
|
|
if (cons_string != NULL) {
|
2014-10-23 05:57:01 +00:00
|
|
|
string = iter_.Next(&offset);
|
2014-04-29 13:09:31 +00:00
|
|
|
if (string != NULL) String::VisitFlat(this, string, offset);
|
|
|
|
}
|
2012-12-06 11:49:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool StringCharacterStream::HasMore() {
|
|
|
|
if (buffer8_ != end_) return true;
|
2014-04-29 13:09:31 +00:00
|
|
|
int offset;
|
2014-10-23 05:57:01 +00:00
|
|
|
String* string = iter_.Next(&offset);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(offset, 0);
|
2012-12-19 13:27:20 +00:00
|
|
|
if (string == NULL) return false;
|
2014-04-29 13:09:31 +00:00
|
|
|
String::VisitFlat(this, string);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(buffer8_ != end_);
|
2012-12-06 11:49:15 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void StringCharacterStream::VisitOneByteString(
|
2014-04-29 13:09:31 +00:00
|
|
|
const uint8_t* chars, int length) {
|
2012-12-06 11:49:15 +00:00
|
|
|
is_one_byte_ = true;
|
|
|
|
buffer8_ = chars;
|
|
|
|
end_ = chars + length;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void StringCharacterStream::VisitTwoByteString(
|
2014-04-29 13:09:31 +00:00
|
|
|
const uint16_t* chars, int length) {
|
2012-12-06 11:49:15 +00:00
|
|
|
is_one_byte_ = false;
|
|
|
|
buffer16_ = chars;
|
|
|
|
end_ = reinterpret_cast<const uint8_t*>(chars + length);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-05-04 16:42:11 +00:00
|
|
|
void JSFunctionResultCache::MakeZeroSize() {
|
2011-01-17 16:54:56 +00:00
|
|
|
set_finger_index(kEntriesIndex);
|
|
|
|
set_size(kEntriesIndex);
|
2010-05-04 16:42:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSFunctionResultCache::Clear() {
|
2011-01-17 16:54:56 +00:00
|
|
|
int cache_size = size();
|
2013-12-23 14:42:42 +00:00
|
|
|
Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
|
2010-10-29 08:13:19 +00:00
|
|
|
MemsetPointer(entries_start,
|
2011-03-18 20:35:07 +00:00
|
|
|
GetHeap()->the_hole_value(),
|
2010-10-29 08:13:19 +00:00
|
|
|
cache_size - kEntriesIndex);
|
2010-05-04 16:42:11 +00:00
|
|
|
MakeZeroSize();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-01-17 16:54:56 +00:00
|
|
|
int JSFunctionResultCache::size() {
|
|
|
|
return Smi::cast(get(kCacheSizeIndex))->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSFunctionResultCache::set_size(int size) {
|
|
|
|
set(kCacheSizeIndex, Smi::FromInt(size));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int JSFunctionResultCache::finger_index() {
|
|
|
|
return Smi::cast(get(kFingerIndex))->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSFunctionResultCache::set_finger_index(int finger_index) {
|
|
|
|
set(kFingerIndex, Smi::FromInt(finger_index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
byte ByteArray::get(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < this->length());
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ByteArray::set(int index, byte value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && index < this->length());
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int ByteArray::get_int(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index >= 0 && (index * kIntSize) < this->length());
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ByteArray* ByteArray::FromDataStartAddress(Address address) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_TAG_ALIGNED(address);
|
2008-07-03 15:10:15 +00:00
|
|
|
return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Address ByteArray::GetDataStartAddress() {
|
|
|
|
return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
|
2011-03-09 15:01:16 +00:00
|
|
|
return reinterpret_cast<uint8_t*>(external_pointer());
|
2009-07-28 08:43:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2014-01-24 16:01:15 +00:00
|
|
|
uint8_t* ptr = external_uint8_clamped_pointer();
|
2009-07-28 08:43:51 +00:00
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalUint8ClampedArray::get(
|
|
|
|
Handle<ExternalUint8ClampedArray> array,
|
|
|
|
int index) {
|
2014-04-16 06:18:37 +00:00
|
|
|
return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
|
|
|
|
array->GetIsolate());
|
2014-04-08 14:20:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalUint8ClampedArray::set(int index, uint8_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2014-01-24 16:01:15 +00:00
|
|
|
uint8_t* ptr = external_uint8_clamped_pointer();
|
2009-07-28 08:43:51 +00:00
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
void* ExternalArray::external_pointer() const {
|
2009-10-20 15:26:17 +00:00
|
|
|
intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
|
|
|
|
return reinterpret_cast<void*>(ptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
|
|
|
|
intptr_t ptr = reinterpret_cast<intptr_t>(value);
|
|
|
|
WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
int8_t ExternalInt8Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
int8_t* ptr = static_cast<int8_t*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
|
|
|
|
int index) {
|
2014-04-16 06:18:37 +00:00
|
|
|
return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
|
|
|
|
array->GetIsolate());
|
2014-04-08 14:20:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalInt8Array::set(int index, int8_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
int8_t* ptr = static_cast<int8_t*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
uint8_t ExternalUint8Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
|
|
|
|
int index) {
|
2014-04-16 06:18:37 +00:00
|
|
|
return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
|
|
|
|
array->GetIsolate());
|
2014-04-08 14:20:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalUint8Array::set(int index, uint8_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
int16_t ExternalInt16Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
int16_t* ptr = static_cast<int16_t*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
|
|
|
|
int index) {
|
2014-04-16 06:18:37 +00:00
|
|
|
return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
|
|
|
|
array->GetIsolate());
|
2014-04-08 14:20:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalInt16Array::set(int index, int16_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
int16_t* ptr = static_cast<int16_t*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
uint16_t ExternalUint16Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
|
|
|
|
int index) {
|
2014-04-16 06:18:37 +00:00
|
|
|
return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
|
|
|
|
array->GetIsolate());
|
2014-04-08 14:20:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalUint16Array::set(int index, uint16_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
int32_t ExternalInt32Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
int32_t* ptr = static_cast<int32_t*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
|
|
|
|
int index) {
|
|
|
|
return array->GetIsolate()->factory()->
|
|
|
|
NewNumberFromInt(array->get_scalar(index));
|
2011-08-03 11:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalInt32Array::set(int index, int32_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
int32_t* ptr = static_cast<int32_t*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
uint32_t ExternalUint32Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
|
|
|
|
int index) {
|
|
|
|
return array->GetIsolate()->factory()->
|
|
|
|
NewNumberFromUint(array->get_scalar(index));
|
2011-08-03 11:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalUint32Array::set(int index, uint32_t value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
float ExternalFloat32Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
float* ptr = static_cast<float*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
|
|
|
|
int index) {
|
|
|
|
return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
|
2011-08-03 11:12:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalFloat32Array::set(int index, float value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2009-10-20 15:26:17 +00:00
|
|
|
float* ptr = static_cast<float*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
2010-07-13 08:05:10 +00:00
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
double ExternalFloat64Array::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2011-04-21 07:15:43 +00:00
|
|
|
double* ptr = static_cast<double*>(external_pointer());
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
|
|
|
|
int index) {
|
|
|
|
return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
void ExternalFloat64Array::set(int index, double value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2011-04-21 07:15:43 +00:00
|
|
|
double* ptr = static_cast<double*>(external_pointer());
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-26 12:50:13 +00:00
|
|
|
void* FixedTypedArrayBase::DataPtr() {
|
|
|
|
return FIELD_ADDR(this, kDataOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-27 13:05:11 +00:00
|
|
|
int FixedTypedArrayBase::DataSize(InstanceType type) {
|
2014-01-16 17:08:45 +00:00
|
|
|
int element_size;
|
2014-05-27 13:05:11 +00:00
|
|
|
switch (type) {
|
2014-03-26 12:50:13 +00:00
|
|
|
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
|
|
|
|
case FIXED_##TYPE##_ARRAY_TYPE: \
|
|
|
|
element_size = size; \
|
2014-01-16 17:08:45 +00:00
|
|
|
break;
|
2014-03-26 12:50:13 +00:00
|
|
|
|
|
|
|
TYPED_ARRAYS(TYPED_ARRAY_CASE)
|
|
|
|
#undef TYPED_ARRAY_CASE
|
2014-01-16 17:08:45 +00:00
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
return 0;
|
|
|
|
}
|
2014-03-26 12:50:13 +00:00
|
|
|
return length() * element_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-27 13:05:11 +00:00
|
|
|
int FixedTypedArrayBase::DataSize() {
|
|
|
|
return DataSize(map()->instance_type());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-26 12:50:13 +00:00
|
|
|
int FixedTypedArrayBase::size() {
|
|
|
|
return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-27 13:05:11 +00:00
|
|
|
int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
|
|
|
|
return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-26 12:50:13 +00:00
|
|
|
uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
int8_t Int8ArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
int16_t Int16ArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
int32_t Int32ArrayTraits::defaultValue() { return 0; }
|
|
|
|
|
|
|
|
|
|
|
|
float Float32ArrayTraits::defaultValue() {
|
2015-01-21 14:38:49 +00:00
|
|
|
return std::numeric_limits<float>::quiet_NaN();
|
2014-01-16 17:08:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-21 14:38:49 +00:00
|
|
|
double Float64ArrayTraits::defaultValue() {
|
|
|
|
return std::numeric_limits<double>::quiet_NaN();
|
|
|
|
}
|
2014-03-26 12:50:13 +00:00
|
|
|
|
|
|
|
|
2014-01-16 17:08:45 +00:00
|
|
|
template <class Traits>
|
|
|
|
typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2014-01-16 17:08:45 +00:00
|
|
|
ElementType* ptr = reinterpret_cast<ElementType*>(
|
|
|
|
FIELD_ADDR(this, kDataOffset));
|
|
|
|
return ptr[index];
|
|
|
|
}
|
|
|
|
|
2014-01-30 20:05:11 +00:00
|
|
|
|
|
|
|
template<> inline
|
|
|
|
FixedTypedArray<Float64ArrayTraits>::ElementType
|
|
|
|
FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2014-01-30 20:05:11 +00:00
|
|
|
return READ_DOUBLE_FIELD(this, ElementOffset(index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-16 17:08:45 +00:00
|
|
|
template <class Traits>
|
|
|
|
void FixedTypedArray<Traits>::set(int index, ElementType value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2014-01-16 17:08:45 +00:00
|
|
|
ElementType* ptr = reinterpret_cast<ElementType*>(
|
|
|
|
FIELD_ADDR(this, kDataOffset));
|
|
|
|
ptr[index] = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-30 20:05:11 +00:00
|
|
|
template<> inline
|
|
|
|
void FixedTypedArray<Float64ArrayTraits>::set(
|
|
|
|
int index, Float64ArrayTraits::ElementType value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((index >= 0) && (index < this->length()));
|
2014-01-30 20:05:11 +00:00
|
|
|
WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-26 12:50:13 +00:00
|
|
|
template <class Traits>
|
|
|
|
typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
|
|
|
|
return static_cast<ElementType>(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <> inline
|
|
|
|
uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
|
|
|
|
if (value < 0) return 0;
|
|
|
|
if (value > 0xFF) return 0xFF;
|
|
|
|
return static_cast<uint8_t>(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <class Traits>
|
|
|
|
typename Traits::ElementType FixedTypedArray<Traits>::from_double(
|
|
|
|
double value) {
|
|
|
|
return static_cast<ElementType>(DoubleToInt32(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<> inline
|
|
|
|
uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
|
2014-10-21 11:10:13 +00:00
|
|
|
// Handle NaNs and less than zero values which clamp to zero.
|
|
|
|
if (!(value > 0)) return 0;
|
2014-03-26 12:50:13 +00:00
|
|
|
if (value > 0xFF) return 0xFF;
|
|
|
|
return static_cast<uint8_t>(lrint(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<> inline
|
|
|
|
float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
|
|
|
|
return static_cast<float>(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<> inline
|
|
|
|
double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
template <class Traits>
|
|
|
|
Handle<Object> FixedTypedArray<Traits>::get(
|
|
|
|
Handle<FixedTypedArray<Traits> > array,
|
|
|
|
int index) {
|
|
|
|
return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
|
|
|
|
}
|
|
|
|
|
2014-04-16 06:18:37 +00:00
|
|
|
|
2014-01-16 17:08:45 +00:00
|
|
|
template <class Traits>
|
2014-04-16 06:18:37 +00:00
|
|
|
Handle<Object> FixedTypedArray<Traits>::SetValue(
|
|
|
|
Handle<FixedTypedArray<Traits> > array,
|
|
|
|
uint32_t index,
|
|
|
|
Handle<Object> value) {
|
2014-01-16 17:08:45 +00:00
|
|
|
ElementType cast_value = Traits::defaultValue();
|
2014-04-16 06:18:37 +00:00
|
|
|
if (index < static_cast<uint32_t>(array->length())) {
|
2014-01-16 17:08:45 +00:00
|
|
|
if (value->IsSmi()) {
|
2014-04-16 06:18:37 +00:00
|
|
|
int int_value = Handle<Smi>::cast(value)->value();
|
2014-03-26 12:50:13 +00:00
|
|
|
cast_value = from_int(int_value);
|
2014-01-16 17:08:45 +00:00
|
|
|
} else if (value->IsHeapNumber()) {
|
2014-04-16 06:18:37 +00:00
|
|
|
double double_value = Handle<HeapNumber>::cast(value)->value();
|
2014-03-26 12:50:13 +00:00
|
|
|
cast_value = from_double(double_value);
|
2014-01-16 17:08:45 +00:00
|
|
|
} else {
|
|
|
|
// Clamp undefined to the default value. All other types have been
|
|
|
|
// converted to a number type further up in the call chain.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(value->IsUndefined());
|
2014-01-16 17:08:45 +00:00
|
|
|
}
|
2014-04-16 06:18:37 +00:00
|
|
|
array->set(index, cast_value);
|
2014-01-16 17:08:45 +00:00
|
|
|
}
|
2014-04-16 06:18:37 +00:00
|
|
|
return Traits::ToHandle(array->GetIsolate(), cast_value);
|
2014-01-16 17:08:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-08 14:20:29 +00:00
|
|
|
Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
|
|
|
|
return handle(Smi::FromInt(scalar), isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
|
|
|
|
uint8_t scalar) {
|
|
|
|
return handle(Smi::FromInt(scalar), isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
|
|
|
|
return handle(Smi::FromInt(scalar), isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
|
|
|
|
return handle(Smi::FromInt(scalar), isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
|
|
|
|
return handle(Smi::FromInt(scalar), isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
|
|
|
|
return isolate->factory()->NewNumberFromUint(scalar);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
|
|
|
|
return isolate->factory()->NewNumberFromInt(scalar);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
|
|
|
|
return isolate->factory()->NewNumber(scalar);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
|
|
|
|
return isolate->factory()->NewNumber(scalar);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-09-06 12:50:11 +00:00
|
|
|
int Map::visitor_id() {
|
|
|
|
return READ_BYTE_FIELD(this, kVisitorIdOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::set_visitor_id(int id) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= id && id < 256);
|
2010-09-06 12:50:11 +00:00
|
|
|
WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
|
|
|
|
}
|
|
|
|
|
2009-10-20 15:26:17 +00:00
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
int Map::instance_size() {
|
2014-04-09 09:50:25 +00:00
|
|
|
return NOBARRIER_READ_BYTE_FIELD(
|
|
|
|
this, kInstanceSizeOffset) << kPointerSizeLog2;
|
2008-10-15 06:03:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int Map::inobject_properties() {
|
|
|
|
return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-08-19 07:30:20 +00:00
|
|
|
int Map::pre_allocated_property_fields() {
|
|
|
|
return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-04 10:48:49 +00:00
|
|
|
int Map::GetInObjectPropertyOffset(int index) {
|
|
|
|
// Adjust for the number of properties stored in the object.
|
|
|
|
index -= inobject_properties();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(index <= 0);
|
2014-02-04 10:48:49 +00:00
|
|
|
return instance_size() + (index * kPointerSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
Handle<Map> Map::CopyInstallDescriptorsForTesting(
|
|
|
|
Handle<Map> map, int new_descriptor, Handle<DescriptorArray> descriptors,
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor) {
|
|
|
|
return CopyInstallDescriptors(map, new_descriptor, descriptors,
|
|
|
|
layout_descriptor);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
int HeapObject::SizeFromMap(Map* map) {
|
2010-08-18 13:00:38 +00:00
|
|
|
int instance_size = map->instance_size();
|
|
|
|
if (instance_size != kVariableSizeSentinel) return instance_size;
|
2009-06-16 13:31:31 +00:00
|
|
|
// Only inline the most frequent cases.
|
2014-05-27 13:05:11 +00:00
|
|
|
InstanceType instance_type = map->instance_type();
|
2008-07-03 15:10:15 +00:00
|
|
|
if (instance_type == FIXED_ARRAY_TYPE) {
|
2010-08-11 14:30:14 +00:00
|
|
|
return FixedArray::BodyDescriptor::SizeOf(map, this);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
2014-09-10 12:38:12 +00:00
|
|
|
if (instance_type == ONE_BYTE_STRING_TYPE ||
|
|
|
|
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
|
2014-11-05 07:30:07 +00:00
|
|
|
// Strings may get concurrently truncated, hence we have to access its
|
|
|
|
// length synchronized.
|
2012-11-15 13:31:27 +00:00
|
|
|
return SeqOneByteString::SizeFor(
|
2014-11-05 07:30:07 +00:00
|
|
|
reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
|
2010-08-18 13:00:38 +00:00
|
|
|
}
|
2009-06-16 13:31:31 +00:00
|
|
|
if (instance_type == BYTE_ARRAY_TYPE) {
|
|
|
|
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
|
|
|
|
}
|
2011-09-19 18:36:47 +00:00
|
|
|
if (instance_type == FREE_SPACE_TYPE) {
|
2014-04-08 16:31:57 +00:00
|
|
|
return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
|
2011-09-19 18:36:47 +00:00
|
|
|
}
|
2013-06-27 13:39:44 +00:00
|
|
|
if (instance_type == STRING_TYPE ||
|
|
|
|
instance_type == INTERNALIZED_STRING_TYPE) {
|
2014-11-05 07:30:07 +00:00
|
|
|
// Strings may get concurrently truncated, hence we have to access its
|
|
|
|
// length synchronized.
|
2010-08-18 13:00:38 +00:00
|
|
|
return SeqTwoByteString::SizeFor(
|
2014-11-05 07:30:07 +00:00
|
|
|
reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
|
2010-08-18 13:00:38 +00:00
|
|
|
}
|
2011-06-09 10:03:35 +00:00
|
|
|
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
|
|
|
|
return FixedDoubleArray::SizeFor(
|
|
|
|
reinterpret_cast<FixedDoubleArray*>(this)->length());
|
|
|
|
}
|
2013-10-14 13:35:06 +00:00
|
|
|
if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
|
2014-06-03 16:22:10 +00:00
|
|
|
return reinterpret_cast<ConstantPoolArray*>(this)->size();
|
2013-10-14 13:35:06 +00:00
|
|
|
}
|
2014-01-16 17:08:45 +00:00
|
|
|
if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
|
|
|
|
instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
|
2014-05-27 13:05:11 +00:00
|
|
|
return reinterpret_cast<FixedTypedArrayBase*>(
|
|
|
|
this)->TypedArraySize(instance_type);
|
2014-01-16 17:08:45 +00:00
|
|
|
}
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(instance_type == CODE_TYPE);
|
2010-08-18 13:00:38 +00:00
|
|
|
return reinterpret_cast<Code*>(this)->CodeSize();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::set_instance_size(int value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(0, value & (kPointerSize - 1));
|
2008-10-15 06:03:26 +00:00
|
|
|
value >>= kPointerSizeLog2;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= value && value < 256);
|
2014-04-09 09:50:25 +00:00
|
|
|
NOBARRIER_WRITE_BYTE_FIELD(
|
|
|
|
this, kInstanceSizeOffset, static_cast<byte>(value));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-15 06:03:26 +00:00
|
|
|
void Map::set_inobject_properties(int value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= value && value < 256);
|
2008-10-15 06:03:26 +00:00
|
|
|
WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-08-19 07:30:20 +00:00
|
|
|
void Map::set_pre_allocated_property_fields(int value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(0 <= value && value < 256);
|
2009-08-19 07:30:20 +00:00
|
|
|
WRITE_BYTE_FIELD(this,
|
|
|
|
kPreAllocatedPropertyFieldsOffset,
|
|
|
|
static_cast<byte>(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
InstanceType Map::instance_type() {
|
|
|
|
return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::set_instance_type(InstanceType value) {
|
|
|
|
WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int Map::unused_property_fields() {
|
|
|
|
return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::set_unused_property_fields(int value) {
|
|
|
|
WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
byte Map::bit_field() {
|
|
|
|
return READ_BYTE_FIELD(this, kBitFieldOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::set_bit_field(byte value) {
|
|
|
|
WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-04-24 08:13:09 +00:00
|
|
|
byte Map::bit_field2() {
|
|
|
|
return READ_BYTE_FIELD(this, kBitField2Offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::set_bit_field2(byte value) {
|
|
|
|
WRITE_BYTE_FIELD(this, kBitField2Offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void Map::set_non_instance_prototype(bool value) {
|
|
|
|
if (value) {
|
|
|
|
set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
|
|
|
|
} else {
|
|
|
|
set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::has_non_instance_prototype() {
|
|
|
|
return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-28 12:05:40 +00:00
|
|
|
void Map::set_function_with_prototype(bool value) {
|
2014-05-09 16:18:58 +00:00
|
|
|
set_bit_field(FunctionWithPrototype::update(bit_field(), value));
|
2010-04-28 12:05:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::function_with_prototype() {
|
2014-05-09 16:18:58 +00:00
|
|
|
return FunctionWithPrototype::decode(bit_field());
|
2010-04-28 12:05:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-30 12:51:06 +00:00
|
|
|
void Map::set_is_access_check_needed(bool access_check_needed) {
|
|
|
|
if (access_check_needed) {
|
|
|
|
set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
|
|
|
|
} else {
|
|
|
|
set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::is_access_check_needed() {
|
|
|
|
return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-07-02 14:36:34 +00:00
|
|
|
void Map::set_is_extensible(bool value) {
|
|
|
|
if (value) {
|
|
|
|
set_bit_field2(bit_field2() | (1 << kIsExtensible));
|
|
|
|
} else {
|
|
|
|
set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Map::is_extensible() {
|
|
|
|
return ((1 << kIsExtensible) & bit_field2()) != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-08-07 16:14:22 +00:00
|
|
|
void Map::set_is_prototype_map(bool value) {
|
|
|
|
set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
|
2014-08-04 15:02:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool Map::is_prototype_map() {
|
|
|
|
return IsPrototypeMapBits::decode(bit_field2());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-06 14:25:19 +00:00
|
|
|
void Map::set_dictionary_map(bool value) {
|
2014-01-31 11:38:43 +00:00
|
|
|
uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
|
|
|
|
new_bit_field3 = IsUnstable::update(new_bit_field3, value);
|
|
|
|
set_bit_field3(new_bit_field3);
|
2012-08-06 14:25:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::is_dictionary_map() {
|
|
|
|
return DictionaryMap::decode(bit_field3());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Code::Flags Code::flags() {
|
|
|
|
return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-08-11 14:00:58 +00:00
|
|
|
void Map::set_owns_descriptors(bool owns_descriptors) {
|
|
|
|
set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::owns_descriptors() {
|
|
|
|
return OwnsDescriptors::decode(bit_field3());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-14 21:47:39 +00:00
|
|
|
void Map::set_has_instance_call_handler() {
|
|
|
|
set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
|
2012-11-06 12:30:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-14 21:47:39 +00:00
|
|
|
bool Map::has_instance_call_handler() {
|
|
|
|
return HasInstanceCallHandler::decode(bit_field3());
|
2012-11-06 12:30:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-26 15:30:41 +00:00
|
|
|
void Map::deprecate() {
|
|
|
|
set_bit_field3(Deprecated::update(bit_field3(), true));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::is_deprecated() {
|
|
|
|
return Deprecated::decode(bit_field3());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-08-05 16:42:39 +00:00
|
|
|
void Map::set_migration_target(bool value) {
|
|
|
|
set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::is_migration_target() {
|
|
|
|
return IsMigrationTarget::decode(bit_field3());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-05 15:28:09 +00:00
|
|
|
void Map::set_counter(int value) {
|
|
|
|
set_bit_field3(Counter::update(bit_field3(), value));
|
2014-05-23 08:52:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-05 15:28:09 +00:00
|
|
|
int Map::counter() { return Counter::decode(bit_field3()); }
|
2014-05-23 08:52:05 +00:00
|
|
|
|
|
|
|
|
2013-07-30 16:33:58 +00:00
|
|
|
void Map::mark_unstable() {
|
|
|
|
set_bit_field3(IsUnstable::update(bit_field3(), true));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::is_stable() {
|
|
|
|
return !IsUnstable::decode(bit_field3());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-18 13:00:40 +00:00
|
|
|
bool Map::has_code_cache() {
|
|
|
|
return code_cache() != GetIsolate()->heap()->empty_fixed_array();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-26 15:30:41 +00:00
|
|
|
bool Map::CanBeDeprecated() {
|
|
|
|
int descriptor = LastAdded();
|
|
|
|
for (int i = 0; i <= descriptor; i++) {
|
|
|
|
PropertyDetails details = instance_descriptors()->GetDetails(i);
|
2014-03-04 12:48:17 +00:00
|
|
|
if (details.representation().IsNone()) return true;
|
|
|
|
if (details.representation().IsSmi()) return true;
|
|
|
|
if (details.representation().IsDouble()) return true;
|
|
|
|
if (details.representation().IsHeapObject()) return true;
|
2015-01-19 17:49:13 +00:00
|
|
|
if (details.type() == DATA_CONSTANT) return true;
|
2013-04-26 15:30:41 +00:00
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-20 11:49:54 +00:00
|
|
|
void Map::NotifyLeafMapLayoutChange() {
|
2013-07-30 16:33:58 +00:00
|
|
|
if (is_stable()) {
|
|
|
|
mark_unstable();
|
|
|
|
dependent_code()->DeoptimizeDependentCodeGroup(
|
|
|
|
GetIsolate(),
|
|
|
|
DependentCode::kPrototypeCheckGroup);
|
|
|
|
}
|
2013-02-20 11:49:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-07-23 09:18:42 +00:00
|
|
|
bool Map::CanOmitMapChecks() {
|
2013-07-30 16:33:58 +00:00
|
|
|
return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
|
2013-07-23 09:18:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-20 11:49:54 +00:00
|
|
|
int DependentCode::number_of_entries(DependencyGroup group) {
|
2013-01-24 11:55:05 +00:00
|
|
|
if (length() == 0) return 0;
|
2013-02-20 11:49:54 +00:00
|
|
|
return Smi::cast(get(group))->value();
|
2013-01-24 11:55:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-20 11:49:54 +00:00
|
|
|
void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
|
|
|
|
set(group, Smi::FromInt(value));
|
2013-01-24 11:55:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-12 09:43:22 +00:00
|
|
|
bool DependentCode::is_code_at(int i) {
|
|
|
|
return get(kCodesStartIndex + i)->IsCode();
|
|
|
|
}
|
|
|
|
|
2013-02-20 11:49:54 +00:00
|
|
|
Code* DependentCode::code_at(int i) {
|
|
|
|
return Code::cast(get(kCodesStartIndex + i));
|
2013-01-24 11:55:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-12 09:43:22 +00:00
|
|
|
CompilationInfo* DependentCode::compilation_info_at(int i) {
|
|
|
|
return reinterpret_cast<CompilationInfo*>(
|
|
|
|
Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
|
2013-01-24 11:55:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-12 09:43:22 +00:00
|
|
|
void DependentCode::set_object_at(int i, Object* object) {
|
|
|
|
set(kCodesStartIndex + i, object);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* DependentCode::object_at(int i) {
|
|
|
|
return get(kCodesStartIndex + i);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object** DependentCode::slot_at(int i) {
|
2013-12-23 14:42:42 +00:00
|
|
|
return RawFieldOfElementAt(kCodesStartIndex + i);
|
2013-01-24 11:55:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-12 09:43:22 +00:00
|
|
|
void DependentCode::clear_at(int i) {
|
2013-02-20 11:49:54 +00:00
|
|
|
set_undefined(kCodesStartIndex + i);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-12 09:43:22 +00:00
|
|
|
void DependentCode::copy(int from, int to) {
|
|
|
|
set(kCodesStartIndex + to, get(kCodesStartIndex + from));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-20 11:49:54 +00:00
|
|
|
void DependentCode::ExtendGroup(DependencyGroup group) {
|
|
|
|
GroupStartIndexes starts(this);
|
|
|
|
for (int g = kGroupCount - 1; g > group; g--) {
|
|
|
|
if (starts.at(g) < starts.at(g + 1)) {
|
2013-06-12 09:43:22 +00:00
|
|
|
copy(starts.at(g), starts.at(g + 1));
|
2013-02-20 11:49:54 +00:00
|
|
|
}
|
|
|
|
}
|
2013-01-24 11:55:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void Code::set_flags(Code::Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_INT_FIELD(this, kFlagsOffset, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Code::Kind Code::kind() {
|
|
|
|
return ExtractKindFromFlags(flags());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-21 13:10:14 +00:00
|
|
|
bool Code::IsCodeStubOrIC() {
|
|
|
|
return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
|
|
|
|
kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
|
|
|
|
kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
|
|
|
|
kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
|
|
|
|
kind() == TO_BOOLEAN_IC;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-30 08:49:36 +00:00
|
|
|
InlineCacheState Code::ic_state() {
|
|
|
|
InlineCacheState result = ExtractICStateFromFlags(flags());
|
2008-07-03 15:10:15 +00:00
|
|
|
// Only allow uninitialized or debugger states for non-IC code
|
|
|
|
// objects. This is used in the debugger to determine whether or not
|
|
|
|
// a call to code object has been replaced with a debug break call.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_inline_cache_stub() ||
|
2008-07-03 15:10:15 +00:00
|
|
|
result == UNINITIALIZED ||
|
2013-01-10 14:15:12 +00:00
|
|
|
result == DEBUG_STUB);
|
2008-07-03 15:10:15 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-28 15:32:55 +00:00
|
|
|
ExtraICState Code::extra_ic_state() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
|
2014-02-11 15:01:44 +00:00
|
|
|
return ExtractExtraICStateFromFlags(flags());
|
2013-04-24 11:32:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-25 11:35:23 +00:00
|
|
|
Code::StubType Code::type() {
|
2008-07-03 15:10:15 +00:00
|
|
|
return ExtractTypeFromFlags(flags());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-12-13 10:27:19 +00:00
|
|
|
// For initialization.
|
|
|
|
void Code::set_raw_kind_specific_flags1(int value) {
|
|
|
|
WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_raw_kind_specific_flags2(int value) {
|
|
|
|
WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-18 09:50:46 +00:00
|
|
|
inline bool Code::is_crankshafted() {
|
|
|
|
return IsCrankshaftedField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-03 11:47:31 +00:00
|
|
|
inline bool Code::is_hydrogen_stub() {
|
|
|
|
return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-18 09:50:46 +00:00
|
|
|
inline void Code::set_is_crankshafted(bool value) {
|
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
|
|
|
int updated = IsCrankshaftedField::update(previous, value);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-30 13:54:45 +00:00
|
|
|
inline bool Code::is_turbofanned() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
|
2014-07-30 13:54:45 +00:00
|
|
|
return IsTurbofannedField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline void Code::set_is_turbofanned(bool value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
|
2014-07-30 13:54:45 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
|
|
|
int updated = IsTurbofannedField::update(previous, value);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-22 15:30:48 +00:00
|
|
|
inline bool Code::can_have_weak_objects() {
|
|
|
|
DCHECK(kind() == OPTIMIZED_FUNCTION);
|
|
|
|
return CanHaveWeakObjectsField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
inline void Code::set_can_have_weak_objects(bool value) {
|
|
|
|
DCHECK(kind() == OPTIMIZED_FUNCTION);
|
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
|
|
|
int updated = CanHaveWeakObjectsField::update(previous, value);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
bool Code::optimizable() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2010-12-07 11:31:57 +00:00
|
|
|
return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_optimizable(bool value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2010-12-07 11:31:57 +00:00
|
|
|
WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Code::has_deoptimization_support() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2011-09-13 08:31:21 +00:00
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_has_deoptimization_support(bool value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2011-09-13 08:31:21 +00:00
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
|
|
|
|
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Code::has_debug_break_slots() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2011-09-13 08:31:21 +00:00
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_has_debug_break_slots(bool value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2011-09-13 08:31:21 +00:00
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
|
|
|
|
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-18 13:40:33 +00:00
|
|
|
bool Code::is_compiled_optimizable() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2011-10-18 13:40:33 +00:00
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
return FullCodeFlagsIsCompiledOptimizable::decode(flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_compiled_optimizable(bool value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2011-10-18 13:40:33 +00:00
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
|
|
|
|
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-20 16:20:48 +00:00
|
|
|
bool Code::has_reloc_info_for_serialization() {
|
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_has_reloc_info_for_serialization(bool value) {
|
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
|
|
|
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
|
|
|
|
flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
|
|
|
|
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
int Code::allow_osr_at_loop_nesting_level() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2014-06-24 09:31:30 +00:00
|
|
|
int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
|
|
|
return AllowOSRAtLoopNestingLevelField::decode(fields);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_allow_osr_at_loop_nesting_level(int level) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
|
|
|
DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
|
2014-06-24 09:31:30 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
|
|
|
int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-03-27 12:19:50 +00:00
|
|
|
int Code::profiler_ticks() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2012-03-27 12:19:50 +00:00
|
|
|
return READ_BYTE_FIELD(this, kProfilerTicksOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_profiler_ticks(int ticks) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(ticks < 256);
|
2014-09-04 11:27:20 +00:00
|
|
|
if (kind() == FUNCTION) {
|
|
|
|
WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
|
|
|
|
}
|
2012-03-27 12:19:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-16 09:55:34 +00:00
|
|
|
int Code::builtin_index() {
|
|
|
|
return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_builtin_index(int index) {
|
|
|
|
WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
unsigned Code::stack_slots() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_crankshafted());
|
2012-07-06 22:08:27 +00:00
|
|
|
return StackSlotsField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_stack_slots(unsigned slots) {
|
2012-07-06 22:08:27 +00:00
|
|
|
CHECK(slots <= (1 << kStackSlotsBitCount));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_crankshafted());
|
2012-07-06 22:08:27 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
|
|
|
int updated = StackSlotsField::update(previous, slots);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-02-03 10:07:22 +00:00
|
|
|
unsigned Code::safepoint_table_offset() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_crankshafted());
|
2012-07-06 22:08:27 +00:00
|
|
|
return SafepointTableOffsetField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-02-03 10:07:22 +00:00
|
|
|
void Code::set_safepoint_table_offset(unsigned offset) {
|
2012-07-06 22:08:27 +00:00
|
|
|
CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_crankshafted());
|
|
|
|
DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
|
2012-07-06 22:08:27 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
|
|
|
int updated = SafepointTableOffsetField::update(previous, offset);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
unsigned Code::back_edge_table_offset() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2013-04-10 09:24:31 +00:00
|
|
|
return BackEdgeTableOffsetField::decode(
|
2014-06-24 09:31:30 +00:00
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
void Code::set_back_edge_table_offset(unsigned offset) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
|
|
|
DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
|
2014-06-24 09:31:30 +00:00
|
|
|
offset = offset >> kPointerSizeLog2;
|
2012-07-06 22:08:27 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
2013-04-10 09:24:31 +00:00
|
|
|
int updated = BackEdgeTableOffsetField::update(previous, offset);
|
2012-07-06 22:08:27 +00:00
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-10 09:24:31 +00:00
|
|
|
bool Code::back_edges_patched_for_osr() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(FUNCTION, kind());
|
2014-06-24 09:31:30 +00:00
|
|
|
return allow_osr_at_loop_nesting_level() > 0;
|
2013-03-12 18:03:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-07-21 13:51:04 +00:00
|
|
|
byte Code::to_boolean_state() {
|
2014-02-11 15:01:44 +00:00
|
|
|
return extra_ic_state();
|
2011-07-21 13:51:04 +00:00
|
|
|
}
|
|
|
|
|
2011-09-27 11:42:02 +00:00
|
|
|
|
2012-01-27 16:09:20 +00:00
|
|
|
bool Code::has_function_cache() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == STUB);
|
2012-07-06 22:08:27 +00:00
|
|
|
return HasFunctionCacheField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
2012-01-27 16:09:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_has_function_cache(bool flag) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == STUB);
|
2012-07-06 22:08:27 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
|
|
|
int updated = HasFunctionCacheField::update(previous, flag);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
2012-01-27 16:09:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-24 11:55:05 +00:00
|
|
|
bool Code::marked_for_deoptimization() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == OPTIMIZED_FUNCTION);
|
2013-01-24 11:55:05 +00:00
|
|
|
return MarkedForDeoptimizationField::decode(
|
|
|
|
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_marked_for_deoptimization(bool flag) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == OPTIMIZED_FUNCTION);
|
|
|
|
DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
|
2013-01-24 11:55:05 +00:00
|
|
|
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
|
|
|
int updated = MarkedForDeoptimizationField::update(previous, flag);
|
|
|
|
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
bool Code::is_inline_cache_stub() {
|
|
|
|
Kind kind = this->kind();
|
2013-06-25 09:09:25 +00:00
|
|
|
switch (kind) {
|
|
|
|
#define CASE(name) case name: return true;
|
|
|
|
IC_KIND_LIST(CASE)
|
|
|
|
#undef CASE
|
|
|
|
default: return false;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-10-01 09:44:35 +00:00
|
|
|
bool Code::is_keyed_stub() {
|
2014-01-31 16:52:17 +00:00
|
|
|
return is_keyed_load_stub() || is_keyed_store_stub();
|
2013-10-01 09:44:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-08-22 12:16:00 +00:00
|
|
|
bool Code::is_debug_stub() {
|
|
|
|
return ic_state() == DEBUG_STUB;
|
2013-01-10 14:15:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-12-30 11:23:59 +00:00
|
|
|
ConstantPoolArray* Code::constant_pool() {
|
|
|
|
return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_constant_pool(Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(value->IsConstantPoolArray());
|
2013-12-30 11:23:59 +00:00
|
|
|
WRITE_FIELD(this, kConstantPoolOffset, value);
|
|
|
|
WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-18 13:50:21 +00:00
|
|
|
Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
|
|
|
|
ExtraICState extra_ic_state, StubType type,
|
|
|
|
CacheHolderFlag holder) {
|
2008-07-03 15:10:15 +00:00
|
|
|
// Compute the bit mask.
|
2013-03-18 13:35:17 +00:00
|
|
|
unsigned int bits = KindField::encode(kind)
|
2011-09-12 10:50:50 +00:00
|
|
|
| ICStateField::encode(ic_state)
|
|
|
|
| TypeField::encode(type)
|
2014-02-11 15:01:44 +00:00
|
|
|
| ExtraICStateField::encode(extra_ic_state)
|
2011-09-12 10:50:50 +00:00
|
|
|
| CacheHolderField::encode(holder);
|
|
|
|
return static_cast<Flags>(bits);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
|
2011-01-18 16:54:48 +00:00
|
|
|
ExtraICState extra_ic_state,
|
2014-07-18 13:50:21 +00:00
|
|
|
CacheHolderFlag holder,
|
2014-02-14 15:15:08 +00:00
|
|
|
StubType type) {
|
|
|
|
return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-18 13:50:21 +00:00
|
|
|
Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
|
|
|
|
CacheHolderFlag holder) {
|
2014-02-14 15:17:26 +00:00
|
|
|
return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Code::Kind Code::ExtractKindFromFlags(Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
return KindField::decode(flags);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-30 08:49:36 +00:00
|
|
|
InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
return ICStateField::decode(flags);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-28 15:32:55 +00:00
|
|
|
ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
return ExtraICStateField::decode(flags);
|
2011-01-18 16:54:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-25 11:35:23 +00:00
|
|
|
Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
return TypeField::decode(flags);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-18 13:50:21 +00:00
|
|
|
CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
return CacheHolderField::decode(flags);
|
2010-07-02 14:15:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
|
2011-09-12 10:50:50 +00:00
|
|
|
int bits = flags & ~TypeField::kMask;
|
2008-07-03 15:10:15 +00:00
|
|
|
return static_cast<Flags>(bits);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-18 13:50:21 +00:00
|
|
|
Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
|
|
|
|
int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
|
|
|
|
return static_cast<Flags>(bits);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-12-09 12:53:59 +00:00
|
|
|
Code* Code::GetCodeFromTargetAddress(Address address) {
|
|
|
|
HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
|
|
|
|
// GetCodeFromTargetAddress might be called when marking objects during mark
|
|
|
|
// sweep. reinterpret_cast is therefore used instead of the more appropriate
|
|
|
|
// Code::cast. Code::cast does not work when the object's map is
|
|
|
|
// marked.
|
|
|
|
Code* result = reinterpret_cast<Code*>(code);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-20 07:10:18 +00:00
|
|
|
Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
|
|
|
|
return HeapObject::
|
|
|
|
FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-19 14:03:48 +00:00
|
|
|
bool Code::IsWeakObjectInOptimizedCode(Object* object) {
|
2014-04-29 12:32:38 +00:00
|
|
|
if (!FLAG_collect_maps) return false;
|
2014-02-19 14:03:48 +00:00
|
|
|
if (object->IsMap()) {
|
|
|
|
return Map::cast(object)->CanTransition() &&
|
|
|
|
FLAG_weak_embedded_maps_in_optimized_code;
|
|
|
|
}
|
2015-02-10 16:58:39 +00:00
|
|
|
if (object->IsCell()) object = Cell::cast(object)->value();
|
2015-02-04 17:13:47 +00:00
|
|
|
if (object->IsJSObject()) {
|
2014-02-19 14:03:48 +00:00
|
|
|
return FLAG_weak_embedded_objects_in_optimized_code;
|
|
|
|
}
|
2015-02-04 17:13:47 +00:00
|
|
|
if (object->IsFixedArray()) {
|
|
|
|
// Contexts of inlined functions are embedded in optimized code.
|
|
|
|
Map* map = HeapObject::cast(object)->map();
|
|
|
|
Heap* heap = map->GetHeap();
|
|
|
|
return FLAG_weak_embedded_objects_in_optimized_code &&
|
|
|
|
map == heap->function_context_map();
|
|
|
|
}
|
2014-02-19 14:03:48 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-18 14:15:09 +00:00
|
|
|
class Code::FindAndReplacePattern {
|
|
|
|
public:
|
|
|
|
FindAndReplacePattern() : count_(0) { }
|
|
|
|
void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(count_ < kMaxCount);
|
2014-03-18 14:15:09 +00:00
|
|
|
find_[count_] = map_to_find;
|
|
|
|
replace_[count_] = obj_to_replace;
|
|
|
|
++count_;
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
static const int kMaxCount = 4;
|
|
|
|
int count_;
|
|
|
|
Handle<Map> find_[kMaxCount];
|
|
|
|
Handle<Object> replace_[kMaxCount];
|
|
|
|
friend class Code;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
Object* Map::prototype() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return READ_FIELD(this, kPrototypeOffset);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-23 08:46:32 +00:00
|
|
|
void Map::set_prototype(Object* value, WriteBarrierMode mode) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(value->IsNull() || value->IsJSReceiver());
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_FIELD(this, kPrototypeOffset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-13 08:43:16 +00:00
|
|
|
// If the descriptor is using the empty transition array, install a new empty
|
|
|
|
// transition array that will have place for an element transition.
|
2014-04-11 12:13:53 +00:00
|
|
|
static void EnsureHasTransitionArray(Handle<Map> map) {
|
|
|
|
Handle<TransitionArray> transitions;
|
2012-10-11 12:01:19 +00:00
|
|
|
if (!map->HasTransitionArray()) {
|
2014-04-11 12:13:53 +00:00
|
|
|
transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
|
2012-09-19 09:54:10 +00:00
|
|
|
transitions->set_back_pointer_storage(map->GetBackPointer());
|
2012-10-17 13:04:49 +00:00
|
|
|
} else if (!map->transitions()->IsFullTransitionArray()) {
|
2014-04-11 14:25:00 +00:00
|
|
|
transitions = TransitionArray::ExtendToFullTransitionArray(map);
|
2012-10-11 12:01:19 +00:00
|
|
|
} else {
|
2014-04-11 12:13:53 +00:00
|
|
|
return;
|
2012-09-19 09:54:10 +00:00
|
|
|
}
|
2014-04-11 12:13:53 +00:00
|
|
|
map->set_transitions(*transitions);
|
2012-08-13 08:43:16 +00:00
|
|
|
}
|
2012-07-10 13:31:36 +00:00
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
LayoutDescriptor* Map::layout_descriptor_gc_safe() {
|
|
|
|
Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
|
|
|
|
return LayoutDescriptor::cast_gc_safe(layout_desc);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-24 14:54:26 +00:00
|
|
|
bool Map::HasFastPointerLayout() const {
|
|
|
|
Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
|
|
|
|
return LayoutDescriptor::IsFastPointerLayout(layout_desc);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
void Map::UpdateDescriptors(DescriptorArray* descriptors,
|
|
|
|
LayoutDescriptor* layout_desc) {
|
|
|
|
set_instance_descriptors(descriptors);
|
|
|
|
if (FLAG_unbox_double_fields) {
|
|
|
|
if (layout_descriptor()->IsSlowLayout()) {
|
|
|
|
set_layout_descriptor(layout_desc);
|
|
|
|
}
|
|
|
|
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
|
|
|
|
DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Map::InitializeDescriptors(DescriptorArray* descriptors,
|
|
|
|
LayoutDescriptor* layout_desc) {
|
2012-07-18 15:38:58 +00:00
|
|
|
int len = descriptors->number_of_descriptors();
|
2012-10-17 13:04:49 +00:00
|
|
|
set_instance_descriptors(descriptors);
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
SetNumberOfOwnDescriptors(len);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
if (FLAG_unbox_double_fields) {
|
|
|
|
set_layout_descriptor(layout_desc);
|
|
|
|
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
|
|
|
|
set_visitor_id(StaticVisitorBase::GetVisitorId(this));
|
|
|
|
}
|
2012-07-18 15:38:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-10-17 13:04:49 +00:00
|
|
|
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
|
2014-11-11 10:24:52 +00:00
|
|
|
ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
|
2013-08-05 16:42:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
void Map::set_bit_field3(uint32_t bits) {
|
2014-05-09 16:18:58 +00:00
|
|
|
if (kInt32Size != kPointerSize) {
|
|
|
|
WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
|
|
|
|
}
|
|
|
|
WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
|
2013-08-05 16:42:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uint32_t Map::bit_field3() {
|
2014-05-09 16:18:58 +00:00
|
|
|
return READ_UINT32_FIELD(this, kBitField3Offset);
|
2013-08-05 16:42:39 +00:00
|
|
|
}
|
2011-05-23 15:59:38 +00:00
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
LayoutDescriptor* Map::GetLayoutDescriptor() {
|
|
|
|
return FLAG_unbox_double_fields ? layout_descriptor()
|
|
|
|
: LayoutDescriptor::FastPointerLayout();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:13:53 +00:00
|
|
|
void Map::AppendDescriptor(Descriptor* desc) {
|
2012-07-19 10:01:52 +00:00
|
|
|
DescriptorArray* descriptors = instance_descriptors();
|
2012-08-27 13:47:34 +00:00
|
|
|
int number_of_own_descriptors = NumberOfOwnDescriptors();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
|
2014-04-11 12:13:53 +00:00
|
|
|
descriptors->Append(desc);
|
2012-08-27 13:47:34 +00:00
|
|
|
SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
// This function does not support appending double field descriptors and
|
|
|
|
// it should never try to (otherwise, layout descriptor must be updated too).
|
|
|
|
#ifdef DEBUG
|
|
|
|
PropertyDetails details = desc->GetDetails();
|
2015-01-19 17:49:13 +00:00
|
|
|
CHECK(details.type() != DATA || !details.representation().IsDouble());
|
2014-11-11 10:24:52 +00:00
|
|
|
#endif
|
2012-07-19 10:01:52 +00:00
|
|
|
}
|
|
|
|
|
2011-05-23 15:59:38 +00:00
|
|
|
|
2012-05-09 07:29:18 +00:00
|
|
|
Object* Map::GetBackPointer() {
|
2012-08-13 08:43:16 +00:00
|
|
|
Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
|
2014-11-27 10:10:48 +00:00
|
|
|
if (object->IsTransitionArray()) {
|
2012-08-13 08:43:16 +00:00
|
|
|
return TransitionArray::cast(object)->back_pointer_storage();
|
2012-07-10 13:31:36 +00:00
|
|
|
} else {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(object->IsMap() || object->IsUndefined());
|
2012-07-10 13:31:36 +00:00
|
|
|
return object;
|
|
|
|
}
|
2012-05-09 07:29:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-05 13:54:20 +00:00
|
|
|
bool Map::HasElementsTransition() {
|
|
|
|
return HasTransitionArray() && transitions()->HasElementsTransition();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
bool Map::HasTransitionArray() const {
|
2012-08-13 08:43:16 +00:00
|
|
|
Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
|
|
|
|
return object->IsTransitionArray();
|
2012-07-05 13:54:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 06:59:56 +00:00
|
|
|
Map* Map::elements_transition_map() {
|
2014-12-10 15:18:44 +00:00
|
|
|
int index =
|
|
|
|
transitions()->SearchSpecial(GetHeap()->elements_transition_symbol());
|
2013-07-31 17:08:50 +00:00
|
|
|
return transitions()->GetTarget(index);
|
2012-06-11 06:59:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-26 08:27:20 +00:00
|
|
|
bool Map::CanHaveMoreTransitions() {
|
|
|
|
if (!HasTransitionArray()) return true;
|
2014-11-03 16:44:58 +00:00
|
|
|
return transitions()->number_of_transitions() <
|
|
|
|
TransitionArray::kMaxNumberOfTransitions;
|
2012-07-26 08:27:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
Sharing of descriptor arrays.
This CL adds multiple things:
Transition arrays do not directly point at their descriptor array anymore, but rather do so via an indirect pointer (a JSGlobalPropertyCell).
An ownership bit is added to maps indicating whether it owns its own descriptor array or not.
Maps owning a descriptor array can pass on ownership if a transition from that map is generated; but only if the descriptor array stays exactly the same; or if a descriptor is added.
Maps that don't have ownership get ownership back if their direct child to which ownership was passed is cleared in ClearNonLiveTransitions.
To detect which descriptors in an array are valid, each map knows its own NumberOfOwnDescriptors. Since the descriptors are sorted in order of addition, if we search and find a descriptor with index bigger than this number, it is not valid for the given map.
We currently still build up an enumeration cache (although this may disappear). The enumeration cache is always built for the entire descriptor array, even if not all descriptors are owned by the map. Once a descriptor array has an enumeration cache for a given map; this invariant will always be true, even if the descriptor array was extended. The extended array will inherit the enumeration cache from the smaller descriptor array. If a map with more descriptors needs an enumeration cache, it's EnumLength will still be set to invalid, so it will have to recompute the enumeration cache. This new cache will also be valid for smaller maps since they have their own enumlength; and use this to loop over the cache. If the EnumLength is still invalid, but there is already a cache present that is big enough; we just initialize the EnumLength field for the map.
When we apply ClearNonLiveTransitions and descriptor ownership is passed back to a parent map, the descriptor array is trimmed in-place and resorted. At the same time, the enumeration cache is trimmed in-place.
Only transition arrays contain descriptor arrays. If we transition to a map and pass ownership of the descriptor array along, the child map will not store the descriptor array it owns. Rather its parent will keep the pointer. So for every leaf-map, we find the descriptor array by following the back pointer, reading out the transition array, and fetching the descriptor array from the JSGlobalPropertyCell. If a map has a transition array, we fetch it from there. If a map has undefined as its back-pointer and has no transition array; it is considered to have an empty descriptor array.
When we modify properties, we cannot share the descriptor array. To accommodate this, the child map will get its own transition array; even if there are not necessarily any transitions leaving from the child map. This is necessary since it's the only way to store its own descriptor array.
Review URL: https://chromiumcodereview.appspot.com/10909007
git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12492 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
2012-09-12 16:43:57 +00:00
|
|
|
Map* Map::GetTransition(int transition_index) {
|
|
|
|
return transitions()->GetTarget(transition_index);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-10 15:18:44 +00:00
|
|
|
int Map::SearchSpecialTransition(Symbol* name) {
|
|
|
|
if (HasTransitionArray()) {
|
|
|
|
return transitions()->SearchSpecial(name);
|
|
|
|
}
|
|
|
|
return TransitionArray::kNotFound;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-12 15:27:38 +00:00
|
|
|
int Map::SearchTransition(PropertyKind kind, Name* name,
|
2014-12-10 15:18:44 +00:00
|
|
|
PropertyAttributes attributes) {
|
|
|
|
if (HasTransitionArray()) {
|
2014-12-12 15:27:38 +00:00
|
|
|
return transitions()->Search(kind, name, attributes);
|
2014-12-10 15:18:44 +00:00
|
|
|
}
|
2014-04-04 04:49:07 +00:00
|
|
|
return TransitionArray::kNotFound;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-10 07:53:00 +00:00
|
|
|
FixedArray* Map::GetPrototypeTransitions() {
|
|
|
|
if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
|
|
|
|
if (!transitions()->HasPrototypeTransitions()) {
|
|
|
|
return GetHeap()->empty_fixed_array();
|
|
|
|
}
|
|
|
|
return transitions()->GetPrototypeTransitions();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-11 12:13:53 +00:00
|
|
|
void Map::SetPrototypeTransitions(
|
|
|
|
Handle<Map> map, Handle<FixedArray> proto_transitions) {
|
|
|
|
EnsureHasTransitionArray(map);
|
|
|
|
int old_number_of_transitions = map->NumberOfProtoTransitions();
|
2014-11-12 10:01:45 +00:00
|
|
|
if (Heap::ShouldZapGarbage() && map->HasPrototypeTransitions()) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
|
2014-04-11 12:13:53 +00:00
|
|
|
map->ZapPrototypeTransitions();
|
2012-07-10 07:53:00 +00:00
|
|
|
}
|
2014-04-11 12:13:53 +00:00
|
|
|
map->transitions()->SetPrototypeTransitions(*proto_transitions);
|
|
|
|
map->SetNumberOfProtoTransitions(old_number_of_transitions);
|
2012-07-10 07:53:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool Map::HasPrototypeTransitions() {
|
|
|
|
return HasTransitionArray() && transitions()->HasPrototypeTransitions();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
TransitionArray* Map::transitions() const {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(HasTransitionArray());
|
2012-08-13 08:43:16 +00:00
|
|
|
Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
|
|
|
|
return TransitionArray::cast(object);
|
2012-07-05 13:54:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-13 08:43:16 +00:00
|
|
|
void Map::set_transitions(TransitionArray* transition_array,
|
|
|
|
WriteBarrierMode mode) {
|
2013-03-25 15:18:52 +00:00
|
|
|
// Transition arrays are not shared. When one is replaced, it should not
|
|
|
|
// keep referenced objects alive, so we zap it.
|
|
|
|
// When there is another reference to the array somewhere (e.g. a handle),
|
|
|
|
// not zapping turns from a waste of memory into a source of crashes.
|
|
|
|
if (HasTransitionArray()) {
|
2014-01-22 14:02:00 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
for (int i = 0; i < transitions()->number_of_transitions(); i++) {
|
|
|
|
Map* target = transitions()->GetTarget(i);
|
|
|
|
if (target->instance_descriptors() == instance_descriptors()) {
|
|
|
|
Name* key = transitions()->GetKey(i);
|
2014-12-10 15:18:44 +00:00
|
|
|
int new_target_index;
|
|
|
|
if (TransitionArray::IsSpecialTransition(key)) {
|
|
|
|
new_target_index = transition_array->SearchSpecial(Symbol::cast(key));
|
|
|
|
} else {
|
|
|
|
PropertyDetails details =
|
|
|
|
TransitionArray::GetTargetDetails(key, target);
|
2014-12-12 15:27:38 +00:00
|
|
|
new_target_index = transition_array->Search(details.kind(), key,
|
2014-12-10 15:18:44 +00:00
|
|
|
details.attributes());
|
|
|
|
}
|
|
|
|
DCHECK_NE(TransitionArray::kNotFound, new_target_index);
|
|
|
|
DCHECK_EQ(target, transition_array->GetTarget(new_target_index));
|
2014-01-22 14:02:00 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(transitions() != transition_array);
|
2012-07-05 13:54:20 +00:00
|
|
|
ZapTransitions();
|
|
|
|
}
|
2012-08-13 08:43:16 +00:00
|
|
|
|
|
|
|
WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
|
|
|
|
CONDITIONAL_WRITE_BARRIER(
|
|
|
|
GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
|
2012-06-11 06:59:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-07-10 07:53:00 +00:00
|
|
|
void Map::init_back_pointer(Object* undefined) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(undefined->IsUndefined());
|
2012-08-13 08:43:16 +00:00
|
|
|
WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
|
2012-07-10 07:53:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-09 07:29:18 +00:00
|
|
|
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
|
|
|
|
DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
|
2012-05-09 07:29:18 +00:00
|
|
|
(value->IsMap() && GetBackPointer()->IsUndefined()));
|
2012-08-13 08:43:16 +00:00
|
|
|
Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
|
|
|
|
if (object->IsTransitionArray()) {
|
|
|
|
TransitionArray::cast(object)->set_back_pointer_storage(value);
|
2012-07-17 13:50:19 +00:00
|
|
|
} else {
|
2012-08-13 08:43:16 +00:00
|
|
|
WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
|
2012-07-10 13:31:36 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(
|
2012-08-13 08:43:16 +00:00
|
|
|
GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
|
2012-07-10 13:31:36 +00:00
|
|
|
}
|
2012-05-09 07:29:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-09 10:49:41 +00:00
|
|
|
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
|
2013-02-20 11:49:54 +00:00
|
|
|
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(Map, constructor, Object, kConstructorOffset)
|
|
|
|
|
|
|
|
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
|
2011-10-17 12:44:16 +00:00
|
|
|
ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
|
2012-06-08 07:45:11 +00:00
|
|
|
ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
|
2012-08-17 09:03:08 +00:00
|
|
|
ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
|
2014-07-01 12:12:34 +00:00
|
|
|
ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2012-08-20 11:35:50 +00:00
|
|
|
ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
|
2014-05-05 18:27:57 +00:00
|
|
|
ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
|
2012-03-14 16:16:46 +00:00
|
|
|
ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
|
2012-06-08 07:45:11 +00:00
|
|
|
ACCESSORS(AccessorInfo, expected_receiver_type, Object,
|
|
|
|
kExpectedReceiverTypeOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2013-02-12 14:33:08 +00:00
|
|
|
ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
|
|
|
|
ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
|
|
|
|
ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
|
|
|
|
|
2013-06-06 15:40:28 +00:00
|
|
|
ACCESSORS(Box, value, Object, kValueOffset)
|
|
|
|
|
2012-01-10 16:11:33 +00:00
|
|
|
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
|
|
|
|
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
|
|
|
|
ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
|
|
|
|
ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
|
|
|
|
|
|
|
|
ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
|
|
|
|
ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
|
|
|
|
ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
|
|
|
|
ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
|
|
|
|
ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
|
|
|
|
ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
|
2014-11-27 10:21:32 +00:00
|
|
|
SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
|
|
|
|
BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
|
|
|
|
kCanInterceptSymbolsBit)
|
2015-02-09 11:33:36 +00:00
|
|
|
BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
|
|
|
|
ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
|
|
|
|
|
|
|
|
ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
|
|
|
|
ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
|
2013-09-04 07:45:36 +00:00
|
|
|
ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
|
|
|
|
kPrototypeTemplateOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
|
|
|
|
kNamedPropertyHandlerOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
|
|
|
|
kIndexedPropertyHandlerOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, instance_template, Object,
|
|
|
|
kInstanceTemplateOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
|
|
|
|
kInstanceCallHandlerOffset)
|
|
|
|
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
|
|
|
|
kAccessCheckInfoOffset)
|
2012-03-14 16:16:46 +00:00
|
|
|
ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
|
2008-07-16 07:07:30 +00:00
|
|
|
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
|
|
|
|
kInternalFieldCountOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
|
|
|
|
|
2013-07-08 15:00:12 +00:00
|
|
|
ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
|
2013-09-24 10:30:41 +00:00
|
|
|
ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
|
2014-01-13 10:28:01 +00:00
|
|
|
ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
|
|
|
|
ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
|
|
|
|
kPretenureCreateCountOffset)
|
2013-09-19 14:13:34 +00:00
|
|
|
ACCESSORS(AllocationSite, dependent_code, DependentCode,
|
|
|
|
kDependentCodeOffset)
|
2013-07-17 11:50:24 +00:00
|
|
|
ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
|
2013-07-19 13:30:49 +00:00
|
|
|
ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
|
2013-01-08 09:03:16 +00:00
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(Script, source, Object, kSourceOffset)
|
|
|
|
ACCESSORS(Script, name, Object, kNameOffset)
|
2013-06-25 14:57:47 +00:00
|
|
|
ACCESSORS(Script, id, Smi, kIdOffset)
|
2012-03-14 16:16:46 +00:00
|
|
|
ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
|
|
|
|
ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
|
2009-05-06 08:52:48 +00:00
|
|
|
ACCESSORS(Script, context_data, Object, kContextOffset)
|
2014-10-15 10:11:08 +00:00
|
|
|
ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
|
2012-03-14 16:16:46 +00:00
|
|
|
ACCESSORS_TO_SMI(Script, type, kTypeOffset)
|
2009-11-27 14:10:48 +00:00
|
|
|
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
|
2009-12-01 14:36:45 +00:00
|
|
|
ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
|
2012-03-14 16:16:46 +00:00
|
|
|
ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
|
|
|
|
kEvalFrominstructionsOffsetOffset)
|
2013-07-30 17:00:05 +00:00
|
|
|
ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
|
2015-01-29 14:01:13 +00:00
|
|
|
BOOL_ACCESSORS(Script, flags, is_embedder_debug_script,
|
|
|
|
kIsEmbedderDebugScriptBit)
|
2013-07-30 17:05:50 +00:00
|
|
|
BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
|
2014-07-02 07:01:31 +00:00
|
|
|
ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
|
|
|
|
ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
|
2013-07-30 17:00:05 +00:00
|
|
|
|
|
|
|
Script::CompilationType Script::compilation_type() {
|
|
|
|
return BooleanBit::get(flags(), kCompilationTypeBit) ?
|
|
|
|
COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
|
|
|
|
}
|
|
|
|
void Script::set_compilation_type(CompilationType type) {
|
|
|
|
set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
|
|
|
|
type == COMPILATION_TYPE_EVAL));
|
|
|
|
}
|
|
|
|
Script::CompilationState Script::compilation_state() {
|
|
|
|
return BooleanBit::get(flags(), kCompilationStateBit) ?
|
|
|
|
COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
|
|
|
|
}
|
|
|
|
void Script::set_compilation_state(CompilationState state) {
|
|
|
|
set_flags(BooleanBit::set(flags(), kCompilationStateBit,
|
|
|
|
state == COMPILATION_STATE_COMPILED));
|
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
|
|
|
|
ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
|
|
|
|
ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
|
|
|
|
ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
|
|
|
|
|
2012-03-14 16:16:46 +00:00
|
|
|
ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
|
|
|
|
ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
|
|
|
|
ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
|
|
|
|
|
|
|
|
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
|
2012-06-14 14:06:22 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
|
|
|
|
kOptimizedCodeMapOffset)
|
2011-10-05 08:12:36 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
|
2014-09-18 09:59:53 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
|
2014-04-30 10:51:01 +00:00
|
|
|
kFeedbackVectorOffset)
|
2014-11-07 16:03:11 +00:00
|
|
|
#if TRACE_MAPS
|
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
|
|
|
|
#endif
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
|
|
|
|
kInstanceClassNameOffset)
|
2010-03-11 16:24:31 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
|
|
|
|
ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
|
2009-04-14 00:51:59 +00:00
|
|
|
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2012-02-09 10:19:46 +00:00
|
|
|
|
2012-12-19 10:28:36 +00:00
|
|
|
SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
|
|
|
|
kHiddenPrototypeBit)
|
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
|
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
|
|
|
|
kNeedsAccessCheckBit)
|
2011-07-25 15:01:45 +00:00
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
|
|
|
|
kReadOnlyPrototypeBit)
|
2013-08-26 17:40:03 +00:00
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
|
|
|
|
kRemovePrototypeBit)
|
2013-09-09 07:52:52 +00:00
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
|
|
|
|
kDoNotCacheBit)
|
2015-01-23 09:07:11 +00:00
|
|
|
BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
|
2008-07-03 15:10:15 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
|
|
|
|
kIsExpressionBit)
|
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
|
|
|
|
kIsTopLevelBit)
|
2013-03-12 18:03:18 +00:00
|
|
|
|
2014-11-17 08:42:45 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
|
2010-06-07 15:39:10 +00:00
|
|
|
kAllowLazyCompilation)
|
2012-06-19 14:29:48 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo,
|
|
|
|
compiler_hints,
|
|
|
|
allows_lazy_compilation_without_context,
|
|
|
|
kAllowLazyCompilationWithoutContext)
|
2011-06-16 14:12:58 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo,
|
|
|
|
compiler_hints,
|
|
|
|
uses_arguments,
|
|
|
|
kUsesArguments)
|
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo,
|
|
|
|
compiler_hints,
|
|
|
|
has_duplicate_parameters,
|
|
|
|
kHasDuplicateParameters)
|
2014-09-19 12:50:50 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
|
2014-10-13 07:50:21 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2010-08-17 11:44:01 +00:00
|
|
|
|
2010-05-27 12:30:45 +00:00
|
|
|
#if V8_HOST_ARCH_32_BIT
|
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
|
2015-02-11 01:36:17 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
|
2008-07-03 15:10:15 +00:00
|
|
|
kFormalParameterCountOffset)
|
2010-05-27 12:30:45 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
|
2008-07-03 15:10:15 +00:00
|
|
|
kExpectedNofPropertiesOffset)
|
2010-05-27 12:30:45 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
|
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
|
2008-07-03 15:10:15 +00:00
|
|
|
kStartPositionAndTypeOffset)
|
2010-05-27 12:30:45 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
|
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
|
2008-07-03 15:10:15 +00:00
|
|
|
kFunctionTokenPositionOffset)
|
2010-05-27 12:30:45 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
|
2009-08-19 07:30:20 +00:00
|
|
|
kCompilerHintsOffset)
|
2013-09-05 13:20:51 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
|
|
|
|
kOptCountAndBailoutReasonOffset)
|
2012-06-11 16:57:27 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
|
2014-04-23 07:07:54 +00:00
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
|
|
|
|
SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
|
2013-07-18 08:12:01 +00:00
|
|
|
|
2010-05-27 12:30:45 +00:00
|
|
|
#else
|
|
|
|
|
2014-10-01 13:14:14 +00:00
|
|
|
#if V8_TARGET_LITTLE_ENDIAN
|
|
|
|
#define PSEUDO_SMI_LO_ALIGN 0
|
|
|
|
#define PSEUDO_SMI_HI_ALIGN kIntSize
|
|
|
|
#else
|
|
|
|
#define PSEUDO_SMI_LO_ALIGN kIntSize
|
|
|
|
#define PSEUDO_SMI_HI_ALIGN 0
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
|
|
|
|
STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
|
|
|
|
int holder::name() const { \
|
|
|
|
int value = READ_INT_FIELD(this, offset); \
|
|
|
|
DCHECK(kHeapObjectTag == 1); \
|
|
|
|
DCHECK((value & kHeapObjectTag) == 0); \
|
|
|
|
return value >> 1; \
|
|
|
|
} \
|
|
|
|
void holder::set_##name(int value) { \
|
|
|
|
DCHECK(kHeapObjectTag == 1); \
|
|
|
|
DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
|
|
|
|
WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
|
|
|
|
}
|
|
|
|
|
|
|
|
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
|
|
|
|
STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
|
2010-05-27 12:30:45 +00:00
|
|
|
INT_ACCESSORS(holder, name, offset)
|
|
|
|
|
|
|
|
|
|
|
|
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
|
2015-02-11 01:36:17 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
|
|
|
|
formal_parameter_count,
|
2010-11-08 10:30:57 +00:00
|
|
|
kFormalParameterCountOffset)
|
2010-05-27 12:30:45 +00:00
|
|
|
|
2010-11-08 10:30:57 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
|
|
|
|
expected_nof_properties,
|
|
|
|
kExpectedNofPropertiesOffset)
|
2010-05-27 12:30:45 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2010-11-08 10:30:57 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
|
|
|
|
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
|
|
|
|
start_position_and_type,
|
|
|
|
kStartPositionAndTypeOffset)
|
|
|
|
|
|
|
|
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
|
|
|
|
function_token_position,
|
|
|
|
kFunctionTokenPositionOffset)
|
|
|
|
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
|
|
|
|
compiler_hints,
|
|
|
|
kCompilerHintsOffset)
|
|
|
|
|
2013-09-05 13:20:51 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
|
|
|
|
opt_count_and_bailout_reason,
|
|
|
|
kOptCountAndBailoutReasonOffset)
|
2013-06-05 08:43:25 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
|
2013-07-18 08:12:01 +00:00
|
|
|
|
2014-04-23 07:07:54 +00:00
|
|
|
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
|
|
|
|
ast_node_count,
|
|
|
|
kAstNodeCountOffset)
|
|
|
|
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
|
|
|
|
profiler_ticks,
|
|
|
|
kProfilerTicksOffset)
|
|
|
|
|
2010-05-27 12:30:45 +00:00
|
|
|
#endif
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2010-09-23 09:15:26 +00:00
|
|
|
|
2011-06-16 14:12:58 +00:00
|
|
|
BOOL_GETTER(SharedFunctionInfo,
|
|
|
|
compiler_hints,
|
|
|
|
optimization_disabled,
|
|
|
|
kOptimizationDisabled)
|
2010-12-07 11:31:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_optimization_disabled(bool disable) {
|
|
|
|
set_compiler_hints(BooleanBit::set(compiler_hints(),
|
|
|
|
kOptimizationDisabled,
|
|
|
|
disable));
|
|
|
|
// If disabling optimizations we reflect that in the code object so
|
|
|
|
// it will not be counted as optimizable code.
|
|
|
|
if ((code()->kind() == Code::FUNCTION) && disable) {
|
|
|
|
code()->set_optimizable(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-04 09:34:05 +00:00
|
|
|
LanguageMode SharedFunctionInfo::language_mode() {
|
2015-02-05 14:11:34 +00:00
|
|
|
STATIC_ASSERT(LANGUAGE_END == 3);
|
|
|
|
return construct_language_mode(
|
|
|
|
BooleanBit::get(compiler_hints(), kStrictModeFunction),
|
|
|
|
BooleanBit::get(compiler_hints(), kStrongModeFunction));
|
2011-10-24 07:47:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-02-04 09:34:05 +00:00
|
|
|
void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
|
2015-02-05 14:11:34 +00:00
|
|
|
STATIC_ASSERT(LANGUAGE_END == 3);
|
2015-02-04 09:34:05 +00:00
|
|
|
// We only allow language mode transitions that set the same language mode
|
|
|
|
// again or go up in the chain:
|
|
|
|
DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
|
2011-11-24 15:17:04 +00:00
|
|
|
int hints = compiler_hints();
|
2015-02-04 09:34:05 +00:00
|
|
|
hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
|
2015-02-05 14:11:34 +00:00
|
|
|
hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
|
2011-11-24 15:17:04 +00:00
|
|
|
set_compiler_hints(hints);
|
2011-10-24 07:47:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 16:39:42 +00:00
|
|
|
FunctionKind SharedFunctionInfo::kind() {
|
|
|
|
return FunctionKindBits::decode(compiler_hints());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_kind(FunctionKind kind) {
|
|
|
|
DCHECK(IsValidFunctionKind(kind));
|
|
|
|
int hints = compiler_hints();
|
|
|
|
hints = FunctionKindBits::update(hints, kind);
|
|
|
|
set_compiler_hints(hints);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-28 04:08:48 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_super_property,
|
|
|
|
kUsesSuperProperty)
|
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, uses_super_constructor_call,
|
|
|
|
kUsesSuperConstructorCall)
|
2011-08-08 16:14:46 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
|
2013-11-12 14:43:18 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
|
|
|
|
kInlineBuiltin)
|
2011-08-08 16:14:46 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
|
|
|
|
name_should_print_as_anonymous,
|
|
|
|
kNameShouldPrintAsAnonymous)
|
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
|
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
|
2012-02-14 14:14:51 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
|
2012-07-09 08:59:03 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
|
2013-05-13 10:59:00 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
|
2014-07-21 09:58:01 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
|
2014-09-10 16:39:42 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
|
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
|
|
|
|
kIsConciseMethod)
|
2015-02-05 23:34:16 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
|
|
|
|
kIsAccessorFunction)
|
2014-11-07 16:39:00 +00:00
|
|
|
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
|
|
|
|
kIsDefaultConstructor)
|
2011-06-15 10:47:37 +00:00
|
|
|
|
2010-03-09 10:49:41 +00:00
|
|
|
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
|
|
|
|
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
|
2014-12-02 14:25:17 +00:00
|
|
|
ACCESSORS(CodeCache, weak_cell_cache, Object, kWeakCellCacheOffset)
|
2010-03-09 10:49:41 +00:00
|
|
|
|
2011-06-06 13:15:11 +00:00
|
|
|
ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
|
|
|
|
|
2009-10-07 12:20:02 +00:00
|
|
|
bool Script::HasValidSource() {
|
|
|
|
Object* src = this->source();
|
|
|
|
if (!src->IsString()) return true;
|
|
|
|
String* src_str = String::cast(src);
|
|
|
|
if (!StringShape(src_str).IsExternal()) return true;
|
2012-11-21 10:01:05 +00:00
|
|
|
if (src_str->IsOneByteRepresentation()) {
|
2014-09-10 12:38:12 +00:00
|
|
|
return ExternalOneByteString::cast(src)->resource() != NULL;
|
2009-10-07 12:20:02 +00:00
|
|
|
} else if (src_str->IsTwoByteRepresentation()) {
|
|
|
|
return ExternalTwoByteString::cast(src)->resource() != NULL;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-09-15 15:02:38 +00:00
|
|
|
void SharedFunctionInfo::DontAdaptArguments() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(code()->kind() == Code::BUILTIN);
|
2015-02-11 01:36:17 +00:00
|
|
|
set_formal_parameter_count(kDontAdaptArgumentsSentinel);
|
2008-09-15 15:02:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-07 09:57:29 +00:00
|
|
|
int SharedFunctionInfo::start_position() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return start_position_and_type() >> kStartPositionShift;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_start_position(int start_position) {
|
|
|
|
set_start_position_and_type((start_position << kStartPositionShift)
|
|
|
|
| (start_position_and_type() & ~kStartPositionMask));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
Code* SharedFunctionInfo::code() const {
|
2008-07-03 15:10:15 +00:00
|
|
|
return Code::cast(READ_FIELD(this, kCodeOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-23 08:46:32 +00:00
|
|
|
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_FIELD(this, kCodeOffset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-01-11 13:13:11 +00:00
|
|
|
void SharedFunctionInfo::ReplaceCode(Code* value) {
|
|
|
|
// If the GC metadata field is already used then the function was
|
|
|
|
// enqueued as a code flushing candidate and we remove it now.
|
|
|
|
if (code()->gc_metadata() != NULL) {
|
|
|
|
CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
|
|
|
|
flusher->EvictCandidate(this);
|
|
|
|
}
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
|
2014-04-30 10:51:01 +00:00
|
|
|
|
2013-01-11 13:13:11 +00:00
|
|
|
set_code(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
ScopeInfo* SharedFunctionInfo::scope_info() const {
|
2011-11-03 10:36:55 +00:00
|
|
|
return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
|
2010-07-14 11:18:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-11-03 10:36:55 +00:00
|
|
|
void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
|
2010-07-14 11:18:09 +00:00
|
|
|
WriteBarrierMode mode) {
|
|
|
|
WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
|
2011-09-19 18:36:47 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(),
|
|
|
|
this,
|
|
|
|
kScopeInfoOffset,
|
|
|
|
reinterpret_cast<Object*>(value),
|
|
|
|
mode);
|
2010-07-14 11:18:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
bool SharedFunctionInfo::is_compiled() {
|
2014-09-17 15:29:42 +00:00
|
|
|
return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-03-11 16:24:31 +00:00
|
|
|
bool SharedFunctionInfo::IsApiFunction() {
|
|
|
|
return function_data()->IsFunctionTemplateInfo();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsApiFunction());
|
2010-03-11 16:24:31 +00:00
|
|
|
return FunctionTemplateInfo::cast(function_data());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-14 18:53:48 +00:00
|
|
|
bool SharedFunctionInfo::HasBuiltinFunctionId() {
|
2010-05-06 13:21:53 +00:00
|
|
|
return function_data()->IsSmi();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-14 18:53:48 +00:00
|
|
|
BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(HasBuiltinFunctionId());
|
2010-12-14 18:53:48 +00:00
|
|
|
return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
|
2010-03-11 16:24:31 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 16:57:27 +00:00
|
|
|
int SharedFunctionInfo::ic_age() {
|
|
|
|
return ICAgeBits::decode(counters());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_ic_age(int ic_age) {
|
|
|
|
set_counters(ICAgeBits::update(counters(), ic_age));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int SharedFunctionInfo::deopt_count() {
|
|
|
|
return DeoptCountBits::decode(counters());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_deopt_count(int deopt_count) {
|
|
|
|
set_counters(DeoptCountBits::update(counters(), deopt_count));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::increment_deopt_count() {
|
|
|
|
int value = counters();
|
|
|
|
int deopt_count = DeoptCountBits::decode(value);
|
|
|
|
deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
|
|
|
|
set_counters(DeoptCountBits::update(value, deopt_count));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int SharedFunctionInfo::opt_reenable_tries() {
|
|
|
|
return OptReenableTriesBits::decode(counters());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
|
|
|
|
set_counters(OptReenableTriesBits::update(counters(), tries));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-09-05 13:20:51 +00:00
|
|
|
int SharedFunctionInfo::opt_count() {
|
|
|
|
return OptCountBits::decode(opt_count_and_bailout_reason());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void SharedFunctionInfo::set_opt_count(int opt_count) {
|
|
|
|
set_opt_count_and_bailout_reason(
|
|
|
|
OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-13 09:55:48 +00:00
|
|
|
BailoutReason SharedFunctionInfo::disable_optimization_reason() {
|
|
|
|
return static_cast<BailoutReason>(
|
2013-09-05 13:20:51 +00:00
|
|
|
DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
bool SharedFunctionInfo::has_deoptimization_support() {
|
|
|
|
Code* code = this->code();
|
|
|
|
return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-06-11 16:57:27 +00:00
|
|
|
void SharedFunctionInfo::TryReenableOptimization() {
|
|
|
|
int tries = opt_reenable_tries();
|
|
|
|
set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
|
|
|
|
// We reenable optimization whenever the number of tries is a large
|
|
|
|
// enough power of 2.
|
|
|
|
if (tries >= 16 && (((tries - 1) & tries) == 0)) {
|
|
|
|
set_optimization_disabled(false);
|
|
|
|
set_opt_count(0);
|
|
|
|
set_deopt_count(0);
|
|
|
|
code()->set_optimizable(true);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-07-29 09:51:41 +00:00
|
|
|
bool JSFunction::IsBuiltin() {
|
2012-08-17 12:59:00 +00:00
|
|
|
return context()->global_object()->IsJSBuiltinsObject();
|
2009-07-29 09:51:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-02 13:36:29 +00:00
|
|
|
bool JSFunction::IsFromNativeScript() {
|
2014-05-27 12:21:40 +00:00
|
|
|
Object* script = shared()->script();
|
|
|
|
bool native = script->IsScript() &&
|
|
|
|
Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!IsBuiltin() || native); // All builtins are also native.
|
2014-05-27 12:21:40 +00:00
|
|
|
return native;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-02 13:36:29 +00:00
|
|
|
bool JSFunction::IsFromExtensionScript() {
|
|
|
|
Object* script = shared()->script();
|
|
|
|
return script->IsScript() &&
|
|
|
|
Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
bool JSFunction::NeedsArgumentsAdaption() {
|
2015-02-11 01:36:17 +00:00
|
|
|
return shared()->formal_parameter_count() !=
|
|
|
|
SharedFunctionInfo::kDontAdaptArgumentsSentinel;
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool JSFunction::IsOptimized() {
|
|
|
|
return code()->kind() == Code::OPTIMIZED_FUNCTION;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-04-11 13:24:50 +00:00
|
|
|
bool JSFunction::IsOptimizable() {
|
|
|
|
return code()->kind() == Code::FUNCTION && code()->optimizable();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
bool JSFunction::IsMarkedForOptimization() {
|
|
|
|
return code() == GetIsolate()->builtins()->builtin(
|
|
|
|
Builtins::kCompileOptimized);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
bool JSFunction::IsMarkedForConcurrentOptimization() {
|
2013-03-12 18:03:18 +00:00
|
|
|
return code() == GetIsolate()->builtins()->builtin(
|
2013-12-23 14:30:35 +00:00
|
|
|
Builtins::kCompileOptimizedConcurrent);
|
2012-07-19 18:58:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-12-23 14:30:35 +00:00
|
|
|
bool JSFunction::IsInOptimizationQueue() {
|
2012-07-19 18:58:23 +00:00
|
|
|
return code() == GetIsolate()->builtins()->builtin(
|
2013-12-23 14:30:35 +00:00
|
|
|
Builtins::kInOptimizationQueue);
|
2012-07-19 18:58:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-23 08:52:05 +00:00
|
|
|
bool JSFunction::IsInobjectSlackTrackingInProgress() {
|
|
|
|
return has_initial_map() &&
|
2014-12-05 15:28:09 +00:00
|
|
|
initial_map()->counter() >= Map::kSlackTrackingCounterEnd;
|
2014-05-23 08:52:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Code* JSFunction::code() {
|
2013-06-26 14:04:25 +00:00
|
|
|
return Code::cast(
|
2010-08-20 07:10:18 +00:00
|
|
|
Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
|
2010-08-17 11:44:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void JSFunction::set_code(Code* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(value));
|
2010-08-20 07:10:18 +00:00
|
|
|
Address entry = value->entry();
|
|
|
|
WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
|
2011-09-19 18:36:47 +00:00
|
|
|
GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
|
|
|
|
this,
|
|
|
|
HeapObject::RawField(this, kCodeEntryOffset),
|
|
|
|
value);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-12 18:03:18 +00:00
|
|
|
void JSFunction::set_code_no_write_barrier(Code* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(value));
|
2013-03-12 18:03:18 +00:00
|
|
|
Address entry = value->entry();
|
|
|
|
WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
void JSFunction::ReplaceCode(Code* code) {
|
|
|
|
bool was_optimized = IsOptimized();
|
|
|
|
bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
|
|
|
|
|
2014-03-11 15:50:41 +00:00
|
|
|
if (was_optimized && is_optimized) {
|
|
|
|
shared()->EvictFromOptimizedCodeMap(this->code(),
|
|
|
|
"Replacing with another optimized code");
|
|
|
|
}
|
|
|
|
|
2010-12-07 11:31:57 +00:00
|
|
|
set_code(code);
|
|
|
|
|
|
|
|
// Add/remove the function from the list of optimized functions for this
|
|
|
|
// context based on the state change.
|
|
|
|
if (!was_optimized && is_optimized) {
|
2012-08-17 09:03:08 +00:00
|
|
|
context()->native_context()->AddOptimizedFunction(this);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
if (was_optimized && !is_optimized) {
|
2013-09-04 13:53:24 +00:00
|
|
|
// TODO(titzer): linear in the number of optimized functions; fix!
|
2012-08-17 09:03:08 +00:00
|
|
|
context()->native_context()->RemoveOptimizedFunction(this);
|
2010-12-07 11:31:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Context* JSFunction::context() {
|
|
|
|
return Context::cast(READ_FIELD(this, kContextOffset));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-01 12:12:34 +00:00
|
|
|
JSObject* JSFunction::global_proxy() {
|
|
|
|
return context()->global_proxy();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
void JSFunction::set_context(Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(value->IsUndefined() || value->IsContext());
|
2008-07-03 15:10:15 +00:00
|
|
|
WRITE_FIELD(this, kContextOffset, value);
|
2011-09-19 18:36:47 +00:00
|
|
|
WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ACCESSORS(JSFunction, prototype_or_initial_map, Object,
|
|
|
|
kPrototypeOrInitialMapOffset)
|
|
|
|
|
|
|
|
|
|
|
|
Map* JSFunction::initial_map() {
|
|
|
|
return Map::cast(prototype_or_initial_map());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool JSFunction::has_initial_map() {
|
|
|
|
return prototype_or_initial_map()->IsMap();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool JSFunction::has_instance_prototype() {
|
|
|
|
return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool JSFunction::has_prototype() {
|
|
|
|
return map()->has_non_instance_prototype() || has_instance_prototype();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* JSFunction::instance_prototype() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_instance_prototype());
|
2008-07-03 15:10:15 +00:00
|
|
|
if (has_initial_map()) return initial_map()->prototype();
|
|
|
|
// When there is no initial map and the prototype is a JSObject, the
|
|
|
|
// initial map field is used for the prototype field.
|
|
|
|
return prototype_or_initial_map();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* JSFunction::prototype() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(has_prototype());
|
2008-07-03 15:10:15 +00:00
|
|
|
// If the function's prototype property has been set to a non-JSObject
|
|
|
|
// value, that value is stored in the constructor field of the map.
|
|
|
|
if (map()->has_non_instance_prototype()) return map()->constructor();
|
|
|
|
return instance_prototype();
|
|
|
|
}
|
|
|
|
|
2012-06-06 10:17:26 +00:00
|
|
|
|
2010-04-28 12:05:40 +00:00
|
|
|
bool JSFunction::should_have_prototype() {
|
|
|
|
return map()->function_with_prototype();
|
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
bool JSFunction::is_compiled() {
|
2014-09-17 15:29:42 +00:00
|
|
|
return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-17 12:44:16 +00:00
|
|
|
FixedArray* JSFunction::literals() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!shared()->bound());
|
2011-10-17 12:44:16 +00:00
|
|
|
return literals_or_bindings();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSFunction::set_literals(FixedArray* literals) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!shared()->bound());
|
2011-10-17 12:44:16 +00:00
|
|
|
set_literals_or_bindings(literals);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
FixedArray* JSFunction::function_bindings() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(shared()->bound());
|
2011-10-17 12:44:16 +00:00
|
|
|
return literals_or_bindings();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSFunction::set_function_bindings(FixedArray* bindings) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(shared()->bound());
|
2011-10-17 12:44:16 +00:00
|
|
|
// Bound function literal may be initialized to the empty fixed array
|
|
|
|
// before the bindings are set.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(bindings == GetHeap()->empty_fixed_array() ||
|
2015-01-15 11:22:04 +00:00
|
|
|
bindings->map() == GetHeap()->fixed_array_map());
|
2011-10-17 12:44:16 +00:00
|
|
|
set_literals_or_bindings(bindings);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-23 14:55:45 +00:00
|
|
|
int JSFunction::NumberOfLiterals() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!shared()->bound());
|
2008-10-23 14:55:45 +00:00
|
|
|
return literals()->length();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(id < kJSBuiltinsCount); // id is unsigned.
|
2010-04-14 20:16:19 +00:00
|
|
|
return READ_FIELD(this, OffsetOfFunctionWithId(id));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
|
|
|
|
Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(id < kJSBuiltinsCount); // id is unsigned.
|
2010-04-14 20:16:19 +00:00
|
|
|
WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
|
2011-09-19 18:36:47 +00:00
|
|
|
WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
|
2010-04-14 20:16:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(id < kJSBuiltinsCount); // id is unsigned.
|
2010-04-14 20:16:19 +00:00
|
|
|
return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
|
|
|
|
Code* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(id < kJSBuiltinsCount); // id is unsigned.
|
2010-04-14 20:16:19 +00:00
|
|
|
WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!GetHeap()->InNewSpace(value));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-05-13 10:58:25 +00:00
|
|
|
ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
|
2011-09-22 13:54:53 +00:00
|
|
|
ACCESSORS(JSProxy, hash, Object, kHashOffset)
|
2011-09-13 11:42:57 +00:00
|
|
|
ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
|
|
|
|
ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
|
|
|
|
|
|
|
|
|
|
|
|
void JSProxy::InitializeBody(int object_size, Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
|
2011-09-13 11:42:57 +00:00
|
|
|
for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
|
|
|
WRITE_FIELD(this, offset, value);
|
|
|
|
}
|
|
|
|
}
|
2011-05-13 10:58:25 +00:00
|
|
|
|
|
|
|
|
2014-07-10 10:54:47 +00:00
|
|
|
ACCESSORS(JSCollection, table, Object, kTableOffset)
|
2014-04-17 17:45:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
#define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
|
|
|
|
template<class Derived, class TableType> \
|
2014-06-20 10:31:17 +00:00
|
|
|
type* OrderedHashTableIterator<Derived, TableType>::name() const { \
|
2014-04-17 17:45:32 +00:00
|
|
|
return type::cast(READ_FIELD(this, offset)); \
|
|
|
|
} \
|
|
|
|
template<class Derived, class TableType> \
|
|
|
|
void OrderedHashTableIterator<Derived, TableType>::set_##name( \
|
|
|
|
type* value, WriteBarrierMode mode) { \
|
|
|
|
WRITE_FIELD(this, offset, value); \
|
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
|
|
|
|
}
|
|
|
|
|
|
|
|
ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
|
2014-08-14 10:24:19 +00:00
|
|
|
ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
|
|
|
|
ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
|
2014-04-17 17:45:32 +00:00
|
|
|
|
|
|
|
#undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
|
|
|
|
|
|
|
|
|
2013-07-22 08:32:24 +00:00
|
|
|
ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
|
|
|
|
ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
|
2011-08-03 12:48:30 +00:00
|
|
|
|
|
|
|
|
2011-10-28 12:37:29 +00:00
|
|
|
Address Foreign::foreign_address() {
|
|
|
|
return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-10-28 12:37:29 +00:00
|
|
|
void Foreign::set_foreign_address(Address value) {
|
|
|
|
WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-04-15 12:29:44 +00:00
|
|
|
ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
|
2013-04-19 14:11:23 +00:00
|
|
|
ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
|
2013-04-25 10:59:09 +00:00
|
|
|
ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
|
2013-04-15 12:29:44 +00:00
|
|
|
SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
|
|
|
|
ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
|
2013-05-08 08:08:23 +00:00
|
|
|
SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
|
2013-04-15 12:29:44 +00:00
|
|
|
|
2014-05-05 14:31:51 +00:00
|
|
|
bool JSGeneratorObject::is_suspended() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
|
|
|
|
DCHECK_EQ(kGeneratorClosed, 0);
|
2014-05-05 14:31:51 +00:00
|
|
|
return continuation() > 0;
|
|
|
|
}
|
2013-04-15 12:29:44 +00:00
|
|
|
|
2014-05-22 07:32:59 +00:00
|
|
|
bool JSGeneratorObject::is_closed() {
|
|
|
|
return continuation() == kGeneratorClosed;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool JSGeneratorObject::is_executing() {
|
|
|
|
return continuation() == kGeneratorExecuting;
|
|
|
|
}
|
|
|
|
|
2012-04-16 14:43:27 +00:00
|
|
|
ACCESSORS(JSModule, context, Object, kContextOffset)
|
2012-07-09 08:59:03 +00:00
|
|
|
ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
|
2012-04-16 14:43:27 +00:00
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
ACCESSORS(JSValue, value, Object, kValueOffset)
|
|
|
|
|
|
|
|
|
2014-07-01 15:02:31 +00:00
|
|
|
HeapNumber* HeapNumber::cast(Object* object) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
|
2014-07-01 15:02:31 +00:00
|
|
|
return reinterpret_cast<HeapNumber*>(object);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
const HeapNumber* HeapNumber::cast(const Object* object) {
|
2014-08-04 11:34:54 +00:00
|
|
|
SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
|
2014-07-01 15:02:31 +00:00
|
|
|
return reinterpret_cast<const HeapNumber*>(object);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-03-09 11:11:55 +00:00
|
|
|
ACCESSORS(JSDate, value, Object, kValueOffset)
|
2012-03-09 12:07:29 +00:00
|
|
|
ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
|
2012-03-09 11:11:55 +00:00
|
|
|
ACCESSORS(JSDate, year, Object, kYearOffset)
|
|
|
|
ACCESSORS(JSDate, month, Object, kMonthOffset)
|
|
|
|
ACCESSORS(JSDate, day, Object, kDayOffset)
|
2012-03-09 12:07:29 +00:00
|
|
|
ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
|
2012-03-09 11:11:55 +00:00
|
|
|
ACCESSORS(JSDate, hour, Object, kHourOffset)
|
|
|
|
ACCESSORS(JSDate, min, Object, kMinOffset)
|
|
|
|
ACCESSORS(JSDate, sec, Object, kSecOffset)
|
|
|
|
|
|
|
|
|
2011-02-02 13:31:52 +00:00
|
|
|
ACCESSORS(JSMessageObject, type, String, kTypeOffset)
|
|
|
|
ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
|
|
|
|
ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
|
|
|
|
ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
|
|
|
|
SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
|
|
|
|
SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
|
2012-11-29 07:38:00 +00:00
|
|
|
INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
|
2010-07-05 11:45:11 +00:00
|
|
|
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
|
2011-11-11 13:48:14 +00:00
|
|
|
ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
|
2010-12-07 11:31:57 +00:00
|
|
|
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
|
2013-11-05 10:05:03 +00:00
|
|
|
ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
|
2014-03-26 15:14:51 +00:00
|
|
|
ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
|
2012-11-14 15:59:45 +00:00
|
|
|
|
|
|
|
|
2013-11-05 10:14:48 +00:00
|
|
|
void Code::WipeOutHeader() {
|
|
|
|
WRITE_FIELD(this, kRelocationInfoOffset, NULL);
|
|
|
|
WRITE_FIELD(this, kHandlerTableOffset, NULL);
|
|
|
|
WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
|
2014-03-11 20:52:00 +00:00
|
|
|
WRITE_FIELD(this, kConstantPoolOffset, NULL);
|
2014-07-21 13:10:14 +00:00
|
|
|
// Do not wipe out major/minor keys on a code stub or IC
|
2013-11-05 10:14:48 +00:00
|
|
|
if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
|
|
|
|
WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-14 15:59:45 +00:00
|
|
|
Object* Code::type_feedback_info() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == FUNCTION);
|
2013-11-05 10:05:03 +00:00
|
|
|
return raw_type_feedback_info();
|
2012-11-14 15:59:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(kind() == FUNCTION);
|
2013-11-05 10:05:03 +00:00
|
|
|
set_raw_type_feedback_info(value, mode);
|
2012-11-14 15:59:45 +00:00
|
|
|
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
|
|
|
|
value, mode);
|
2014-03-13 15:10:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-21 13:10:14 +00:00
|
|
|
uint32_t Code::stub_key() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsCodeStubOrIC());
|
2014-07-31 07:50:26 +00:00
|
|
|
Smi* smi_key = Smi::cast(raw_type_feedback_info());
|
|
|
|
return static_cast<uint32_t>(smi_key->value());
|
2012-11-14 15:59:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-21 13:10:14 +00:00
|
|
|
void Code::set_stub_key(uint32_t key) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsCodeStubOrIC());
|
2014-07-31 07:50:26 +00:00
|
|
|
set_raw_type_feedback_info(Smi::FromInt(key));
|
2012-11-14 15:59:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-01-25 15:11:59 +00:00
|
|
|
ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2012-11-14 15:59:45 +00:00
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
byte* Code::instruction_start() {
|
|
|
|
return FIELD_ADDR(this, kHeaderSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-07-05 11:45:11 +00:00
|
|
|
byte* Code::instruction_end() {
|
|
|
|
return instruction_start() + instruction_size();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
int Code::body_size() {
|
2010-07-05 11:45:11 +00:00
|
|
|
return RoundUp(instruction_size(), kObjectAlignment);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ByteArray* Code::unchecked_relocation_info() {
|
|
|
|
return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
byte* Code::relocation_start() {
|
2010-07-05 11:45:11 +00:00
|
|
|
return unchecked_relocation_info()->GetDataStartAddress();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int Code::relocation_size() {
|
|
|
|
return unchecked_relocation_info()->length();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
byte* Code::entry() {
|
|
|
|
return instruction_start();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-20 10:08:39 +00:00
|
|
|
bool Code::contains(byte* inner_pointer) {
|
|
|
|
return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ACCESSORS(JSArray, length, Object, kLengthOffset)
|
|
|
|
|
|
|
|
|
2014-06-20 10:31:17 +00:00
|
|
|
void* JSArrayBuffer::backing_store() const {
|
2013-03-28 12:50:18 +00:00
|
|
|
intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
|
|
|
|
return reinterpret_cast<void*>(ptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
|
|
|
|
intptr_t ptr = reinterpret_cast<intptr_t>(value);
|
|
|
|
WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
|
2013-05-23 10:01:42 +00:00
|
|
|
ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
|
|
|
|
|
|
|
|
|
|
|
|
bool JSArrayBuffer::is_external() {
|
|
|
|
return BooleanBit::get(flag(), kIsExternalBit);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSArrayBuffer::set_is_external(bool value) {
|
|
|
|
set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
|
|
|
|
}
|
2013-03-28 12:50:18 +00:00
|
|
|
|
|
|
|
|
2013-11-22 11:35:39 +00:00
|
|
|
bool JSArrayBuffer::should_be_freed() {
|
|
|
|
return BooleanBit::get(flag(), kShouldBeFreed);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSArrayBuffer::set_should_be_freed(bool value) {
|
|
|
|
set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-29 10:36:36 +00:00
|
|
|
bool JSArrayBuffer::is_neuterable() {
|
|
|
|
return BooleanBit::get(flag(), kIsNeuterableBit);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSArrayBuffer::set_is_neuterable(bool value) {
|
|
|
|
set_flag(BooleanBit::set(flag(), kIsNeuterableBit, value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-06-07 10:52:11 +00:00
|
|
|
ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
|
2013-06-21 13:02:38 +00:00
|
|
|
ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
|
2013-06-07 10:52:11 +00:00
|
|
|
|
|
|
|
|
2013-06-21 13:02:38 +00:00
|
|
|
ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
|
|
|
|
ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
|
|
|
|
ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
|
|
|
|
ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
|
2013-04-16 14:16:30 +00:00
|
|
|
ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
|
|
|
|
|
2008-09-23 11:45:43 +00:00
|
|
|
ACCESSORS(JSRegExp, data, Object, kDataOffset)
|
|
|
|
|
|
|
|
|
2008-10-24 08:40:02 +00:00
|
|
|
JSRegExp::Type JSRegExp::TypeTag() {
|
|
|
|
Object* data = this->data();
|
|
|
|
if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
|
|
|
|
Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
|
|
|
|
return static_cast<JSRegExp::Type>(smi->value());
|
2008-09-23 11:45:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-13 10:22:38 +00:00
|
|
|
int JSRegExp::CaptureCount() {
|
|
|
|
switch (TypeTag()) {
|
|
|
|
case ATOM:
|
|
|
|
return 0;
|
|
|
|
case IRREGEXP:
|
|
|
|
return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-11-25 11:07:48 +00:00
|
|
|
JSRegExp::Flags JSRegExp::GetFlags() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(this->data()->IsFixedArray());
|
2008-11-25 11:07:48 +00:00
|
|
|
Object* data = this->data();
|
|
|
|
Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
|
|
|
|
return Flags(smi->value());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
String* JSRegExp::Pattern() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(this->data()->IsFixedArray());
|
2008-11-25 11:07:48 +00:00
|
|
|
Object* data = this->data();
|
2014-12-08 08:53:07 +00:00
|
|
|
String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
|
2008-11-25 11:07:48 +00:00
|
|
|
return pattern;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-10-24 08:40:02 +00:00
|
|
|
Object* JSRegExp::DataAt(int index) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(TypeTag() != NOT_COMPILED);
|
2008-10-24 08:40:02 +00:00
|
|
|
return FixedArray::cast(data())->get(index);
|
2008-09-23 11:45:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-03-11 14:00:55 +00:00
|
|
|
void JSRegExp::SetDataAt(int index, Object* value) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(TypeTag() != NOT_COMPILED);
|
|
|
|
DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
|
2009-03-11 14:00:55 +00:00
|
|
|
FixedArray::cast(data())->set(index, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-09 09:35:57 +00:00
|
|
|
ElementsKind JSObject::GetElementsKind() {
|
2011-06-03 07:41:37 +00:00
|
|
|
ElementsKind kind = map()->elements_kind();
|
2014-12-08 09:52:44 +00:00
|
|
|
#if VERIFY_HEAP && DEBUG
|
2011-09-22 11:30:04 +00:00
|
|
|
FixedArrayBase* fixed_array =
|
|
|
|
reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
|
2013-10-25 12:26:47 +00:00
|
|
|
|
|
|
|
// If a GC was caused while constructing this object, the elements
|
|
|
|
// pointer may point to a one pointer filler map.
|
|
|
|
if (ElementsAreSafeToExamine()) {
|
|
|
|
Map* map = fixed_array->map();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
|
2013-10-25 12:26:47 +00:00
|
|
|
(map == GetHeap()->fixed_array_map() ||
|
|
|
|
map == GetHeap()->fixed_cow_array_map())) ||
|
|
|
|
(IsFastDoubleElementsKind(kind) &&
|
|
|
|
(fixed_array->IsFixedDoubleArray() ||
|
|
|
|
fixed_array == GetHeap()->empty_fixed_array())) ||
|
|
|
|
(kind == DICTIONARY_ELEMENTS &&
|
2011-10-18 11:32:57 +00:00
|
|
|
fixed_array->IsFixedArray() &&
|
2013-10-25 12:26:47 +00:00
|
|
|
fixed_array->IsDictionary()) ||
|
|
|
|
(kind > DICTIONARY_ELEMENTS));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
|
2013-10-25 12:26:47 +00:00
|
|
|
(elements()->IsFixedArray() && elements()->length() >= 2));
|
|
|
|
}
|
2011-09-22 11:30:04 +00:00
|
|
|
#endif
|
2011-06-03 07:41:37 +00:00
|
|
|
return kind;
|
2009-07-28 08:43:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-08-03 11:12:46 +00:00
|
|
|
ElementsAccessor* JSObject::GetElementsAccessor() {
|
|
|
|
return ElementsAccessor::ForKind(GetElementsKind());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-23 14:24:29 +00:00
|
|
|
bool JSObject::HasFastObjectElements() {
|
|
|
|
return IsFastObjectElementsKind(GetElementsKind());
|
2009-07-28 08:43:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-23 14:24:29 +00:00
|
|
|
bool JSObject::HasFastSmiElements() {
|
|
|
|
return IsFastSmiElementsKind(GetElementsKind());
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-23 14:24:29 +00:00
|
|
|
bool JSObject::HasFastSmiOrObjectElements() {
|
|
|
|
return IsFastSmiOrObjectElementsKind(GetElementsKind());
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-06-09 10:03:35 +00:00
|
|
|
bool JSObject::HasFastDoubleElements() {
|
2012-05-23 14:24:29 +00:00
|
|
|
return IsFastDoubleElementsKind(GetElementsKind());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool JSObject::HasFastHoleyElements() {
|
|
|
|
return IsFastHoleyElementsKind(GetElementsKind());
|
2011-06-09 10:03:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-11-15 12:19:14 +00:00
|
|
|
bool JSObject::HasFastElements() {
|
|
|
|
return IsFastElementsKind(GetElementsKind());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-07-28 08:43:51 +00:00
|
|
|
bool JSObject::HasDictionaryElements() {
|
|
|
|
return GetElementsKind() == DICTIONARY_ELEMENTS;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-11 14:39:08 +00:00
|
|
|
bool JSObject::HasSloppyArgumentsElements() {
|
|
|
|
return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
|
2011-09-22 11:30:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-20 15:26:17 +00:00
|
|
|
bool JSObject::HasExternalArrayElements() {
|
2011-03-09 15:01:16 +00:00
|
|
|
HeapObject* array = elements();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array != NULL);
|
2011-03-09 15:01:16 +00:00
|
|
|
return array->IsExternalArray();
|
2009-10-20 15:26:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
#define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
|
|
|
|
bool JSObject::HasExternal##Type##Elements() { \
|
|
|
|
HeapObject* array = elements(); \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array != NULL); \
|
2014-01-24 16:01:15 +00:00
|
|
|
if (!array->IsHeapObject()) \
|
|
|
|
return false; \
|
|
|
|
return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
|
2009-10-20 15:26:17 +00:00
|
|
|
}
|
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
|
2009-10-20 15:26:17 +00:00
|
|
|
|
2014-01-24 16:01:15 +00:00
|
|
|
#undef EXTERNAL_ELEMENTS_CHECK
|
2009-10-20 15:26:17 +00:00
|
|
|
|
|
|
|
|
2014-01-16 17:08:45 +00:00
|
|
|
bool JSObject::HasFixedTypedArrayElements() {
|
|
|
|
HeapObject* array = elements();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array != NULL);
|
2014-01-16 17:08:45 +00:00
|
|
|
return array->IsFixedTypedArrayBase();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-26 12:50:13 +00:00
|
|
|
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
|
|
|
|
bool JSObject::HasFixed##Type##Elements() { \
|
|
|
|
HeapObject* array = elements(); \
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array != NULL); \
|
2014-03-26 12:50:13 +00:00
|
|
|
if (!array->IsHeapObject()) \
|
|
|
|
return false; \
|
|
|
|
return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
|
|
|
|
}
|
|
|
|
|
|
|
|
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
|
|
|
|
|
|
|
|
#undef FIXED_TYPED_ELEMENTS_CHECK
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
bool JSObject::HasNamedInterceptor() {
|
|
|
|
return map()->has_named_interceptor();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool JSObject::HasIndexedInterceptor() {
|
|
|
|
return map()->has_indexed_interceptor();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
NameDictionary* JSObject::property_dictionary() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!HasFastProperties());
|
2013-03-04 15:00:57 +00:00
|
|
|
return NameDictionary::cast(properties());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
SeededNumberDictionary* JSObject::element_dictionary() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(HasDictionaryElements());
|
2012-01-16 09:44:35 +00:00
|
|
|
return SeededNumberDictionary::cast(elements());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-01 10:34:31 +00:00
|
|
|
bool Name::IsHashFieldComputed(uint32_t field) {
|
2010-05-27 12:30:45 +00:00
|
|
|
return (field & kHashNotComputedMask) == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-01 10:34:31 +00:00
|
|
|
bool Name::HasHashCode() {
|
2010-05-27 12:30:45 +00:00
|
|
|
return IsHashFieldComputed(hash_field());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-01 10:34:31 +00:00
|
|
|
uint32_t Name::Hash() {
|
2008-07-03 15:10:15 +00:00
|
|
|
// Fast case: has hash code already been computed?
|
2009-11-24 14:10:06 +00:00
|
|
|
uint32_t field = hash_field();
|
2010-05-27 12:30:45 +00:00
|
|
|
if (IsHashFieldComputed(field)) return field >> kHashShift;
|
2013-03-01 10:34:31 +00:00
|
|
|
// Slow case: compute hash code and set it. Has to be a string.
|
|
|
|
return String::cast(this)->ComputeAndSetHash();
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2014-08-12 15:28:20 +00:00
|
|
|
bool Name::IsOwn() {
|
|
|
|
return this->IsSymbol() && Symbol::cast(this)->is_own();
|
|
|
|
}
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
|
2012-01-04 15:12:15 +00:00
|
|
|
StringHasher::StringHasher(int length, uint32_t seed)
|
2008-10-07 10:10:03 +00:00
|
|
|
: length_(length),
|
2012-01-04 15:12:15 +00:00
|
|
|
raw_running_hash_(seed),
|
2008-10-07 10:10:03 +00:00
|
|
|
array_index_(0),
|
2010-06-07 09:36:30 +00:00
|
|
|
is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
|
2012-09-24 14:23:46 +00:00
|
|
|
is_first_char_(true) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
|
2012-01-04 15:12:15 +00:00
|
|
|
}
|
2008-10-07 10:10:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
bool StringHasher::has_trivial_hash() {
|
2009-11-24 14:10:06 +00:00
|
|
|
return length_ > String::kMaxHashCalcLength;
|
2008-10-07 10:10:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-19 13:27:20 +00:00
|
|
|
uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
|
2012-09-24 14:23:46 +00:00
|
|
|
running_hash += c;
|
|
|
|
running_hash += (running_hash << 10);
|
|
|
|
running_hash ^= (running_hash >> 6);
|
|
|
|
return running_hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
|
|
|
|
running_hash += (running_hash << 3);
|
|
|
|
running_hash ^= (running_hash >> 11);
|
|
|
|
running_hash += (running_hash << 15);
|
|
|
|
if ((running_hash & String::kHashBitMask) == 0) {
|
2012-10-22 10:38:40 +00:00
|
|
|
return kZeroHash;
|
2012-09-24 14:23:46 +00:00
|
|
|
}
|
|
|
|
return running_hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-27 15:47:42 +00:00
|
|
|
uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
|
|
|
|
const uc16* chars, int length) {
|
|
|
|
DCHECK_NOT_NULL(chars);
|
|
|
|
DCHECK(length >= 0);
|
|
|
|
for (int i = 0; i < length; ++i) {
|
|
|
|
running_hash = AddCharacterCore(running_hash, *chars++);
|
|
|
|
}
|
|
|
|
return running_hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
|
|
|
|
const char* chars,
|
|
|
|
int length) {
|
|
|
|
DCHECK_NOT_NULL(chars);
|
|
|
|
DCHECK(length >= 0);
|
|
|
|
for (int i = 0; i < length; ++i) {
|
|
|
|
uint16_t c = static_cast<uint16_t>(*chars++);
|
|
|
|
running_hash = AddCharacterCore(running_hash, c);
|
|
|
|
}
|
|
|
|
return running_hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-19 13:27:20 +00:00
|
|
|
void StringHasher::AddCharacter(uint16_t c) {
|
2008-10-23 06:20:57 +00:00
|
|
|
// Use the Jenkins one-at-a-time hash function to update the hash
|
|
|
|
// for the given character.
|
2012-09-24 14:23:46 +00:00
|
|
|
raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
|
2008-10-07 10:10:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-19 13:27:20 +00:00
|
|
|
bool StringHasher::UpdateIndex(uint16_t c) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(is_array_index_);
|
2012-12-19 13:27:20 +00:00
|
|
|
if (c < '0' || c > '9') {
|
|
|
|
is_array_index_ = false;
|
|
|
|
return false;
|
2012-03-12 12:35:28 +00:00
|
|
|
}
|
2012-12-19 13:27:20 +00:00
|
|
|
int d = c - '0';
|
|
|
|
if (is_first_char_) {
|
|
|
|
is_first_char_ = false;
|
|
|
|
if (c == '0' && length_ > 1) {
|
|
|
|
is_array_index_ = false;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (array_index_ > 429496729U - ((d + 2) >> 3)) {
|
|
|
|
is_array_index_ = false;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
array_index_ = array_index_ * 10 + d;
|
|
|
|
return true;
|
2008-10-07 10:10:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-12-19 13:27:20 +00:00
|
|
|
template<typename Char>
|
|
|
|
inline void StringHasher::AddCharacters(const Char* chars, int length) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
|
2012-12-19 13:27:20 +00:00
|
|
|
int i = 0;
|
|
|
|
if (is_array_index_) {
|
|
|
|
for (; i < length; i++) {
|
|
|
|
AddCharacter(chars[i]);
|
|
|
|
if (!UpdateIndex(chars[i])) {
|
|
|
|
i++;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (; i < length; i++) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!is_array_index_);
|
2012-12-19 13:27:20 +00:00
|
|
|
AddCharacter(chars[i]);
|
|
|
|
}
|
2008-10-07 10:10:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-10 12:05:31 +00:00
|
|
|
template <typename schar>
|
2012-12-19 13:27:20 +00:00
|
|
|
uint32_t StringHasher::HashSequentialString(const schar* chars,
|
|
|
|
int length,
|
|
|
|
uint32_t seed) {
|
2012-01-04 15:12:15 +00:00
|
|
|
StringHasher hasher(length, seed);
|
2012-12-19 13:27:20 +00:00
|
|
|
if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
|
2011-03-10 12:05:31 +00:00
|
|
|
return hasher.GetHashField();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-16 06:59:14 +00:00
|
|
|
uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
|
|
|
|
IteratingStringHasher hasher(string->length(), seed);
|
|
|
|
// Nothing to do.
|
|
|
|
if (hasher.has_trivial_hash()) return hasher.GetHashField();
|
|
|
|
ConsString* cons_string = String::VisitFlat(&hasher, string);
|
2014-11-28 11:07:34 +00:00
|
|
|
if (cons_string == nullptr) return hasher.GetHashField();
|
|
|
|
hasher.VisitConsString(cons_string);
|
2014-07-16 06:59:14 +00:00
|
|
|
return hasher.GetHashField();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
|
|
|
|
int length) {
|
|
|
|
AddCharacters(chars, length);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
|
|
|
|
int length) {
|
|
|
|
AddCharacters(chars, length);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-04 15:00:57 +00:00
|
|
|
bool Name::AsArrayIndex(uint32_t* index) {
|
|
|
|
return IsString() && String::cast(this)->AsArrayIndex(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-07-03 15:10:15 +00:00
|
|
|
bool String::AsArrayIndex(uint32_t* index) {
|
2009-11-24 14:10:06 +00:00
|
|
|
uint32_t field = hash_field();
|
2010-06-07 09:36:30 +00:00
|
|
|
if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
|
|
|
|
return false;
|
|
|
|
}
|
2008-07-03 15:10:15 +00:00
|
|
|
return SlowAsArrayIndex(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-23 07:16:32 +00:00
|
|
|
void String::SetForwardedInternalizedString(String* canonical) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsInternalizedString());
|
|
|
|
DCHECK(HasHashCode());
|
2014-07-23 07:16:32 +00:00
|
|
|
if (canonical == this) return; // No need to forward.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(SlowEquals(canonical));
|
|
|
|
DCHECK(canonical->IsInternalizedString());
|
|
|
|
DCHECK(canonical->HasHashCode());
|
2014-10-01 13:14:14 +00:00
|
|
|
WRITE_FIELD(this, kHashFieldSlot, canonical);
|
2014-07-23 07:16:32 +00:00
|
|
|
// Setting the hash field to a tagged value sets the LSB, causing the hash
|
|
|
|
// code to be interpreted as uninitialized. We use this fact to recognize
|
|
|
|
// that we have a forwarded string.
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!HasHashCode());
|
2014-07-23 07:16:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
String* String::GetForwardedInternalizedString() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(IsInternalizedString());
|
2014-07-23 07:16:32 +00:00
|
|
|
if (HasHashCode()) return this;
|
2014-10-01 13:14:14 +00:00
|
|
|
String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(canonical->IsInternalizedString());
|
|
|
|
DCHECK(SlowEquals(canonical));
|
|
|
|
DCHECK(canonical->HasHashCode());
|
2014-07-23 07:16:32 +00:00
|
|
|
return canonical;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
|
|
|
|
Handle<Name> name) {
|
2013-09-17 11:34:20 +00:00
|
|
|
if (object->IsJSProxy()) {
|
|
|
|
Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
|
|
|
|
return JSProxy::HasPropertyWithHandler(proxy, name);
|
2011-07-19 09:38:59 +00:00
|
|
|
}
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
|
|
|
|
if (!result.has_value) return Maybe<bool>();
|
|
|
|
return maybe(result.value != ABSENT);
|
2011-07-19 09:38:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
|
|
|
|
Handle<Name> name) {
|
2013-09-17 11:34:20 +00:00
|
|
|
if (object->IsJSProxy()) {
|
|
|
|
Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
|
|
|
|
return JSProxy::HasPropertyWithHandler(proxy, name);
|
2011-07-19 09:38:59 +00:00
|
|
|
}
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
|
|
|
|
if (!result.has_value) return Maybe<bool>();
|
|
|
|
return maybe(result.value != ABSENT);
|
2011-07-19 09:38:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
|
|
|
|
Handle<JSReceiver> object, Handle<Name> key) {
|
2012-11-16 13:28:34 +00:00
|
|
|
uint32_t index;
|
2014-03-13 11:55:31 +00:00
|
|
|
if (object->IsJSObject() && key->AsArrayIndex(&index)) {
|
|
|
|
return GetElementAttribute(object, index);
|
2012-11-16 13:28:34 +00:00
|
|
|
}
|
2014-06-12 15:08:33 +00:00
|
|
|
LookupIterator it(object, key);
|
|
|
|
return GetPropertyAttributes(&it);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2012-11-07 14:14:50 +00:00
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
|
|
|
|
Handle<JSReceiver> object, uint32_t index) {
|
2014-03-13 11:55:31 +00:00
|
|
|
if (object->IsJSProxy()) {
|
|
|
|
return JSProxy::GetElementAttributeWithHandler(
|
|
|
|
Handle<JSProxy>::cast(object), object, index);
|
2012-11-12 11:08:34 +00:00
|
|
|
}
|
2014-03-13 11:55:31 +00:00
|
|
|
return JSObject::GetElementAttributeWithReceiver(
|
|
|
|
Handle<JSObject>::cast(object), object, index, true);
|
2012-11-08 12:58:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-12-11 13:51:48 +00:00
|
|
|
bool JSGlobalObject::IsDetached() {
|
2014-07-01 12:12:34 +00:00
|
|
|
return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
|
2013-12-11 13:51:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-24 09:47:25 +00:00
|
|
|
bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
|
2014-07-17 09:44:37 +00:00
|
|
|
const PrototypeIterator iter(this->GetIsolate(),
|
|
|
|
const_cast<JSGlobalProxy*>(this));
|
|
|
|
return iter.GetCurrent() != global;
|
2009-11-12 16:34:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-09 18:31:08 +00:00
|
|
|
Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
|
2013-11-05 11:47:11 +00:00
|
|
|
return object->IsJSProxy()
|
|
|
|
? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
|
|
|
|
: JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* JSReceiver::GetIdentityHash() {
|
2011-09-22 13:54:53 +00:00
|
|
|
return IsJSProxy()
|
2013-11-05 11:47:11 +00:00
|
|
|
? JSProxy::cast(this)->GetIdentityHash()
|
|
|
|
: JSObject::cast(this)->GetIdentityHash();
|
2011-09-22 13:54:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
|
2013-09-17 11:34:20 +00:00
|
|
|
if (object->IsJSProxy()) {
|
|
|
|
Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
|
|
|
|
return JSProxy::HasElementWithHandler(proxy, index);
|
2011-09-22 10:45:37 +00:00
|
|
|
}
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
|
|
|
|
Handle<JSObject>::cast(object), object, index, true);
|
|
|
|
if (!result.has_value) return Maybe<bool>();
|
|
|
|
return maybe(result.value != ABSENT);
|
2012-11-08 12:58:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
|
|
|
|
uint32_t index) {
|
2013-09-17 11:34:20 +00:00
|
|
|
if (object->IsJSProxy()) {
|
|
|
|
Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
|
|
|
|
return JSProxy::HasElementWithHandler(proxy, index);
|
2012-11-08 12:58:08 +00:00
|
|
|
}
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
|
|
|
|
Handle<JSObject>::cast(object), object, index, false);
|
|
|
|
if (!result.has_value) return Maybe<bool>();
|
|
|
|
return maybe(result.value != ABSENT);
|
2012-11-08 12:58:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-07-25 18:32:11 +00:00
|
|
|
Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
|
2014-03-13 11:55:31 +00:00
|
|
|
Handle<JSReceiver> object, uint32_t index) {
|
|
|
|
if (object->IsJSProxy()) {
|
|
|
|
return JSProxy::GetElementAttributeWithHandler(
|
|
|
|
Handle<JSProxy>::cast(object), object, index);
|
2012-11-08 12:58:08 +00:00
|
|
|
}
|
2014-03-13 11:55:31 +00:00
|
|
|
return JSObject::GetElementAttributeWithReceiver(
|
|
|
|
Handle<JSObject>::cast(object), object, index, false);
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool AccessorInfo::all_can_read() {
|
|
|
|
return BooleanBit::get(flag(), kAllCanReadBit);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void AccessorInfo::set_all_can_read(bool value) {
|
|
|
|
set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool AccessorInfo::all_can_write() {
|
|
|
|
return BooleanBit::get(flag(), kAllCanWriteBit);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void AccessorInfo::set_all_can_write(bool value) {
|
|
|
|
set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
PropertyAttributes AccessorInfo::property_attributes() {
|
|
|
|
return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
|
2011-09-12 10:50:50 +00:00
|
|
|
set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
2011-04-11 11:38:34 +00:00
|
|
|
|
2012-06-08 07:45:11 +00:00
|
|
|
bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
|
2014-07-30 10:09:10 +00:00
|
|
|
if (!HasExpectedReceiverType()) return true;
|
|
|
|
if (!receiver->IsJSObject()) return false;
|
|
|
|
return FunctionTemplateInfo::cast(expected_receiver_type())
|
|
|
|
->IsTemplateFor(JSObject::cast(receiver)->map());
|
2012-06-08 07:45:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-05-28 09:58:27 +00:00
|
|
|
void ExecutableAccessorInfo::clear_setter() {
|
2014-12-10 12:14:12 +00:00
|
|
|
auto foreign = GetIsolate()->factory()->NewForeign(
|
2014-12-10 10:18:00 +00:00
|
|
|
reinterpret_cast<v8::internal::Address>(
|
2014-12-10 12:14:12 +00:00
|
|
|
reinterpret_cast<intptr_t>(nullptr)));
|
|
|
|
set_setter(*foreign);
|
2014-05-28 09:58:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-14 15:56:57 +00:00
|
|
|
template<typename Derived, typename Shape, typename Key>
|
|
|
|
void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
|
2014-04-25 13:21:16 +00:00
|
|
|
Handle<Object> key,
|
|
|
|
Handle<Object> value) {
|
2011-04-11 11:38:34 +00:00
|
|
|
SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-14 15:56:57 +00:00
|
|
|
template<typename Derived, typename Shape, typename Key>
|
|
|
|
void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
|
2014-04-25 13:21:16 +00:00
|
|
|
Handle<Object> key,
|
|
|
|
Handle<Object> value,
|
2014-04-14 15:56:57 +00:00
|
|
|
PropertyDetails details) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!key->IsName() ||
|
2012-08-27 13:47:34 +00:00
|
|
|
details.IsDeleted() ||
|
|
|
|
details.dictionary_index() > 0);
|
2014-04-14 15:56:57 +00:00
|
|
|
int index = DerivedHashTable::EntryToIndex(entry);
|
2013-06-03 15:32:22 +00:00
|
|
|
DisallowHeapAllocation no_gc;
|
2010-01-29 11:46:55 +00:00
|
|
|
WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
|
2014-04-25 13:21:16 +00:00
|
|
|
FixedArray::set(index, *key, mode);
|
|
|
|
FixedArray::set(index+1, *value, mode);
|
2011-10-21 10:32:38 +00:00
|
|
|
FixedArray::set(index+2, details.AsSmi());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(other->IsNumber());
|
2011-03-18 20:35:07 +00:00
|
|
|
return key == static_cast<uint32_t>(other->Number());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
|
|
|
|
return ComputeIntegerHash(key, 0);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
|
|
|
|
Object* other) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(other->IsNumber());
|
2012-01-16 09:44:35 +00:00
|
|
|
return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
2014-04-25 13:50:19 +00:00
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
|
2012-01-10 12:58:41 +00:00
|
|
|
return ComputeIntegerHash(key, seed);
|
|
|
|
}
|
|
|
|
|
2014-04-25 13:50:19 +00:00
|
|
|
|
2012-01-16 09:44:35 +00:00
|
|
|
uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
|
|
|
|
uint32_t seed,
|
|
|
|
Object* other) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(other->IsNumber());
|
2012-01-10 12:58:41 +00:00
|
|
|
return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
|
|
|
|
}
|
2011-03-18 20:35:07 +00:00
|
|
|
|
|
|
|
|
2014-04-24 08:55:31 +00:00
|
|
|
Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
|
|
|
|
return isolate->factory()->NewNumberFromUint(key);
|
|
|
|
}
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
|
2014-04-24 14:59:09 +00:00
|
|
|
bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
|
2011-03-18 20:35:07 +00:00
|
|
|
// We know that all entries in a hash table had their hash keys created.
|
|
|
|
// Use that knowledge to have fast failure.
|
2013-03-04 15:00:57 +00:00
|
|
|
if (key->Hash() != Name::cast(other)->Hash()) return false;
|
|
|
|
return key->Equals(Name::cast(other));
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-24 14:59:09 +00:00
|
|
|
uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
|
2011-03-18 20:35:07 +00:00
|
|
|
return key->Hash();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-24 14:59:09 +00:00
|
|
|
uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
|
2013-03-04 15:00:57 +00:00
|
|
|
return Name::cast(other)->Hash();
|
2011-03-18 20:35:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-24 14:59:09 +00:00
|
|
|
Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
|
|
|
|
Handle<Name> key) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(key->IsUniqueName());
|
2014-04-24 14:59:09 +00:00
|
|
|
return key;
|
2014-04-24 10:47:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-10-14 11:03:35 +00:00
|
|
|
Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
|
2014-04-24 15:33:40 +00:00
|
|
|
Handle<NameDictionary> dictionary) {
|
2014-10-14 11:03:35 +00:00
|
|
|
return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
|
2014-04-24 15:33:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-25 13:35:03 +00:00
|
|
|
bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
|
2011-10-25 14:14:56 +00:00
|
|
|
return key->SameValue(other);
|
2011-07-28 17:21:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-25 13:35:03 +00:00
|
|
|
uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
|
2013-11-05 11:47:11 +00:00
|
|
|
return Smi::cast(key->GetHash())->value();
|
2011-07-28 17:21:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-25 13:35:03 +00:00
|
|
|
uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
|
|
|
|
Object* other) {
|
2013-11-05 11:47:11 +00:00
|
|
|
return Smi::cast(other->GetHash())->value();
|
2011-07-28 17:21:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-25 13:35:03 +00:00
|
|
|
Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
|
|
|
|
Handle<Object> key) {
|
2011-07-28 17:21:22 +00:00
|
|
|
return key;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-04-14 15:56:57 +00:00
|
|
|
Handle<ObjectHashTable> ObjectHashTable::Shrink(
|
|
|
|
Handle<ObjectHashTable> table, Handle<Object> key) {
|
2014-04-25 13:35:03 +00:00
|
|
|
return DerivedHashTable::Shrink(table, key);
|
2014-04-14 15:56:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-10-04 07:25:24 +00:00
|
|
|
template <int entrysize>
|
2014-04-25 13:35:03 +00:00
|
|
|
bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
|
2013-10-04 07:25:24 +00:00
|
|
|
return key->SameValue(other);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <int entrysize>
|
2014-04-25 13:35:03 +00:00
|
|
|
uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
|
|
|
|
intptr_t hash = reinterpret_cast<intptr_t>(*key);
|
2013-10-04 07:25:24 +00:00
|
|
|
return (uint32_t)(hash & 0xFFFFFFFF);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <int entrysize>
|
2014-04-25 13:35:03 +00:00
|
|
|
uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
|
2013-10-04 07:25:24 +00:00
|
|
|
Object* other) {
|
|
|
|
intptr_t hash = reinterpret_cast<intptr_t>(other);
|
|
|
|
return (uint32_t)(hash & 0xFFFFFFFF);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template <int entrysize>
|
2014-04-25 13:35:03 +00:00
|
|
|
Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
|
|
|
|
Handle<Object> key) {
|
2013-10-04 07:25:24 +00:00
|
|
|
return key;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
void Map::ClearCodeCache(Heap* heap) {
|
2008-07-03 15:10:15 +00:00
|
|
|
// No write barrier is needed since empty_fixed_array is not in new space.
|
|
|
|
// Please note this function is used during marking:
|
|
|
|
// - MarkCompactCollector::MarkUnmarkedObject
|
2012-03-23 13:33:11 +00:00
|
|
|
// - IncrementalMarking::Step
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
|
2012-10-25 11:52:37 +00:00
|
|
|
WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
|
2008-07-03 15:10:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-03 16:44:58 +00:00
|
|
|
int Map::SlackForArraySize(int old_size, int size_limit) {
|
|
|
|
const int max_slack = size_limit - old_size;
|
|
|
|
CHECK(max_slack >= 0);
|
|
|
|
if (old_size < 4) return Min(max_slack, 1);
|
|
|
|
return Min(max_slack, old_size / 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-25 09:51:13 +00:00
|
|
|
void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(array->HasFastSmiOrObjectElements());
|
2014-03-25 09:51:13 +00:00
|
|
|
Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
|
2009-11-20 10:11:45 +00:00
|
|
|
const int kArraySizeThatFitsComfortablyInNewSpace = 128;
|
|
|
|
if (elts->length() < required_size) {
|
|
|
|
// Doubling in size would be overkill, but leave some slack to avoid
|
|
|
|
// constantly growing.
|
2014-03-25 09:51:13 +00:00
|
|
|
Expand(array, required_size + (required_size >> 3));
|
2009-11-20 10:11:45 +00:00
|
|
|
// It's a performance benefit to keep a frequently used array in new-space.
|
2014-03-25 09:51:13 +00:00
|
|
|
} else if (!array->GetHeap()->new_space()->Contains(*elts) &&
|
2009-11-20 10:11:45 +00:00
|
|
|
required_size < kArraySizeThatFitsComfortablyInNewSpace) {
|
|
|
|
// Expand will allocate a new backing store in new space even if the size
|
|
|
|
// we asked for isn't larger than what we had before.
|
2014-03-25 09:51:13 +00:00
|
|
|
Expand(array, required_size);
|
2009-11-20 10:11:45 +00:00
|
|
|
}
|
2009-06-17 10:55:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-01-29 11:46:55 +00:00
|
|
|
void JSArray::set_length(Smi* length) {
|
2011-09-19 18:36:47 +00:00
|
|
|
// Don't need a write barrier for a Smi.
|
2010-01-29 11:46:55 +00:00
|
|
|
set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-12-14 12:46:32 +00:00
|
|
|
bool JSArray::AllowsSetElementsLength() {
|
|
|
|
bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(result == !HasExternalArrayElements());
|
2011-12-14 12:46:32 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-03-21 08:30:42 +00:00
|
|
|
void JSArray::SetContent(Handle<JSArray> array,
|
|
|
|
Handle<FixedArrayBase> storage) {
|
|
|
|
EnsureCanContainElements(array, storage, storage->length(),
|
|
|
|
ALLOW_COPIED_DOUBLE_ELEMENTS);
|
|
|
|
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
|
2014-03-21 08:30:42 +00:00
|
|
|
IsFastDoubleElementsKind(array->GetElementsKind())) ||
|
|
|
|
((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
|
|
|
|
(IsFastObjectElementsKind(array->GetElementsKind()) ||
|
|
|
|
(IsFastSmiElementsKind(array->GetElementsKind()) &&
|
|
|
|
Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
|
|
|
|
array->set_elements(*storage);
|
|
|
|
array->set_length(Smi::FromInt(storage->length()));
|
2008-10-10 10:27:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-23 21:08:58 +00:00
|
|
|
int TypeFeedbackInfo::ic_total_count() {
|
|
|
|
int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
|
|
|
|
return ICTotalCountField::decode(current);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void TypeFeedbackInfo::set_ic_total_count(int count) {
|
|
|
|
int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
|
|
|
|
value = ICTotalCountField::update(value,
|
|
|
|
ICTotalCountField::decode(count));
|
|
|
|
WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int TypeFeedbackInfo::ic_with_type_info_count() {
|
|
|
|
int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
|
|
|
|
return ICsWithTypeInfoCountField::decode(current);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
|
2014-08-05 17:06:01 +00:00
|
|
|
if (delta == 0) return;
|
2012-08-23 21:08:58 +00:00
|
|
|
int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
|
2012-08-27 15:17:14 +00:00
|
|
|
int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
|
|
|
|
// We can get negative count here when the type-feedback info is
|
|
|
|
// shared between two code objects. The can only happen when
|
|
|
|
// the debugger made a shallow copy of code object (see Heap::CopyCode).
|
|
|
|
// Since we do not optimize when the debugger is active, we can skip
|
|
|
|
// this counter update.
|
|
|
|
if (new_count >= 0) {
|
|
|
|
new_count &= ICsWithTypeInfoCountField::kMask;
|
|
|
|
value = ICsWithTypeInfoCountField::update(value, new_count);
|
|
|
|
WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
|
|
|
|
}
|
2012-08-23 21:08:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-08-05 17:06:01 +00:00
|
|
|
int TypeFeedbackInfo::ic_generic_count() {
|
|
|
|
return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void TypeFeedbackInfo::change_ic_generic_count(int delta) {
|
|
|
|
if (delta == 0) return;
|
|
|
|
int new_count = ic_generic_count() + delta;
|
|
|
|
if (new_count >= 0) {
|
|
|
|
new_count &= ~Smi::kMinValue;
|
|
|
|
WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-08-23 21:08:58 +00:00
|
|
|
void TypeFeedbackInfo::initialize_storage() {
|
|
|
|
WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
|
|
|
|
WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
|
2014-08-05 17:06:01 +00:00
|
|
|
WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
|
2012-08-23 21:08:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void TypeFeedbackInfo::change_own_type_change_checksum() {
|
|
|
|
int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
|
|
|
|
int checksum = OwnTypeChangeChecksum::decode(value);
|
|
|
|
checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
|
|
|
|
value = OwnTypeChangeChecksum::update(value, checksum);
|
|
|
|
// Ensure packed bit field is in Smi range.
|
|
|
|
if (value > Smi::kMaxValue) value |= Smi::kMinValue;
|
|
|
|
if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
|
|
|
|
WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
|
|
|
|
int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
|
|
|
|
int mask = (1 << kTypeChangeChecksumBits) - 1;
|
|
|
|
value = InlinedTypeChangeChecksum::update(value, checksum & mask);
|
|
|
|
// Ensure packed bit field is in Smi range.
|
|
|
|
if (value > Smi::kMaxValue) value |= Smi::kMinValue;
|
|
|
|
if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
|
|
|
|
WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int TypeFeedbackInfo::own_type_change_checksum() {
|
|
|
|
int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
|
|
|
|
return OwnTypeChangeChecksum::decode(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
|
|
|
|
int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
|
|
|
|
int mask = (1 << kTypeChangeChecksumBits) - 1;
|
|
|
|
return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-02-24 14:34:01 +00:00
|
|
|
SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
|
|
|
|
|
|
|
|
|
2011-03-18 20:35:07 +00:00
|
|
|
Relocatable::Relocatable(Isolate* isolate) {
|
|
|
|
isolate_ = isolate;
|
|
|
|
prev_ = isolate->relocatable_top();
|
|
|
|
isolate->set_relocatable_top(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Relocatable::~Relocatable() {
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK_EQ(isolate_->relocatable_top(), this);
|
2011-03-18 20:35:07 +00:00
|
|
|
isolate_->set_relocatable_top(prev_);
|
2008-10-20 06:35:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-11 14:30:14 +00:00
|
|
|
int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
|
|
|
|
return map->instance_size();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-05-19 11:47:34 +00:00
|
|
|
void Foreign::ForeignIterateBody(ObjectVisitor* v) {
|
2010-08-11 14:30:14 +00:00
|
|
|
v->VisitExternalReference(
|
2011-10-28 12:37:29 +00:00
|
|
|
reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
|
2010-08-11 14:30:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<typename StaticVisitor>
|
2011-05-19 11:47:34 +00:00
|
|
|
void Foreign::ForeignIterateBody() {
|
2010-08-11 14:30:14 +00:00
|
|
|
StaticVisitor::VisitExternalReference(
|
2011-10-28 12:37:29 +00:00
|
|
|
reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
|
2010-08-11 14:30:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
|
|
|
|
typedef v8::String::ExternalOneByteStringResource Resource;
|
|
|
|
v->VisitExternalOneByteString(
|
2010-08-11 14:30:14 +00:00
|
|
|
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-09-10 12:38:12 +00:00
|
|
|
template <typename StaticVisitor>
|
|
|
|
void ExternalOneByteString::ExternalOneByteStringIterateBody() {
|
|
|
|
typedef v8::String::ExternalOneByteStringResource Resource;
|
|
|
|
StaticVisitor::VisitExternalOneByteString(
|
2010-08-11 14:30:14 +00:00
|
|
|
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
|
|
|
|
typedef v8::String::ExternalStringResource Resource;
|
|
|
|
v->VisitExternalTwoByteString(
|
|
|
|
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<typename StaticVisitor>
|
|
|
|
void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
|
|
|
|
typedef v8::String::ExternalStringResource Resource;
|
|
|
|
StaticVisitor::VisitExternalTwoByteString(
|
|
|
|
reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
|
|
|
|
int start_offset,
|
|
|
|
int end_offset,
|
|
|
|
ObjectVisitor* v) {
|
|
|
|
DCHECK(FLAG_unbox_double_fields);
|
|
|
|
DCHECK(IsAligned(start_offset, kPointerSize) &&
|
|
|
|
IsAligned(end_offset, kPointerSize));
|
|
|
|
|
2014-12-15 12:13:08 +00:00
|
|
|
LayoutDescriptorHelper helper(object->map());
|
2014-11-11 10:24:52 +00:00
|
|
|
DCHECK(!helper.all_fields_tagged());
|
|
|
|
|
|
|
|
for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
|
|
|
|
// Visit all tagged fields.
|
|
|
|
if (helper.IsTagged(offset)) {
|
|
|
|
v->VisitPointer(HeapObject::RawField(object, offset));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-08-11 14:30:14 +00:00
|
|
|
template<int start_offset, int end_offset, int size>
|
|
|
|
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
|
|
|
|
HeapObject* obj,
|
|
|
|
ObjectVisitor* v) {
|
2014-11-24 14:54:26 +00:00
|
|
|
if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
|
2012-08-09 12:25:03 +00:00
|
|
|
v->VisitPointers(HeapObject::RawField(obj, start_offset),
|
|
|
|
HeapObject::RawField(obj, end_offset));
|
2014-11-11 10:24:52 +00:00
|
|
|
} else {
|
|
|
|
IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
|
|
|
|
}
|
2010-08-11 14:30:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
template<int start_offset>
|
|
|
|
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
|
|
|
|
int object_size,
|
|
|
|
ObjectVisitor* v) {
|
2014-11-24 14:54:26 +00:00
|
|
|
if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) {
|
2014-11-11 10:24:52 +00:00
|
|
|
v->VisitPointers(HeapObject::RawField(obj, start_offset),
|
|
|
|
HeapObject::RawField(obj, object_size));
|
|
|
|
} else {
|
|
|
|
IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
|
|
|
|
}
|
2010-08-11 14:30:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-06-26 00:40:45 +00:00
|
|
|
template<class Derived, class TableType>
|
|
|
|
Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
|
|
|
|
TableType* table(TableType::cast(this->table()));
|
|
|
|
int index = Smi::cast(this->index())->value();
|
|
|
|
Object* key = table->KeyAt(index);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!key->IsTheHole());
|
2014-06-26 00:40:45 +00:00
|
|
|
return key;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSSetIterator::PopulateValueArray(FixedArray* array) {
|
|
|
|
array->set(0, CurrentKey());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void JSMapIterator::PopulateValueArray(FixedArray* array) {
|
|
|
|
array->set(0, CurrentKey());
|
|
|
|
array->set(1, CurrentValue());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Object* JSMapIterator::CurrentValue() {
|
|
|
|
OrderedHashMap* table(OrderedHashMap::cast(this->table()));
|
|
|
|
int index = Smi::cast(this->index())->value();
|
|
|
|
Object* value = table->ValueAt(index);
|
2014-08-04 11:34:54 +00:00
|
|
|
DCHECK(!value->IsTheHole());
|
2014-06-26 00:40:45 +00:00
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-29 12:14:55 +00:00
|
|
|
class String::SubStringRange::iterator FINAL {
|
|
|
|
public:
|
|
|
|
typedef std::forward_iterator_tag iterator_category;
|
|
|
|
typedef int difference_type;
|
|
|
|
typedef uc16 value_type;
|
|
|
|
typedef uc16* pointer;
|
|
|
|
typedef uc16& reference;
|
|
|
|
|
|
|
|
iterator(const iterator& other)
|
|
|
|
: content_(other.content_), offset_(other.offset_) {}
|
|
|
|
|
|
|
|
uc16 operator*() { return content_.Get(offset_); }
|
|
|
|
bool operator==(const iterator& other) const {
|
|
|
|
return content_.UsesSameString(other.content_) && offset_ == other.offset_;
|
|
|
|
}
|
|
|
|
bool operator!=(const iterator& other) const {
|
|
|
|
return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
|
|
|
|
}
|
|
|
|
iterator& operator++() {
|
|
|
|
++offset_;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
iterator operator++(int);
|
|
|
|
|
|
|
|
private:
|
|
|
|
friend class String;
|
|
|
|
iterator(String* from, int offset)
|
|
|
|
: content_(from->GetFlatContent()), offset_(offset) {}
|
|
|
|
String::FlatContent content_;
|
|
|
|
int offset_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
String::SubStringRange::iterator String::SubStringRange::begin() {
|
|
|
|
return String::SubStringRange::iterator(string_, first_);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
String::SubStringRange::iterator String::SubStringRange::end() {
|
|
|
|
return String::SubStringRange::iterator(string_, first_ + length_);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-03-14 16:16:46 +00:00
|
|
|
#undef TYPE_CHECKER
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef CAST_ACCESSOR
|
|
|
|
#undef INT_ACCESSORS
|
|
|
|
#undef ACCESSORS
|
2012-03-14 16:16:46 +00:00
|
|
|
#undef ACCESSORS_TO_SMI
|
|
|
|
#undef SMI_ACCESSORS
|
2014-04-09 09:50:25 +00:00
|
|
|
#undef SYNCHRONIZED_SMI_ACCESSORS
|
|
|
|
#undef NOBARRIER_SMI_ACCESSORS
|
2012-03-14 16:16:46 +00:00
|
|
|
#undef BOOL_GETTER
|
|
|
|
#undef BOOL_ACCESSORS
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef FIELD_ADDR
|
2014-06-20 10:31:17 +00:00
|
|
|
#undef FIELD_ADDR_CONST
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef READ_FIELD
|
2014-04-09 09:50:25 +00:00
|
|
|
#undef NOBARRIER_READ_FIELD
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef WRITE_FIELD
|
2014-04-09 09:50:25 +00:00
|
|
|
#undef NOBARRIER_WRITE_FIELD
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef WRITE_BARRIER
|
2008-10-23 08:46:32 +00:00
|
|
|
#undef CONDITIONAL_WRITE_BARRIER
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef READ_DOUBLE_FIELD
|
|
|
|
#undef WRITE_DOUBLE_FIELD
|
|
|
|
#undef READ_INT_FIELD
|
|
|
|
#undef WRITE_INT_FIELD
|
2012-03-14 16:16:46 +00:00
|
|
|
#undef READ_INTPTR_FIELD
|
|
|
|
#undef WRITE_INTPTR_FIELD
|
|
|
|
#undef READ_UINT32_FIELD
|
|
|
|
#undef WRITE_UINT32_FIELD
|
2008-07-03 15:10:15 +00:00
|
|
|
#undef READ_SHORT_FIELD
|
|
|
|
#undef WRITE_SHORT_FIELD
|
|
|
|
#undef READ_BYTE_FIELD
|
|
|
|
#undef WRITE_BYTE_FIELD
|
2014-04-09 09:50:25 +00:00
|
|
|
#undef NOBARRIER_READ_BYTE_FIELD
|
|
|
|
#undef NOBARRIER_WRITE_BYTE_FIELD
|
2008-07-03 15:10:15 +00:00
|
|
|
|
|
|
|
} } // namespace v8::internal
|
|
|
|
|
|
|
|
#endif // V8_OBJECTS_INL_H_
|