Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/nodejs/node.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/objects/map-inl.h')
-rw-r--r--deps/v8/src/objects/map-inl.h45
1 files changed, 37 insertions, 8 deletions
diff --git a/deps/v8/src/objects/map-inl.h b/deps/v8/src/objects/map-inl.h
index 05d24169966..59f061dc050 100644
--- a/deps/v8/src/objects/map-inl.h
+++ b/deps/v8/src/objects/map-inl.h
@@ -6,7 +6,6 @@
#define V8_OBJECTS_MAP_INL_H_
#include "src/objects/map.h"
-
#include "src/field-type.h"
#include "src/objects-inl.h"
#include "src/objects/api-callbacks-inl.h"
@@ -298,6 +297,17 @@ int Map::UnusedPropertyFields() const {
return unused;
}
+int Map::UnusedInObjectProperties() const {
+ // Like Map::UnusedPropertyFields(), but returns 0 for out of object
+ // properties.
+ int value = used_or_unused_instance_size_in_words();
+ DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
+ if (value >= JSObject::kFieldsAdded) {
+ return instance_size_in_words() - value;
+ }
+ return 0;
+}
+
int Map::used_or_unused_instance_size_in_words() const {
return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
}
@@ -349,6 +359,17 @@ void Map::CopyUnusedPropertyFields(Map* map) {
DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
}
+void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map* map) {
+ int value = map->used_or_unused_instance_size_in_words();
+ if (value >= JSValue::kFieldsAdded) {
+ // Unused in-object fields. Adjust the offset from the object’s start
+ // so it matches the distance to the object’s end.
+ value += instance_size_in_words() - map->instance_size_in_words();
+ }
+ set_used_or_unused_instance_size_in_words(value);
+ DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
+}
+
void Map::AccountAddedPropertyField() {
// Update used instance size and unused property fields number.
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
@@ -503,6 +524,17 @@ bool Map::CanTransition() const {
bool Map::IsBooleanMap() const {
return this == GetReadOnlyRoots().boolean_map();
}
+
+bool Map::IsNullMap() const { return this == GetReadOnlyRoots().null_map(); }
+
+bool Map::IsUndefinedMap() const {
+ return this == GetReadOnlyRoots().undefined_map();
+}
+
+bool Map::IsNullOrUndefinedMap() const {
+ return IsNullMap() || IsUndefinedMap();
+}
+
bool Map::IsPrimitiveMap() const {
return instance_type() <= LAST_PRIMITIVE_TYPE;
}
@@ -536,8 +568,7 @@ Object* Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); }
void Map::set_prototype(Object* value, WriteBarrierMode mode) {
DCHECK(value->IsNull() || value->IsJSReceiver());
WRITE_FIELD(this, kPrototypeOffset, value);
- CONDITIONAL_WRITE_BARRIER(Heap::FromWritableHeapObject(this), this,
- kPrototypeOffset, value, mode);
+ CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, value, mode);
}
LayoutDescriptor* Map::layout_descriptor_gc_safe() const {
@@ -657,8 +688,7 @@ Object* Map::prototype_info() const {
void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
CHECK(is_prototype_map());
WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
- CONDITIONAL_WRITE_BARRIER(Heap::FromWritableHeapObject(this), this,
- Map::kTransitionsOrPrototypeInfoOffset, value,
+ CONDITIONAL_WRITE_BARRIER(this, Map::kTransitionsOrPrototypeInfoOffset, value,
mode);
}
@@ -672,7 +702,6 @@ void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
}
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
-ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
ACCESSORS(Map, constructor_or_backpointer, Object,
kConstructorOrBackPointerOffset)
@@ -746,8 +775,8 @@ int NormalizedMapCache::GetIndex(Handle<Map> map) {
}
bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
- if (!obj->IsFixedArray()) return false;
- if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
+ if (!obj->IsWeakFixedArray()) return false;
+ if (WeakFixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
return false;
}
#ifdef VERIFY_HEAP