Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/nodejs/node.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2012-03-13 08:46:36 +0400
committerisaacs <i@izs.me>2012-03-13 08:46:36 +0400
commite4fc2cbfd354d8ad74c465e508531d92de2b4d52 (patch)
treece1e20dddced61d5b56750743ddb7304846f537a /deps
parentbcb0cc0b184c61313541002e504948ed21f59dad (diff)
Upgrade v8 to 3.9.17
Diffstat (limited to 'deps')
-rw-r--r--deps/v8/ChangeLog80
-rw-r--r--deps/v8/Makefile4
-rw-r--r--deps/v8/SConstruct7
-rw-r--r--deps/v8/build/common.gypi3
-rw-r--r--deps/v8/include/v8-profiler.h11
-rw-r--r--deps/v8/include/v8.h23
-rwxr-xr-xdeps/v8/src/SConscript2
-rw-r--r--deps/v8/src/allocation.h2
-rw-r--r--deps/v8/src/api.cc28
-rw-r--r--deps/v8/src/arm/assembler-arm.cc10
-rw-r--r--deps/v8/src/arm/builtins-arm.cc7
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc78
-rw-r--r--deps/v8/src/arm/codegen-arm.cc13
-rw-r--r--deps/v8/src/arm/deoptimizer-arm.cc126
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc69
-rw-r--r--deps/v8/src/arm/ic-arm.cc8
-rw-r--r--deps/v8/src/arm/lithium-arm.cc443
-rw-r--r--deps/v8/src/arm/lithium-arm.h55
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.cc192
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.h2
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.cc2
-rw-r--r--deps/v8/src/arm/stub-cache-arm.cc100
-rw-r--r--deps/v8/src/assembler.cc11
-rw-r--r--deps/v8/src/assembler.h3
-rw-r--r--deps/v8/src/ast.cc131
-rw-r--r--deps/v8/src/ast.h188
-rw-r--r--deps/v8/src/bootstrapper.cc2
-rw-r--r--deps/v8/src/builtins.cc56
-rw-r--r--deps/v8/src/codegen.h9
-rw-r--r--deps/v8/src/compiler.cc2
-rw-r--r--deps/v8/src/d8.gyp3
-rw-r--r--deps/v8/src/date.cc384
-rw-r--r--deps/v8/src/date.h260
-rw-r--r--deps/v8/src/date.js669
-rw-r--r--deps/v8/src/debug-debugger.js5
-rw-r--r--deps/v8/src/deoptimizer.cc40
-rw-r--r--deps/v8/src/deoptimizer.h23
-rw-r--r--deps/v8/src/elements.cc659
-rw-r--r--deps/v8/src/elements.h88
-rw-r--r--deps/v8/src/execution.cc12
-rw-r--r--deps/v8/src/execution.h9
-rw-r--r--deps/v8/src/flag-definitions.h28
-rw-r--r--deps/v8/src/frames-inl.h2
-rw-r--r--deps/v8/src/frames.cc49
-rw-r--r--deps/v8/src/frames.h5
-rw-r--r--deps/v8/src/full-codegen.cc73
-rw-r--r--deps/v8/src/full-codegen.h3
-rw-r--r--deps/v8/src/global-handles.cc3
-rw-r--r--deps/v8/src/global-handles.h8
-rw-r--r--deps/v8/src/globals.h16
-rw-r--r--deps/v8/src/heap-inl.h9
-rw-r--r--deps/v8/src/heap.cc72
-rw-r--r--deps/v8/src/heap.h20
-rw-r--r--deps/v8/src/hydrogen-instructions.cc168
-rw-r--r--deps/v8/src/hydrogen-instructions.h155
-rw-r--r--deps/v8/src/hydrogen.cc483
-rw-r--r--deps/v8/src/hydrogen.h80
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc7
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.cc135
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.h3
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc48
-rw-r--r--deps/v8/src/ia32/deoptimizer-ia32.cc139
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc158
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc208
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.h2
-rw-r--r--deps/v8/src/ia32/lithium-ia32.cc60
-rw-r--r--deps/v8/src/ia32/lithium-ia32.h42
-rw-r--r--deps/v8/src/ia32/regexp-macro-assembler-ia32.cc2
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc96
-rw-r--r--deps/v8/src/ic.cc16
-rw-r--r--deps/v8/src/interface.cc226
-rw-r--r--deps/v8/src/interface.h156
-rw-r--r--deps/v8/src/isolate.cc8
-rw-r--r--deps/v8/src/isolate.h15
-rw-r--r--deps/v8/src/jsregexp.cc12
-rw-r--r--deps/v8/src/jsregexp.h4
-rw-r--r--deps/v8/src/lithium-allocator.cc16
-rw-r--r--deps/v8/src/lithium.h9
-rw-r--r--deps/v8/src/macros.py40
-rw-r--r--deps/v8/src/mark-compact-inl.h4
-rw-r--r--deps/v8/src/mark-compact.cc4
-rw-r--r--deps/v8/src/mark-compact.h9
-rw-r--r--deps/v8/src/messages.js30
-rw-r--r--deps/v8/src/mips/builtins-mips.cc19
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc53
-rw-r--r--deps/v8/src/mips/codegen-mips.cc13
-rw-r--r--deps/v8/src/mips/deoptimizer-mips.cc157
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc78
-rw-r--r--deps/v8/src/mips/ic-mips.cc13
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.cc236
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.h2
-rw-r--r--deps/v8/src/mips/lithium-mips.cc461
-rw-r--r--deps/v8/src/mips/lithium-mips.h78
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.cc42
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.h4
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.cc2
-rw-r--r--deps/v8/src/mips/stub-cache-mips.cc131
-rw-r--r--deps/v8/src/objects-debug.cc50
-rw-r--r--deps/v8/src/objects-inl.h29
-rw-r--r--deps/v8/src/objects-printer.cc27
-rw-r--r--deps/v8/src/objects-visiting.cc1
-rw-r--r--deps/v8/src/objects.cc657
-rw-r--r--deps/v8/src/objects.h176
-rw-r--r--deps/v8/src/parser.cc431
-rw-r--r--deps/v8/src/parser.h23
-rw-r--r--deps/v8/src/platform-linux.cc16
-rw-r--r--deps/v8/src/platform-nullos.cc24
-rw-r--r--deps/v8/src/platform-posix.cc24
-rw-r--r--deps/v8/src/platform-win32.cc37
-rw-r--r--deps/v8/src/platform.h6
-rw-r--r--deps/v8/src/prettyprinter.cc67
-rw-r--r--deps/v8/src/profile-generator-inl.h6
-rw-r--r--deps/v8/src/profile-generator.cc282
-rw-r--r--deps/v8/src/profile-generator.h95
-rw-r--r--deps/v8/src/property-details.h20
-rw-r--r--deps/v8/src/property.h41
-rw-r--r--deps/v8/src/regexp.js81
-rw-r--r--deps/v8/src/rewriter.cc3
-rw-r--r--deps/v8/src/runtime-profiler.cc3
-rw-r--r--deps/v8/src/runtime.cc526
-rw-r--r--deps/v8/src/runtime.h8
-rw-r--r--deps/v8/src/scopes.cc89
-rw-r--r--deps/v8/src/scopes.h57
-rw-r--r--deps/v8/src/serialize.cc20
-rw-r--r--deps/v8/src/spaces.cc4
-rw-r--r--deps/v8/src/spaces.h5
-rw-r--r--deps/v8/src/strtod.cc12
-rw-r--r--deps/v8/src/stub-cache.cc30
-rw-r--r--deps/v8/src/stub-cache.h40
-rw-r--r--deps/v8/src/token.h4
-rw-r--r--deps/v8/src/type-info.cc19
-rw-r--r--deps/v8/src/type-info.h4
-rw-r--r--deps/v8/src/unicode.cc1302
-rw-r--r--deps/v8/src/v8-counters.h3
-rw-r--r--deps/v8/src/v8.cc20
-rw-r--r--deps/v8/src/v8.h3
-rw-r--r--deps/v8/src/v8natives.js69
-rw-r--r--deps/v8/src/variables.cc6
-rw-r--r--deps/v8/src/variables.h8
-rw-r--r--deps/v8/src/version.cc2
-rw-r--r--deps/v8/src/win32-headers.h3
-rw-r--r--deps/v8/src/x64/builtins-x64.cc134
-rw-r--r--deps/v8/src/x64/code-stubs-x64.cc102
-rw-r--r--deps/v8/src/x64/code-stubs-x64.h3
-rw-r--r--deps/v8/src/x64/codegen-x64.cc70
-rw-r--r--deps/v8/src/x64/deoptimizer-x64.cc119
-rw-r--r--deps/v8/src/x64/full-codegen-x64.cc70
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.cc214
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.h2
-rw-r--r--deps/v8/src/x64/lithium-x64.cc435
-rw-r--r--deps/v8/src/x64/lithium-x64.h34
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.cc2
-rw-r--r--deps/v8/src/x64/stub-cache-x64.cc60
-rw-r--r--deps/v8/test/cctest/SConscript4
-rw-r--r--deps/v8/test/cctest/cctest.gyp4
-rw-r--r--deps/v8/test/cctest/test-api.cc90
-rw-r--r--deps/v8/test/cctest/test-date.cc168
-rw-r--r--deps/v8/test/cctest/test-debug.cc20
-rw-r--r--deps/v8/test/cctest/test-deoptimization.cc3
-rw-r--r--deps/v8/test/cctest/test-heap-profiler.cc83
-rw-r--r--deps/v8/test/cctest/test-heap.cc16
-rw-r--r--deps/v8/test/cctest/test-random.cc109
-rw-r--r--deps/v8/test/mjsunit/comparison-ops-and-undefined.js128
-rw-r--r--deps/v8/test/mjsunit/compiler/alloc-object-huge.js308
-rw-r--r--deps/v8/test/mjsunit/compiler/alloc-object.js90
-rw-r--r--deps/v8/test/mjsunit/compiler/inline-construct.js152
-rw-r--r--deps/v8/test/mjsunit/compiler/optimized-for-in.js16
-rw-r--r--deps/v8/test/mjsunit/compiler/regress-toint32.js45
-rw-r--r--deps/v8/test/mjsunit/date.js6
-rw-r--r--deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js17
-rw-r--r--deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js17
-rw-r--r--deps/v8/test/mjsunit/debug-stepin-accessor.js4
-rw-r--r--deps/v8/test/mjsunit/function-call.js6
-rw-r--r--deps/v8/test/mjsunit/fuzz-natives.js5
-rw-r--r--deps/v8/test/mjsunit/harmony/module-parsing.js89
-rw-r--r--deps/v8/test/mjsunit/harmony/module-resolution.js139
-rw-r--r--deps/v8/test/mjsunit/mjsunit.js2
-rw-r--r--deps/v8/test/mjsunit/number-is.js58
-rw-r--r--deps/v8/test/mjsunit/object-define-property.js14
-rw-r--r--deps/v8/test/mjsunit/object-is.js47
-rw-r--r--deps/v8/test/mjsunit/regress/regress-102153.js57
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1229.js2
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1853.js116
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1980.js40
-rw-r--r--deps/v8/test/test262/test262.status12
-rw-r--r--deps/v8/test/test262/testcfg.py28
-rw-r--r--deps/v8/tools/common-includes.sh194
-rw-r--r--deps/v8/tools/gen-postmortem-metadata.py3
-rw-r--r--deps/v8/tools/gyp/v8.gyp4
-rw-r--r--deps/v8/tools/jsmin.py4
-rw-r--r--deps/v8/tools/merge-to-branch.sh208
-rwxr-xr-xdeps/v8/tools/push-to-trunk.sh353
-rwxr-xr-xdeps/v8/tools/test-wrapper-gypbuild.py4
-rwxr-xr-xdeps/v8/tools/test.py35
194 files changed, 11152 insertions, 4857 deletions
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index 8922db6400d..9ba13624ae0 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,6 +1,74 @@
+2012-03-12: Version 3.9.17
+
+ Fixed VFP detection through compiler defines. (issue 1996)
+
+ Add Code-related fields to postmortem metadata.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-09: Version 3.9.16
+
+ Added basic interface inference for modules (behind the --harmony flag).
+
+ Added Object.is, Number.isFinite, Number.isNaN.
+
+ Updated the Unicode tables to Unicode version 6.1.0.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-06: Version 3.9.15
+
+ Fix the heap profiler crash caused by memory layout changes between
+ passes.
+
+ Fix Error.prototype.toString to throw TypeError. (issue 1980)
+
+ Fix double-rounding in strtod for MinGW. (issue 1062)
+
+ Fix corrupted snapshot serializaton on ia32. (Chromium issue v8/1985)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-01: Version 3.9.14
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-29: Version 3.9.13
+
+ Added code kind check before preparing for OSR. (issue 1900, 115073)
+
+ Fixed issue 1802: Pass zone explicitly to zone-allocation on x64 and
+ ARM.
+
+ Ported string construct stub to x64. (issue 849)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-28: Version 3.9.12
+
+ Fixed the negative lookup stub to handle deleted entries in a
+ dictionary. (issue 1964)
+
+ Added a new API where the host can supply a callback function. The
+ callback function can resolve the location of a return address on stack
+ to the location where a return-address rewriting profiler stashed the
+ original return address.
+
+ Fixed Chromium issue http://crbug.com/115646: When compiling for-in
+ pass correct context value to the increment instruction.
+
+ Fixed issue 1853: Update breakpoints set with partial file name after
+ compile.
+
+
2012-02-27: Version 3.9.11
- Make 'module' a context-sensitive keyword (V8 issue 1957).
+ Made 'module' a context-sensitive keyword (V8 issue 1957).
2012-02-24: Version 3.9.10
@@ -55,11 +123,11 @@
2012-02-14: Version 3.9.6
- Fix template-related linker error. (issue 1936)
+ Fixed template-related linker error. (issue 1936)
- Allow inlining of functions containing object literals. (issue 1322)
+ Allowed inlining of functions containing object literals. (issue 1322)
- Add --call-graph-size option to tickprocessor. (issue 1937)
+ Added --call-graph-size option to tickprocessor. (issue 1937)
Heap Snapshot maximum size limit is too low for really big apps. At the
moment the limit is 256MB. (Chromium issue 113015)
@@ -93,7 +161,7 @@
2012-02-06: Version 3.9.2
- Add timestamp to --trace-gc output. (issue 1932)
+ Added timestamp to --trace-gc output. (issue 1932)
Heap profiler reports implicit references.
@@ -115,7 +183,7 @@
2012-02-01: Version 3.9.0
- Reduce memory use immediately after starting V8.
+ Reduced memory use immediately after starting V8.
Stability fixes and performance improvements on all platforms.
diff --git a/deps/v8/Makefile b/deps/v8/Makefile
index 73e84216b4d..5dc6ca5ad60 100644
--- a/deps/v8/Makefile
+++ b/deps/v8/Makefile
@@ -75,6 +75,10 @@ ifeq ($(vfp3), off)
else
GYPFLAGS += -Dv8_can_use_vfp_instructions=true
endif
+# debuggersupport=off
+ifeq ($(debuggersupport), off)
+ GYPFLAGS += -Dv8_enable_debugger_support=0
+endif
# soname_version=1.2.3
ifdef soname_version
GYPFLAGS += -Dsoname_version=$(soname_version)
diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct
index 84863e38caf..bfa53a7af40 100644
--- a/deps/v8/SConstruct
+++ b/deps/v8/SConstruct
@@ -300,7 +300,12 @@ V8_EXTRA_FLAGS = {
'-Wnon-virtual-dtor']
},
'os:win32': {
- 'WARNINGFLAGS': ['-pedantic', '-Wno-long-long', '-Wno-pedantic-ms-format']
+ 'WARNINGFLAGS': ['-pedantic',
+ '-Wno-long-long',
+ '-Wno-pedantic-ms-format'],
+ 'library:shared': {
+ 'LIBS': ['winmm', 'ws2_32']
+ }
},
'os:linux': {
'WARNINGFLAGS': ['-pedantic'],
diff --git a/deps/v8/build/common.gypi b/deps/v8/build/common.gypi
index 6e1cbea7a10..9976d25047a 100644
--- a/deps/v8/build/common.gypi
+++ b/deps/v8/build/common.gypi
@@ -305,7 +305,7 @@
'cflags': [ '-I/usr/pkg/include' ],
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', {
- 'cflags': [ '-Wno-unused-parameter',
+ 'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor', '-Woverloaded-virtual' ],
}],
],
@@ -352,6 +352,7 @@
}], # OS=="mac"
['OS=="win"', {
'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
'CharacterSet': '1',
},
diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h
index 5a3a40ff6f6..2499bbf050e 100644
--- a/deps/v8/include/v8-profiler.h
+++ b/deps/v8/include/v8-profiler.h
@@ -284,14 +284,8 @@ class V8EXPORT HeapGraphNode {
* the objects that are reachable only from this object. In other
* words, the size of memory that will be reclaimed having this node
* collected.
- *
- * Exact retained size calculation has O(N) (number of nodes)
- * computational complexity, while approximate has O(1). It is
- * assumed that initially heap profiling tools provide approximate
- * sizes for all nodes, and then exact sizes are calculated for the
- * most 'interesting' nodes.
*/
- int GetRetainedSize(bool exact) const;
+ int GetRetainedSize() const;
/** Returns child nodes count of the node. */
int GetChildrenCount() const;
@@ -436,6 +430,9 @@ class V8EXPORT HeapProfiler {
* handle.
*/
static const uint16_t kPersistentHandleNoClassId = 0;
+
+ /** Returns the number of currently existing persistent handles. */
+ static int GetPersistentHandleCount();
};
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index a29cd9f718d..e4037b9ac6d 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -2858,6 +2858,20 @@ typedef bool (*EntropySource)(unsigned char* buffer, size_t length);
/**
+ * ReturnAddressLocationResolver is used as a callback function when v8 is
+ * resolving the location of a return address on the stack. Profilers that
+ * change the return address on the stack can use this to resolve the stack
+ * location to whereever the profiler stashed the original return address.
+ * When invoked, return_addr_location will point to a location on stack where
+ * a machine return address resides, this function should return either the
+ * same pointer, or a pointer to the profiler's copy of the original return
+ * address.
+ */
+typedef uintptr_t (*ReturnAddressLocationResolver)(
+ uintptr_t return_addr_location);
+
+
+/**
* Interface for iterating though all external resources in the heap.
*/
class V8EXPORT ExternalResourceVisitor { // NOLINT
@@ -3111,6 +3125,13 @@ class V8EXPORT V8 {
static void SetEntropySource(EntropySource source);
/**
+ * Allows the host application to provide a callback that allows v8 to
+ * cooperate with a profiler that rewrites return addresses on stack.
+ */
+ static void SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver return_address_resolver);
+
+ /**
* Adjusts the amount of registered external memory. Used to give
* V8 an indication of the amount of externally allocated memory
* that is kept alive by JavaScript objects. V8 uses this to decide
@@ -3850,7 +3871,7 @@ class Internals {
static const int kFullStringRepresentationMask = 0x07;
static const int kExternalTwoByteRepresentationTag = 0x02;
- static const int kJSObjectType = 0xa9;
+ static const int kJSObjectType = 0xaa;
static const int kFirstNonstringType = 0x80;
static const int kForeignType = 0x85;
diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript
index 94840dc8b5d..fde7a80765d 100755
--- a/deps/v8/src/SConscript
+++ b/deps/v8/src/SConscript
@@ -59,6 +59,7 @@ SOURCES = {
counters.cc
cpu-profiler.cc
data-flow.cc
+ date.cc
dateparser.cc
debug-agent.cc
debug.cc
@@ -84,6 +85,7 @@ SOURCES = {
hydrogen-instructions.cc
ic.cc
incremental-marking.cc
+ interface.cc
inspector.cc
interpreter-irregexp.cc
isolate.cc
diff --git a/deps/v8/src/allocation.h b/deps/v8/src/allocation.h
index 69e72bdbad3..31067dda819 100644
--- a/deps/v8/src/allocation.h
+++ b/deps/v8/src/allocation.h
@@ -80,7 +80,7 @@ class AllStatic {
template <typename T>
-T* NewArray(int size) {
+T* NewArray(size_t size) {
T* result = new T[size];
if (result == NULL) Malloced::FatalProcessOutOfMemory();
return result;
diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc
index 775c884c593..d8c7ba0eaaf 100644
--- a/deps/v8/src/api.cc
+++ b/deps/v8/src/api.cc
@@ -525,7 +525,8 @@ Extension::Extension(const char* name,
int source_length)
: name_(name),
source_length_(source_length >= 0 ?
- source_length : (source ? strlen(source) : 0)),
+ source_length :
+ (source ? static_cast<int>(strlen(source)) : 0)),
source_(source, source_length_),
dep_count_(dep_count),
deps_(deps),
@@ -4026,6 +4027,12 @@ void v8::V8::SetEntropySource(EntropySource source) {
}
+void v8::V8::SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver return_address_resolver) {
+ i::V8::SetReturnAddressLocationResolver(return_address_resolver);
+}
+
+
bool v8::V8::Dispose() {
i::Isolate* isolate = i::Isolate::Current();
if (!ApiCheck(isolate != NULL && isolate->IsDefaultIsolate(),
@@ -4728,8 +4735,8 @@ double v8::Date::NumberValue() const {
if (IsDeadCheck(isolate, "v8::Date::NumberValue()")) return 0;
LOG_API(isolate, "Date::NumberValue");
i::Handle<i::Object> obj = Utils::OpenHandle(this);
- i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj);
- return jsvalue->value()->Number();
+ i::Handle<i::JSDate> jsdate = i::Handle<i::JSDate>::cast(obj);
+ return jsdate->value()->Number();
}
@@ -4740,8 +4747,10 @@ void v8::Date::DateTimeConfigurationChangeNotification() {
LOG_API(isolate, "Date::DateTimeConfigurationChangeNotification");
ENTER_V8(isolate);
+ isolate->date_cache()->ResetDateCache();
+
i::HandleScope scope(isolate);
- // Get the function ResetDateCache (defined in date-delay.js).
+ // Get the function ResetDateCache (defined in date.js).
i::Handle<i::String> func_name_str =
isolate->factory()->LookupAsciiSymbol("ResetDateCache");
i::MaybeObject* result =
@@ -5867,10 +5876,10 @@ int HeapGraphNode::GetSelfSize() const {
}
-int HeapGraphNode::GetRetainedSize(bool exact) const {
+int HeapGraphNode::GetRetainedSize() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
- return ToInternal(this)->RetainedSize(exact);
+ return ToInternal(this)->retained_size();
}
@@ -5972,7 +5981,7 @@ const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById");
return reinterpret_cast<const HeapGraphNode*>(
- ToInternal(this)->GetEntryById(id));
+ ToInternal(this)->GetEntryById(static_cast<i::SnapshotObjectId>(id)));
}
@@ -6065,6 +6074,11 @@ void HeapProfiler::DefineWrapperClass(uint16_t class_id,
}
+int HeapProfiler::GetPersistentHandleCount() {
+ i::Isolate* isolate = i::Isolate::Current();
+ return isolate->global_handles()->NumberOfGlobalHandles();
+}
+
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
diff --git a/deps/v8/src/arm/assembler-arm.cc b/deps/v8/src/arm/assembler-arm.cc
index 25922361a26..ff15221119b 100644
--- a/deps/v8/src/arm/assembler-arm.cc
+++ b/deps/v8/src/arm/assembler-arm.cc
@@ -66,11 +66,13 @@ static uint64_t CpuFeaturesImpliedByCompiler() {
#ifdef __arm__
// If the compiler is allowed to use VFP then we can use VFP too in our code
- // generation even when generating snapshots. This won't work for cross
- // compilation. VFPv3 implies ARMv7, see ARM DDI 0406B, page A1-6.
-#if defined(__VFP_FP__) && !defined(__SOFTFP__)
+ // generation even when generating snapshots. ARMv7 and hardware floating
+ // point support implies VFPv3, see ARM DDI 0406B, page A1-6.
+#if defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(__VFP_FP__) \
+ && !defined(__SOFTFP__)
answer |= 1u << VFP3 | 1u << ARMv7;
-#endif // defined(__VFP_FP__) && !defined(__SOFTFP__)
+#endif // defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(__VFP_FP__)
+ // && !defined(__SOFTFP__)
#endif // def __arm__
return answer;
diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc
index 993addca9ee..c99e778a7f0 100644
--- a/deps/v8/src/arm/builtins-arm.cc
+++ b/deps/v8/src/arm/builtins-arm.cc
@@ -978,6 +978,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
+
// Restore context from the frame.
// r0: result
// sp[0]: receiver
@@ -1740,7 +1745,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&invoke);
__ Call(r3);
+ // Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Exit frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ Jump(lr);
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index 62e6c80271e..250f020a624 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -5930,8 +5930,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&sliced_string);
// Sliced string. Fetch parent and correct start index by offset.
- __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
__ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
+ __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
__ add(r3, r3, Operand(r4, ASR, 1)); // Add offset to index.
// Update instance type.
__ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
@@ -5969,8 +5969,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ AllocateTwoByteSlicedString(r0, r2, r6, r7, &runtime);
__ bind(&set_slice_header);
__ mov(r3, Operand(r3, LSL, 1));
- __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
__ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
+ __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
__ jmp(&return_r0);
__ bind(&copy_routine);
@@ -6560,15 +6560,15 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
__ and_(r2, r1, Operand(r0));
__ JumpIfSmi(r2, &generic_stub);
__ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE);
- __ b(ne, &miss);
+ __ b(ne, &maybe_undefined1);
__ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
- __ b(ne, &miss);
+ __ b(ne, &maybe_undefined2);
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or VFP3 is unsupported.
@@ -6592,14 +6592,28 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ mov(r0, Operand(LESS), LeaveCC, lt);
__ mov(r0, Operand(GREATER), LeaveCC, gt);
__ Ret();
-
- __ bind(&unordered);
}
+ __ bind(&unordered);
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
__ bind(&generic_stub);
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
+ __ b(ne, &miss);
+ __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
+ __ b(ne, &maybe_undefined2);
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ CompareRoot(r1, Heap::kUndefinedValueRootIndex);
+ __ b(eq, &unordered);
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -6647,6 +6661,8 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = r1;
Register right = r0;
@@ -6680,28 +6696,39 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
// Check that both strings are symbols. If they are, we're done
// because we already know they are not identical.
- ASSERT(GetCondition() == eq);
- STATIC_ASSERT(kSymbolTag != 0);
- __ and_(tmp3, tmp1, Operand(tmp2));
- __ tst(tmp3, Operand(kIsSymbolMask));
- // Make sure r0 is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(r0));
- __ Ret(ne);
+ if (equality) {
+ ASSERT(GetCondition() == eq);
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ and_(tmp3, tmp1, Operand(tmp2));
+ __ tst(tmp3, Operand(kIsSymbolMask));
+ // Make sure r0 is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(r0));
+ __ Ret(ne);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ JumpIfBothInstanceTypesAreNotSequentialAscii(tmp1, tmp2, tmp3, tmp4,
- &runtime);
+ __ JumpIfBothInstanceTypesAreNotSequentialAscii(
+ tmp1, tmp2, tmp3, tmp4, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2, tmp3);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2, tmp3);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3, tmp4);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
__ Push(left, right);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -6812,7 +6839,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// scratch0 points to properties hash.
// Compute the masked index: (hash + i + i * i) & mask.
@@ -6840,10 +6867,17 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ b(eq, done);
if (i != kInlinedProbes - 1) {
+ // Load the hole ready for use below:
+ __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
+
// Stop if found the property.
__ cmp(entity_name, Operand(Handle<String>(name)));
__ b(eq, miss);
+ Label the_hole;
+ __ cmp(entity_name, tmp);
+ __ b(eq, &the_hole);
+
// Check if the entry name is not a symbol.
__ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
__ ldrb(entity_name,
@@ -6851,6 +6885,8 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ tst(entity_name, Operand(kIsSymbolMask));
__ b(eq, miss);
+ __ bind(&the_hole);
+
// Restore the properties.
__ ldr(properties,
FieldMemOperand(receiver, JSObject::kPropertiesOffset));
diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc
index 506f9b2d5dc..6e182777830 100644
--- a/deps/v8/src/arm/codegen-arm.cc
+++ b/deps/v8/src/arm/codegen-arm.cc
@@ -37,6 +37,19 @@ namespace internal {
#define __ ACCESS_MASM(masm)
+TranscendentalFunction CreateTranscendentalFunction(
+ TranscendentalCache::Type type) {
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ return NULL;
+}
+
+
// -------------------------------------------------------------------------
// Platform-specific RuntimeCallHelper functions.
diff --git a/deps/v8/src/arm/deoptimizer-arm.cc b/deps/v8/src/arm/deoptimizer-arm.cc
index 76d89541c59..d9a4d4b0f41 100644
--- a/deps/v8/src/arm/deoptimizer-arm.cc
+++ b/deps/v8/src/arm/deoptimizer-arm.cc
@@ -351,7 +351,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
- unsigned input_frame_size = input_->GetFrameSize();
unsigned output_frame_size = height_in_bytes + fixed_frame_size;
// Allocate and store the output frame description.
@@ -373,16 +372,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Compute the incoming parameter translation.
int parameter_count = height;
unsigned output_offset = output_frame_size;
- unsigned input_offset = input_frame_size;
for (int i = 0; i < parameter_count; ++i) {
output_offset -= kPointerSize;
DoTranslateCommand(iterator, frame_index, output_offset);
}
- input_offset -= (parameter_count * kPointerSize);
// Read caller's PC from the previous frame.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t callers_pc = output_[frame_index - 1]->GetPc();
output_frame->SetFrameSlot(output_offset, callers_pc);
if (FLAG_trace_deopt) {
@@ -392,7 +388,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Read caller's FP from the previous frame, and set this frame's FP.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t value = output_[frame_index - 1]->GetFp();
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
@@ -404,7 +399,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// A marker value is used in place of the context.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t context = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
output_frame->SetFrameSlot(output_offset, context);
@@ -415,7 +409,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(function);
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -425,7 +418,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Number of incoming arguments.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -445,6 +437,119 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 7 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // Constructor function being invoked by the stub.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
// This code is very similar to ia32 code, but relies on register names (fp, sp)
// and how the frame is laid out.
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
@@ -557,9 +662,8 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
value = reinterpret_cast<intptr_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
- if (is_topmost) {
- output_frame->SetRegister(cp.code(), value);
- }
+ output_frame->SetContext(value);
+ if (is_topmost) output_frame->SetRegister(cp.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
top_address + output_offset, output_offset, value);
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index 51e92083327..77f4e4414da 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -1004,6 +1004,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register r0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(r1, cell);
+ __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+ __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset));
+
__ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
__ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -1488,11 +1498,15 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ ldr(r0, MemOperand(sp));
__ push(r0);
VisitForStackValue(key);
- __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0)));
- __ push(r1);
- VisitForStackValue(value);
+ if (property->kind() == ObjectLiteral::Property::GETTER) {
+ VisitForStackValue(value);
+ __ LoadRoot(r1, Heap::kNullValueRootIndex);
+ __ push(r1);
+ } else {
+ __ LoadRoot(r1, Heap::kNullValueRootIndex);
+ __ push(r1);
+ VisitForStackValue(value);
+ }
__ mov(r0, Operand(Smi::FromInt(NONE)));
__ push(r0);
__ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
@@ -2382,6 +2396,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
CallConstructStub stub(flags);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(r0);
}
@@ -2932,6 +2947,50 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = r0;
+ Register result = r0;
+ Register scratch0 = r9;
+ Register scratch1 = r1;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
+ __ Assert(eq, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch1, Operand(stamp));
+ __ ldr(scratch1, MemOperand(scratch1));
+ __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ cmp(scratch1, scratch0);
+ __ b(ne, &runtime);
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch1);
+ __ mov(r1, Operand(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+ context()->Plug(r0);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc
index 14daadaea95..e84365789be 100644
--- a/deps/v8/src/arm/ic-arm.cc
+++ b/deps/v8/src/arm/ic-arm.cc
@@ -399,7 +399,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r1, r2, r3, r4, r5);
+ masm, flags, r1, r2, r3, r4, r5, r6);
// If the stub cache probing failed, the receiver might be a value.
// For value objects, we use the map of the prototype objects for
@@ -438,7 +438,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
// Probe the stub cache for the value object.
__ bind(&probe);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r1, r2, r3, r4, r5);
+ masm, flags, r1, r2, r3, r4, r5, r6);
__ bind(&miss);
}
@@ -706,7 +706,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
Code::Flags flags =
Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r0, r2, r3, r4, r5);
+ masm, flags, r0, r2, r3, r4, r5, r6);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1516,7 +1516,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r1, r2, r3, r4, r5);
+ masm, flags, r1, r2, r3, r4, r5, r6);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc
index a934aacd36a..36421d9bd3c 100644
--- a/deps/v8/src/arm/lithium-arm.cc
+++ b/deps/v8/src/arm/lithium-arm.cc
@@ -440,7 +440,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -476,7 +476,7 @@ void LChunk::MarkEmptyBlocks() {
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new LInstructionGap(block);
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
instructions_.Add(gap);
@@ -551,8 +551,8 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ chunk_ = new(zone()) LChunk(info(), graph());
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -582,14 +582,14 @@ void LChunkBuilder::Abort(const char* format, ...) {
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
- return new LUnallocated(LUnallocated::FIXED_REGISTER,
- Register::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
}
LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
- return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
- DoubleRegister::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ DoubleRegister::ToAllocationIndex(reg));
}
@@ -604,30 +604,30 @@ LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
LOperand* LChunkBuilder::UseRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
return Use(value,
- new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
- LUnallocated::USED_AT_START));
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ LUnallocated::USED_AT_START));
}
LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
}
LOperand* LChunkBuilder::Use(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
}
LOperand* LChunkBuilder::UseAtStart(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE,
- LUnallocated::USED_AT_START));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
+ LUnallocated::USED_AT_START));
}
@@ -662,7 +662,7 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
LOperand* LChunkBuilder::UseAny(HValue* value) {
return value->IsConstant()
? chunk_->DefineConstantOperand(HConstant::cast(value))
- : Use(value, new LUnallocated(LUnallocated::ANY));
+ : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
}
@@ -688,21 +688,24 @@ LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsSpilled(
LTemplateInstruction<1, I, T>* instr, int index) {
- return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineSameAsFirst(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -784,13 +787,14 @@ LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_));
return instr;
}
LUnallocated* LChunkBuilder::TempRegister() {
- LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ LUnallocated* operand =
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
operand->set_virtual_register(allocator_->GetVirtualRegister());
if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
return operand;
@@ -812,17 +816,17 @@ LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
- return new LLabel(instr->block());
+ return new(zone()) LLabel(instr->block());
}
LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
@@ -834,7 +838,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LArithmeticT* result = new LArithmeticT(op, left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -868,7 +872,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
}
LInstruction* result =
- DefineAsRegister(new LShiftI(op, left, right, does_deopt));
+ DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
return does_deopt ? AssignEnvironment(result) : result;
}
@@ -881,7 +885,7 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
ASSERT(op != Token::MOD);
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
return DefineAsRegister(result);
}
@@ -899,7 +903,8 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
ASSERT(right->representation().IsTagged());
LOperand* left_operand = UseFixed(left, r1);
LOperand* right_operand = UseFixed(right, r0);
- LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand);
+ LArithmeticT* result =
+ new(zone()) LArithmeticT(op, left_operand, right_operand);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -995,15 +1000,17 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
- LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
- hydrogen_env->is_arguments_adaptor(),
- ast_id,
- hydrogen_env->parameter_count(),
- argument_count_,
- value_count,
- outer);
+ LEnvironment* result = new(zone()) LEnvironment(
+ hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1013,14 +1020,14 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new LArgument(argument_index++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
- if (!hydrogen_env->is_arguments_adaptor()) {
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
*argument_index_accumulator = argument_index;
}
@@ -1029,7 +1036,7 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
- return new LGoto(instr->FirstSuccessor()->block_id());
+ return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
}
@@ -1039,10 +1046,10 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
- return new LGoto(successor->block_id());
+ return new(zone()) LGoto(successor->block_id());
}
- LBranch* result = new LBranch(UseRegister(value));
+ LBranch* result = new(zone()) LBranch(UseRegister(value));
// Tagged values that are not known smis or booleans require a
// deoptimization environment.
Representation rep = value->representation();
@@ -1059,23 +1066,24 @@ LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LCmpMapAndBranch(value, temp);
+ return new(zone()) LCmpMapAndBranch(value, temp);
}
-LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- return DefineAsRegister(new LArgumentsLength(UseRegister(length->value())));
+LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
+ LOperand* value = UseRegister(instr->value());
+ return DefineAsRegister(new(zone()) LArgumentsLength(value));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- return DefineAsRegister(new LArgumentsElements);
+ return DefineAsRegister(new(zone()) LArgumentsElements);
}
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstanceOf* result =
- new LInstanceOf(UseFixed(instr->left(), r0),
+ new(zone()) LInstanceOf(UseFixed(instr->left(), r0),
UseFixed(instr->right(), r1));
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1084,7 +1092,8 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseFixed(instr->left(), r0), FixedTemp(r4));
+ new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), r0),
+ FixedTemp(r4));
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1094,7 +1103,7 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* receiver = UseFixed(instr->receiver(), r0);
LOperand* length = UseFixed(instr->length(), r2);
LOperand* elements = UseFixed(instr->elements(), r3);
- LApplyArguments* result = new LApplyArguments(function,
+ LApplyArguments* result = new(zone()) LApplyArguments(function,
receiver,
length,
elements);
@@ -1105,73 +1114,75 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
++argument_count_;
LOperand* argument = Use(instr->argument());
- return new LPushArgument(argument);
+ return new(zone()) LPushArgument(argument);
}
LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LThisFunction);
+ return instr->HasNoUses()
+ ? NULL
+ : DefineAsRegister(new(zone()) LThisFunction);
}
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LContext);
+ return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
}
LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LOuterContext(context));
+ return DefineAsRegister(new(zone()) LOuterContext(context));
}
LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
- return MarkAsCall(new LDeclareGlobals, instr);
+ return MarkAsCall(new(zone()) LDeclareGlobals, instr);
}
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalObject(context));
+ return DefineAsRegister(new(zone()) LGlobalObject(context));
}
LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
LOperand* global_object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalReceiver(global_object));
+ return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
}
LInstruction* LChunkBuilder::DoCallConstantFunction(
HCallConstantFunction* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallConstantFunction, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, r0), instr);
}
LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
LOperand* function = UseFixed(instr->function(), r1);
argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
+ LInvokeFunction* result = new(zone()) LInvokeFunction(function);
return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
- if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* input = UseFixedDouble(instr->value(), d2);
- LUnaryMathOperation* result = new LUnaryMathOperation(input, NULL);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, NULL);
return MarkAsCall(DefineFixedDouble(result, d2), instr);
} else if (op == kMathPowHalf) {
LOperand* input = UseFixedDouble(instr->value(), d2);
LOperand* temp = FixedTemp(d3);
- LUnaryMathOperation* result = new LUnaryMathOperation(input, temp);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
return DefineFixedDouble(result, d2);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
- LUnaryMathOperation* result = new LUnaryMathOperation(input, temp);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
switch (op) {
case kMathAbs:
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
@@ -1193,32 +1204,32 @@ LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
argument_count_ -= instr->argument_count();
LOperand* key = UseFixed(instr->key(), r2);
- return MarkAsCall(DefineFixed(new LCallKeyed(key), r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKeyed(key), r0), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallNamed, r0), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, r0), instr);
}
LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallKnownGlobal, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, r0), instr);
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* constructor = UseFixed(instr->constructor(), r1);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new(zone()) LCallNew(constructor);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1226,13 +1237,14 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* function = UseFixed(instr->function(), r1);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallFunction(function), r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallFunction(function), r0),
+ instr);
}
LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallRuntime, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, r0), instr);
}
@@ -1258,7 +1270,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- return DefineAsRegister(new LBitI(left, right));
+ return DefineAsRegister(new(zone()) LBitI(left, right));
} else {
ASSERT(instr->representation().IsTagged());
ASSERT(instr->left()->representation().IsTagged());
@@ -1266,7 +1278,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LArithmeticT* result = new LArithmeticT(instr->op(), left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
}
@@ -1275,7 +1287,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
- return DefineAsRegister(new LBitNotI(UseRegisterAtStart(instr->value())));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LBitNotI(value));
}
@@ -1291,7 +1304,7 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
LOperand* dividend = UseFixed(instr->left(), r0);
LOperand* divisor = UseFixed(instr->right(), r1);
return AssignEnvironment(AssignPointerMap(
- DefineFixed(new LDivI(dividend, divisor), r0)));
+ DefineFixed(new(zone()) LDivI(dividend, divisor), r0)));
} else {
return DoArithmeticT(Token::DIV, instr);
}
@@ -1307,15 +1320,15 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->HasPowerOf2Divisor()) {
ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
LOperand* value = UseRegisterAtStart(instr->left());
- mod = new LModI(value, UseOrConstant(instr->right()));
+ mod = new(zone()) LModI(value, UseOrConstant(instr->right()));
} else {
LOperand* dividend = UseRegister(instr->left());
LOperand* divisor = UseRegister(instr->right());
- mod = new LModI(dividend,
- divisor,
- TempRegister(),
- FixedTemp(d10),
- FixedTemp(d11));
+ mod = new(zone()) LModI(dividend,
+ divisor,
+ TempRegister(),
+ FixedTemp(d10),
+ FixedTemp(d11));
}
if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
@@ -1333,7 +1346,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
// TODO(fschneider): Allow any register as input registers.
LOperand* left = UseFixedDouble(instr->left(), d1);
LOperand* right = UseFixedDouble(instr->right(), d2);
- LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
return MarkAsCall(DefineFixedDouble(result, d1), instr);
}
}
@@ -1354,7 +1367,7 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
} else {
left = UseRegisterAtStart(instr->LeastConstantOperand());
}
- LMulI* mul = new LMulI(left, right, temp);
+ LMulI* mul = new(zone()) LMulI(left, right, temp);
if (instr->CheckFlag(HValue::kCanOverflow) ||
instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
AssignEnvironment(mul);
@@ -1376,7 +1389,7 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- LSubI* sub = new LSubI(left, right);
+ LSubI* sub = new(zone()) LSubI(left, right);
LInstruction* result = DefineAsRegister(sub);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1396,7 +1409,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- LAddI* add = new LAddI(left, right);
+ LAddI* add = new(zone()) LAddI(left, right);
LInstruction* result = DefineAsRegister(add);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1421,7 +1434,7 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
LOperand* right = exponent_type.IsDouble() ?
UseFixedDouble(instr->right(), d2) :
UseFixed(instr->right(), r2);
- LPower* result = new LPower(left, right);
+ LPower* result = new(zone()) LPower(left, right);
return MarkAsCall(DefineFixedDouble(result, d3),
instr,
CAN_DEOPTIMIZE_EAGERLY);
@@ -1432,7 +1445,7 @@ LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
ASSERT(instr->representation().IsDouble());
ASSERT(instr->global_object()->representation().IsTagged());
LOperand* global_object = UseFixed(instr->global_object(), r0);
- LRandom* result = new LRandom(global_object);
+ LRandom* result = new(zone()) LRandom(global_object);
return MarkAsCall(DefineFixedDouble(result, d7), instr);
}
@@ -1442,7 +1455,7 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LCmpT* result = new LCmpT(left, right);
+ LCmpT* result = new(zone()) LCmpT(left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1455,14 +1468,14 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterOrConstantAtStart(instr->left());
LOperand* right = UseRegisterOrConstantAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
} else {
ASSERT(r.IsDouble());
ASSERT(instr->left()->representation().IsDouble());
ASSERT(instr->right()->representation().IsDouble());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
}
}
@@ -1471,47 +1484,50 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
HCompareObjectEqAndBranch* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpObjectEqAndBranch(left, right);
+ return new(zone()) LCmpObjectEqAndBranch(left, right);
}
LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
HCompareConstantEqAndBranch* instr) {
- return new LCmpConstantEqAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LCmpConstantEqAndBranch(value);
}
LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsNilAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LIsObjectAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsObjectAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LIsStringAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsStringAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsSmiAndBranch(Use(instr->value()));
+ return new(zone()) LIsSmiAndBranch(Use(instr->value()));
}
LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
HIsUndetectableAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsUndetectableAndBranch(UseRegisterAtStart(instr->value()),
- TempRegister());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
}
@@ -1521,7 +1537,8 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LStringCompareAndBranch* result = new LStringCompareAndBranch(left, right);
+ LStringCompareAndBranch* result =
+ new(zone()) LStringCompareAndBranch(left, right);
return MarkAsCall(result, instr);
}
@@ -1529,7 +1546,8 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasInstanceTypeAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasInstanceTypeAndBranch(value);
}
@@ -1538,14 +1556,14 @@ LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGetCachedArrayIndex(value));
+ return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
}
LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
HHasCachedArrayIndexAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasCachedArrayIndexAndBranch(
+ return new(zone()) LHasCachedArrayIndexAndBranch(
UseRegisterAtStart(instr->value()));
}
@@ -1553,40 +1571,48 @@ LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LClassOfTestAndBranch(UseRegister(instr->value()),
- TempRegister());
+ LOperand* value = UseRegister(instr->value());
+ return new(zone()) LClassOfTestAndBranch(value, TempRegister());
}
LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LJSArrayLength(array));
+ return DefineAsRegister(new(zone()) LJSArrayLength(array));
}
LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayBaseLength(array));
+ return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
}
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LElementsKind(object));
+ return DefineAsRegister(new(zone()) LElementsKind(object));
}
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
- LValueOf* result = new LValueOf(object, TempRegister());
+ LValueOf* result = new(zone()) LValueOf(object, TempRegister());
return DefineAsRegister(result);
}
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* object = UseFixed(instr->value(), r0);
+ LDateField* result = new LDateField(object, FixedTemp(r1), instr->index());
+ return MarkAsCall(DefineFixed(result, r0), instr);
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
- UseRegister(instr->length())));
+ LOperand* value = UseRegisterAtStart(instr->index());
+ LOperand* length = UseRegister(instr->length());
+ return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1599,7 +1625,7 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), r0);
- return MarkAsCall(new LThrow(value), instr);
+ return MarkAsCall(new(zone()) LThrow(value), instr);
}
@@ -1622,7 +1648,7 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new LNumberUntagD(value);
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
@@ -1630,14 +1656,17 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
if (!needs_check) {
- res = DefineAsRegister(new LSmiUntag(value, needs_check));
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
: NULL;
LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(d11)
: NULL;
- res = DefineSameAsFirst(new LTaggedToI(value, temp1, temp2, temp3));
+ res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
+ temp1,
+ temp2,
+ temp3));
res = AssignEnvironment(res);
}
return res;
@@ -1651,16 +1680,15 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
// Make sure that the temp and result_temp registers are
// different.
LUnallocated* result_temp = TempRegister();
- LNumberTagD* result = new LNumberTagD(value, temp1, temp2);
+ LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
Define(result, result_temp);
return AssignPointerMap(result);
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- LDoubleToI* res =
- new LDoubleToI(value,
- TempRegister(),
- instr->CanTruncateToInt32() ? TempRegister() : NULL);
+ LOperand* temp1 = TempRegister();
+ LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL;
+ LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2);
return AssignEnvironment(DefineAsRegister(res));
}
} else if (from.IsInteger32()) {
@@ -1668,15 +1696,15 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
HValue* val = instr->value();
LOperand* value = UseRegisterAtStart(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
- return DefineAsRegister(new LSmiTag(value));
+ return DefineAsRegister(new(zone()) LSmiTag(value));
} else {
- LNumberTagI* result = new LNumberTagI(value);
+ LNumberTagI* result = new(zone()) LNumberTagI(value);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
} else {
ASSERT(to.IsDouble());
LOperand* value = Use(instr->value());
- return DefineAsRegister(new LInteger32ToDouble(value));
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
}
}
UNREACHABLE();
@@ -1686,13 +1714,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckNonSmi(value));
+ return AssignEnvironment(new(zone()) LCheckNonSmi(value));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckInstanceType(value);
+ LInstruction* result = new(zone()) LCheckInstanceType(value);
return AssignEnvironment(result);
}
@@ -1700,26 +1728,26 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
LOperand* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
- LInstruction* result = new LCheckPrototypeMaps(temp1, temp2);
+ LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckSmi(value));
+ return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckFunction(value));
+ return AssignEnvironment(new(zone()) LCheckFunction(value));
}
LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckMap(value);
+ LInstruction* result = new(zone()) LCheckMap(value);
return AssignEnvironment(result);
}
@@ -1729,57 +1757,32 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
Representation input_rep = value->representation();
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
- return DefineAsRegister(new LClampDToUint8(reg, FixedTemp(d11)));
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(d11)));
} else if (input_rep.IsInteger32()) {
- return DefineAsRegister(new LClampIToUint8(reg));
+ return DefineAsRegister(new(zone()) LClampIToUint8(reg));
} else {
ASSERT(input_rep.IsTagged());
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve d1 explicitly.
- LClampTToUint8* result = new LClampTToUint8(reg, FixedTemp(d11));
+ LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(d11));
return AssignEnvironment(DefineAsRegister(result));
}
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
- LOperand* reg = UseRegister(value);
- if (input_rep.IsDouble()) {
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LDoubleToI* res = new LDoubleToI(reg, temp1, temp2);
- return AssignEnvironment(DefineAsRegister(res));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LOperand* temp3 = FixedTemp(d11);
- LTaggedToI* res = new LTaggedToI(reg, temp1, temp2, temp3);
- return AssignEnvironment(DefineSameAsFirst(res));
- }
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
- return new LReturn(UseFixed(instr->value(), r0));
+ return new(zone()) LReturn(UseFixed(instr->value(), r0));
}
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- return DefineAsRegister(new LConstantI);
+ return DefineAsRegister(new(zone()) LConstantI);
} else if (r.IsDouble()) {
- return DefineAsRegister(new LConstantD);
+ return DefineAsRegister(new(zone()) LConstantD);
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT);
+ return DefineAsRegister(new(zone()) LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1788,7 +1791,7 @@ LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
- LLoadGlobalCell* result = new LLoadGlobalCell;
+ LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
return instr->RequiresHoleCheck()
? AssignEnvironment(DefineAsRegister(result))
: DefineAsRegister(result);
@@ -1797,7 +1800,7 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), r0);
- LLoadGlobalGeneric* result = new LLoadGlobalGeneric(global_object);
+ LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1807,8 +1810,8 @@ LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
// Use a temp to check the value in the cell in the case where we perform
// a hole check.
return instr->RequiresHoleCheck()
- ? AssignEnvironment(new LStoreGlobalCell(value, TempRegister()))
- : new LStoreGlobalCell(value, NULL);
+ ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
+ : new(zone()) LStoreGlobalCell(value, NULL);
}
@@ -1816,14 +1819,15 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), r1);
LOperand* value = UseFixed(instr->value(), r0);
LStoreGlobalGeneric* result =
- new LStoreGlobalGeneric(global_object, value);
+ new(zone()) LStoreGlobalGeneric(global_object, value);
return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- LInstruction* result = DefineAsRegister(new LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1838,14 +1842,14 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
context = UseRegister(instr->context());
value = UseRegister(instr->value());
}
- LInstruction* result = new LStoreContextSlot(context, value);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value);
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
return DefineAsRegister(
- new LLoadNamedField(UseRegisterAtStart(instr->object())));
+ new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
}
@@ -1854,11 +1858,13 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* obj = UseFixed(instr->object(), r0);
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return MarkAsCall(DefineFixed(result, r0), instr);
} else {
LOperand* obj = UseRegisterAtStart(instr->object());
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return AssignEnvironment(DefineAsRegister(result));
}
}
@@ -1866,7 +1872,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), r0);
- LInstruction* result = DefineFixed(new LLoadNamedGeneric(object), r0);
+ LInstruction* result = DefineFixed(new(zone()) LLoadNamedGeneric(object), r0);
return MarkAsCall(result, instr);
}
@@ -1874,20 +1880,20 @@ LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
return AssignEnvironment(DefineAsRegister(
- new LLoadFunctionPrototype(UseRegister(instr->function()))));
+ new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadElements(input));
+ return DefineAsRegister(new(zone()) LLoadElements(input));
}
LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
HLoadExternalArrayPointer* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadExternalArrayPointer(input));
+ return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
}
@@ -1897,7 +1903,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterAtStart(instr->key());
- LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
+ LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
if (instr->RequiresHoleCheck()) AssignEnvironment(result);
return DefineAsRegister(result);
}
@@ -1910,7 +1916,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LOperand* elements = UseTempRegister(instr->elements());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastDoubleElement* result =
- new LLoadKeyedFastDoubleElement(elements, key);
+ new(zone()) LLoadKeyedFastDoubleElement(elements, key);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -1929,7 +1935,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new LLoadKeyedSpecializedArrayElement(external_pointer, key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -1943,7 +1949,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* key = UseFixed(instr->key(), r0);
LInstruction* result =
- DefineFixed(new LLoadKeyedGeneric(object, key), r0);
+ DefineFixed(new(zone()) LLoadKeyedGeneric(object, key), r0);
return MarkAsCall(result, instr);
}
@@ -1962,7 +1968,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastElement(obj, key, val);
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1976,7 +1982,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LOperand* val = UseTempRegister(instr->value());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastDoubleElement(elements, key, val);
+ return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
}
@@ -2002,9 +2008,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
: UseRegister(instr->value());
LOperand* key = UseRegisterOrConstant(instr->key());
- return new LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
+ key,
+ val);
}
@@ -2017,7 +2023,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
+ return MarkAsCall(new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
}
@@ -2028,14 +2034,16 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, NULL);
+ new(zone()) LTransitionElementsKind(object, new_map_reg, NULL);
return DefineSameAsFirst(result);
} else {
LOperand* object = UseFixed(instr->object(), r0);
LOperand* fixed_object_reg = FixedTemp(r2);
LOperand* new_map_reg = FixedTemp(r3);
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+ new(zone()) LTransitionElementsKind(object,
+ new_map_reg,
+ fixed_object_reg);
return MarkAsCall(DefineFixed(result, r0), instr);
}
}
@@ -2052,7 +2060,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new LStoreNamedField(obj, val);
+ return new(zone()) LStoreNamedField(obj, val);
}
@@ -2060,7 +2068,7 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LOperand* obj = UseFixed(instr->object(), r1);
LOperand* val = UseFixed(instr->value(), r0);
- LInstruction* result = new LStoreNamedGeneric(obj, val);
+ LInstruction* result = new(zone()) LStoreNamedGeneric(obj, val);
return MarkAsCall(result, instr);
}
@@ -2068,60 +2076,67 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), r0),
+ instr);
}
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseTempRegister(instr->string());
LOperand* index = UseTempRegister(instr->index());
- LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
+ LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
LOperand* char_code = UseRegister(instr->value());
- LStringCharFromCode* result = new LStringCharFromCode(char_code);
+ LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LStringLength(string));
+ return DefineAsRegister(new(zone()) LStringLength(string));
+}
+
+
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LAllocateObject* result = new LAllocateObject(TempRegister(), TempRegister());
+ return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFastLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
- return MarkAsCall(DefineFixed(new LArrayLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
- return MarkAsCall(DefineFixed(new LRegExpLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFunctionLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LOperand* object = UseFixed(instr->object(), r0);
LOperand* key = UseFixed(instr->key(), r1);
- LDeleteProperty* result = new LDeleteProperty(object, key);
+ LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -2129,13 +2144,13 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
- return AssignEnvironment(new LOsrEntry);
+ return AssignEnvironment(new(zone()) LOsrEntry);
}
LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
int spill_index = chunk()->GetParameterStackSlot(instr->index());
- return DefineAsSpilled(new LParameter, spill_index);
+ return DefineAsSpilled(new(zone()) LParameter, spill_index);
}
@@ -2145,13 +2160,13 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
- return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+ return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
}
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr);
}
@@ -2168,32 +2183,33 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LOperand* arguments = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = UseRegister(instr->index());
- LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ LAccessArgumentsAt* result =
+ new(zone()) LAccessArgumentsAt(arguments, length, index);
return AssignEnvironment(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
LOperand* object = UseFixed(instr->value(), r0);
- LToFastProperties* result = new LToFastProperties(object);
+ LToFastProperties* result = new(zone()) LToFastProperties(object);
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseFixed(instr->value(), r0));
+ LTypeof* result = new(zone()) LTypeof(UseFixed(instr->value(), r0));
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
- return new LTypeofIsAndBranch(UseTempRegister(instr->value()));
+ return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
}
LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
HIsConstructCallAndBranch* instr) {
- return new LIsConstructCallAndBranch(TempRegister());
+ return new(zone()) LIsConstructCallAndBranch(TempRegister());
}
@@ -2216,7 +2232,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LInstruction* result = new LLazyBailout;
+ LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
instruction_pending_deoptimization_environment_->
set_deoptimization_environment(result->environment());
@@ -2230,10 +2246,10 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
- return MarkAsCall(new LStackCheck, instr);
+ return MarkAsCall(new(zone()) LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
- return AssignEnvironment(AssignPointerMap(new LStackCheck));
+ return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
}
}
@@ -2245,7 +2261,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2263,14 +2280,14 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
LInstruction* LChunkBuilder::DoIn(HIn* instr) {
LOperand* key = UseRegisterAtStart(instr->key());
LOperand* object = UseRegisterAtStart(instr->object());
- LIn* result = new LIn(key, object);
+ LIn* result = new(zone()) LIn(key, object);
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
LOperand* object = UseFixed(instr->enumerable(), r0);
- LForInPrepareMap* result = new LForInPrepareMap(object);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -2278,21 +2295,21 @@ LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
LOperand* map = UseRegister(instr->map());
return AssignEnvironment(DefineAsRegister(
- new LForInCacheArray(map)));
+ new(zone()) LForInCacheArray(map)));
}
LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* map = UseRegisterAtStart(instr->map());
- return AssignEnvironment(new LCheckMapValue(value, map));
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
}
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* index = UseRegister(instr->index());
- return DefineAsRegister(new LLoadFieldByIndex(object, index));
+ return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
}
diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h
index 1846922dbcd..ae19677fda3 100644
--- a/deps/v8/src/arm/lithium-arm.h
+++ b/deps/v8/src/arm/lithium-arm.h
@@ -49,6 +49,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -176,8 +177,8 @@ class LCodeGen;
V(ForInPrepareMap) \
V(ForInCacheArray) \
V(CheckMapValue) \
- V(LoadFieldByIndex)
-
+ V(LoadFieldByIndex) \
+ V(DateField)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -989,6 +990,41 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
};
+class LDateField: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LDateField(LOperand* date, LOperand* temp, Smi* index) : index_(index) {
+ inputs_[0] = date;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(ValueOf)
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
+class LSetDateField: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LSetDateField(LOperand* date, LOperand* value, LOperand* temp, int index)
+ : index_(index) {
+ inputs_[0] = date;
+ inputs_[1] = value;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DateField, "date-set-field")
+ DECLARE_HYDROGEN_ACCESSOR(DateField)
+
+ int index() const { return index_; }
+
+ private:
+ int index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 1, 0> {
public:
explicit LThrow(LOperand* value) {
@@ -1922,6 +1958,18 @@ class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
};
+class LAllocateObject: public LTemplateInstruction<1, 0, 2> {
+ public:
+ LAllocateObject(LOperand* temp1, LOperand* temp2) {
+ temps_[0] = temp1;
+ temps_[1] = temp2;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
@@ -2192,6 +2240,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2221,6 +2270,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2325,6 +2375,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc
index 80455564061..012ea458ff2 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.cc
+++ b/deps/v8/src/arm/lithium-codegen-arm.cc
@@ -62,7 +62,7 @@ class SafepointGenerator : public CallWrapper {
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope1(VFP3);
@@ -479,10 +479,18 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- if (environment->is_arguments_adaptor()) {
- translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
- } else {
- translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
}
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
@@ -619,7 +627,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
- if (!e->is_arguments_adaptor()) {
+ if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
@@ -1430,6 +1438,46 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(result));
+ ASSERT(object.is(r0));
+ ASSERT(!scratch.is(scratch0()));
+ ASSERT(!scratch.is(object));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE);
+ __ Assert(eq, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch, Operand(stamp));
+ __ ldr(scratch, MemOperand(scratch));
+ __ ldr(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ cmp(scratch, scratch0());
+ __ b(ne, &runtime);
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ mov(r1, Operand(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
Register input = ToRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
@@ -3222,15 +3270,62 @@ void LCodeGen::DoPower(LPower* instr) {
void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
+
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(d7));
ASSERT(ToRegister(instr->InputAt(0)).is(r0));
- __ PrepareCallCFunction(1, scratch0());
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == kSeedSize);
+
__ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
- __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ ldr(r2, FieldMemOperand(r0, kRandomSeedOffset));
+ // r2: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ ldr(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
+ __ cmp(r1, Operand(0));
+ __ b(eq, deferred->entry());
+ // Load state[1].
+ __ ldr(r0, FieldMemOperand(r2, ByteArray::kHeaderSize + kSeedSize));
+ // r1: state[0].
+ // r0: state[1].
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ __ and_(r3, r1, Operand(0xFFFF));
+ __ mov(r4, Operand(18273));
+ __ mul(r3, r3, r4);
+ __ add(r1, r3, Operand(r1, LSR, 16));
+ // Save state[0].
+ __ str(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ and_(r3, r0, Operand(0xFFFF));
+ __ mov(r4, Operand(36969));
+ __ mul(r3, r3, r4);
+ __ add(r0, r3, Operand(r0, LSR, 16));
+ // Save state[1].
+ __ str(r0, FieldMemOperand(r2, ByteArray::kHeaderSize + kSeedSize));
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ and_(r0, r0, Operand(0x3FFFF));
+ __ add(r0, r0, Operand(r1, LSL, 14));
+ __ bind(deferred->exit());
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
// Create this constant using mov/orr to avoid PC relative load.
__ mov(r1, Operand(0x41000000));
@@ -3245,6 +3340,13 @@ void LCodeGen::DoRandom(LRandom* instr) {
}
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1, scratch0());
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ // Return value is in r0.
+}
+
+
void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(d2));
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -4322,6 +4424,80 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
}
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch2 = ToRegister(instr->TempAt(1));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ scratch,
+ scratch2,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(map, constructor);
+ __ ldr(map, FieldMemOperand(map, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ str(map, FieldMemOperand(result, JSObject::kMapOffset));
+ __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
+ __ str(scratch, FieldMemOperand(result, JSObject::kElementsOffset));
+ __ str(scratch, FieldMemOperand(result, JSObject::kPropertiesOffset));
+ if (initial_map->inobject_properties() != 0) {
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ str(scratch, FieldMemOperand(result, property_offset));
+ }
+ }
+
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ mov(result, Operand(0));
+
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ LoadHeapObject(r0, constructor);
+ __ push(r0);
+ CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+ __ StoreToSafepointRegisterSlot(r0, result);
+}
+
+
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Heap* heap = isolate()->heap();
ElementsKind boilerplate_elements_kind =
diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h
index 00823e1638c..adb6e1bb73e 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.h
+++ b/deps/v8/src/arm/lithium-codegen-arm.h
@@ -114,8 +114,10 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
index 880c372538d..de83c13e150 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
@@ -1055,7 +1055,7 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid
int delta = code_handle->address() - re_code->address();
diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc
index 9a0793e12f4..74fca2e4c57 100644
--- a/deps/v8/src/arm/stub-cache-arm.cc
+++ b/deps/v8/src/arm/stub-cache-arm.cc
@@ -43,59 +43,83 @@ static void ProbeTable(Isolate* isolate,
MacroAssembler* masm,
Code::Flags flags,
StubCache::Table table,
+ Register receiver,
Register name,
+ // Number of the cache entry, not scaled.
Register offset,
- int offset_shift_bits,
Register scratch,
- Register scratch2) {
+ Register scratch2,
+ Register offset_scratch) {
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
+ ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
+ uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
// Check the relative positions of the address fields.
ASSERT(value_off_addr > key_off_addr);
ASSERT((value_off_addr - key_off_addr) % 4 == 0);
ASSERT((value_off_addr - key_off_addr) < (256 * 4));
+ ASSERT(map_off_addr > key_off_addr);
+ ASSERT((map_off_addr - key_off_addr) % 4 == 0);
+ ASSERT((map_off_addr - key_off_addr) < (256 * 4));
Label miss;
- Register offsets_base_addr = scratch;
+ Register base_addr = scratch;
+ scratch = no_reg;
+
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ add(offset_scratch, offset, Operand(offset, LSL, 1));
+
+ // Calculate the base address of the entry.
+ __ mov(base_addr, Operand(key_offset));
+ __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
// Check that the key in the entry matches the name.
- __ mov(offsets_base_addr, Operand(key_offset));
- __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1 + offset_shift_bits));
+ __ ldr(ip, MemOperand(base_addr, 0));
__ cmp(name, ip);
__ b(ne, &miss);
+ // Check the map matches.
+ __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
+ __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ cmp(ip, scratch2);
+ __ b(ne, &miss);
+
// Get the code entry from the cache.
- __ add(offsets_base_addr, offsets_base_addr,
- Operand(value_off_addr - key_off_addr));
- __ ldr(scratch2,
- MemOperand(offsets_base_addr, offset, LSL, 1 + offset_shift_bits));
+ Register code = scratch2;
+ scratch2 = no_reg;
+ __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
// Check that the flags match what we're looking for.
- __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
+ Register flags_reg = base_addr;
+ base_addr = no_reg;
+ __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
// It's a nice optimization if this constant is encodable in the bic insn.
uint32_t mask = Code::kFlagsNotUsedInLookup;
ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
- __ bic(scratch2, scratch2, Operand(mask));
+ __ bic(flags_reg, flags_reg, Operand(mask));
// Using cmn and the negative instead of cmp means we can use movw.
if (flags < 0) {
- __ cmn(scratch2, Operand(-flags));
+ __ cmn(flags_reg, Operand(-flags));
} else {
- __ cmp(scratch2, Operand(flags));
+ __ cmp(flags_reg, Operand(flags));
}
__ b(ne, &miss);
- // Re-load code entry from cache.
- __ ldr(offset,
- MemOperand(offsets_base_addr, offset, LSL, 1 + offset_shift_bits));
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
// Jump to the first instruction in the code stub.
- __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(offset);
+ __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
// Miss: fall through.
__ bind(&miss);
@@ -167,13 +191,14 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Isolate* isolate = masm->isolate();
Label miss;
- // Make sure that code is valid. The shifting code relies on the
- // entry size being 8.
- ASSERT(sizeof(Entry) == 8);
+ // Make sure that code is valid. The multiplying code relies on the
+ // entry size being 12.
+ ASSERT(sizeof(Entry) == 12);
// Make sure the flags does not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -193,6 +218,11 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
ASSERT(!scratch.is(no_reg));
ASSERT(!extra.is(no_reg));
ASSERT(!extra2.is(no_reg));
+ ASSERT(!extra3.is(no_reg));
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
+ extra2, extra3);
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &miss);
@@ -201,29 +231,32 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
__ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
__ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ add(scratch, scratch, Operand(ip));
- uint32_t mask = (kPrimaryTableSize - 1) << kHeapObjectTagSize;
+ uint32_t mask = kPrimaryTableSize - 1;
+ // We shift out the last two bits because they are not part of the hash and
+ // they are always 01 for maps.
+ __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
// Mask down the eor argument to the minimum to keep the immediate
// ARM-encodable.
- __ eor(scratch, scratch, Operand(flags & mask));
+ __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
// Prefer and_ to ubfx here because ubfx takes 2 cycles.
__ and_(scratch, scratch, Operand(mask));
- __ mov(scratch, Operand(scratch, LSR, 1));
// Probe the primary table.
ProbeTable(isolate,
masm,
flags,
kPrimary,
+ receiver,
name,
scratch,
- 1,
extra,
- extra2);
+ extra2,
+ extra3);
// Primary miss: Compute hash for secondary probe.
- __ sub(scratch, scratch, Operand(name, LSR, 1));
- uint32_t mask2 = (kSecondaryTableSize - 1) << (kHeapObjectTagSize - 1);
- __ add(scratch, scratch, Operand((flags >> 1) & mask2));
+ __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
+ uint32_t mask2 = kSecondaryTableSize - 1;
+ __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
__ and_(scratch, scratch, Operand(mask2));
// Probe the secondary table.
@@ -231,15 +264,18 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
masm,
flags,
kSecondary,
+ receiver,
name,
scratch,
- 1,
extra,
- extra2);
+ extra2,
+ extra3);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
+ extra2, extra3);
}
diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc
index 0bec57752f3..07509b5b28e 100644
--- a/deps/v8/src/assembler.cc
+++ b/deps/v8/src/assembler.cc
@@ -813,6 +813,17 @@ ExternalReference ExternalReference::random_uint32_function(
}
+ExternalReference ExternalReference::get_date_field_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
+}
+
+
+ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
+ return ExternalReference(isolate->date_cache()->stamp_address());
+}
+
+
ExternalReference ExternalReference::transcendental_cache_array_address(
Isolate* isolate) {
return ExternalReference(
diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h
index e7c92b451c0..5063879ae4c 100644
--- a/deps/v8/src/assembler.h
+++ b/deps/v8/src/assembler.h
@@ -595,6 +595,9 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference transcendental_cache_array_address(Isolate* isolate);
static ExternalReference delete_handle_scope_extensions(Isolate* isolate);
+ static ExternalReference get_date_field_function(Isolate* isolate);
+ static ExternalReference date_cache_stamp(Isolate* isolate);
+
// Deoptimization support.
static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate* isolate);
diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc
index ca3ab7851d3..239e5d0ffe4 100644
--- a/deps/v8/src/ast.cc
+++ b/deps/v8/src/ast.cc
@@ -76,7 +76,8 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var)
is_this_(var->is_this()),
is_trivial_(false),
is_lvalue_(false),
- position_(RelocInfo::kNoPosition) {
+ position_(RelocInfo::kNoPosition),
+ interface_(var->interface()) {
BindTo(var);
}
@@ -84,14 +85,16 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var)
VariableProxy::VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
- int position)
+ int position,
+ Interface* interface)
: Expression(isolate),
name_(name),
var_(NULL),
is_this_(is_this),
is_trivial_(false),
is_lvalue_(false),
- position_(position) {
+ position_(position),
+ interface_(interface) {
// Names must be canonicalized for fast equality checks.
ASSERT(name->IsSymbol());
}
@@ -168,12 +171,15 @@ LanguageMode FunctionLiteral::language_mode() const {
}
-ObjectLiteral::Property::Property(Literal* key, Expression* value) {
+ObjectLiteral::Property::Property(Literal* key,
+ Expression* value,
+ Isolate* isolate) {
emit_store_ = true;
key_ = key;
value_ = value;
Object* k = *key->handle();
- if (k->IsSymbol() && HEAP->Proto_symbol()->Equals(String::cast(k))) {
+ if (k->IsSymbol() &&
+ isolate->heap()->Proto_symbol()->Equals(String::cast(k))) {
kind_ = PROTOTYPE;
} else if (value_->AsMaterializedLiteral() != NULL) {
kind_ = MATERIALIZED_LITERAL;
@@ -237,55 +243,21 @@ bool IsEqualNumber(void* first, void* second) {
void ObjectLiteral::CalculateEmitStore() {
- ZoneHashMap properties(&IsEqualString);
- ZoneHashMap elements(&IsEqualNumber);
- for (int i = this->properties()->length() - 1; i >= 0; i--) {
- ObjectLiteral::Property* property = this->properties()->at(i);
+ ZoneHashMap table(Literal::Match);
+ for (int i = properties()->length() - 1; i >= 0; i--) {
+ ObjectLiteral::Property* property = properties()->at(i);
Literal* literal = property->key();
- Handle<Object> handle = literal->handle();
-
- if (handle->IsNull()) {
- continue;
- }
-
- uint32_t hash;
- ZoneHashMap* table;
- void* key;
- Factory* factory = Isolate::Current()->factory();
- if (handle->IsSymbol()) {
- Handle<String> name(String::cast(*handle));
- if (name->AsArrayIndex(&hash)) {
- Handle<Object> key_handle = factory->NewNumberFromUint(hash);
- key = key_handle.location();
- table = &elements;
- } else {
- key = name.location();
- hash = name->Hash();
- table = &properties;
- }
- } else if (handle->ToArrayIndex(&hash)) {
- key = handle.location();
- table = &elements;
- } else {
- ASSERT(handle->IsNumber());
- double num = handle->Number();
- char arr[100];
- Vector<char> buffer(arr, ARRAY_SIZE(arr));
- const char* str = DoubleToCString(num, buffer);
- Handle<String> name = factory->NewStringFromAscii(CStrVector(str));
- key = name.location();
- hash = name->Hash();
- table = &properties;
- }
+ if (literal->handle()->IsNull()) continue;
+ uint32_t hash = literal->Hash();
// If the key of a computed property is in the table, do not emit
// a store for the property later.
- if (property->kind() == ObjectLiteral::Property::COMPUTED) {
- if (table->Lookup(key, hash, false) != NULL) {
- property->set_emit_store(false);
- }
+ if (property->kind() == ObjectLiteral::Property::COMPUTED &&
+ table.Lookup(literal, hash, false) != NULL) {
+ property->set_emit_store(false);
+ } else {
+ // Add key to the table.
+ table.Lookup(literal, hash, true);
}
- // Add key to the table.
- table->Lookup(key, hash, true);
}
}
@@ -417,8 +389,8 @@ bool Declaration::IsInlineable() const {
return proxy()->var()->IsStackAllocated();
}
-bool VariableDeclaration::IsInlineable() const {
- return Declaration::IsInlineable() && fun() == NULL;
+bool FunctionDeclaration::IsInlineable() const {
+ return false;
}
@@ -517,13 +489,27 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
LookupResult lookup(type->GetIsolate());
while (true) {
type->LookupInDescriptors(NULL, *name, &lookup);
- // For properties we know the target iff we have a constant function.
- if (lookup.IsFound() && lookup.IsProperty()) {
- if (lookup.type() == CONSTANT_FUNCTION) {
- target_ = Handle<JSFunction>(lookup.GetConstantFunctionFromMap(*type));
- return true;
+ if (lookup.IsFound()) {
+ switch (lookup.type()) {
+ case CONSTANT_FUNCTION:
+ // We surely know the target for a constant function.
+ target_ =
+ Handle<JSFunction>(lookup.GetConstantFunctionFromMap(*type));
+ return true;
+ case NORMAL:
+ case FIELD:
+ case CALLBACKS:
+ case HANDLER:
+ case INTERCEPTOR:
+ // We don't know the target.
+ return false;
+ case MAP_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR:
+ // Perhaps something interesting is up in the prototype chain...
+ break;
}
- return false;
}
// If we reach the end of the prototype chain, we don't know the target.
if (!type->prototype()->IsJSObject()) return false;
@@ -596,6 +582,14 @@ void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
}
+void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+ is_monomorphic_ = oracle->CallNewIsMonomorphic(this);
+ if (is_monomorphic_) {
+ target_ = oracle->GetCallNewTarget(this);
+ }
+}
+
+
void CompareOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
TypeInfo info = oracle->CompareType(this);
if (info.IsSmi()) {
@@ -995,7 +989,10 @@ CaseClause::CaseClause(Isolate* isolate,
}
INCREASE_NODE_COUNT(VariableDeclaration)
+INCREASE_NODE_COUNT(FunctionDeclaration)
INCREASE_NODE_COUNT(ModuleDeclaration)
+INCREASE_NODE_COUNT(ImportDeclaration)
+INCREASE_NODE_COUNT(ExportDeclaration)
INCREASE_NODE_COUNT(ModuleLiteral)
INCREASE_NODE_COUNT(ModuleVariable)
INCREASE_NODE_COUNT(ModulePath)
@@ -1137,4 +1134,22 @@ void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
}
}
+
+Handle<String> Literal::ToString() {
+ if (handle_->IsString()) return Handle<String>::cast(handle_);
+ ASSERT(handle_->IsNumber());
+ char arr[100];
+ Vector<char> buffer(arr, ARRAY_SIZE(arr));
+ const char* str;
+ if (handle_->IsSmi()) {
+ // Optimization only, the heap number case would subsume this.
+ OS::SNPrintF(buffer, "%d", Smi::cast(*handle_)->value());
+ str = arr;
+ } else {
+ str = DoubleToCString(handle_->Number(), buffer);
+ }
+ return FACTORY->NewStringFromAscii(CStrVector(str));
+}
+
+
} } // namespace v8::internal
diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h
index 3acd121582a..09864885e93 100644
--- a/deps/v8/src/ast.h
+++ b/deps/v8/src/ast.h
@@ -41,6 +41,7 @@
#include "token.h"
#include "utils.h"
#include "variables.h"
+#include "interface.h"
#include "zone-inl.h"
namespace v8 {
@@ -61,7 +62,10 @@ namespace internal {
#define DECLARATION_NODE_LIST(V) \
V(VariableDeclaration) \
+ V(FunctionDeclaration) \
V(ModuleDeclaration) \
+ V(ImportDeclaration) \
+ V(ExportDeclaration) \
#define MODULE_NODE_LIST(V) \
V(ModuleLiteral) \
@@ -444,10 +448,10 @@ class Declaration: public AstNode {
VariableProxy* proxy() const { return proxy_; }
VariableMode mode() const { return mode_; }
Scope* scope() const { return scope_; }
+ virtual InitializationFlag initialization() const = 0;
virtual bool IsInlineable() const;
virtual Declaration* AsDeclaration() { return this; }
- virtual VariableDeclaration* AsVariableDeclaration() { return NULL; }
protected:
Declaration(VariableProxy* proxy,
@@ -475,22 +479,43 @@ class VariableDeclaration: public Declaration {
public:
DECLARE_NODE_TYPE(VariableDeclaration)
- virtual VariableDeclaration* AsVariableDeclaration() { return this; }
+ virtual InitializationFlag initialization() const {
+ return mode() == VAR ? kCreatedInitialized : kNeedsInitialization;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ VariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ Scope* scope)
+ : Declaration(proxy, mode, scope) {
+ }
+};
+
- FunctionLiteral* fun() const { return fun_; } // may be NULL
+class FunctionDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(FunctionDeclaration)
+
+ FunctionLiteral* fun() const { return fun_; }
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
virtual bool IsInlineable() const;
protected:
template<class> friend class AstNodeFactory;
- VariableDeclaration(VariableProxy* proxy,
+ FunctionDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* fun,
Scope* scope)
: Declaration(proxy, mode, scope),
fun_(fun) {
- // At the moment there are no "const functions"'s in JavaScript...
- ASSERT(fun == NULL || mode == VAR || mode == LET);
+ // At the moment there are no "const functions" in JavaScript...
+ ASSERT(mode == VAR || mode == LET);
+ ASSERT(fun != NULL);
}
private:
@@ -503,6 +528,9 @@ class ModuleDeclaration: public Declaration {
DECLARE_NODE_TYPE(ModuleDeclaration)
Module* module() const { return module_; }
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
protected:
template<class> friend class AstNodeFactory;
@@ -519,10 +547,58 @@ class ModuleDeclaration: public Declaration {
};
+class ImportDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(ImportDeclaration)
+
+ Module* module() const { return module_; }
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ImportDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope)
+ : Declaration(proxy, LET, scope),
+ module_(module) {
+ }
+
+ private:
+ Module* module_;
+};
+
+
+class ExportDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(ExportDeclaration)
+
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ExportDeclaration(VariableProxy* proxy,
+ Scope* scope)
+ : Declaration(proxy, LET, scope) {
+ }
+};
+
+
class Module: public AstNode {
- // TODO(rossberg): stuff to come...
+ public:
+ Interface* interface() const { return interface_; }
+
protected:
- Module() {}
+ Module() : interface_(Interface::NewModule()) {}
+ explicit Module(Interface* interface) : interface_(interface) {}
+
+ private:
+ Interface* interface_;
};
@@ -535,8 +611,9 @@ class ModuleLiteral: public Module {
protected:
template<class> friend class AstNodeFactory;
- explicit ModuleLiteral(Block* body)
- : body_(body) {
+ ModuleLiteral(Block* body, Interface* interface)
+ : Module(interface),
+ body_(body) {
}
private:
@@ -553,9 +630,7 @@ class ModuleVariable: public Module {
protected:
template<class> friend class AstNodeFactory;
- explicit ModuleVariable(VariableProxy* proxy)
- : proxy_(proxy) {
- }
+ inline explicit ModuleVariable(VariableProxy* proxy);
private:
VariableProxy* proxy_;
@@ -1136,11 +1211,6 @@ class Literal: public Expression {
public:
DECLARE_NODE_TYPE(Literal)
- // Check if this literal is identical to the other literal.
- bool IsIdenticalTo(const Literal* other) const {
- return handle_.is_identical_to(other->handle_);
- }
-
virtual bool IsPropertyName() {
if (handle_->IsSymbol()) {
uint32_t ignored;
@@ -1173,6 +1243,16 @@ class Literal: public Expression {
Handle<Object> handle() const { return handle_; }
+ // Support for using Literal as a HashMap key. NOTE: Currently, this works
+ // only for string and number literals!
+ uint32_t Hash() { return ToString()->Hash(); }
+
+ static bool Match(void* literal1, void* literal2) {
+ Handle<String> s1 = static_cast<Literal*>(literal1)->ToString();
+ Handle<String> s2 = static_cast<Literal*>(literal2)->ToString();
+ return s1->Equals(*s2);
+ }
+
protected:
template<class> friend class AstNodeFactory;
@@ -1181,6 +1261,8 @@ class Literal: public Expression {
handle_(handle) { }
private:
+ Handle<String> ToString();
+
Handle<Object> handle_;
};
@@ -1232,7 +1314,7 @@ class ObjectLiteral: public MaterializedLiteral {
PROTOTYPE // Property is __proto__.
};
- Property(Literal* key, Expression* value);
+ Property(Literal* key, Expression* value, Isolate* isolate);
Literal* key() { return key_; }
Expression* value() { return value_; }
@@ -1382,6 +1464,8 @@ class VariableProxy: public Expression {
Variable* var() const { return var_; }
bool is_this() const { return is_this_; }
int position() const { return position_; }
+ Interface* interface() const { return interface_; }
+
void MarkAsTrivial() { is_trivial_ = true; }
void MarkAsLValue() { is_lvalue_ = true; }
@@ -1397,7 +1481,8 @@ class VariableProxy: public Expression {
VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
- int position);
+ int position,
+ Interface* interface);
Handle<String> name_;
Variable* var_; // resolved variable, or NULL
@@ -1407,6 +1492,7 @@ class VariableProxy: public Expression {
// or with a increment/decrement operator.
bool is_lvalue_;
int position_;
+ Interface* interface_;
};
@@ -1528,6 +1614,13 @@ class CallNew: public Expression {
ZoneList<Expression*>* arguments() const { return arguments_; }
virtual int position() const { return pos_; }
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ virtual bool IsMonomorphic() { return is_monomorphic_; }
+ Handle<JSFunction> target() { return target_; }
+
+ // Bailout support.
+ int ReturnId() const { return return_id_; }
+
protected:
template<class> friend class AstNodeFactory;
@@ -1538,12 +1631,19 @@ class CallNew: public Expression {
: Expression(isolate),
expression_(expression),
arguments_(arguments),
- pos_(pos) { }
+ pos_(pos),
+ is_monomorphic_(false),
+ return_id_(GetNextId(isolate)) { }
private:
Expression* expression_;
ZoneList<Expression*>* arguments_;
int pos_;
+
+ bool is_monomorphic_;
+ Handle<JSFunction> target_;
+
+ int return_id_;
};
@@ -2423,6 +2523,15 @@ class RegExpEmpty: public RegExpTree {
// ----------------------------------------------------------------------------
+// Out-of-line inline constructors (to side-step cyclic dependencies).
+
+inline ModuleVariable::ModuleVariable(VariableProxy* proxy)
+ : Module(proxy->interface()),
+ proxy_(proxy) {
+}
+
+
+// ----------------------------------------------------------------------------
// Basic visitor
// - leaf node visitors are abstract.
@@ -2518,13 +2627,21 @@ class AstNodeFactory BASE_EMBEDDED {
VariableDeclaration* NewVariableDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* fun,
Scope* scope) {
VariableDeclaration* decl =
- new(zone_) VariableDeclaration(proxy, mode, fun, scope);
+ new(zone_) VariableDeclaration(proxy, mode, scope);
VISIT_AND_RETURN(VariableDeclaration, decl)
}
+ FunctionDeclaration* NewFunctionDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* fun,
+ Scope* scope) {
+ FunctionDeclaration* decl =
+ new(zone_) FunctionDeclaration(proxy, mode, fun, scope);
+ VISIT_AND_RETURN(FunctionDeclaration, decl)
+ }
+
ModuleDeclaration* NewModuleDeclaration(VariableProxy* proxy,
Module* module,
Scope* scope) {
@@ -2533,8 +2650,23 @@ class AstNodeFactory BASE_EMBEDDED {
VISIT_AND_RETURN(ModuleDeclaration, decl)
}
- ModuleLiteral* NewModuleLiteral(Block* body) {
- ModuleLiteral* module = new(zone_) ModuleLiteral(body);
+ ImportDeclaration* NewImportDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope) {
+ ImportDeclaration* decl =
+ new(zone_) ImportDeclaration(proxy, module, scope);
+ VISIT_AND_RETURN(ImportDeclaration, decl)
+ }
+
+ ExportDeclaration* NewExportDeclaration(VariableProxy* proxy,
+ Scope* scope) {
+ ExportDeclaration* decl =
+ new(zone_) ExportDeclaration(proxy, scope);
+ VISIT_AND_RETURN(ExportDeclaration, decl)
+ }
+
+ ModuleLiteral* NewModuleLiteral(Block* body, Interface* interface) {
+ ModuleLiteral* module = new(zone_) ModuleLiteral(body, interface);
VISIT_AND_RETURN(ModuleLiteral, module)
}
@@ -2690,9 +2822,11 @@ class AstNodeFactory BASE_EMBEDDED {
VariableProxy* NewVariableProxy(Handle<String> name,
bool is_this,
- int position = RelocInfo::kNoPosition) {
+ int position = RelocInfo::kNoPosition,
+ Interface* interface =
+ Interface::NewValue()) {
VariableProxy* proxy =
- new(zone_) VariableProxy(isolate_, name, is_this, position);
+ new(zone_) VariableProxy(isolate_, name, is_this, position, interface);
VISIT_AND_RETURN(VariableProxy, proxy)
}
diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc
index 19a6a159903..a48bb4df4d7 100644
--- a/deps/v8/src/bootstrapper.cc
+++ b/deps/v8/src/bootstrapper.cc
@@ -927,7 +927,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
{ // --- D a t e ---
// Builtin functions for Date.prototype.
Handle<JSFunction> date_fun =
- InstallFunction(global, "Date", JS_VALUE_TYPE, JSValue::kSize,
+ InstallFunction(global, "Date", JS_DATE_TYPE, JSDate::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc
index 7290a2cf1ce..ca202f2dc40 100644
--- a/deps/v8/src/builtins.cc
+++ b/deps/v8/src/builtins.cc
@@ -310,28 +310,6 @@ BUILTIN(ArrayCodeGeneric) {
}
-static void CopyElements(Heap* heap,
- AssertNoAllocation* no_gc,
- FixedArray* dst,
- int dst_index,
- FixedArray* src,
- int src_index,
- int len) {
- if (len == 0) return;
- ASSERT(dst != src); // Use MoveElements instead.
- ASSERT(dst->map() != HEAP->fixed_cow_array_map());
- ASSERT(len > 0);
- CopyWords(dst->data_start() + dst_index,
- src->data_start() + src_index,
- len);
- WriteBarrierMode mode = dst->GetWriteBarrierMode(*no_gc);
- if (mode == UPDATE_WRITE_BARRIER) {
- heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
- }
- heap->incremental_marking()->RecordWrites(dst);
-}
-
-
static void MoveElements(Heap* heap,
AssertNoAllocation* no_gc,
FixedArray* dst,
@@ -531,7 +509,8 @@ BUILTIN(ArrayPush) {
FixedArray* new_elms = FixedArray::cast(obj);
AssertNoAllocation no_gc;
- CopyElements(heap, &no_gc, new_elms, 0, elms, 0, len);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS, 0,
+ new_elms, FAST_ELEMENTS, 0, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
@@ -667,7 +646,8 @@ BUILTIN(ArrayUnshift) {
}
FixedArray* new_elms = FixedArray::cast(obj);
AssertNoAllocation no_gc;
- CopyElements(heap, &no_gc, new_elms, to_add, elms, 0, len);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS, 0,
+ new_elms, FAST_ELEMENTS, to_add, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
array->set_elements(elms);
@@ -778,8 +758,9 @@ BUILTIN(ArraySlice) {
if (!maybe_array->To(&result_array)) return maybe_array;
AssertNoAllocation no_gc;
- CopyElements(heap, &no_gc, FixedArray::cast(result_array->elements()), 0,
- elms, k, result_len);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS, k,
+ FixedArray::cast(result_array->elements()),
+ FAST_ELEMENTS, 0, result_len);
return result_array;
}
@@ -852,11 +833,9 @@ BUILTIN(ArraySplice) {
{
AssertNoAllocation no_gc;
// Fill newly created array.
- CopyElements(heap,
- &no_gc,
- FixedArray::cast(result_array->elements()), 0,
- elms, actual_start,
- actual_delete_count);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS, actual_start,
+ FixedArray::cast(result_array->elements()),
+ FAST_ELEMENTS, 0, actual_delete_count);
}
int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
@@ -906,12 +885,13 @@ BUILTIN(ArraySplice) {
{
AssertNoAllocation no_gc;
// Copy the part before actual_start as is.
- CopyElements(heap, &no_gc, new_elms, 0, elms, 0, actual_start);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS, 0,
+ new_elms, FAST_ELEMENTS, 0, actual_start);
const int to_copy = len - actual_delete_count - actual_start;
- CopyElements(heap, &no_gc,
- new_elms, actual_start + item_count,
- elms, actual_start + actual_delete_count,
- to_copy);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS,
+ actual_start + actual_delete_count,
+ new_elms, FAST_ELEMENTS,
+ actual_start + item_count, to_copy);
}
FillWithHoles(heap, new_elms, new_length, capacity);
@@ -1000,7 +980,9 @@ BUILTIN(ArrayConcat) {
JSArray* array = JSArray::cast(args[i]);
int len = Smi::cast(array->length())->value();
FixedArray* elms = FixedArray::cast(array->elements());
- CopyElements(heap, &no_gc, result_elms, start_pos, elms, 0, len);
+ CopyObjectToObjectElements(&no_gc, elms, FAST_ELEMENTS, 0,
+ result_elms, FAST_ELEMENTS,
+ start_pos, len);
start_pos += len;
}
ASSERT(start_pos == result_len);
diff --git a/deps/v8/src/codegen.h b/deps/v8/src/codegen.h
index 5360d3ef3ca..28a3006e1cc 100644
--- a/deps/v8/src/codegen.h
+++ b/deps/v8/src/codegen.h
@@ -84,6 +84,15 @@ enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
namespace v8 {
namespace internal {
+// Results of the library implementation of transcendental functions may differ
+// from the one we use in our generated code. Therefore we use the same
+// generated code both in runtime and compiled code.
+typedef double (*TranscendentalFunction)(double x);
+
+TranscendentalFunction CreateTranscendentalFunction(
+ TranscendentalCache::Type type);
+
+
class ElementsTransitionGenerator : public AllStatic {
public:
static void GenerateSmiOnlyToObject(MacroAssembler* masm);
diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc
index cbf53a5e86e..d689e871b7e 100644
--- a/deps/v8/src/compiler.cc
+++ b/deps/v8/src/compiler.cc
@@ -116,9 +116,9 @@ void CompilationInfo::DisableOptimization() {
bool CompilationInfo::ShouldSelfOptimize() {
return FLAG_self_optimization &&
FLAG_crankshaft &&
- !Serializer::enabled() &&
!function()->flags()->Contains(kDontSelfOptimize) &&
!function()->flags()->Contains(kDontOptimize) &&
+ function()->scope()->allows_lazy_recompilation() &&
(shared_info().is_null() || !shared_info()->optimization_disabled());
}
diff --git a/deps/v8/src/d8.gyp b/deps/v8/src/d8.gyp
index 3b92d03681a..a8361e6b4e5 100644
--- a/deps/v8/src/d8.gyp
+++ b/deps/v8/src/d8.gyp
@@ -41,9 +41,6 @@
'include_dirs+': [
'../src',
],
- 'defines': [
- 'ENABLE_DEBUGGER_SUPPORT',
- ],
'sources': [
'd8.cc',
],
diff --git a/deps/v8/src/date.cc b/deps/v8/src/date.cc
new file mode 100644
index 00000000000..a377451770f
--- /dev/null
+++ b/deps/v8/src/date.cc
@@ -0,0 +1,384 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "date.h"
+
+#include "v8.h"
+
+#include "objects.h"
+#include "objects-inl.h"
+
+namespace v8 {
+namespace internal {
+
+
+static const int kDays4Years[] = {0, 365, 2 * 365, 3 * 365 + 1};
+static const int kDaysIn4Years = 4 * 365 + 1;
+static const int kDaysIn100Years = 25 * kDaysIn4Years - 1;
+static const int kDaysIn400Years = 4 * kDaysIn100Years + 1;
+static const int kDays1970to2000 = 30 * 365 + 7;
+static const int kDaysOffset = 1000 * kDaysIn400Years + 5 * kDaysIn400Years -
+ kDays1970to2000;
+static const int kYearsOffset = 400000;
+static const char kDaysInMonths[] =
+ {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
+
+
+void DateCache::ResetDateCache() {
+ static const int kMaxStamp = Smi::kMaxValue;
+ stamp_ = Smi::FromInt(stamp_->value() + 1);
+ if (stamp_->value() > kMaxStamp) {
+ stamp_ = Smi::FromInt(0);
+ }
+ ASSERT(stamp_ != Smi::FromInt(kInvalidStamp));
+ for (int i = 0; i < kDSTSize; ++i) {
+ ClearSegment(&dst_[i]);
+ }
+ dst_usage_counter_ = 0;
+ before_ = &dst_[0];
+ after_ = &dst_[1];
+ local_offset_ms_ = kInvalidLocalOffsetInMs;
+ ymd_valid_ = false;
+}
+
+
+void DateCache::ClearSegment(DST* segment) {
+ segment->start_sec = kMaxEpochTimeInSec;
+ segment->end_sec = -kMaxEpochTimeInSec;
+ segment->offset_ms = 0;
+ segment->last_used = 0;
+}
+
+
+void DateCache::YearMonthDayFromDays(
+ int days, int* year, int* month, int* day) {
+ if (ymd_valid_) {
+ // Check conservatively if the given 'days' has
+ // the same year and month as the cached 'days'.
+ int new_day = ymd_day_ + (days - ymd_days_);
+ if (new_day >= 1 && new_day <= 28) {
+ ymd_day_ = new_day;
+ ymd_days_ = days;
+ *year = ymd_year_;
+ *month = ymd_month_;
+ *day = new_day;
+ return;
+ }
+ }
+ int save_days = days;
+
+ days += kDaysOffset;
+ *year = 400 * (days / kDaysIn400Years) - kYearsOffset;
+ days %= kDaysIn400Years;
+
+ ASSERT(DaysFromYearMonth(*year, 0) + days == save_days);
+
+ days--;
+ int yd1 = days / kDaysIn100Years;
+ days %= kDaysIn100Years;
+ *year += 100 * yd1;
+
+ days++;
+ int yd2 = days / kDaysIn4Years;
+ days %= kDaysIn4Years;
+ *year += 4 * yd2;
+
+ days--;
+ int yd3 = days / 365;
+ days %= 365;
+ *year += yd3;
+
+
+ bool is_leap = (!yd1 || yd2) && !yd3;
+
+ ASSERT(days >= -1);
+ ASSERT(is_leap || (days >= 0));
+ ASSERT((days < 365) || (is_leap && (days < 366)));
+ ASSERT(is_leap == ((*year % 4 == 0) && (*year % 100 || (*year % 400 == 0))));
+ ASSERT(is_leap || ((DaysFromYearMonth(*year, 0) + days) == save_days));
+ ASSERT(!is_leap || ((DaysFromYearMonth(*year, 0) + days + 1) == save_days));
+
+ days += is_leap;
+
+ // Check if the date is after February.
+ if (days >= 31 + 28 + is_leap) {
+ days -= 31 + 28 + is_leap;
+ // Find the date starting from March.
+ for (int i = 2; i < 12; i++) {
+ if (days < kDaysInMonths[i]) {
+ *month = i;
+ *day = days + 1;
+ break;
+ }
+ days -= kDaysInMonths[i];
+ }
+ } else {
+ // Check January and February.
+ if (days < 31) {
+ *month = 0;
+ *day = days + 1;
+ } else {
+ *month = 1;
+ *day = days - 31 + 1;
+ }
+ }
+ ASSERT(DaysFromYearMonth(*year, *month) + *day - 1 == save_days);
+ ymd_valid_ = true;
+ ymd_year_ = *year;
+ ymd_month_ = *month;
+ ymd_day_ = *day;
+ ymd_days_ = save_days;
+}
+
+
+int DateCache::DaysFromYearMonth(int year, int month) {
+ static const int day_from_month[] = {0, 31, 59, 90, 120, 151,
+ 181, 212, 243, 273, 304, 334};
+ static const int day_from_month_leap[] = {0, 31, 60, 91, 121, 152,
+ 182, 213, 244, 274, 305, 335};
+
+ year += month / 12;
+ month %= 12;
+ if (month < 0) {
+ year--;
+ month += 12;
+ }
+
+ ASSERT(month >= 0);
+ ASSERT(month < 12);
+
+ // year_delta is an arbitrary number such that:
+ // a) year_delta = -1 (mod 400)
+ // b) year + year_delta > 0 for years in the range defined by
+ // ECMA 262 - 15.9.1.1, i.e. upto 100,000,000 days on either side of
+ // Jan 1 1970. This is required so that we don't run into integer
+ // division of negative numbers.
+ // c) there shouldn't be an overflow for 32-bit integers in the following
+ // operations.
+ static const int year_delta = 399999;
+ static const int base_day = 365 * (1970 + year_delta) +
+ (1970 + year_delta) / 4 -
+ (1970 + year_delta) / 100 +
+ (1970 + year_delta) / 400;
+
+ int year1 = year + year_delta;
+ int day_from_year = 365 * year1 +
+ year1 / 4 -
+ year1 / 100 +
+ year1 / 400 -
+ base_day;
+
+ if ((year % 4 != 0) || (year % 100 == 0 && year % 400 != 0)) {
+ return day_from_year + day_from_month[month];
+ }
+ return day_from_year + day_from_month_leap[month];
+}
+
+
+void DateCache::ExtendTheAfterSegment(int time_sec, int offset_ms) {
+ if (after_->offset_ms == offset_ms &&
+ after_->start_sec <= time_sec + kDefaultDSTDeltaInSec &&
+ time_sec <= after_->end_sec) {
+ // Extend the after_ segment.
+ after_->start_sec = time_sec;
+ } else {
+ // The after_ segment is either invalid or starts too late.
+ if (after_->start_sec <= after_->end_sec) {
+ // If the after_ segment is valid, replace it with a new segment.
+ after_ = LeastRecentlyUsedDST(before_);
+ }
+ after_->start_sec = time_sec;
+ after_->end_sec = time_sec;
+ after_->offset_ms = offset_ms;
+ after_->last_used = ++dst_usage_counter_;
+ }
+}
+
+
+int DateCache::DaylightSavingsOffsetInMs(int64_t time_ms) {
+ int time_sec = (time_ms >= 0 && time_ms <= kMaxEpochTimeInMs)
+ ? static_cast<int>(time_ms / 1000)
+ : static_cast<int>(EquivalentTime(time_ms) / 1000);
+
+ // Invalidate cache if the usage counter is close to overflow.
+ // Note that dst_usage_counter is incremented less than ten times
+ // in this function.
+ if (dst_usage_counter_ >= kMaxInt - 10) {
+ dst_usage_counter_ = 0;
+ for (int i = 0; i < kDSTSize; ++i) {
+ ClearSegment(&dst_[i]);
+ }
+ }
+
+ // Optimistic fast check.
+ if (before_->start_sec <= time_sec &&
+ time_sec <= before_->end_sec) {
+ // Cache hit.
+ before_->last_used = ++dst_usage_counter_;
+ return before_->offset_ms;
+ }
+
+ ProbeDST(time_sec);
+
+ ASSERT(InvalidSegment(before_) || before_->start_sec <= time_sec);
+ ASSERT(InvalidSegment(after_) || time_sec < after_->start_sec);
+
+ if (InvalidSegment(before_)) {
+ // Cache miss.
+ before_->start_sec = time_sec;
+ before_->end_sec = time_sec;
+ before_->offset_ms = GetDaylightSavingsOffsetFromOS(time_sec);
+ before_->last_used = ++dst_usage_counter_;
+ return before_->offset_ms;
+ }
+
+ if (time_sec <= before_->end_sec) {
+ // Cache hit.
+ before_->last_used = ++dst_usage_counter_;
+ return before_->offset_ms;
+ }
+
+ if (time_sec > before_->end_sec + kDefaultDSTDeltaInSec) {
+ // If the before_ segment ends too early, then just
+ // query for the offset of the time_sec
+ int offset_ms = GetDaylightSavingsOffsetFromOS(time_sec);
+ ExtendTheAfterSegment(time_sec, offset_ms);
+ // This swap helps the optimistic fast check in subsequent invocations.
+ DST* temp = before_;
+ before_ = after_;
+ after_ = temp;
+ return offset_ms;
+ }
+
+ // Now the time_sec is between
+ // before_->end_sec and before_->end_sec + default DST delta.
+ // Update the usage counter of before_ since it is going to be used.
+ before_->last_used = ++dst_usage_counter_;
+
+ // Check if after_ segment is invalid or starts too late.
+ // Note that start_sec of invalid segments is kMaxEpochTimeInSec.
+ if (before_->end_sec + kDefaultDSTDeltaInSec <= after_->start_sec) {
+ int new_after_start_sec = before_->end_sec + kDefaultDSTDeltaInSec;
+ int new_offset_ms = GetDaylightSavingsOffsetFromOS(new_after_start_sec);
+ ExtendTheAfterSegment(new_after_start_sec, new_offset_ms);
+ } else {
+ ASSERT(!InvalidSegment(after_));
+ // Update the usage counter of after_ since it is going to be used.
+ after_->last_used = ++dst_usage_counter_;
+ }
+
+ // Now the time_sec is between before_->end_sec and after_->start_sec.
+ // Only one daylight savings offset change can occur in this interval.
+
+ if (before_->offset_ms == after_->offset_ms) {
+ // Merge two segments if they have the same offset.
+ before_->end_sec = after_->end_sec;
+ ClearSegment(after_);
+ return before_->offset_ms;
+ }
+
+ // Binary search for daylight savings offset change point,
+ // but give up if we don't find it in four iterations.
+ for (int i = 4; i >= 0; --i) {
+ int delta = after_->start_sec - before_->end_sec;
+ int middle_sec = (i == 0) ? time_sec : before_->end_sec + delta / 2;
+ int offset_ms = GetDaylightSavingsOffsetFromOS(middle_sec);
+ if (before_->offset_ms == offset_ms) {
+ before_->end_sec = middle_sec;
+ if (time_sec <= before_->end_sec) {
+ return offset_ms;
+ }
+ } else {
+ ASSERT(after_->offset_ms == offset_ms);
+ after_->start_sec = middle_sec;
+ if (time_sec >= after_->start_sec) {
+ // This swap helps the optimistic fast check in subsequent invocations.
+ DST* temp = before_;
+ before_ = after_;
+ after_ = temp;
+ return offset_ms;
+ }
+ }
+ }
+ UNREACHABLE();
+ return 0;
+}
+
+
+void DateCache::ProbeDST(int time_sec) {
+ DST* before = NULL;
+ DST* after = NULL;
+ ASSERT(before_ != after_);
+
+ for (int i = 0; i < kDSTSize; ++i) {
+ if (dst_[i].start_sec <= time_sec) {
+ if (before == NULL || before->start_sec < dst_[i].start_sec) {
+ before = &dst_[i];
+ }
+ } else if (time_sec < dst_[i].end_sec) {
+ if (after == NULL || after->end_sec > dst_[i].end_sec) {
+ after = &dst_[i];
+ }
+ }
+ }
+
+ // If before or after segments were not found,
+ // then set them to any invalid segment.
+ if (before == NULL) {
+ before = InvalidSegment(before_) ? before_ : LeastRecentlyUsedDST(after);
+ }
+ if (after == NULL) {
+ after = InvalidSegment(after_) && before != after_
+ ? after_ : LeastRecentlyUsedDST(before);
+ }
+
+ ASSERT(before != NULL);
+ ASSERT(after != NULL);
+ ASSERT(before != after);
+ ASSERT(InvalidSegment(before) || before->start_sec <= time_sec);
+ ASSERT(InvalidSegment(after) || time_sec < after->start_sec);
+ ASSERT(InvalidSegment(before) || InvalidSegment(after) ||
+ before->end_sec < after->start_sec);
+
+ before_ = before;
+ after_ = after;
+}
+
+
+DateCache::DST* DateCache::LeastRecentlyUsedDST(DST* skip) {
+ DST* result = NULL;
+ for (int i = 0; i < kDSTSize; ++i) {
+ if (&dst_[i] == skip) continue;
+ if (result == NULL || result->last_used > dst_[i].last_used) {
+ result = &dst_[i];
+ }
+ }
+ ClearSegment(result);
+ return result;
+}
+
+} } // namespace v8::internal
diff --git a/deps/v8/src/date.h b/deps/v8/src/date.h
new file mode 100644
index 00000000000..fcd61db0467
--- /dev/null
+++ b/deps/v8/src/date.h
@@ -0,0 +1,260 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_DATE_H_
+#define V8_DATE_H_
+
+#include "allocation.h"
+#include "globals.h"
+#include "platform.h"
+
+
+namespace v8 {
+namespace internal {
+
+class DateCache {
+ public:
+ static const int kMsPerMin = 60 * 1000;
+ static const int kSecPerDay = 24 * 60 * 60;
+ static const int64_t kMsPerDay = kSecPerDay * 1000;
+
+ // The largest time that can be passed to OS date-time library functions.
+ static const int kMaxEpochTimeInSec = kMaxInt;
+ static const int64_t kMaxEpochTimeInMs =
+ static_cast<int64_t>(kMaxInt) * 1000;
+
+ // The largest time that can be stored in JSDate.
+ static const int64_t kMaxTimeInMs =
+ static_cast<int64_t>(864000000) * 10000000;
+
+ // Conservative upper bound on time that can be stored in JSDate
+ // before UTC conversion.
+ static const int64_t kMaxTimeBeforeUTCInMs =
+ kMaxTimeInMs + 10 * kMsPerDay;
+
+ // Sentinel that denotes an invalid local offset.
+ static const int kInvalidLocalOffsetInMs = kMaxInt;
+ // Sentinel that denotes an invalid cache stamp.
+ // It is an invariant of DateCache that cache stamp is non-negative.
+ static const int kInvalidStamp = -1;
+
+ DateCache() : stamp_(0) {
+ ResetDateCache();
+ }
+
+ virtual ~DateCache() {}
+
+
+ // Clears cached timezone information and increments the cache stamp.
+ void ResetDateCache();
+
+
+ // Computes floor(time_ms / kMsPerDay).
+ static int DaysFromTime(int64_t time_ms) {
+ if (time_ms < 0) time_ms -= (kMsPerDay - 1);
+ return static_cast<int>(time_ms / kMsPerDay);
+ }
+
+
+ // Computes modulo(time_ms, kMsPerDay) given that
+ // days = floor(time_ms / kMsPerDay).
+ static int TimeInDay(int64_t time_ms, int days) {
+ return static_cast<int>(time_ms - days * kMsPerDay);
+ }
+
+
+ // Given the number of days since the epoch, computes the weekday.
+ // ECMA 262 - 15.9.1.6.
+ int Weekday(int days) {
+ int result = (days + 4) % 7;
+ return result >= 0 ? result : result + 7;
+ }
+
+
+ bool IsLeap(int year) {
+ return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
+ }
+
+
+ // ECMA 262 - 15.9.1.7.
+ int LocalOffsetInMs() {
+ if (local_offset_ms_ == kInvalidLocalOffsetInMs) {
+ local_offset_ms_ = GetLocalOffsetFromOS();
+ }
+ return local_offset_ms_;
+ }
+
+
+ const char* LocalTimezone(int64_t time_ms) {
+ if (time_ms < 0 || time_ms > kMaxEpochTimeInMs) {
+ time_ms = EquivalentTime(time_ms);
+ }
+ return OS::LocalTimezone(static_cast<double>(time_ms));
+ }
+
+ // ECMA 262 - 15.9.5.26
+ int TimezoneOffset(int64_t time_ms) {
+ int64_t local_ms = ToLocal(time_ms);
+ return static_cast<int>((time_ms - local_ms) / kMsPerMin);
+ }
+
+ // ECMA 262 - 15.9.1.9
+ int64_t ToLocal(int64_t time_ms) {
+ return time_ms + LocalOffsetInMs() + DaylightSavingsOffsetInMs(time_ms);
+ }
+
+ // ECMA 262 - 15.9.1.9
+ int64_t ToUTC(int64_t time_ms) {
+ time_ms -= LocalOffsetInMs();
+ return time_ms - DaylightSavingsOffsetInMs(time_ms);
+ }
+
+
+ // Computes a time equivalent to the given time according
+ // to ECMA 262 - 15.9.1.9.
+ // The issue here is that some library calls don't work right for dates
+ // that cannot be represented using a non-negative signed 32 bit integer
+ // (measured in whole seconds based on the 1970 epoch).
+ // We solve this by mapping the time to a year with same leap-year-ness
+ // and same starting day for the year. The ECMAscript specification says
+ // we must do this, but for compatibility with other browsers, we use
+ // the actual year if it is in the range 1970..2037
+ int64_t EquivalentTime(int64_t time_ms) {
+ int days = DaysFromTime(time_ms);
+ int time_within_day_ms = static_cast<int>(time_ms - days * kMsPerDay);
+ int year, month, day;
+ YearMonthDayFromDays(days, &year, &month, &day);
+ int new_days = DaysFromYearMonth(EquivalentYear(year), month) + day - 1;
+ return static_cast<int64_t>(new_days) * kMsPerDay + time_within_day_ms;
+ }
+
+ // Returns an equivalent year in the range [2008-2035] matching
+ // - leap year,
+ // - week day of first day.
+ // ECMA 262 - 15.9.1.9.
+ int EquivalentYear(int year) {
+ int week_day = Weekday(DaysFromYearMonth(year, 0));
+ int recent_year = (IsLeap(year) ? 1956 : 1967) + (week_day * 12) % 28;
+ // Find the year in the range 2008..2037 that is equivalent mod 28.
+ // Add 3*28 to give a positive argument to the modulus operator.
+ return 2008 + (recent_year + 3 * 28 - 2008) % 28;
+ }
+
+ // Given the number of days since the epoch, computes
+ // the corresponding year, month, and day.
+ void YearMonthDayFromDays(int days, int* year, int* month, int* day);
+
+ // Computes the number of days since the epoch for
+ // the first day of the given month in the given year.
+ int DaysFromYearMonth(int year, int month);
+
+ // Cache stamp is used for invalidating caches in JSDate.
+ // We increment the stamp each time when the timezone information changes.
+ // JSDate objects perform stamp check and invalidate their caches if
+ // their saved stamp is not equal to the current stamp.
+ Smi* stamp() { return stamp_; }
+ void* stamp_address() { return &stamp_; }
+
+ // These functions are virtual so that we can override them when testing.
+ virtual int GetDaylightSavingsOffsetFromOS(int64_t time_sec) {
+ double time_ms = static_cast<double>(time_sec * 1000);
+ return static_cast<int>(OS::DaylightSavingsOffset(time_ms));
+ }
+
+ virtual int GetLocalOffsetFromOS() {
+ double offset = OS::LocalTimeOffset();
+ ASSERT(offset < kInvalidLocalOffsetInMs);
+ return static_cast<int>(offset);
+ }
+
+ private:
+ // The implementation relies on the fact that no time zones have
+ // more than one daylight savings offset change per 19 days.
+ // In Egypt in 2010 they decided to suspend DST during Ramadan. This
+ // led to a short interval where DST is in effect from September 10 to
+ // September 30.
+ static const int kDefaultDSTDeltaInSec = 19 * kSecPerDay;
+
+ // Size of the Daylight Savings Time cache.
+ static const int kDSTSize = 32;
+
+ // Daylight Savings Time segment stores a segment of time where
+ // daylight savings offset does not change.
+ struct DST {
+ int start_sec;
+ int end_sec;
+ int offset_ms;
+ int last_used;
+ };
+
+ // Computes the daylight savings offset for the given time.
+ // ECMA 262 - 15.9.1.8
+ int DaylightSavingsOffsetInMs(int64_t time_ms);
+
+ // Sets the before_ and the after_ segments from the DST cache such that
+ // the before_ segment starts earlier than the given time and
+ // the after_ segment start later than the given time.
+ // Both segments might be invalid.
+ // The last_used counters of the before_ and after_ are updated.
+ void ProbeDST(int time_sec);
+
+ // Finds the least recently used segment from the DST cache that is not
+ // equal to the given 'skip' segment.
+ DST* LeastRecentlyUsedDST(DST* skip);
+
+ // Extends the after_ segment with the given point or resets it
+ // if it starts later than the given time + kDefaultDSTDeltaInSec.
+ inline void ExtendTheAfterSegment(int time_sec, int offset_ms);
+
+ // Makes the given segment invalid.
+ inline void ClearSegment(DST* segment);
+
+ bool InvalidSegment(DST* segment) {
+ return segment->start_sec > segment->end_sec;
+ }
+
+ Smi* stamp_;
+
+ // Daylight Saving Time cache.
+ DST dst_[kDSTSize];
+ int dst_usage_counter_;
+ DST* before_;
+ DST* after_;
+
+ int local_offset_ms_;
+
+ // Year/Month/Day cache.
+ bool ymd_valid_;
+ int ymd_days_;
+ int ymd_year_;
+ int ymd_month_;
+ int ymd_day_;
+};
+
+} } // namespace v8::internal
+
+#endif
diff --git a/deps/v8/src/date.js b/deps/v8/src/date.js
index 8c51a931e4b..75edf6d32d1 100644
--- a/deps/v8/src/date.js
+++ b/deps/v8/src/date.js
@@ -44,173 +44,6 @@ function ThrowDateTypeError() {
throw new $TypeError('this is not a Date object.');
}
-// ECMA 262 - 5.2
-function Modulo(value, remainder) {
- var mod = value % remainder;
- // Guard against returning -0.
- if (mod == 0) return 0;
- return mod >= 0 ? mod : mod + remainder;
-}
-
-
-function TimeWithinDay(time) {
- return Modulo(time, msPerDay);
-}
-
-
-// ECMA 262 - 15.9.1.3
-function DaysInYear(year) {
- if (year % 4 != 0) return 365;
- if ((year % 100 == 0) && (year % 400 != 0)) return 365;
- return 366;
-}
-
-
-function DayFromYear(year) {
- return 365 * (year-1970)
- + FLOOR((year-1969)/4)
- - FLOOR((year-1901)/100)
- + FLOOR((year-1601)/400);
-}
-
-
-function TimeFromYear(year) {
- return msPerDay * DayFromYear(year);
-}
-
-
-function InLeapYear(time) {
- return DaysInYear(YearFromTime(time)) - 365; // Returns 1 or 0.
-}
-
-
-// ECMA 262 - 15.9.1.9
-function EquivalentYear(year) {
- // Returns an equivalent year in the range [2008-2035] matching
- // - leap year.
- // - week day of first day.
- var time = TimeFromYear(year);
- var recent_year = (InLeapYear(time) == 0 ? 1967 : 1956) +
- (WeekDay(time) * 12) % 28;
- // Find the year in the range 2008..2037 that is equivalent mod 28.
- // Add 3*28 to give a positive argument to the modulus operator.
- return 2008 + (recent_year + 3*28 - 2008) % 28;
-}
-
-
-function EquivalentTime(t) {
- // The issue here is that some library calls don't work right for dates
- // that cannot be represented using a non-negative signed 32 bit integer
- // (measured in whole seconds based on the 1970 epoch).
- // We solve this by mapping the time to a year with same leap-year-ness
- // and same starting day for the year. The ECMAscript specification says
- // we must do this, but for compatibility with other browsers, we use
- // the actual year if it is in the range 1970..2037
- if (t >= 0 && t <= 2.1e12) return t;
-
- var day = MakeDay(EquivalentYear(YearFromTime(t)),
- MonthFromTime(t),
- DateFromTime(t));
- return MakeDate(day, TimeWithinDay(t));
-}
-
-
-// local_time_offset is initialized when the DST_offset_cache is missed.
-// It must not be used until after a call to DaylightSavingsOffset().
-// In this way, only one check, for a DST cache miss, is needed.
-var local_time_offset;
-
-
-// Because computing the DST offset is an expensive operation,
-// we keep a cache of the last computed DST offset along with a time interval
-// where we know the cache is valid.
-// When the cache is valid, local_time_offset is also valid.
-var DST_offset_cache = {
- // Cached DST offset.
- offset: 0,
- // Time interval where the cached offset is valid.
- start: 0, end: -1,
- // Size of next interval expansion.
- increment: 0,
- initial_increment: 19 * msPerDay
-};
-
-
-// NOTE: The implementation relies on the fact that no time zones have
-// more than one daylight savings offset change per 19 days.
-//
-// In Egypt in 2010 they decided to suspend DST during Ramadan. This
-// led to a short interval where DST is in effect from September 10 to
-// September 30.
-//
-// If this function is called with NaN it returns NaN.
-function DaylightSavingsOffset(t) {
- // Load the cache object from the builtins object.
- var cache = DST_offset_cache;
-
- // Cache the start and the end in local variables for fast access.
- var start = cache.start;
- var end = cache.end;
-
- if (start <= t) {
- // If the time fits in the cached interval, return the cached offset.
- if (t <= end) return cache.offset;
-
- // If the cache misses, the local_time_offset may not be initialized.
- if (IS_UNDEFINED(local_time_offset)) {
- local_time_offset = %DateLocalTimeOffset();
- }
-
- // Compute a possible new interval end.
- var new_end = end + cache.increment;
-
- if (t <= new_end) {
- var end_offset = %DateDaylightSavingsOffset(EquivalentTime(new_end));
- if (cache.offset == end_offset) {
- // If the offset at the end of the new interval still matches
- // the offset in the cache, we grow the cached time interval
- // and return the offset.
- cache.end = new_end;
- cache.increment = cache.initial_increment;
- return end_offset;
- } else {
- var offset = %DateDaylightSavingsOffset(EquivalentTime(t));
- if (offset == end_offset) {
- // The offset at the given time is equal to the offset at the
- // new end of the interval, so that means that we've just skipped
- // the point in time where the DST offset change occurred. Updated
- // the interval to reflect this and reset the increment.
- cache.start = t;
- cache.end = new_end;
- cache.increment = cache.initial_increment;
- } else {
- // The interval contains a DST offset change and the given time is
- // before it. Adjust the increment to avoid a linear search for
- // the offset change point and change the end of the interval.
- cache.increment /= 3;
- cache.end = t;
- }
- // Update the offset in the cache and return it.
- cache.offset = offset;
- return offset;
- }
- }
- }
-
- // If the cache misses, the local_time_offset may not be initialized.
- if (IS_UNDEFINED(local_time_offset)) {
- local_time_offset = %DateLocalTimeOffset();
- }
- // Compute the DST offset for the time and shrink the cache interval
- // to only contain the time. This allows fast repeated DST offset
- // computations for the same time.
- var offset = %DateDaylightSavingsOffset(EquivalentTime(t));
- cache.offset = offset;
- cache.start = cache.end = t;
- cache.increment = cache.initial_increment;
- return offset;
-}
-
var timezone_cache_time = $NaN;
var timezone_cache_timezone;
@@ -220,57 +53,18 @@ function LocalTimezone(t) {
if (t == timezone_cache_time) {
return timezone_cache_timezone;
}
- var timezone = %DateLocalTimezone(EquivalentTime(t));
+ var timezone = %DateLocalTimezone(t);
timezone_cache_time = t;
timezone_cache_timezone = timezone;
return timezone;
}
-function WeekDay(time) {
- return Modulo(DAY(time) + 4, 7);
-}
-
-
-function LocalTime(time) {
- if (NUMBER_IS_NAN(time)) return time;
- // DaylightSavingsOffset called before local_time_offset used.
- return time + DaylightSavingsOffset(time) + local_time_offset;
-}
-
-
-var ltcache = {
- key: null,
- val: null
-};
-
-function LocalTimeNoCheck(time) {
- var ltc = ltcache;
- if (%_ObjectEquals(time, ltc.key)) return ltc.val;
-
- // Inline the DST offset cache checks for speed.
- // The cache is hit, or DaylightSavingsOffset is called,
- // before local_time_offset is used.
- var cache = DST_offset_cache;
- if (cache.start <= time && time <= cache.end) {
- var dst_offset = cache.offset;
- } else {
- var dst_offset = DaylightSavingsOffset(time);
- }
- ltc.key = time;
- return (ltc.val = time + local_time_offset + dst_offset);
-}
-
-
function UTC(time) {
if (NUMBER_IS_NAN(time)) return time;
// local_time_offset is needed before the call to DaylightSavingsOffset,
// so it may be uninitialized.
- if (IS_UNDEFINED(local_time_offset)) {
- local_time_offset = %DateLocalTimeOffset();
- }
- var tmp = time - local_time_offset;
- return tmp - DaylightSavingsOffset(tmp);
+ return %DateToUTC(time);
}
@@ -293,48 +87,6 @@ function TimeInYear(year) {
}
-var ymd_from_time_cache = [1970, 0, 1];
-var ymd_from_time_cached_time = 0;
-
-function YearFromTime(t) {
- if (t !== ymd_from_time_cached_time) {
- if (!$isFinite(t)) {
- return $NaN;
- }
-
- %DateYMDFromTime(t, ymd_from_time_cache);
- ymd_from_time_cached_time = t;
- }
-
- return ymd_from_time_cache[0];
-}
-
-function MonthFromTime(t) {
- if (t !== ymd_from_time_cached_time) {
- if (!$isFinite(t)) {
- return $NaN;
- }
- %DateYMDFromTime(t, ymd_from_time_cache);
- ymd_from_time_cached_time = t;
- }
-
- return ymd_from_time_cache[1];
-}
-
-function DateFromTime(t) {
- if (t !== ymd_from_time_cached_time) {
- if (!$isFinite(t)) {
- return $NaN;
- }
-
- %DateYMDFromTime(t, ymd_from_time_cache);
- ymd_from_time_cached_time = t;
- }
-
- return ymd_from_time_cache[2];
-}
-
-
// Compute number of days given a year, month, date.
// Note that month and date can lie outside the normal range.
// For example:
@@ -385,9 +137,6 @@ function TimeClip(time) {
var Date_cache = {
// Cached time value.
time: $NaN,
- // Cached year when interpreting the time as a local time. Only
- // valid when the time matches cached time.
- year: $NaN,
// String input for which the cached time is valid.
string: null
};
@@ -404,11 +153,10 @@ var Date_cache = {
var value;
if (argc == 0) {
value = %DateCurrentTime();
-
+ SET_UTC_DATE_VALUE(this, value);
} else if (argc == 1) {
if (IS_NUMBER(year)) {
- value = TimeClip(year);
-
+ value = year;
} else if (IS_STRING(year)) {
// Probe the Date cache. If we already have a time value for the
// given time, we re-use that instead of parsing the string again.
@@ -419,7 +167,6 @@ var Date_cache = {
value = DateParse(year);
if (!NUMBER_IS_NAN(value)) {
cache.time = value;
- cache.year = YearFromTime(LocalTimeNoCheck(value));
cache.string = year;
}
}
@@ -433,9 +180,9 @@ var Date_cache = {
// which is the default for everything else than Date objects.
// This makes us behave like KJS and SpiderMonkey.
var time = ToPrimitive(year, NUMBER_HINT);
- value = IS_STRING(time) ? DateParse(time) : TimeClip(ToNumber(time));
+ value = IS_STRING(time) ? DateParse(time) : ToNumber(time);
}
-
+ SET_UTC_DATE_VALUE(this, value);
} else {
year = ToNumber(year);
month = ToNumber(month);
@@ -449,9 +196,9 @@ var Date_cache = {
TO_INTEGER(year) <= 99) ? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, month, date);
var time = MakeTime(hours, minutes, seconds, ms);
- value = TimeClip(UTC(MakeDate(day, time)));
+ value = MakeDate(day, time);
+ SET_LOCAL_DATE_VALUE(this, value);
}
- %_SetValueOf(this, value);
});
@@ -468,11 +215,11 @@ function TwoDigitString(value) {
}
-function DateString(time) {
- return WeekDays[WeekDay(time)] + ' '
- + Months[MonthFromTime(time)] + ' '
- + TwoDigitString(DateFromTime(time)) + ' '
- + YearFromTime(time);
+function DateString(date) {
+ return WeekDays[LOCAL_WEEKDAY(date)] + ' '
+ + Months[LOCAL_MONTH(date)] + ' '
+ + TwoDigitString(LOCAL_DAY(date)) + ' '
+ + LOCAL_YEAR(date);
}
@@ -482,38 +229,32 @@ var LongMonths = ['January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'November', 'December'];
-function LongDateString(time) {
- return LongWeekDays[WeekDay(time)] + ', '
- + LongMonths[MonthFromTime(time)] + ' '
- + TwoDigitString(DateFromTime(time)) + ', '
- + YearFromTime(time);
+function LongDateString(date) {
+ return LongWeekDays[LOCAL_WEEKDAY(date)] + ', '
+ + LongMonths[LOCAL_MONTH(date)] + ' '
+ + TwoDigitString(LOCAL_DAY(date)) + ', '
+ + LOCAL_YEAR(date);
}
-function TimeString(time) {
- return TwoDigitString(HOUR_FROM_TIME(time)) + ':'
- + TwoDigitString(MIN_FROM_TIME(time)) + ':'
- + TwoDigitString(SEC_FROM_TIME(time));
+function TimeString(date) {
+ return TwoDigitString(LOCAL_HOUR(date)) + ':'
+ + TwoDigitString(LOCAL_MIN(date)) + ':'
+ + TwoDigitString(LOCAL_SEC(date));
}
-function LocalTimezoneString(time) {
- var old_timezone = timezone_cache_timezone;
- var timezone = LocalTimezone(time);
- if (old_timezone && timezone != old_timezone) {
- // If the timezone string has changed from the one that we cached,
- // the local time offset may now be wrong. So we need to update it
- // and try again.
- local_time_offset = %DateLocalTimeOffset();
- // We also need to invalidate the DST cache as the new timezone may have
- // different DST times.
- var dst_cache = DST_offset_cache;
- dst_cache.start = 0;
- dst_cache.end = -1;
- }
+function TimeStringUTC(date) {
+ return TwoDigitString(UTC_HOUR(date)) + ':'
+ + TwoDigitString(UTC_MIN(date)) + ':'
+ + TwoDigitString(UTC_SEC(date));
+}
+
+
+function LocalTimezoneString(date) {
+ var timezone = LocalTimezone(UTC_DATE_VALUE(date));
- var timezoneOffset =
- (DaylightSavingsOffset(time) + local_time_offset) / msPerMinute;
+ var timezoneOffset = -TIMEZONE_OFFSET(date);
var sign = (timezoneOffset >= 0) ? 1 : -1;
var hours = FLOOR((sign * timezoneOffset)/60);
var min = FLOOR((sign * timezoneOffset)%60);
@@ -523,8 +264,8 @@ function LocalTimezoneString(time) {
}
-function DatePrintString(time) {
- return DateString(time) + ' ' + TimeString(time);
+function DatePrintString(date) {
+ return DateString(date) + ' ' + TimeString(date);
}
// -------------------------------------------------------------------
@@ -564,7 +305,7 @@ function DateUTC(year, month, date, hours, minutes, seconds, ms) {
TO_INTEGER(year) <= 99) ? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, month, date);
var time = MakeTime(hours, minutes, seconds, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(day, time)));
+ return TimeClip(MakeDate(day, time));
}
@@ -577,27 +318,30 @@ function DateNow() {
// ECMA 262 - 15.9.5.2
function DateToString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this)
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- var time_zone_string = LocalTimezoneString(t); // May update local offset.
- return DatePrintString(LocalTimeNoCheck(t)) + time_zone_string;
+ var time_zone_string = LocalTimezoneString(this)
+ return DatePrintString(this) + time_zone_string;
}
// ECMA 262 - 15.9.5.3
function DateToDateString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- return DateString(LocalTimeNoCheck(t));
+ return DateString(this);
}
// ECMA 262 - 15.9.5.4
function DateToTimeString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- var time_zone_string = LocalTimezoneString(t); // May update local offset.
- return TimeString(LocalTimeNoCheck(t)) + time_zone_string;
+ var time_zone_string = LocalTimezoneString(this);
+ return TimeString(this) + time_zone_string;
}
@@ -609,363 +353,388 @@ function DateToLocaleString() {
// ECMA 262 - 15.9.5.6
function DateToLocaleDateString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- return LongDateString(LocalTimeNoCheck(t));
+ return LongDateString(this);
}
// ECMA 262 - 15.9.5.7
function DateToLocaleTimeString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- var lt = LocalTimeNoCheck(t);
- return TimeString(lt);
+ return TimeString(this);
}
// ECMA 262 - 15.9.5.8
function DateValueOf() {
- return DATE_VALUE(this);
+ CHECK_DATE(this);
+ return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.9
function DateGetTime() {
- return DATE_VALUE(this);
+ CHECK_DATE(this);
+ return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.10
function DateGetFullYear() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- var cache = Date_cache;
- if (cache.time === t) return cache.year;
- return YearFromTime(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_YEAR(this);
}
// ECMA 262 - 15.9.5.11
function DateGetUTCFullYear() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return YearFromTime(t);
+ CHECK_DATE(this);
+ return UTC_YEAR(this);
}
// ECMA 262 - 15.9.5.12
function DateGetMonth() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MonthFromTime(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_MONTH(this);
}
// ECMA 262 - 15.9.5.13
function DateGetUTCMonth() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MonthFromTime(t);
+ CHECK_DATE(this);
+ return UTC_MONTH(this);
}
// ECMA 262 - 15.9.5.14
function DateGetDate() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return DateFromTime(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_DAY(this);
}
// ECMA 262 - 15.9.5.15
function DateGetUTCDate() {
- var t = DATE_VALUE(this);
- return NAN_OR_DATE_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_DAY(this);
}
// ECMA 262 - 15.9.5.16
function DateGetDay() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return WeekDay(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_WEEKDAY(this);
}
// ECMA 262 - 15.9.5.17
function DateGetUTCDay() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return WeekDay(t);
+ CHECK_DATE(this);
+ return UTC_WEEKDAY(this);
}
// ECMA 262 - 15.9.5.18
function DateGetHours() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return HOUR_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_HOUR(this);
}
// ECMA 262 - 15.9.5.19
function DateGetUTCHours() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return HOUR_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_HOUR(this);
}
// ECMA 262 - 15.9.5.20
function DateGetMinutes() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MIN_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_MIN(this);
}
// ECMA 262 - 15.9.5.21
function DateGetUTCMinutes() {
- var t = DATE_VALUE(this);
- return NAN_OR_MIN_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_MIN(this);
}
// ECMA 262 - 15.9.5.22
function DateGetSeconds() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return SEC_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_SEC(this);
}
// ECMA 262 - 15.9.5.23
function DateGetUTCSeconds() {
- var t = DATE_VALUE(this);
- return NAN_OR_SEC_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_SEC(this)
}
// ECMA 262 - 15.9.5.24
function DateGetMilliseconds() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MS_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_MS(this);
}
// ECMA 262 - 15.9.5.25
function DateGetUTCMilliseconds() {
- var t = DATE_VALUE(this);
- return NAN_OR_MS_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_MS(this);
}
// ECMA 262 - 15.9.5.26
function DateGetTimezoneOffset() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return (t - LocalTimeNoCheck(t)) / msPerMinute;
+ CHECK_DATE(this);
+ return TIMEZONE_OFFSET(this);
}
// ECMA 262 - 15.9.5.27
function DateSetTime(ms) {
- if (!IS_DATE(this)) ThrowDateTypeError();
- return %_SetValueOf(this, TimeClip(ToNumber(ms)));
+ CHECK_DATE(this);
+ SET_UTC_DATE_VALUE(this, ToNumber(ms));
+ return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.28
function DateSetMilliseconds(ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
ms = ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t),
- MIN_FROM_TIME(t),
- SEC_FROM_TIME(t),
- ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ var time = MakeTime(LOCAL_HOUR(this), LOCAL_MIN(this), LOCAL_SEC(this), ms);
+ SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
+ return this;
}
// ECMA 262 - 15.9.5.29
function DateSetUTCMilliseconds(ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
ms = ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t),
- MIN_FROM_TIME(t),
- SEC_FROM_TIME(t),
+ var time = MakeTime(UTC_HOUR(this),
+ UTC_MIN(this),
+ UTC_SEC(this),
ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.30
function DateSetSeconds(sec, ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
sec = ToNumber(sec);
- ms = %_ArgumentsLength() < 2 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), MIN_FROM_TIME(t), sec, ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ ms = %_ArgumentsLength() < 2 ? LOCAL_MS(this) : ToNumber(ms);
+ var time = MakeTime(LOCAL_HOUR(this), LOCAL_MIN(this), sec, ms);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.31
function DateSetUTCSeconds(sec, ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
sec = ToNumber(sec);
- ms = %_ArgumentsLength() < 2 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), MIN_FROM_TIME(t), sec, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ ms = %_ArgumentsLength() < 2 ? UTC_MS(this) : ToNumber(ms);
+ var time = MakeTime(UTC_HOUR(this), UTC_MIN(this), sec, ms);
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.33
function DateSetMinutes(min, sec, ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
- sec = argc < 2 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 3 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), min, sec, ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ sec = argc < 2 ? LOCAL_SEC(this) : ToNumber(sec);
+ ms = argc < 3 ? LOCAL_MS(this) : ToNumber(ms);
+ var time = MakeTime(LOCAL_HOUR(this), min, sec, ms);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.34
function DateSetUTCMinutes(min, sec, ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
- sec = argc < 2 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 3 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), min, sec, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ sec = argc < 2 ? UTC_SEC(this) : ToNumber(sec);
+ ms = argc < 3 ? UTC_MS(this) : ToNumber(ms);
+ var time = MakeTime(UTC_HOUR(this), min, sec, ms);
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.35
function DateSetHours(hour, min, sec, ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
- min = argc < 2 ? NAN_OR_MIN_FROM_TIME(t) : ToNumber(min);
- sec = argc < 3 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 4 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
+ min = argc < 2 ? LOCAL_MIN(this) : ToNumber(min);
+ sec = argc < 3 ? LOCAL_SEC(this) : ToNumber(sec);
+ ms = argc < 4 ? LOCAL_MS(this) : ToNumber(ms);
var time = MakeTime(hour, min, sec, ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.34
function DateSetUTCHours(hour, min, sec, ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
- min = argc < 2 ? NAN_OR_MIN_FROM_TIME(t) : ToNumber(min);
- sec = argc < 3 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 4 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
+ min = argc < 2 ? UTC_MIN(this) : ToNumber(min);
+ sec = argc < 3 ? UTC_SEC(this) : ToNumber(sec);
+ ms = argc < 4 ? UTC_MS(this) : ToNumber(ms);
var time = MakeTime(hour, min, sec, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.36
function DateSetDate(date) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
date = ToNumber(date);
- var day = MakeDay(YearFromTime(t), MonthFromTime(t), date);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ var day = MakeDay(LOCAL_YEAR(this), LOCAL_MONTH(this), date);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, LOCAL_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.37
function DateSetUTCDate(date) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
date = ToNumber(date);
- var day = MakeDay(YearFromTime(t), MonthFromTime(t), date);
- return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
+ var day = MakeDay(UTC_YEAR(this), UTC_MONTH(this), date);
+ return SET_UTC_DATE_VALUE(this, MakeDate(day, UTC_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.38
function DateSetMonth(month, date) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
month = ToNumber(month);
- date = %_ArgumentsLength() < 2 ? NAN_OR_DATE_FROM_TIME(t) : ToNumber(date);
- var day = MakeDay(YearFromTime(t), month, date);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ date = %_ArgumentsLength() < 2 ? LOCAL_DAY(this) : ToNumber(date);
+ var day = MakeDay(LOCAL_YEAR(this), month, date);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, LOCAL_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.39
function DateSetUTCMonth(month, date) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
month = ToNumber(month);
- date = %_ArgumentsLength() < 2 ? NAN_OR_DATE_FROM_TIME(t) : ToNumber(date);
- var day = MakeDay(YearFromTime(t), month, date);
- return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
+ date = %_ArgumentsLength() < 2 ? UTC_DAY(this) : ToNumber(date);
+ var day = MakeDay(UTC_YEAR(this), month, date);
+ return SET_UTC_DATE_VALUE(this, MakeDate(day, UTC_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.40
function DateSetFullYear(year, month, date) {
- var t = DATE_VALUE(this);
- t = NUMBER_IS_NAN(t) ? 0 : LocalTimeNoCheck(t);
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
year = ToNumber(year);
var argc = %_ArgumentsLength();
- month = argc < 2 ? MonthFromTime(t) : ToNumber(month);
- date = argc < 3 ? DateFromTime(t) : ToNumber(date);
+ var time ;
+ if (NUMBER_IS_NAN(t)) {
+ month = argc < 2 ? 0 : ToNumber(month);
+ date = argc < 3 ? 1 : ToNumber(date);
+ time = 0;
+ } else {
+ month = argc < 2 ? LOCAL_MONTH(this) : ToNumber(month);
+ date = argc < 3 ? LOCAL_DAY(this) : ToNumber(date);
+ time = LOCAL_TIME_IN_DAY(this);
+ }
var day = MakeDay(year, month, date);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, time));
}
// ECMA 262 - 15.9.5.41
function DateSetUTCFullYear(year, month, date) {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) t = 0;
- var argc = %_ArgumentsLength();
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
year = ToNumber(year);
- month = argc < 2 ? MonthFromTime(t) : ToNumber(month);
- date = argc < 3 ? DateFromTime(t) : ToNumber(date);
+ var argc = %_ArgumentsLength();
+ var time ;
+ if (NUMBER_IS_NAN(t)) {
+ month = argc < 2 ? 0 : ToNumber(month);
+ date = argc < 3 ? 1 : ToNumber(date);
+ time = 0;
+ } else {
+ month = argc < 2 ? UTC_MONTH(this) : ToNumber(month);
+ date = argc < 3 ? UTC_DAY(this) : ToNumber(date);
+ time = UTC_TIME_IN_DAY(this);
+ }
var day = MakeDay(year, month, date);
- return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
+ return SET_UTC_DATE_VALUE(this, MakeDate(day, time));
}
// ECMA 262 - 15.9.5.42
function DateToUTCString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
// Return UTC string of the form: Sat, 31 Jan 1970 23:00:00 GMT
- return WeekDays[WeekDay(t)] + ', '
- + TwoDigitString(DateFromTime(t)) + ' '
- + Months[MonthFromTime(t)] + ' '
- + YearFromTime(t) + ' '
- + TimeString(t) + ' GMT';
+ return WeekDays[UTC_WEEKDAY(this)] + ', '
+ + TwoDigitString(UTC_DAY(this)) + ' '
+ + Months[UTC_MONTH(this)] + ' '
+ + UTC_YEAR(this) + ' '
+ + TimeStringUTC(this) + ' GMT';
}
// ECMA 262 - B.2.4
function DateGetYear() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return $NaN;
- return YearFromTime(LocalTimeNoCheck(t)) - 1900;
+ CHECK_DATE(this);
+ return LOCAL_YEAR(this) - 1900;
}
// ECMA 262 - B.2.5
function DateSetYear(year) {
- var t = LocalTime(DATE_VALUE(this));
- if (NUMBER_IS_NAN(t)) t = 0;
+ CHECK_DATE(this);
year = ToNumber(year);
- if (NUMBER_IS_NAN(year)) return %_SetValueOf(this, $NaN);
+ if (NUMBER_IS_NAN(year)) return SET_UTC_DATE_VALUE(this, $NaN);
year = (0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
? 1900 + TO_INTEGER(year) : year;
- var day = MakeDay(year, MonthFromTime(t), DateFromTime(t));
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ var t = LOCAL_DATE_VALUE(this);
+ var month, date, time;
+ if (NUMBER_IS_NAN(t)) {
+ month = 0;
+ date = 1;
+ time = 0;
+ } else {
+ month = LOCAL_MONTH(this);
+ date = LOCAL_DAY(this);
+ time = LOCAL_TIME_IN_DAY(this);
+ }
+ var day = MakeDay(year, month, date);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, time));
}
@@ -989,7 +758,8 @@ function PadInt(n, digits) {
// ECMA 262 - 15.9.5.43
function DateToISOString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) throw MakeRangeError("invalid_time_value", []);
var year = this.getUTCFullYear();
var year_string;
@@ -1024,34 +794,13 @@ function DateToJSON(key) {
function ResetDateCache() {
-
- // Reset the local_time_offset:
- local_time_offset = %DateLocalTimeOffset();
-
- // Reset the DST offset cache:
- var cache = DST_offset_cache;
- cache.offset = 0;
- cache.start = 0;
- cache.end = -1;
- cache.increment = 0;
- cache.initial_increment = 19 * msPerDay;
-
// Reset the timezone cache:
timezone_cache_time = $NaN;
timezone_cache_timezone = undefined;
- // Reset the ltcache:
- ltcache.key = null;
- ltcache.val = null;
-
- // Reset the ymd_from_time_cache:
- ymd_from_time_cache = [$NaN, $NaN, $NaN];
- ymd_from_time_cached_time = $NaN;
-
// Reset the date cache:
cache = Date_cache;
cache.time = $NaN;
- cache.year = $NaN;
cache.string = null;
}
diff --git a/deps/v8/src/debug-debugger.js b/deps/v8/src/debug-debugger.js
index 91c70a027b4..802f6224c43 100644
--- a/deps/v8/src/debug-debugger.js
+++ b/deps/v8/src/debug-debugger.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -478,7 +478,8 @@ ScriptBreakPoint.prototype.clear = function () {
function UpdateScriptBreakPoints(script) {
for (var i = 0; i < script_break_points.length; i++) {
var break_point = script_break_points[i];
- if ((break_point.type() == Debug.ScriptBreakPointType.ScriptName) &&
+ if ((break_point.type() == Debug.ScriptBreakPointType.ScriptName ||
+ break_point.type() == Debug.ScriptBreakPointType.ScriptRegExp) &&
break_point.matchesScript(script)) {
break_point.set(script);
}
diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc
index 55ecc71dab1..d069a4507ed 100644
--- a/deps/v8/src/deoptimizer.cc
+++ b/deps/v8/src/deoptimizer.cc
@@ -170,8 +170,16 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
deoptimizer->output_[frame_index - 1]->GetFrameType() ==
StackFrame::ARGUMENTS_ADAPTOR;
- DeoptimizedFrameInfo* info =
- new DeoptimizedFrameInfo(deoptimizer, frame_index, has_arguments_adaptor);
+ int construct_offset = has_arguments_adaptor ? 2 : 1;
+ bool has_construct_stub =
+ frame_index >= construct_offset &&
+ deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
+ StackFrame::CONSTRUCT;
+
+ DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
+ frame_index,
+ has_arguments_adaptor,
+ has_construct_stub);
isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
// Get the "simulated" top and size for the requested frame.
@@ -570,6 +578,9 @@ void Deoptimizer::DoComputeOutputFrames() {
case Translation::ARGUMENTS_ADAPTOR_FRAME:
DoComputeArgumentsAdaptorFrame(&iterator, i);
break;
+ case Translation::CONSTRUCT_STUB_FRAME:
+ DoComputeConstructStubFrame(&iterator, i);
+ break;
default:
UNREACHABLE();
break;
@@ -686,6 +697,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
case Translation::BEGIN:
case Translation::JS_FRAME:
case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME:
case Translation::DUPLICATE:
UNREACHABLE();
return;
@@ -873,6 +885,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
case Translation::BEGIN:
case Translation::JS_FRAME:
case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME:
case Translation::DUPLICATE:
UNREACHABLE(); // Malformed input.
return false;
@@ -1206,7 +1219,8 @@ FrameDescription::FrameDescription(uint32_t frame_size,
function_(function),
top_(kZapUint32),
pc_(kZapUint32),
- fp_(kZapUint32) {
+ fp_(kZapUint32),
+ context_(kZapUint32) {
// Zap all the registers.
for (int r = 0; r < Register::kNumRegisters; r++) {
SetRegister(r, kZapUint32);
@@ -1320,6 +1334,13 @@ Handle<ByteArray> TranslationBuffer::CreateByteArray() {
}
+void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
+ buffer_->Add(CONSTRUCT_STUB_FRAME);
+ buffer_->Add(literal_id);
+ buffer_->Add(height);
+}
+
+
void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
buffer_->Add(literal_id);
@@ -1402,6 +1423,7 @@ int Translation::NumberOfOperandsFor(Opcode opcode) {
return 1;
case BEGIN:
case ARGUMENTS_ADAPTOR_FRAME:
+ case CONSTRUCT_STUB_FRAME:
return 2;
case JS_FRAME:
return 3;
@@ -1421,6 +1443,8 @@ const char* Translation::StringFor(Opcode opcode) {
return "JS_FRAME";
case ARGUMENTS_ADAPTOR_FRAME:
return "ARGUMENTS_ADAPTOR_FRAME";
+ case CONSTRUCT_STUB_FRAME:
+ return "CONSTRUCT_STUB_FRAME";
case REGISTER:
return "REGISTER";
case INT32_REGISTER:
@@ -1476,6 +1500,7 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
case Translation::BEGIN:
case Translation::JS_FRAME:
case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME:
// Peeled off before getting here.
break;
@@ -1598,10 +1623,13 @@ Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments(
#ifdef ENABLE_DEBUGGER_SUPPORT
-DeoptimizedFrameInfo::DeoptimizedFrameInfo(
- Deoptimizer* deoptimizer, int frame_index, bool has_arguments_adaptor) {
+DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
+ int frame_index,
+ bool has_arguments_adaptor,
+ bool has_construct_stub) {
FrameDescription* output_frame = deoptimizer->output_[frame_index];
- SetFunction(output_frame->GetFunction());
+ function_ = output_frame->GetFunction();
+ has_construct_stub_ = has_construct_stub;
expression_count_ = output_frame->GetExpressionCount();
expression_stack_ = new Object*[expression_count_];
// Get the source position using the unoptimized code.
diff --git a/deps/v8/src/deoptimizer.h b/deps/v8/src/deoptimizer.h
index 68bc48d5cbb..7699222b766 100644
--- a/deps/v8/src/deoptimizer.h
+++ b/deps/v8/src/deoptimizer.h
@@ -283,6 +283,8 @@ class Deoptimizer : public Malloced {
void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
int frame_index);
+ void DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index);
void DoTranslateCommand(TranslationIterator* iterator,
int frame_index,
unsigned output_offset);
@@ -431,6 +433,9 @@ class FrameDescription {
intptr_t GetFp() const { return fp_; }
void SetFp(intptr_t fp) { fp_ = fp; }
+ intptr_t GetContext() const { return context_; }
+ void SetContext(intptr_t context) { context_ = context; }
+
Smi* GetState() const { return state_; }
void SetState(Smi* state) { state_ = state; }
@@ -492,6 +497,7 @@ class FrameDescription {
intptr_t top_;
intptr_t pc_;
intptr_t fp_;
+ intptr_t context_;
StackFrame::Type type_;
Smi* state_;
#ifdef DEBUG
@@ -556,6 +562,7 @@ class Translation BASE_EMBEDDED {
enum Opcode {
BEGIN,
JS_FRAME,
+ CONSTRUCT_STUB_FRAME,
ARGUMENTS_ADAPTOR_FRAME,
REGISTER,
INT32_REGISTER,
@@ -584,6 +591,7 @@ class Translation BASE_EMBEDDED {
// Commands.
void BeginJSFrame(int node_id, int literal_id, unsigned height);
void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
+ void BeginConstructStubFrame(int literal_id, unsigned height);
void StoreRegister(Register reg);
void StoreInt32Register(Register reg);
void StoreDoubleRegister(DoubleRegister reg);
@@ -716,7 +724,8 @@ class DeoptimizedFrameInfo : public Malloced {
public:
DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
int frame_index,
- bool has_arguments_adaptor);
+ bool has_arguments_adaptor,
+ bool has_construct_stub);
virtual ~DeoptimizedFrameInfo();
// GC support.
@@ -733,6 +742,12 @@ class DeoptimizedFrameInfo : public Malloced {
return function_;
}
+ // Check if this frame is preceded by construct stub frame. The bottom-most
+ // inlined frame might still be called by an uninlined construct stub.
+ bool HasConstructStub() {
+ return has_construct_stub_;
+ }
+
// Get an incoming argument.
Object* GetParameter(int index) {
ASSERT(0 <= index && index < parameters_count());
@@ -750,11 +765,6 @@ class DeoptimizedFrameInfo : public Malloced {
}
private:
- // Set the frame function.
- void SetFunction(JSFunction* function) {
- function_ = function;
- }
-
// Set an incoming argument.
void SetParameter(int index, Object* obj) {
ASSERT(0 <= index && index < parameters_count());
@@ -768,6 +778,7 @@ class DeoptimizedFrameInfo : public Malloced {
}
JSFunction* function_;
+ bool has_construct_stub_;
int parameters_count_;
int expression_count_;
Object** parameters_;
diff --git a/deps/v8/src/elements.cc b/deps/v8/src/elements.cc
index d951b0ee5f3..331f6bc4b49 100644
--- a/deps/v8/src/elements.cc
+++ b/deps/v8/src/elements.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -59,6 +59,53 @@ namespace v8 {
namespace internal {
+// First argument in list is the accessor class, the second argument is the
+// accessor ElementsKind, and the third is the backing store class. Use the
+// fast element handler for smi-only arrays. The implementation is currently
+// identical. Note that the order must match that of the ElementsKind enum for
+// the |accessor_array[]| below to work.
+#define ELEMENTS_LIST(V) \
+ V(FastObjectElementsAccessor, FAST_SMI_ONLY_ELEMENTS, FixedArray) \
+ V(FastObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \
+ V(FastDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS, FixedDoubleArray) \
+ V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, \
+ SeededNumberDictionary) \
+ V(NonStrictArgumentsElementsAccessor, NON_STRICT_ARGUMENTS_ELEMENTS, \
+ FixedArray) \
+ V(ExternalByteElementsAccessor, EXTERNAL_BYTE_ELEMENTS, \
+ ExternalByteArray) \
+ V(ExternalUnsignedByteElementsAccessor, \
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS, ExternalUnsignedByteArray) \
+ V(ExternalShortElementsAccessor, EXTERNAL_SHORT_ELEMENTS, \
+ ExternalShortArray) \
+ V(ExternalUnsignedShortElementsAccessor, \
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS, ExternalUnsignedShortArray) \
+ V(ExternalIntElementsAccessor, EXTERNAL_INT_ELEMENTS, \
+ ExternalIntArray) \
+ V(ExternalUnsignedIntElementsAccessor, \
+ EXTERNAL_UNSIGNED_INT_ELEMENTS, ExternalUnsignedIntArray) \
+ V(ExternalFloatElementsAccessor, \
+ EXTERNAL_FLOAT_ELEMENTS, ExternalFloatArray) \
+ V(ExternalDoubleElementsAccessor, \
+ EXTERNAL_DOUBLE_ELEMENTS, ExternalDoubleArray) \
+ V(PixelElementsAccessor, EXTERNAL_PIXEL_ELEMENTS, ExternalPixelArray)
+
+
+template<ElementsKind Kind> class ElementsKindTraits {
+ public:
+ typedef FixedArrayBase BackingStore;
+};
+
+#define ELEMENTS_TRAITS(Class, KindParam, Store) \
+template<> class ElementsKindTraits<KindParam> { \
+ public: \
+ static const ElementsKind Kind = KindParam; \
+ typedef Store BackingStore; \
+};
+ELEMENTS_LIST(ELEMENTS_TRAITS)
+#undef ELEMENTS_TRAITS
+
+
ElementsAccessor** ElementsAccessor::elements_accessors_;
@@ -84,6 +131,140 @@ static Failure* ThrowArrayLengthRangeError(Heap* heap) {
}
+void CopyObjectToObjectElements(AssertNoAllocation* no_gc,
+ FixedArray* from_obj,
+ ElementsKind from_kind,
+ uint32_t from_start,
+ FixedArray* to_obj,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ ASSERT(to_obj->map() != HEAP->fixed_cow_array_map());
+ ASSERT(from_kind == FAST_ELEMENTS || from_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ if (copy_size == -1) {
+ copy_size = Min(from_obj->length() - from_start,
+ to_obj->length() - to_start);
+ }
+ ASSERT(((copy_size + static_cast<int>(to_start)) <= to_obj->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_obj->length()));
+ if (copy_size == 0) return;
+ Address to = to_obj->address() + FixedArray::kHeaderSize;
+ Address from = from_obj->address() + FixedArray::kHeaderSize;
+ CopyWords(reinterpret_cast<Object**>(to) + to_start,
+ reinterpret_cast<Object**>(from) + from_start,
+ copy_size);
+ if (from_kind == FAST_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ Heap* heap = from_obj->GetHeap();
+ WriteBarrierMode mode = to_obj->GetWriteBarrierMode(*no_gc);
+ if (mode == UPDATE_WRITE_BARRIER) {
+ heap->RecordWrites(to_obj->address(),
+ to_obj->OffsetOfElementAt(to_start),
+ copy_size);
+ }
+ heap->incremental_marking()->RecordWrites(to_obj);
+ }
+}
+
+
+
+
+static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
+ uint32_t from_start,
+ FixedArray* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ ASSERT(to != from);
+ ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(copy_size == -1 ||
+ (copy_size + static_cast<int>(to_start)) <= to->length());
+ WriteBarrierMode mode = to_kind == FAST_ELEMENTS
+ ? UPDATE_WRITE_BARRIER
+ : SKIP_WRITE_BARRIER;
+ uint32_t copy_limit = (copy_size == -1)
+ ? to->length()
+ : Min(to_start + copy_size, static_cast<uint32_t>(to->length()));
+ for (int i = 0; i < from->Capacity(); ++i) {
+ Object* key = from->KeyAt(i);
+ if (key->IsNumber()) {
+ uint32_t entry = static_cast<uint32_t>(key->Number());
+ if (entry >= to_start && entry < copy_limit) {
+ Object* value = from->ValueAt(i);
+ ASSERT(to_kind == FAST_ELEMENTS || value->IsSmi());
+ to->set(entry, value, mode);
+ }
+ }
+ }
+}
+
+
+MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
+ FixedDoubleArray* from_obj,
+ uint32_t from_start,
+ FixedArray* to_obj,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ if (copy_size == -1) {
+ copy_size = Min(from_obj->length() - from_start,
+ to_obj->length() - to_start);
+ }
+ ASSERT(((copy_size + static_cast<int>(to_start)) <= to_obj->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_obj->length()));
+ if (copy_size == 0) return from_obj;
+ for (int i = 0; i < copy_size; ++i) {
+ if (to_kind == FAST_SMI_ONLY_ELEMENTS) {
+ UNIMPLEMENTED();
+ return Failure::Exception();
+ } else {
+ MaybeObject* maybe_value = from_obj->get(i + from_start);
+ Object* value;
+ ASSERT(to_kind == FAST_ELEMENTS);
+ // Because FAST_DOUBLE_ELEMENTS -> FAST_ELEMENT allocate HeapObjects
+ // iteratively, the allocate must succeed within a single GC cycle,
+ // otherwise the retry after the GC will also fail. In order to ensure
+ // that no GC is triggered, allocate HeapNumbers from old space if they
+ // can't be taken from new space.
+ if (!maybe_value->ToObject(&value)) {
+ ASSERT(maybe_value->IsRetryAfterGC() || maybe_value->IsOutOfMemory());
+ Heap* heap = from_obj->GetHeap();
+ MaybeObject* maybe_value_object =
+ heap->AllocateHeapNumber(from_obj->get_scalar(i + from_start),
+ TENURED);
+ if (!maybe_value_object->ToObject(&value)) return maybe_value_object;
+ }
+ to_obj->set(i + to_start, value, UPDATE_WRITE_BARRIER);
+ }
+ }
+ return to_obj;
+}
+
+
+static void CopyDoubleToDoubleElements(FixedDoubleArray* from_obj,
+ uint32_t from_start,
+ FixedDoubleArray* to_obj,
+ uint32_t to_start,
+ int copy_size) {
+ if (copy_size == -1) {
+ copy_size = Min(from_obj->length() - from_start,
+ to_obj->length() - to_start);
+ }
+ ASSERT(((copy_size + static_cast<int>(to_start)) <= to_obj->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from_obj->length()));
+ if (copy_size == 0) return;
+ Address to = to_obj->address() + FixedDoubleArray::kHeaderSize;
+ Address from = from_obj->address() + FixedDoubleArray::kHeaderSize;
+ to += kDoubleSize * to_start;
+ from += kDoubleSize * from_start;
+ int words_per_double = (kDoubleSize / kPointerSize);
+ CopyWords(reinterpret_cast<Object**>(to),
+ reinterpret_cast<Object**>(from),
+ words_per_double * copy_size);
+}
+
+
// Base class for element handler implementations. Contains the
// the common logic for objects with different ElementsKinds.
// Subclasses must specialize method for which the element
@@ -101,37 +282,67 @@ static Failure* ThrowArrayLengthRangeError(Heap* heap) {
// http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use
// CRTP to guarantee aggressive compile time optimizations (i.e. inlining and
// specialization of SomeElementsAccessor methods).
-template <typename ElementsAccessorSubclass, typename BackingStoreClass>
+template <typename ElementsAccessorSubclass,
+ typename ElementsTraitsParam>
class ElementsAccessorBase : public ElementsAccessor {
protected:
- ElementsAccessorBase() { }
- virtual MaybeObject* Get(FixedArrayBase* backing_store,
+ explicit ElementsAccessorBase(const char* name)
+ : ElementsAccessor(name) { }
+
+ typedef ElementsTraitsParam ElementsTraits;
+ typedef typename ElementsTraitsParam::BackingStore BackingStore;
+
+ virtual ElementsKind kind() const { return ElementsTraits::Kind; }
+
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ BackingStore* backing_store) {
+ MaybeObject* element =
+ ElementsAccessorSubclass::GetImpl(receiver, holder, key, backing_store);
+ return !element->IsTheHole();
+ }
+
+ virtual bool HasElement(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store) {
+ if (backing_store == NULL) {
+ backing_store = holder->elements();
+ }
+ return ElementsAccessorSubclass::HasElementImpl(
+ receiver, holder, key, BackingStore::cast(backing_store));
+ }
+
+ virtual MaybeObject* Get(Object* receiver,
+ JSObject* holder,
uint32_t key,
- JSObject* obj,
- Object* receiver) {
+ FixedArrayBase* backing_store) {
+ if (backing_store == NULL) {
+ backing_store = holder->elements();
+ }
return ElementsAccessorSubclass::GetImpl(
- BackingStoreClass::cast(backing_store), key, obj, receiver);
+ receiver, holder, key, BackingStore::cast(backing_store));
}
- static MaybeObject* GetImpl(BackingStoreClass* backing_store,
- uint32_t key,
+ static MaybeObject* GetImpl(Object* receiver,
JSObject* obj,
- Object* receiver) {
+ uint32_t key,
+ BackingStore* backing_store) {
return (key < ElementsAccessorSubclass::GetCapacityImpl(backing_store))
? backing_store->get(key)
: backing_store->GetHeap()->the_hole_value();
}
- virtual MaybeObject* SetLength(JSObject* obj,
+ virtual MaybeObject* SetLength(JSArray* array,
Object* length) {
- ASSERT(obj->IsJSArray());
return ElementsAccessorSubclass::SetLengthImpl(
- BackingStoreClass::cast(obj->elements()), obj, length);
+ array, length, BackingStore::cast(array->elements()));
}
- static MaybeObject* SetLengthImpl(BackingStoreClass* backing_store,
- JSObject* obj,
- Object* length);
+ static MaybeObject* SetLengthImpl(JSObject* obj,
+ Object* length,
+ BackingStore* backing_store);
virtual MaybeObject* SetCapacityAndLength(JSArray* array,
int capacity,
@@ -153,10 +364,34 @@ class ElementsAccessorBase : public ElementsAccessor {
uint32_t key,
JSReceiver::DeleteMode mode) = 0;
- virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from,
- FixedArray* to,
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ UNREACHABLE();
+ return NULL;
+ }
+
+ virtual MaybeObject* CopyElements(JSObject* from_holder,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size,
+ FixedArrayBase* from) {
+ if (from == NULL) {
+ from = from_holder->elements();
+ }
+ return ElementsAccessorSubclass::CopyElementsImpl(
+ from, from_start, to, to_kind, to_start, copy_size);
+ }
+
+ virtual MaybeObject* AddElementsToFixedArray(Object* receiver,
JSObject* holder,
- Object* receiver) {
+ FixedArray* to,
+ FixedArrayBase* from) {
int len0 = to->length();
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
@@ -165,7 +400,10 @@ class ElementsAccessorBase : public ElementsAccessor {
}
}
#endif
- BackingStoreClass* backing_store = BackingStoreClass::cast(from);
+ if (from == NULL) {
+ from = holder->elements();
+ }
+ BackingStore* backing_store = BackingStore::cast(from);
uint32_t len1 = ElementsAccessorSubclass::GetCapacityImpl(backing_store);
// Optimize if 'other' is empty.
@@ -173,15 +411,15 @@ class ElementsAccessorBase : public ElementsAccessor {
if (len1 == 0) return to;
// Compute how many elements are not in other.
- int extra = 0;
+ uint32_t extra = 0;
for (uint32_t y = 0; y < len1; y++) {
- if (ElementsAccessorSubclass::HasElementAtIndexImpl(
- backing_store, y, holder, receiver)) {
- uint32_t key =
- ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, y);
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, y);
+ if (ElementsAccessorSubclass::HasElementImpl(
+ receiver, holder, key, backing_store)) {
MaybeObject* maybe_value =
- ElementsAccessorSubclass::GetImpl(backing_store, key,
- holder, receiver);
+ ElementsAccessorSubclass::GetImpl(receiver, holder,
+ key, backing_store);
Object* value;
if (!maybe_value->ToObject(&value)) return maybe_value;
ASSERT(!value->IsTheHole());
@@ -210,15 +448,15 @@ class ElementsAccessorBase : public ElementsAccessor {
}
}
// Fill in the extra values.
- int index = 0;
+ uint32_t index = 0;
for (uint32_t y = 0; y < len1; y++) {
- if (ElementsAccessorSubclass::HasElementAtIndexImpl(
- backing_store, y, holder, receiver)) {
- uint32_t key =
- ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, y);
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, y);
+ if (ElementsAccessorSubclass::HasElementImpl(
+ receiver, holder, key, backing_store)) {
MaybeObject* maybe_value =
- ElementsAccessorSubclass::GetImpl(backing_store, key,
- holder, receiver);
+ ElementsAccessorSubclass::GetImpl(receiver, holder,
+ key, backing_store);
Object* value;
if (!maybe_value->ToObject(&value)) return maybe_value;
if (!value->IsTheHole() && !HasKey(to, value)) {
@@ -232,43 +470,24 @@ class ElementsAccessorBase : public ElementsAccessor {
}
protected:
- static uint32_t GetCapacityImpl(BackingStoreClass* backing_store) {
+ static uint32_t GetCapacityImpl(BackingStore* backing_store) {
return backing_store->length();
}
virtual uint32_t GetCapacity(FixedArrayBase* backing_store) {
return ElementsAccessorSubclass::GetCapacityImpl(
- BackingStoreClass::cast(backing_store));
- }
-
- static bool HasElementAtIndexImpl(BackingStoreClass* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- uint32_t key =
- ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, index);
- MaybeObject* element =
- ElementsAccessorSubclass::GetImpl(backing_store, key, holder, receiver);
- return !element->IsTheHole();
- }
-
- virtual bool HasElementAtIndex(FixedArrayBase* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- return ElementsAccessorSubclass::HasElementAtIndexImpl(
- BackingStoreClass::cast(backing_store), index, holder, receiver);
+ BackingStore::cast(backing_store));
}
- static uint32_t GetKeyForIndexImpl(BackingStoreClass* backing_store,
+ static uint32_t GetKeyForIndexImpl(BackingStore* backing_store,
uint32_t index) {
return index;
}
virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
- uint32_t index) {
+ uint32_t index) {
return ElementsAccessorSubclass::GetKeyForIndexImpl(
- BackingStoreClass::cast(backing_store), index);
+ BackingStore::cast(backing_store), index);
}
private:
@@ -278,12 +497,18 @@ class ElementsAccessorBase : public ElementsAccessor {
// Super class for all fast element arrays.
template<typename FastElementsAccessorSubclass,
- typename BackingStore,
+ typename KindTraits,
int ElementSize>
class FastElementsAccessor
- : public ElementsAccessorBase<FastElementsAccessorSubclass, BackingStore> {
+ : public ElementsAccessorBase<FastElementsAccessorSubclass, KindTraits> {
+ public:
+ explicit FastElementsAccessor(const char* name)
+ : ElementsAccessorBase<FastElementsAccessorSubclass,
+ KindTraits>(name) {}
protected:
- friend class ElementsAccessorBase<FastElementsAccessorSubclass, BackingStore>;
+ friend class ElementsAccessorBase<FastElementsAccessorSubclass, KindTraits>;
+
+ typedef typename KindTraits::BackingStore BackingStore;
// Adjusts the length of the fast backing store or returns the new length or
// undefined in case conversion to a slow backing store should be performed.
@@ -338,9 +563,14 @@ class FastElementsAccessor
class FastObjectElementsAccessor
: public FastElementsAccessor<FastObjectElementsAccessor,
- FixedArray,
+ ElementsKindTraits<FAST_ELEMENTS>,
kPointerSize> {
public:
+ explicit FastObjectElementsAccessor(const char* name)
+ : FastElementsAccessor<FastObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS>,
+ kPointerSize>(name) {}
+
static MaybeObject* DeleteCommon(JSObject* obj,
uint32_t key) {
ASSERT(obj->HasFastElements() ||
@@ -387,6 +617,28 @@ class FastObjectElementsAccessor
return heap->true_value();
}
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ switch (to_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS: {
+ AssertNoAllocation no_gc;
+ CopyObjectToObjectElements(
+ &no_gc, FixedArray::cast(from), ElementsTraits::Kind, from_start,
+ FixedArray::cast(to), to_kind, to_start, copy_size);
+ return from;
+ }
+ default:
+ UNREACHABLE();
+ }
+ return to->GetHeap()->undefined_value();
+ }
+
+
static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
uint32_t capacity,
uint32_t length) {
@@ -401,7 +653,7 @@ class FastObjectElementsAccessor
protected:
friend class FastElementsAccessor<FastObjectElementsAccessor,
- FixedArray,
+ ElementsKindTraits<FAST_ELEMENTS>,
kPointerSize>;
virtual MaybeObject* Delete(JSObject* obj,
@@ -414,8 +666,14 @@ class FastObjectElementsAccessor
class FastDoubleElementsAccessor
: public FastElementsAccessor<FastDoubleElementsAccessor,
- FixedDoubleArray,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
kDoubleSize> {
+ public:
+ explicit FastDoubleElementsAccessor(const char* name)
+ : FastElementsAccessor<FastDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ kDoubleSize>(name) {}
+
static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
uint32_t capacity,
uint32_t length) {
@@ -424,11 +682,34 @@ class FastDoubleElementsAccessor
protected:
friend class ElementsAccessorBase<FastDoubleElementsAccessor,
- FixedDoubleArray>;
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >;
friend class FastElementsAccessor<FastDoubleElementsAccessor,
- FixedDoubleArray,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
kDoubleSize>;
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ switch (to_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS:
+ return CopyDoubleToObjectElements(
+ FixedDoubleArray::cast(from), from_start, FixedArray::cast(to),
+ to_kind, to_start, copy_size);
+ case FAST_DOUBLE_ELEMENTS:
+ CopyDoubleToDoubleElements(FixedDoubleArray::cast(from), from_start,
+ FixedDoubleArray::cast(to),
+ to_start, copy_size);
+ return from;
+ default:
+ UNREACHABLE();
+ }
+ return to->GetHeap()->undefined_value();
+ }
+
virtual MaybeObject* Delete(JSObject* obj,
uint32_t key,
JSReceiver::DeleteMode mode) {
@@ -441,38 +722,45 @@ class FastDoubleElementsAccessor
return obj->GetHeap()->true_value();
}
- static bool HasElementAtIndexImpl(FixedDoubleArray* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- return !backing_store->is_the_hole(index);
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedDoubleArray* backing_store) {
+ return !backing_store->is_the_hole(key);
}
};
// Super class for all external element arrays.
template<typename ExternalElementsAccessorSubclass,
- typename ExternalArray>
+ ElementsKind Kind>
class ExternalElementsAccessor
: public ElementsAccessorBase<ExternalElementsAccessorSubclass,
- ExternalArray> {
+ ElementsKindTraits<Kind> > {
+ public:
+ explicit ExternalElementsAccessor(const char* name)
+ : ElementsAccessorBase<ExternalElementsAccessorSubclass,
+ ElementsKindTraits<Kind> >(name) {}
+
protected:
+ typedef typename ElementsKindTraits<Kind>::BackingStore BackingStore;
+
friend class ElementsAccessorBase<ExternalElementsAccessorSubclass,
- ExternalArray>;
+ ElementsKindTraits<Kind> >;
- static MaybeObject* GetImpl(ExternalArray* backing_store,
- uint32_t key,
+ static MaybeObject* GetImpl(Object* receiver,
JSObject* obj,
- Object* receiver) {
+ uint32_t key,
+ BackingStore* backing_store) {
return
key < ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store)
? backing_store->get(key)
: backing_store->GetHeap()->undefined_value();
}
- static MaybeObject* SetLengthImpl(ExternalArray* backing_store,
- JSObject* obj,
- Object* length) {
+ static MaybeObject* SetLengthImpl(JSObject* obj,
+ Object* length,
+ BackingStore* backing_store) {
// External arrays do not support changing their length.
UNREACHABLE();
return obj;
@@ -484,67 +772,116 @@ class ExternalElementsAccessor
// External arrays always ignore deletes.
return obj->GetHeap()->true_value();
}
+
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ BackingStore* backing_store) {
+ uint32_t capacity =
+ ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store);
+ return key < capacity;
+ }
};
class ExternalByteElementsAccessor
: public ExternalElementsAccessor<ExternalByteElementsAccessor,
- ExternalByteArray> {
+ EXTERNAL_BYTE_ELEMENTS> {
+ public:
+ explicit ExternalByteElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalByteElementsAccessor,
+ EXTERNAL_BYTE_ELEMENTS>(name) {}
};
class ExternalUnsignedByteElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedByteElementsAccessor,
- ExternalUnsignedByteArray> {
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS> {
+ public:
+ explicit ExternalUnsignedByteElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalUnsignedByteElementsAccessor,
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS>(name) {}
};
class ExternalShortElementsAccessor
: public ExternalElementsAccessor<ExternalShortElementsAccessor,
- ExternalShortArray> {
+ EXTERNAL_SHORT_ELEMENTS> {
+ public:
+ explicit ExternalShortElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalShortElementsAccessor,
+ EXTERNAL_SHORT_ELEMENTS>(name) {}
};
class ExternalUnsignedShortElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedShortElementsAccessor,
- ExternalUnsignedShortArray> {
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS> {
+ public:
+ explicit ExternalUnsignedShortElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalUnsignedShortElementsAccessor,
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS>(name) {}
};
class ExternalIntElementsAccessor
: public ExternalElementsAccessor<ExternalIntElementsAccessor,
- ExternalIntArray> {
+ EXTERNAL_INT_ELEMENTS> {
+ public:
+ explicit ExternalIntElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalIntElementsAccessor,
+ EXTERNAL_INT_ELEMENTS>(name) {}
};
class ExternalUnsignedIntElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedIntElementsAccessor,
- ExternalUnsignedIntArray> {
+ EXTERNAL_UNSIGNED_INT_ELEMENTS> {
+ public:
+ explicit ExternalUnsignedIntElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalUnsignedIntElementsAccessor,
+ EXTERNAL_UNSIGNED_INT_ELEMENTS>(name) {}
};
class ExternalFloatElementsAccessor
: public ExternalElementsAccessor<ExternalFloatElementsAccessor,
- ExternalFloatArray> {
+ EXTERNAL_FLOAT_ELEMENTS> {
+ public:
+ explicit ExternalFloatElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalFloatElementsAccessor,
+ EXTERNAL_FLOAT_ELEMENTS>(name) {}
};
class ExternalDoubleElementsAccessor
: public ExternalElementsAccessor<ExternalDoubleElementsAccessor,
- ExternalDoubleArray> {
+ EXTERNAL_DOUBLE_ELEMENTS> {
+ public:
+ explicit ExternalDoubleElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalDoubleElementsAccessor,
+ EXTERNAL_DOUBLE_ELEMENTS>(name) {}
};
class PixelElementsAccessor
: public ExternalElementsAccessor<PixelElementsAccessor,
- ExternalPixelArray> {
+ EXTERNAL_PIXEL_ELEMENTS> {
+ public:
+ explicit PixelElementsAccessor(const char* name)
+ : ExternalElementsAccessor<PixelElementsAccessor,
+ EXTERNAL_PIXEL_ELEMENTS>(name) {}
};
class DictionaryElementsAccessor
: public ElementsAccessorBase<DictionaryElementsAccessor,
- SeededNumberDictionary> {
+ ElementsKindTraits<DICTIONARY_ELEMENTS> > {
public:
+ explicit DictionaryElementsAccessor(const char* name)
+ : ElementsAccessorBase<DictionaryElementsAccessor,
+ ElementsKindTraits<DICTIONARY_ELEMENTS> >(name) {}
+
// Adjusts the length of the dictionary backing store and returns the new
// length according to ES5 section 15.4.5.2 behavior.
static MaybeObject* SetLengthWithoutNormalize(SeededNumberDictionary* dict,
@@ -647,9 +984,29 @@ class DictionaryElementsAccessor
return heap->true_value();
}
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ switch (to_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS:
+ CopyDictionaryToObjectElements(
+ SeededNumberDictionary::cast(from), from_start,
+ FixedArray::cast(to), to_kind, to_start, copy_size);
+ return from;
+ default:
+ UNREACHABLE();
+ }
+ return to->GetHeap()->undefined_value();
+ }
+
+
protected:
friend class ElementsAccessorBase<DictionaryElementsAccessor,
- SeededNumberDictionary>;
+ ElementsKindTraits<DICTIONARY_ELEMENTS> >;
virtual MaybeObject* Delete(JSObject* obj,
uint32_t key,
@@ -657,10 +1014,10 @@ class DictionaryElementsAccessor
return DeleteCommon(obj, key, mode);
}
- static MaybeObject* GetImpl(SeededNumberDictionary* backing_store,
- uint32_t key,
+ static MaybeObject* GetImpl(Object* receiver,
JSObject* obj,
- Object* receiver) {
+ uint32_t key,
+ SeededNumberDictionary* backing_store) {
int entry = backing_store->FindEntry(key);
if (entry != SeededNumberDictionary::kNotFound) {
Object* element = backing_store->ValueAt(entry);
@@ -677,6 +1034,14 @@ class DictionaryElementsAccessor
return obj->GetHeap()->the_hole_value();
}
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ SeededNumberDictionary* backing_store) {
+ return backing_store->FindEntry(key) !=
+ SeededNumberDictionary::kNotFound;
+ }
+
static uint32_t GetKeyForIndexImpl(SeededNumberDictionary* dict,
uint32_t index) {
Object* key = dict->KeyAt(index);
@@ -685,18 +1050,24 @@ class DictionaryElementsAccessor
};
-class NonStrictArgumentsElementsAccessor
- : public ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
- FixedArray> {
+class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
+ NonStrictArgumentsElementsAccessor,
+ ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> > {
+ public:
+ explicit NonStrictArgumentsElementsAccessor(const char* name)
+ : ElementsAccessorBase<
+ NonStrictArgumentsElementsAccessor,
+ ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> >(name) {}
protected:
- friend class ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
- FixedArray>;
+ friend class ElementsAccessorBase<
+ NonStrictArgumentsElementsAccessor,
+ ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> >;
- static MaybeObject* GetImpl(FixedArray* parameter_map,
- uint32_t key,
+ static MaybeObject* GetImpl(Object* receiver,
JSObject* obj,
- Object* receiver) {
- Object* probe = GetParameterMapArg(parameter_map, key);
+ uint32_t key,
+ FixedArray* parameter_map) {
+ Object* probe = GetParameterMapArg(obj, parameter_map, key);
if (!probe->IsTheHole()) {
Context* context = Context::cast(parameter_map->get(0));
int context_index = Smi::cast(probe)->value();
@@ -706,7 +1077,7 @@ class NonStrictArgumentsElementsAccessor
// Object is not mapped, defer to the arguments.
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
MaybeObject* maybe_result = ElementsAccessor::ForArray(arguments)->Get(
- arguments, key, obj, receiver);
+ receiver, obj, key, arguments);
Object* result;
if (!maybe_result->ToObject(&result)) return maybe_result;
// Elements of the arguments object in slow mode might be slow aliases.
@@ -722,9 +1093,9 @@ class NonStrictArgumentsElementsAccessor
}
}
- static MaybeObject* SetLengthImpl(FixedArray* parameter_map,
- JSObject* obj,
- Object* length) {
+ static MaybeObject* SetLengthImpl(JSObject* obj,
+ Object* length,
+ FixedArray* parameter_map) {
// TODO(mstarzinger): This was never implemented but will be used once we
// correctly implement [[DefineOwnProperty]] on arrays.
UNIMPLEMENTED();
@@ -735,7 +1106,7 @@ class NonStrictArgumentsElementsAccessor
uint32_t key,
JSReceiver::DeleteMode mode) {
FixedArray* parameter_map = FixedArray::cast(obj->elements());
- Object* probe = GetParameterMapArg(parameter_map, key);
+ Object* probe = GetParameterMapArg(obj, parameter_map, key);
if (!probe->IsTheHole()) {
// TODO(kmillikin): We could check if this was the last aliased
// parameter, and revert to normal elements in that case. That
@@ -752,6 +1123,19 @@ class NonStrictArgumentsElementsAccessor
return obj->GetHeap()->true_value();
}
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ FixedArray* parameter_map = FixedArray::cast(from);
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments);
+ return accessor->CopyElements(NULL, from_start, to, to_kind,
+ to_start, copy_size, arguments);
+ }
+
static uint32_t GetCapacityImpl(FixedArray* parameter_map) {
FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
return Max(static_cast<uint32_t>(parameter_map->length() - 2),
@@ -763,24 +1147,27 @@ class NonStrictArgumentsElementsAccessor
return index;
}
- static bool HasElementAtIndexImpl(FixedArray* parameter_map,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- Object* probe = GetParameterMapArg(parameter_map, index);
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArray* parameter_map) {
+ Object* probe = GetParameterMapArg(holder, parameter_map, key);
if (!probe->IsTheHole()) {
return true;
} else {
FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments);
- return !accessor->Get(arguments, index, holder, receiver)->IsTheHole();
+ return !accessor->Get(receiver, holder, key, arguments)->IsTheHole();
}
}
private:
- static Object* GetParameterMapArg(FixedArray* parameter_map,
+ static Object* GetParameterMapArg(JSObject* holder,
+ FixedArray* parameter_map,
uint32_t key) {
- uint32_t length = parameter_map->length();
+ uint32_t length = holder->IsJSArray()
+ ? Smi::cast(JSArray::cast(holder)->length())->value()
+ : parameter_map->length();
return key < (length - 2 )
? parameter_map->get(key + 2)
: parameter_map->GetHeap()->the_hole_value();
@@ -822,45 +1209,22 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) {
void ElementsAccessor::InitializeOncePerProcess() {
- // First argument in list is the accessor class, the second argument is can
- // be any arbitrary unique identifier, in this case chosen to be the
- // corresponding enum. Use the fast element handler for smi-only arrays.
- // The implementation is currently identical. Note that the order must match
- // that of the ElementsKind enum for the |accessor_array[]| below to work.
-#define ELEMENTS_LIST(V) \
- V(FastObjectElementsAccessor, FAST_SMI_ONLY_ELEMENTS) \
- V(FastObjectElementsAccessor, FAST_ELEMENTS) \
- V(FastDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS) \
- V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS) \
- V(NonStrictArgumentsElementsAccessor, NON_STRICT_ARGUMENTS_ELEMENTS) \
- V(ExternalByteElementsAccessor, EXTERNAL_BYTE_ELEMENTS) \
- V(ExternalUnsignedByteElementsAccessor, EXTERNAL_UNSIGNED_BYTE_ELEMENTS) \
- V(ExternalShortElementsAccessor, EXTERNAL_SHORT_ELEMENTS) \
- V(ExternalUnsignedShortElementsAccessor, EXTERNAL_UNSIGNED_SHORT_ELEMENTS) \
- V(ExternalIntElementsAccessor, EXTERNAL_INT_ELEMENTS) \
- V(ExternalUnsignedIntElementsAccessor, EXTERNAL_UNSIGNED_INT_ELEMENTS) \
- V(ExternalFloatElementsAccessor, EXTERNAL_FLOAT_ELEMENTS) \
- V(ExternalDoubleElementsAccessor, EXTERNAL_DOUBLE_ELEMENTS) \
- V(PixelElementsAccessor, EXTERNAL_PIXEL_ELEMENTS)
-
static struct ConcreteElementsAccessors {
-#define ACCESSOR_STRUCT(Class, Name) Class* Name##_handler;
+#define ACCESSOR_STRUCT(Class, Kind, Store) Class* Kind##_handler;
ELEMENTS_LIST(ACCESSOR_STRUCT)
#undef ACCESSOR_STRUCT
} element_accessors = {
-#define ACCESSOR_INIT(Class, Name) new Class(),
+#define ACCESSOR_INIT(Class, Kind, Store) new Class(#Kind),
ELEMENTS_LIST(ACCESSOR_INIT)
#undef ACCESSOR_INIT
};
static ElementsAccessor* accessor_array[] = {
-#define ACCESSOR_ARRAY(Class, Name) element_accessors.Name##_handler,
+#define ACCESSOR_ARRAY(Class, Kind, Store) element_accessors.Kind##_handler,
ELEMENTS_LIST(ACCESSOR_ARRAY)
#undef ACCESSOR_ARRAY
};
-#undef ELEMENTS_LIST
-
STATIC_ASSERT((sizeof(accessor_array) / sizeof(*accessor_array)) ==
kElementsKindCount);
@@ -868,11 +1232,12 @@ void ElementsAccessor::InitializeOncePerProcess() {
}
-template <typename ElementsAccessorSubclass, typename BackingStoreClass>
-MaybeObject* ElementsAccessorBase<ElementsAccessorSubclass, BackingStoreClass>::
- SetLengthImpl(BackingStoreClass* backing_store,
- JSObject* obj,
- Object* length) {
+template <typename ElementsAccessorSubclass, typename ElementsKindTraits>
+MaybeObject* ElementsAccessorBase<ElementsAccessorSubclass,
+ ElementsKindTraits>::
+ SetLengthImpl(JSObject* obj,
+ Object* length,
+ typename ElementsKindTraits::BackingStore* backing_store) {
JSArray* array = JSArray::cast(obj);
// Fast case: The new length fits into a Smi.
diff --git a/deps/v8/src/elements.h b/deps/v8/src/elements.h
index a2a184d52c7..5b5be23b100 100644
--- a/deps/v8/src/elements.h
+++ b/deps/v8/src/elements.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,8 @@
#define V8_ELEMENTS_H_
#include "objects.h"
+#include "heap.h"
+#include "isolate.h"
namespace v8 {
namespace internal {
@@ -37,19 +39,38 @@ namespace internal {
// ElementsKinds.
class ElementsAccessor {
public:
- ElementsAccessor() { }
+ explicit ElementsAccessor(const char* name) : name_(name) { }
virtual ~ElementsAccessor() { }
- virtual MaybeObject* Get(FixedArrayBase* backing_store,
- uint32_t key,
+
+ virtual ElementsKind kind() const = 0;
+ const char* name() const { return name_; }
+
+ // Returns true if a holder contains an element with the specified key
+ // without iterating up the prototype chain. The caller can optionally pass
+ // in the backing store to use for the check, which must be compatible with
+ // the ElementsKind of the ElementsAccessor. If backing_store is NULL, the
+ // holder->elements() is used as the backing store.
+ virtual bool HasElement(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store = NULL) = 0;
+
+ // Returns the element with the specified key or undefined if there is no such
+ // element. This method doesn't iterate up the prototype chain. The caller
+ // can optionally pass in the backing store to use for the check, which must
+ // be compatible with the ElementsKind of the ElementsAccessor. If
+ // backing_store is NULL, the holder->elements() is used as the backing store.
+ virtual MaybeObject* Get(Object* receiver,
JSObject* holder,
- Object* receiver) = 0;
+ uint32_t key,
+ FixedArrayBase* backing_store = NULL) = 0;
// Modifies the length data property as specified for JSArrays and resizes the
// underlying backing store accordingly. The method honors the semantics of
// changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that
// have non-deletable elements can only be shrunk to the size of highest
// element that is non-deletable.
- virtual MaybeObject* SetLength(JSObject* holder,
+ virtual MaybeObject* SetLength(JSArray* holder,
Object* new_length) = 0;
// Modifies both the length and capacity of a JSArray, resizing the underlying
@@ -62,14 +83,34 @@ class ElementsAccessor {
int capacity,
int length) = 0;
+ // Deletes an element in an object, returning a new elements backing store.
virtual MaybeObject* Delete(JSObject* holder,
uint32_t key,
JSReceiver::DeleteMode mode) = 0;
- virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from,
- FixedArray* to,
+ // Copy elements from one backing store to another. Typically, callers specify
+ // the source JSObject or JSArray in source_holder. If the holder's backing
+ // store is available, it can be passed in source and source_holder is
+ // ignored.
+ virtual MaybeObject* CopyElements(JSObject* source_holder,
+ uint32_t source_start,
+ FixedArrayBase* destination,
+ ElementsKind destination_kind,
+ uint32_t destination_start,
+ int copy_size,
+ FixedArrayBase* source = NULL) = 0;
+
+ MaybeObject* CopyElements(JSObject* from_holder,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ FixedArrayBase* from = NULL) {
+ return CopyElements(from_holder, 0, to, to_kind, 0, -1, from);
+ }
+
+ virtual MaybeObject* AddElementsToFixedArray(Object* receiver,
JSObject* holder,
- Object* receiver) = 0;
+ FixedArray* to,
+ FixedArrayBase* from = NULL) = 0;
// Returns a shared ElementsAccessor for the specified ElementsKind.
static ElementsAccessor* ForKind(ElementsKind elements_kind) {
@@ -86,28 +127,35 @@ class ElementsAccessor {
virtual uint32_t GetCapacity(FixedArrayBase* backing_store) = 0;
- virtual bool HasElementAtIndex(FixedArrayBase* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) = 0;
-
- // Element handlers distinguish between indexes and keys when the manipulate
+ // Element handlers distinguish between indexes and keys when they manipulate
// elements. Indexes refer to elements in terms of their location in the
- // underlying storage's backing store representation, and are between 0
+ // underlying storage's backing store representation, and are between 0 and
// GetCapacity. Keys refer to elements in terms of the value that would be
- // specific in JavaScript to access the element. In most implementations, keys
- // are equivalent to indexes, and GetKeyForIndex returns the same value it is
- // passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps the
- // index to a key using the KeyAt method on the NumberDictionary.
+ // specified in JavaScript to access the element. In most implementations,
+ // keys are equivalent to indexes, and GetKeyForIndex returns the same value
+ // it is passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps
+ // the index to a key using the KeyAt method on the NumberDictionary.
virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
uint32_t index) = 0;
private:
static ElementsAccessor** elements_accessors_;
+ const char* name_;
DISALLOW_COPY_AND_ASSIGN(ElementsAccessor);
};
+
+void CopyObjectToObjectElements(AssertNoAllocation* no_gc,
+ FixedArray* from_obj,
+ ElementsKind from_kind,
+ uint32_t from_start,
+ FixedArray* to_obj,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size);
+
+
} } // namespace v8::internal
#endif // V8_ELEMENTS_H_
diff --git a/deps/v8/src/execution.cc b/deps/v8/src/execution.cc
index 00806a7ce27..443d4b8d0f2 100644
--- a/deps/v8/src/execution.cc
+++ b/deps/v8/src/execution.cc
@@ -376,6 +376,12 @@ void StackGuard::DisableInterrupts() {
}
+bool StackGuard::ShouldPostponeInterrupts() {
+ ExecutionAccess access(isolate_);
+ return should_postpone_interrupts(access);
+}
+
+
bool StackGuard::IsInterrupted() {
ExecutionAccess access(isolate_);
return (thread_local_.interrupt_flags_ & INTERRUPT) != 0;
@@ -872,9 +878,11 @@ void Execution::ProcessDebugMessages(bool debug_command_only) {
#endif
-MaybeObject* Execution::HandleStackGuardInterrupt() {
- Isolate* isolate = Isolate::Current();
+MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) {
StackGuard* stack_guard = isolate->stack_guard();
+ if (stack_guard->ShouldPostponeInterrupts()) {
+ return isolate->heap()->undefined_value();
+ }
if (stack_guard->IsGCRequest()) {
isolate->heap()->CollectAllGarbage(false, "StackGuard GC request");
diff --git a/deps/v8/src/execution.h b/deps/v8/src/execution.h
index 014736ee882..01e4b9da4f7 100644
--- a/deps/v8/src/execution.h
+++ b/deps/v8/src/execution.h
@@ -45,6 +45,10 @@ enum InterruptFlag {
GC_REQUEST = 1 << 6
};
+
+class Isolate;
+
+
class Execution : public AllStatic {
public:
// Call a function, the caller supplies a receiver and an array
@@ -141,7 +145,8 @@ class Execution : public AllStatic {
// If the stack guard is triggered, but it is not an actual
// stack overflow, then handle the interruption accordingly.
- MUST_USE_RESULT static MaybeObject* HandleStackGuardInterrupt();
+ MUST_USE_RESULT static MaybeObject* HandleStackGuardInterrupt(
+ Isolate* isolate);
// Get a function delegate (or undefined) for the given non-function
// object. Used for support calling objects as functions.
@@ -158,7 +163,6 @@ class Execution : public AllStatic {
class ExecutionAccess;
-class Isolate;
// StackGuard contains the handling of the limits that are used to limit the
@@ -222,6 +226,7 @@ class StackGuard {
Address address_of_real_jslimit() {
return reinterpret_cast<Address>(&thread_local_.real_jslimit_);
}
+ bool ShouldPostponeInterrupts();
private:
StackGuard();
diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h
index 2968445d877..ac30b2941d1 100644
--- a/deps/v8/src/flag-definitions.h
+++ b/deps/v8/src/flag-definitions.h
@@ -148,6 +148,7 @@ DEFINE_bool(collect_megamorphic_maps_from_stub_cache,
"crankshaft harvests type feedback from stub cache")
DEFINE_bool(hydrogen_stats, false, "print statistics for hydrogen")
DEFINE_bool(trace_hydrogen, false, "trace generated hydrogen to file")
+DEFINE_string(trace_phase, "Z", "trace generated IR for specified phases")
DEFINE_bool(trace_inlining, false, "trace inlining decisions")
DEFINE_bool(trace_alloc, false, "trace register allocator")
DEFINE_bool(trace_all_uses, false, "trace all use positions")
@@ -167,30 +168,37 @@ DEFINE_bool(use_osr, true, "use on-stack replacement")
DEFINE_bool(trace_osr, false, "trace on-stack replacement")
DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures")
+DEFINE_bool(inline_construct, false, "inline constructor calls")
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
-DEFINE_bool(optimize_for_in, false,
+DEFINE_bool(optimize_for_in, true,
"optimize functions containing for-in loops")
// Experimental profiler changes.
DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
-DEFINE_int(frame_count, 2, "number of stack frames inspected by the profiler")
+DEFINE_int(frame_count, 1, "number of stack frames inspected by the profiler")
DEFINE_bool(self_optimization, false,
"primitive functions trigger their own optimization")
+DEFINE_bool(direct_self_opt, false,
+ "call recompile stub directly when self-optimizing")
+DEFINE_bool(retry_self_opt, false, "re-try self-optimization if it failed")
DEFINE_bool(count_based_interrupts, false,
"trigger profiler ticks based on counting instead of timing")
DEFINE_bool(interrupt_at_exit, false,
"insert an interrupt check at function exit")
DEFINE_bool(weighted_back_edges, false,
"weight back edges by jump distance for interrupt triggering")
-DEFINE_int(interrupt_budget, 10000,
+DEFINE_int(interrupt_budget, 5900,
"execution budget before interrupt is triggered")
-DEFINE_int(type_info_threshold, 0,
+DEFINE_int(type_info_threshold, 40,
"percentage of ICs that must have type info to allow optimization")
+DEFINE_int(self_opt_count, 130, "call count before self-optimization")
DEFINE_implication(experimental_profiler, watch_ic_patching)
DEFINE_implication(experimental_profiler, self_optimization)
+// Not implying direct_self_opt here because it seems to be a bad idea.
+DEFINE_implication(experimental_profiler, retry_self_opt)
DEFINE_implication(experimental_profiler, count_based_interrupts)
DEFINE_implication(experimental_profiler, interrupt_at_exit)
DEFINE_implication(experimental_profiler, weighted_back_edges)
@@ -485,6 +493,11 @@ DEFINE_bool(print_global_handles, false, "report global handles after GC")
// ic.cc
DEFINE_bool(trace_ic, false, "trace inline cache state transitions")
+// interface.cc
+DEFINE_bool(print_interfaces, false, "print interfaces")
+DEFINE_bool(print_interface_details, false, "print interface inference details")
+DEFINE_int(print_interface_depth, 5, "depth for printing interfaces")
+
// objects.cc
DEFINE_bool(trace_normalization,
false,
@@ -562,6 +575,13 @@ DEFINE_bool(trace_elements_transitions, false, "trace elements transitions")
// code-stubs.cc
DEFINE_bool(print_code_stubs, false, "print code stubs")
+DEFINE_bool(test_secondary_stub_cache,
+ false,
+ "test secondary stub cache by disabling the primary one")
+
+DEFINE_bool(test_primary_stub_cache,
+ false,
+ "test primary stub cache by disabling the secondary one")
// codegen-ia32.cc / codegen-arm.cc
DEFINE_bool(print_code, false, "print generated code")
diff --git a/deps/v8/src/frames-inl.h b/deps/v8/src/frames-inl.h
index 010233a0fa2..27a526cef10 100644
--- a/deps/v8/src/frames-inl.h
+++ b/deps/v8/src/frames-inl.h
@@ -191,7 +191,7 @@ inline bool StandardFrame::IsArgumentsAdaptorFrame(Address fp) {
inline bool StandardFrame::IsConstructFrame(Address fp) {
Object* marker =
Memory::Object_at(fp + StandardFrameConstants::kMarkerOffset);
- return marker == Smi::FromInt(CONSTRUCT);
+ return marker == Smi::FromInt(StackFrame::CONSTRUCT);
}
diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc
index 40df12c4372..be537c96e92 100644
--- a/deps/v8/src/frames.cc
+++ b/deps/v8/src/frames.cc
@@ -41,6 +41,22 @@
namespace v8 {
namespace internal {
+
+static ReturnAddressLocationResolver return_address_location_resolver = NULL;
+
+
+// Resolves pc_address through the resolution address function if one is set.
+static inline Address* ResolveReturnAddressLocation(Address* pc_address) {
+ if (return_address_location_resolver == NULL) {
+ return pc_address;
+ } else {
+ return reinterpret_cast<Address*>(
+ return_address_location_resolver(
+ reinterpret_cast<uintptr_t>(pc_address)));
+ }
+}
+
+
// Iterator that supports traversing the stack handlers of a
// particular frame. Needs to know the top of the handler chain.
class StackHandlerIterator BASE_EMBEDDED {
@@ -155,8 +171,8 @@ void StackFrameIterator::Reset() {
ASSERT(fp_ != NULL);
state.fp = fp_;
state.sp = sp_;
- state.pc_address =
- reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_));
+ state.pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_)));
type = StackFrame::ComputeType(isolate(), &state);
}
if (SingletonFor(type) == NULL) return;
@@ -414,6 +430,13 @@ void StackFrame::IteratePc(ObjectVisitor* v,
}
+void StackFrame::SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver) {
+ ASSERT(return_address_location_resolver == NULL);
+ return_address_location_resolver = resolver;
+}
+
+
StackFrame::Type StackFrame::ComputeType(Isolate* isolate, State* state) {
ASSERT(state->fp != NULL);
if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
@@ -488,8 +511,8 @@ void ExitFrame::ComputeCallerState(State* state) const {
// Set up the caller state.
state->sp = caller_sp();
state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
- state->pc_address
- = reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset);
+ state->pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
}
@@ -523,7 +546,8 @@ StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
void ExitFrame::FillState(Address fp, Address sp, State* state) {
state->sp = sp;
state->fp = fp;
- state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize);
+ state->pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(sp - 1 * kPointerSize));
}
@@ -558,7 +582,8 @@ int StandardFrame::ComputeExpressionsCount() const {
void StandardFrame::ComputeCallerState(State* state) const {
state->sp = caller_sp();
state->fp = caller_fp();
- state->pc_address = reinterpret_cast<Address*>(ComputePCAddress(fp()));
+ state->pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(ComputePCAddress(fp())));
}
@@ -818,14 +843,11 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
// We create the summary in reverse order because the frames
// in the deoptimization translation are ordered bottom-to-top.
+ bool is_constructor = IsConstructor();
int i = jsframe_count;
while (i > 0) {
opcode = static_cast<Translation::Opcode>(it.Next());
if (opcode == Translation::JS_FRAME) {
- // We don't inline constructor calls, so only the first, outermost
- // frame can be a constructor frame in case of inlining.
- bool is_constructor = (i == jsframe_count) && IsConstructor();
-
i--;
int ast_id = it.Next();
int function_id = it.Next();
@@ -875,11 +897,18 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
frames->Add(summary);
+ is_constructor = false;
+ } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
+ // The next encountered JS_FRAME will be marked as a constructor call.
+ it.Skip(Translation::NumberOfOperandsFor(opcode));
+ ASSERT(!is_constructor);
+ is_constructor = true;
} else {
// Skip over operands to advance to the next opcode.
it.Skip(Translation::NumberOfOperandsFor(opcode));
}
}
+ ASSERT(!is_constructor);
}
diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h
index e550f765ca6..90715551970 100644
--- a/deps/v8/src/frames.h
+++ b/deps/v8/src/frames.h
@@ -241,6 +241,11 @@ class StackFrame BASE_EMBEDDED {
virtual void Iterate(ObjectVisitor* v) const = 0;
static void IteratePc(ObjectVisitor* v, Address* pc_address, Code* holder);
+ // Sets a callback function for return-address rewriting profilers
+ // to resolve the location of a return address to the location of the
+ // profiler's stashed return address.
+ static void SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver);
// Printing support.
enum PrintMode { OVERVIEW, DETAILS };
diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc
index 96395427403..f77c82df512 100644
--- a/deps/v8/src/full-codegen.cc
+++ b/deps/v8/src/full-codegen.cc
@@ -55,10 +55,22 @@ void BreakableStatementChecker::VisitVariableDeclaration(
VariableDeclaration* decl) {
}
+void BreakableStatementChecker::VisitFunctionDeclaration(
+ FunctionDeclaration* decl) {
+}
+
void BreakableStatementChecker::VisitModuleDeclaration(
ModuleDeclaration* decl) {
}
+void BreakableStatementChecker::VisitImportDeclaration(
+ ImportDeclaration* decl) {
+}
+
+void BreakableStatementChecker::VisitExportDeclaration(
+ ExportDeclaration* decl) {
+}
+
void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
}
@@ -569,29 +581,28 @@ void FullCodeGenerator::VisitDeclarations(
isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
int length = declarations->length();
for (int j = 0, i = 0; i < length; i++) {
- VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration();
- if (decl != NULL) {
- Variable* var = decl->proxy()->var();
-
- if (var->IsUnallocated()) {
- array->set(j++, *(var->name()));
- if (decl->fun() == NULL) {
- if (var->binding_needs_init()) {
- // In case this binding needs initialization use the hole.
- array->set_the_hole(j++);
- } else {
- array->set_undefined(j++);
- }
+ Declaration* decl = declarations->at(i);
+ Variable* var = decl->proxy()->var();
+
+ if (var->IsUnallocated()) {
+ array->set(j++, *(var->name()));
+ FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
+ if (fun_decl == NULL) {
+ if (var->binding_needs_init()) {
+ // In case this binding needs initialization use the hole.
+ array->set_the_hole(j++);
} else {
- Handle<SharedFunctionInfo> function =
- Compiler::BuildFunctionInfo(decl->fun(), script());
- // Check for stack-overflow exception.
- if (function.is_null()) {
- SetStackOverflow();
- return;
- }
- array->set(j++, *function);
+ array->set_undefined(j++);
+ }
+ } else {
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(fun_decl->fun(), script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) {
+ SetStackOverflow();
+ return;
}
+ array->set(j++, *function);
}
}
}
@@ -605,11 +616,26 @@ void FullCodeGenerator::VisitDeclarations(
void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+}
+
+
+void FullCodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
}
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* decl) {
// TODO(rossberg)
}
@@ -1133,6 +1159,10 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
Label test, body;
Iteration loop_statement(this, stmt);
+
+ // Set statement position for a break slot before entering the for-body.
+ SetStatementPosition(stmt);
+
if (stmt->init() != NULL) {
Visit(stmt->init());
}
@@ -1147,7 +1177,6 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
__ bind(loop_statement.continue_label());
- SetStatementPosition(stmt);
if (stmt->next() != NULL) {
Visit(stmt->next());
}
diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h
index 0ffb8852729..25e3dba124d 100644
--- a/deps/v8/src/full-codegen.h
+++ b/deps/v8/src/full-codegen.h
@@ -437,6 +437,9 @@ class FullCodeGenerator: public AstVisitor {
// the offset of the start of the table.
unsigned EmitStackCheckTable();
+ void EmitProfilingCounterDecrement(int delta);
+ void EmitProfilingCounterReset();
+
// Platform-specific return sequence
void EmitReturnSequence();
diff --git a/deps/v8/src/global-handles.cc b/deps/v8/src/global-handles.cc
index 471f5a336c5..9c0ad45179d 100644
--- a/deps/v8/src/global-handles.cc
+++ b/deps/v8/src/global-handles.cc
@@ -384,6 +384,7 @@ GlobalHandles::GlobalHandles(Isolate* isolate)
: isolate_(isolate),
number_of_weak_handles_(0),
number_of_global_object_weak_handles_(0),
+ number_of_global_handles_(0),
first_block_(NULL),
first_used_block_(NULL),
first_free_(NULL),
@@ -403,6 +404,7 @@ GlobalHandles::~GlobalHandles() {
Handle<Object> GlobalHandles::Create(Object* value) {
isolate_->counters()->global_handles()->Increment();
+ number_of_global_handles_++;
if (first_free_ == NULL) {
first_block_ = new NodeBlock(first_block_);
first_block_->PutNodesOnFreeList(&first_free_);
@@ -423,6 +425,7 @@ Handle<Object> GlobalHandles::Create(Object* value) {
void GlobalHandles::Destroy(Object** location) {
isolate_->counters()->global_handles()->Decrement();
+ number_of_global_handles_--;
if (location == NULL) return;
Node::FromLocation(location)->Release(this);
}
diff --git a/deps/v8/src/global-handles.h b/deps/v8/src/global-handles.h
index 153d4dac1a2..ddf5fe29c38 100644
--- a/deps/v8/src/global-handles.h
+++ b/deps/v8/src/global-handles.h
@@ -143,6 +143,11 @@ class GlobalHandles {
return number_of_global_object_weak_handles_;
}
+ // Returns the current number of handles to global objects.
+ int NumberOfGlobalHandles() {
+ return number_of_global_handles_;
+ }
+
// Clear the weakness of a global handle.
void ClearWeakness(Object** location);
@@ -248,6 +253,9 @@ class GlobalHandles {
// number_of_weak_handles_.
int number_of_global_object_weak_handles_;
+ // Field always containing the number of handles to global objects.
+ int number_of_global_handles_;
+
// List of all allocated node blocks.
NodeBlock* first_block_;
diff --git a/deps/v8/src/globals.h b/deps/v8/src/globals.h
index 9f137808021..e53cc81d6c2 100644
--- a/deps/v8/src/globals.h
+++ b/deps/v8/src/globals.h
@@ -175,27 +175,27 @@ typedef byte* Address;
// than defining __STDC_CONSTANT_MACROS before including <stdint.h>, and it
// works on compilers that don't have it (like MSVC).
#if V8_HOST_ARCH_64_BIT
-#ifdef _MSC_VER
+#if defined(_MSC_VER)
#define V8_UINT64_C(x) (x ## UI64)
#define V8_INT64_C(x) (x ## I64)
#define V8_INTPTR_C(x) (x ## I64)
#define V8_PTR_PREFIX "ll"
-#else // _MSC_VER
+#elif defined(__MINGW64__)
+#define V8_UINT64_C(x) (x ## ULL)
+#define V8_INT64_C(x) (x ## LL)
+#define V8_INTPTR_C(x) (x ## LL)
+#define V8_PTR_PREFIX "I64"
+#else
#define V8_UINT64_C(x) (x ## UL)
#define V8_INT64_C(x) (x ## L)
#define V8_INTPTR_C(x) (x ## L)
#define V8_PTR_PREFIX "l"
-#endif // _MSC_VER
+#endif
#else // V8_HOST_ARCH_64_BIT
#define V8_INTPTR_C(x) (x)
#define V8_PTR_PREFIX ""
#endif // V8_HOST_ARCH_64_BIT
-#ifdef __MINGW64__
-#undef V8_PTR_PREFIX
-#define V8_PTR_PREFIX "I64"
-#endif // __MINGW64__
-
// The following macro works on both 32 and 64-bit platforms.
// Usage: instead of writing 0x1234567890123456
// write V8_2PART_UINT64_C(0x12345678,90123456);
diff --git a/deps/v8/src/heap-inl.h b/deps/v8/src/heap-inl.h
index 81ed448a17f..706d2886b9f 100644
--- a/deps/v8/src/heap-inl.h
+++ b/deps/v8/src/heap-inl.h
@@ -32,6 +32,7 @@
#include "isolate.h"
#include "list-inl.h"
#include "objects.h"
+#include "platform.h"
#include "v8-counters.h"
#include "store-buffer.h"
#include "store-buffer-inl.h"
@@ -658,15 +659,15 @@ double TranscendentalCache::SubCache::Calculate(double input) {
case ATAN:
return atan(input);
case COS:
- return cos(input);
+ return fast_cos(input);
case EXP:
return exp(input);
case LOG:
- return log(input);
+ return fast_log(input);
case SIN:
- return sin(input);
+ return fast_sin(input);
case TAN:
- return tan(input);
+ return fast_tan(input);
default:
return 0.0; // Never happens.
}
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index e0b1e50f186..da98239db1e 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -499,7 +499,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
if (collector == MARK_COMPACTOR &&
- !mark_compact_collector()->PreciseSweepingRequired() &&
+ !mark_compact_collector()->abort_incremental_marking_ &&
!incremental_marking()->IsStopped() &&
!incremental_marking()->should_hurry() &&
FLAG_incremental_marking_steps) {
@@ -578,6 +578,17 @@ static void VerifySymbolTable() {
}
+static bool AbortIncrementalMarkingAndCollectGarbage(
+ Heap* heap,
+ AllocationSpace space,
+ const char* gc_reason = NULL) {
+ heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
+ bool result = heap->CollectGarbage(space, gc_reason);
+ heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
+ return result;
+}
+
+
void Heap::ReserveSpace(
int new_space_size,
int pointer_space_size,
@@ -604,28 +615,28 @@ void Heap::ReserveSpace(
gc_performed = true;
}
if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
- Heap::CollectGarbage(OLD_POINTER_SPACE,
- "failed to reserve space in the old pointer space");
+ AbortIncrementalMarkingAndCollectGarbage(this, OLD_POINTER_SPACE,
+ "failed to reserve space in the old pointer space");
gc_performed = true;
}
if (!(old_data_space->ReserveSpace(data_space_size))) {
- Heap::CollectGarbage(OLD_DATA_SPACE,
- "failed to reserve space in the old data space");
+ AbortIncrementalMarkingAndCollectGarbage(this, OLD_DATA_SPACE,
+ "failed to reserve space in the old data space");
gc_performed = true;
}
if (!(code_space->ReserveSpace(code_space_size))) {
- Heap::CollectGarbage(CODE_SPACE,
- "failed to reserve space in the code space");
+ AbortIncrementalMarkingAndCollectGarbage(this, CODE_SPACE,
+ "failed to reserve space in the code space");
gc_performed = true;
}
if (!(map_space->ReserveSpace(map_space_size))) {
- Heap::CollectGarbage(MAP_SPACE,
- "failed to reserve space in the map space");
+ AbortIncrementalMarkingAndCollectGarbage(this, MAP_SPACE,
+ "failed to reserve space in the map space");
gc_performed = true;
}
if (!(cell_space->ReserveSpace(cell_space_size))) {
- Heap::CollectGarbage(CELL_SPACE,
- "failed to reserve space in the cell space");
+ AbortIncrementalMarkingAndCollectGarbage(this, CELL_SPACE,
+ "failed to reserve space in the cell space");
gc_performed = true;
}
// We add a slack-factor of 2 in order to have space for a series of
@@ -637,8 +648,8 @@ void Heap::ReserveSpace(
large_object_size += cell_space_size + map_space_size + code_space_size +
data_space_size + pointer_space_size;
if (!(lo_space->ReserveSpace(large_object_size))) {
- Heap::CollectGarbage(LO_SPACE,
- "failed to reserve space in the large object space");
+ AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE,
+ "failed to reserve space in the large object space");
gc_performed = true;
}
}
@@ -1909,11 +1920,10 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
MaybeObject* Heap::AllocateCodeCache() {
- Object* result;
- { MaybeObject* maybe_result = AllocateStruct(CODE_CACHE_TYPE);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ CodeCache* code_cache;
+ { MaybeObject* maybe_code_cache = AllocateStruct(CODE_CACHE_TYPE);
+ if (!maybe_code_cache->To(&code_cache)) return maybe_code_cache;
}
- CodeCache* code_cache = CodeCache::cast(result);
code_cache->set_default_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
code_cache->set_normal_type_cache(undefined_value(), SKIP_WRITE_BARRIER);
return code_cache;
@@ -1926,22 +1936,20 @@ MaybeObject* Heap::AllocatePolymorphicCodeCache() {
MaybeObject* Heap::AllocateAccessorPair() {
- Object* result;
- { MaybeObject* maybe_result = AllocateStruct(ACCESSOR_PAIR_TYPE);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ AccessorPair* accessors;
+ { MaybeObject* maybe_accessors = AllocateStruct(ACCESSOR_PAIR_TYPE);
+ if (!maybe_accessors->To(&accessors)) return maybe_accessors;
}
- AccessorPair* accessors = AccessorPair::cast(result);
- // Later we will have to distinguish between undefined and the hole...
- // accessors->set_getter(the_hole_value(), SKIP_WRITE_BARRIER);
- // accessors->set_setter(the_hole_value(), SKIP_WRITE_BARRIER);
+ accessors->set_getter(the_hole_value(), SKIP_WRITE_BARRIER);
+ accessors->set_setter(the_hole_value(), SKIP_WRITE_BARRIER);
return accessors;
}
MaybeObject* Heap::AllocateTypeFeedbackInfo() {
TypeFeedbackInfo* info;
- { MaybeObject* maybe_result = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
- if (!maybe_result->To(&info)) return maybe_result;
+ { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
+ if (!maybe_info->To(&info)) return maybe_info;
}
info->set_ic_total_count(0);
info->set_ic_with_typeinfo_count(0);
@@ -1953,8 +1961,8 @@ MaybeObject* Heap::AllocateTypeFeedbackInfo() {
MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) {
AliasedArgumentsEntry* entry;
- { MaybeObject* maybe_result = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE);
- if (!maybe_result->To(&entry)) return maybe_result;
+ { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE);
+ if (!maybe_entry->To(&entry)) return maybe_entry;
}
entry->set_aliased_context_slot(aliased_context_slot);
return entry;
@@ -6921,14 +6929,18 @@ void Heap::FreeQueuedChunks() {
// pieces and initialize size, owner and flags field of every piece.
// If FromAnyPointerAddress encounters a slot that belongs to one of
// these smaller pieces it will treat it as a slot on a normal Page.
+ Address chunk_end = chunk->address() + chunk->size();
MemoryChunk* inner = MemoryChunk::FromAddress(
chunk->address() + Page::kPageSize);
- MemoryChunk* inner_last = MemoryChunk::FromAddress(
- chunk->address() + chunk->size() - 1);
+ MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1);
while (inner <= inner_last) {
// Size of a large chunk is always a multiple of
// OS::AllocateAlignment() so there is always
// enough space for a fake MemoryChunk header.
+ Address area_end = Min(inner->address() + Page::kPageSize, chunk_end);
+ // Guard against overflow.
+ if (area_end < inner->address()) area_end = chunk_end;
+ inner->SetArea(inner->address(), area_end);
inner->set_size(Page::kPageSize);
inner->set_owner(lo_space());
inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index 70c3146be20..df3717e4fae 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -150,7 +150,8 @@ namespace internal {
V(Script, empty_script, EmptyScript) \
V(Smi, real_stack_limit, RealStackLimit) \
V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
- V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)
+ V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
+ V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -1040,8 +1041,14 @@ class Heap {
const char* gc_reason = NULL);
static const int kNoGCFlags = 0;
- static const int kMakeHeapIterableMask = 1;
+ static const int kSweepPreciselyMask = 1;
static const int kReduceMemoryFootprintMask = 2;
+ static const int kAbortIncrementalMarkingMask = 4;
+
+ // Making the heap iterable requires us to sweep precisely and abort any
+ // incremental marking as well.
+ static const int kMakeHeapIterableMask =
+ kSweepPreciselyMask | kAbortIncrementalMarkingMask;
// Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
// non-zero, then the slower precise sweeper is used, which leaves the heap
@@ -1341,6 +1348,10 @@ class Heap {
return old_gen_allocation_limit_ - PromotedTotalSize();
}
+ inline intptr_t OldGenerationCapacityAvailable() {
+ return max_old_generation_size_ - PromotedTotalSize();
+ }
+
static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
static const intptr_t kMinimumAllocationLimit =
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
@@ -1567,6 +1578,11 @@ class Heap {
set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
}
+ void SetConstructStubDeoptPCOffset(int pc_offset) {
+ ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
private:
Heap();
diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc
index 6cd9998b670..f7391dd33d0 100644
--- a/deps/v8/src/hydrogen-instructions.cc
+++ b/deps/v8/src/hydrogen-instructions.cc
@@ -285,6 +285,14 @@ HUseListNode* HUseListNode::tail() {
}
+bool HValue::CheckUsesForFlag(Flag f) {
+ for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
+ if (!it.value()->CheckFlag(f)) return false;
+ }
+ return true;
+}
+
+
HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
Advance();
}
@@ -495,9 +503,9 @@ void HValue::RegisterUse(int index, HValue* new_value) {
}
-void HValue::AddNewRange(Range* r) {
- if (!HasRange()) ComputeInitialRange();
- if (!HasRange()) range_ = new Range();
+void HValue::AddNewRange(Range* r, Zone* zone) {
+ if (!HasRange()) ComputeInitialRange(zone);
+ if (!HasRange()) range_ = new(zone) Range();
ASSERT(HasRange());
r->StackUpon(range_);
range_ = r;
@@ -511,9 +519,9 @@ void HValue::RemoveLastAddedRange() {
}
-void HValue::ComputeInitialRange() {
+void HValue::ComputeInitialRange(Zone* zone) {
ASSERT(!HasRange());
- range_ = InferRange();
+ range_ = InferRange(zone);
ASSERT(HasRange());
}
@@ -831,12 +839,12 @@ void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
HValue* HConstant::Canonicalize() {
- return HasNoUses() && !IsBlockEntry() ? NULL : this;
+ return HasNoUses() ? NULL : this;
}
HValue* HTypeof::Canonicalize() {
- return HasNoUses() && !IsBlockEntry() ? NULL : this;
+ return HasNoUses() ? NULL : this;
}
@@ -858,6 +866,20 @@ HValue* HBitwise::Canonicalize() {
}
+HValue* HAdd::Canonicalize() {
+ if (!representation().IsInteger32()) return this;
+ if (CheckUsesForFlag(kTruncatingToInt32)) ClearFlag(kCanOverflow);
+ return this;
+}
+
+
+HValue* HSub::Canonicalize() {
+ if (!representation().IsInteger32()) return this;
+ if (CheckUsesForFlag(kTruncatingToInt32)) ClearFlag(kCanOverflow);
+ return this;
+}
+
+
HValue* HChange::Canonicalize() {
return (from().Equals(to())) ? value() : this;
}
@@ -986,15 +1008,15 @@ void HInstanceOf::PrintDataTo(StringStream* stream) {
}
-Range* HValue::InferRange() {
+Range* HValue::InferRange(Zone* zone) {
// Untagged integer32 cannot be -0, all other representations can.
- Range* result = new Range();
+ Range* result = new(zone) Range();
result->set_can_be_minus_zero(!representation().IsInteger32());
return result;
}
-Range* HChange::InferRange() {
+Range* HChange::InferRange(Zone* zone) {
Range* input_range = value()->range();
if (from().IsInteger32() &&
to().IsTagged() &&
@@ -1002,46 +1024,46 @@ Range* HChange::InferRange() {
set_type(HType::Smi());
}
Range* result = (input_range != NULL)
- ? input_range->Copy()
- : HValue::InferRange();
+ ? input_range->Copy(zone)
+ : HValue::InferRange(zone);
if (to().IsInteger32()) result->set_can_be_minus_zero(false);
return result;
}
-Range* HConstant::InferRange() {
+Range* HConstant::InferRange(Zone* zone) {
if (has_int32_value_) {
- Range* result = new Range(int32_value_, int32_value_);
+ Range* result = new(zone) Range(int32_value_, int32_value_);
result->set_can_be_minus_zero(false);
return result;
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HPhi::InferRange() {
+Range* HPhi::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
if (block()->IsLoopHeader()) {
- Range* range = new Range(kMinInt, kMaxInt);
+ Range* range = new(zone) Range(kMinInt, kMaxInt);
return range;
} else {
- Range* range = OperandAt(0)->range()->Copy();
+ Range* range = OperandAt(0)->range()->Copy(zone);
for (int i = 1; i < OperandCount(); ++i) {
range->Union(OperandAt(i)->range());
}
return range;
}
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HAdd::InferRange() {
+Range* HAdd::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
Range* b = right()->range();
- Range* res = a->Copy();
+ Range* res = a->Copy(zone);
if (!res->AddAndCheckOverflow(b)) {
ClearFlag(kCanOverflow);
}
@@ -1049,32 +1071,32 @@ Range* HAdd::InferRange() {
res->set_can_be_minus_zero(m0);
return res;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HSub::InferRange() {
+Range* HSub::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
Range* b = right()->range();
- Range* res = a->Copy();
+ Range* res = a->Copy(zone);
if (!res->SubAndCheckOverflow(b)) {
ClearFlag(kCanOverflow);
}
res->set_can_be_minus_zero(a->CanBeMinusZero() && b->CanBeZero());
return res;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HMul::InferRange() {
+Range* HMul::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
Range* b = right()->range();
- Range* res = a->Copy();
+ Range* res = a->Copy(zone);
if (!res->MulAndCheckOverflow(b)) {
ClearFlag(kCanOverflow);
}
@@ -1083,14 +1105,14 @@ Range* HMul::InferRange() {
res->set_can_be_minus_zero(m0);
return res;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HDiv::InferRange() {
+Range* HDiv::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
- Range* result = new Range();
+ Range* result = new(zone) Range();
if (left()->range()->CanBeMinusZero()) {
result->set_can_be_minus_zero(true);
}
@@ -1108,15 +1130,15 @@ Range* HDiv::InferRange() {
}
return result;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HMod::InferRange() {
+Range* HMod::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
- Range* result = new Range();
+ Range* result = new(zone) Range();
if (a->CanBeMinusZero() || a->CanBeNegative()) {
result->set_can_be_minus_zero(true);
}
@@ -1125,7 +1147,7 @@ Range* HMod::InferRange() {
}
return result;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
@@ -1324,40 +1346,41 @@ void HBinaryOperation::PrintDataTo(StringStream* stream) {
}
-Range* HBitwise::InferRange() {
- if (op() == Token::BIT_XOR) return HValue::InferRange();
+Range* HBitwise::InferRange(Zone* zone) {
+ if (op() == Token::BIT_XOR) return HValue::InferRange(zone);
+ const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
int32_t left_mask = (left()->range() != NULL)
? left()->range()->Mask()
- : 0xffffffff;
+ : kDefaultMask;
int32_t right_mask = (right()->range() != NULL)
? right()->range()->Mask()
- : 0xffffffff;
+ : kDefaultMask;
int32_t result_mask = (op() == Token::BIT_AND)
? left_mask & right_mask
: left_mask | right_mask;
return (result_mask >= 0)
- ? new Range(0, result_mask)
- : HValue::InferRange();
+ ? new(zone) Range(0, result_mask)
+ : HValue::InferRange(zone);
}
-Range* HSar::InferRange() {
+Range* HSar::InferRange(Zone* zone) {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
Range* result = (left()->range() != NULL)
- ? left()->range()->Copy()
- : new Range();
+ ? left()->range()->Copy(zone)
+ : new(zone) Range();
result->Sar(c->Integer32Value());
result->set_can_be_minus_zero(false);
return result;
}
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HShr::InferRange() {
+Range* HShr::InferRange(Zone* zone) {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
@@ -1365,53 +1388,54 @@ Range* HShr::InferRange() {
if (left()->range()->CanBeNegative()) {
// Only compute bounds if the result always fits into an int32.
return (shift_count >= 1)
- ? new Range(0, static_cast<uint32_t>(0xffffffff) >> shift_count)
- : new Range();
+ ? new(zone) Range(0,
+ static_cast<uint32_t>(0xffffffff) >> shift_count)
+ : new(zone) Range();
} else {
// For positive inputs we can use the >> operator.
Range* result = (left()->range() != NULL)
- ? left()->range()->Copy()
- : new Range();
+ ? left()->range()->Copy(zone)
+ : new(zone) Range();
result->Sar(c->Integer32Value());
result->set_can_be_minus_zero(false);
return result;
}
}
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HShl::InferRange() {
+Range* HShl::InferRange(Zone* zone) {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
Range* result = (left()->range() != NULL)
- ? left()->range()->Copy()
- : new Range();
+ ? left()->range()->Copy(zone)
+ : new(zone) Range();
result->Shl(c->Integer32Value());
result->set_can_be_minus_zero(false);
return result;
}
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HLoadKeyedSpecializedArrayElement::InferRange() {
+Range* HLoadKeyedSpecializedArrayElement::InferRange(Zone* zone) {
switch (elements_kind()) {
case EXTERNAL_PIXEL_ELEMENTS:
- return new Range(0, 255);
+ return new(zone) Range(0, 255);
case EXTERNAL_BYTE_ELEMENTS:
- return new Range(-128, 127);
+ return new(zone) Range(-128, 127);
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- return new Range(0, 255);
+ return new(zone) Range(0, 255);
case EXTERNAL_SHORT_ELEMENTS:
- return new Range(-32768, 32767);
+ return new(zone) Range(-32768, 32767);
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- return new Range(0, 65535);
+ return new(zone) Range(0, 65535);
default:
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
@@ -1456,7 +1480,22 @@ void HGoto::PrintDataTo(StringStream* stream) {
void HCompareIDAndBranch::SetInputRepresentation(Representation r) {
input_representation_ = r;
if (r.IsDouble()) {
- SetFlag(kDeoptimizeOnUndefined);
+ // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
+ // and !=) have special handling of undefined, e.g. undefined == undefined
+ // is 'true'. Relational comparisons have a different semantic, first
+ // calling ToPrimitive() on their arguments. The standard Crankshaft
+ // tagged-to-double conversion to ensure the HCompareIDAndBranch's inputs
+ // are doubles caused 'undefined' to be converted to NaN. That's compatible
+ // out-of-the box with ordered relational comparisons (<, >, <=,
+ // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
+ // it is not consistent with the spec. For example, it would cause undefined
+ // == undefined (should be true) to be evaluated as NaN == NaN
+ // (false). Therefore, any comparisons other than ordered relational
+ // comparisons must cause a deopt when one of their arguments is undefined.
+ // See also v8:1434
+ if (!Token::IsOrderedRelationalCompareOp(token_)) {
+ SetFlag(kDeoptimizeOnUndefined);
+ }
} else {
ASSERT(r.IsInteger32());
}
@@ -1923,6 +1962,11 @@ HType HStringCharFromCode::CalculateInferredType() {
}
+HType HAllocateObject::CalculateInferredType() {
+ return HType::JSObject();
+}
+
+
HType HFastLiteral::CalculateInferredType() {
// TODO(mstarzinger): Be smarter, could also be JSArray here.
return HType::JSObject();
diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h
index 92645a2b317..5733e51affc 100644
--- a/deps/v8/src/hydrogen-instructions.h
+++ b/deps/v8/src/hydrogen-instructions.h
@@ -62,6 +62,7 @@ class LChunkBuilder;
V(AbnormalExit) \
V(AccessArgumentsAt) \
V(Add) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -173,7 +174,6 @@ class LChunkBuilder;
V(ThisFunction) \
V(Throw) \
V(ToFastProperties) \
- V(ToInt32) \
V(TransitionElementsKind) \
V(Typeof) \
V(TypeofIsAndBranch) \
@@ -184,7 +184,8 @@ class LChunkBuilder;
V(ForInPrepareMap) \
V(ForInCacheArray) \
V(CheckMapValue) \
- V(LoadFieldByIndex)
+ V(LoadFieldByIndex) \
+ V(DateField)
#define GVN_FLAG_LIST(V) \
V(Calls) \
@@ -235,10 +236,14 @@ class Range: public ZoneObject {
int32_t upper() const { return upper_; }
int32_t lower() const { return lower_; }
Range* next() const { return next_; }
- Range* CopyClearLower() const { return new Range(kMinInt, upper_); }
- Range* CopyClearUpper() const { return new Range(lower_, kMaxInt); }
- Range* Copy() const {
- Range* result = new Range(lower_, upper_);
+ Range* CopyClearLower(Zone* zone) const {
+ return new(zone) Range(kMinInt, upper_);
+ }
+ Range* CopyClearUpper(Zone* zone) const {
+ return new(zone) Range(lower_, kMaxInt);
+ }
+ Range* Copy(Zone* zone) const {
+ Range* result = new(zone) Range(lower_, upper_);
result->set_can_be_minus_zero(CanBeMinusZero());
return result;
}
@@ -640,6 +645,9 @@ class HValue: public ZoneObject {
void ClearFlag(Flag f) { flags_ &= ~(1 << f); }
bool CheckFlag(Flag f) const { return (flags_ & (1 << f)) != 0; }
+ // Returns true if the flag specified is set for all uses, false otherwise.
+ bool CheckUsesForFlag(Flag f);
+
GVNFlagSet gvn_flags() const { return gvn_flags_; }
void SetGVNFlag(GVNFlag f) { gvn_flags_.Add(f); }
void ClearGVNFlag(GVNFlag f) { gvn_flags_.Remove(f); }
@@ -682,9 +690,9 @@ class HValue: public ZoneObject {
Range* range() const { return range_; }
bool HasRange() const { return range_ != NULL; }
- void AddNewRange(Range* r);
+ void AddNewRange(Range* r, Zone* zone);
void RemoveLastAddedRange();
- void ComputeInitialRange();
+ void ComputeInitialRange(Zone* zone);
// Representation helpers.
virtual Representation RequiredInputRepresentation(int index) = 0;
@@ -729,7 +737,7 @@ class HValue: public ZoneObject {
return false;
}
virtual void RepresentationChanged(Representation to) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual void DeleteFromGraph() = 0;
virtual void InternalSetOperandAt(int index, HValue* value) = 0;
void clear_block() {
@@ -819,6 +827,8 @@ class HInstruction: public HValue {
bool has_position() const { return position_ != RelocInfo::kNoPosition; }
void set_position(int position) { position_ = position; }
+ bool CanTruncateToInt32() const { return CheckFlag(kTruncatingToInt32); }
+
virtual LInstruction* CompileToLithium(LChunkBuilder* builder) = 0;
#ifdef DEBUG
@@ -1116,10 +1126,6 @@ class HUnaryOperation: public HTemplateInstruction<1> {
return reinterpret_cast<HUnaryOperation*>(value);
}
- virtual bool CanTruncateToInt32() const {
- return CheckFlag(kTruncatingToInt32);
- }
-
HValue* value() { return OperandAt(0); }
virtual void PrintDataTo(StringStream* stream);
};
@@ -1207,7 +1213,7 @@ class HChange: public HUnaryOperation {
return from();
}
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual void PrintDataTo(StringStream* stream);
@@ -1237,37 +1243,6 @@ class HClampToUint8: public HUnaryOperation {
};
-class HToInt32: public HUnaryOperation {
- public:
- explicit HToInt32(HValue* value)
- : HUnaryOperation(value) {
- set_representation(Representation::Integer32());
- SetFlag(kUseGVN);
- }
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::None();
- }
-
- virtual bool CanTruncateToInt32() const {
- return true;
- }
-
- virtual HValue* Canonicalize() {
- if (value()->representation().IsInteger32()) {
- return value();
- } else {
- return this;
- }
- }
-
- DECLARE_CONCRETE_INSTRUCTION(ToInt32)
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-};
-
-
class HSimulate: public HInstruction {
public:
HSimulate(int ast_id, int pop_count)
@@ -1376,11 +1351,13 @@ class HEnterInlined: public HTemplateInstruction<0> {
HEnterInlined(Handle<JSFunction> closure,
int arguments_count,
FunctionLiteral* function,
- CallKind call_kind)
+ CallKind call_kind,
+ bool is_construct)
: closure_(closure),
arguments_count_(arguments_count),
function_(function),
- call_kind_(call_kind) {
+ call_kind_(call_kind),
+ is_construct_(is_construct) {
}
virtual void PrintDataTo(StringStream* stream);
@@ -1389,6 +1366,7 @@ class HEnterInlined: public HTemplateInstruction<0> {
int arguments_count() const { return arguments_count_; }
FunctionLiteral* function() const { return function_; }
CallKind call_kind() const { return call_kind_; }
+ bool is_construct() const { return is_construct_; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -1401,6 +1379,7 @@ class HEnterInlined: public HTemplateInstruction<0> {
int arguments_count_;
FunctionLiteral* function_;
CallKind call_kind_;
+ bool is_construct_;
};
@@ -1908,6 +1887,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
set_representation(Representation::Double());
break;
default:
@@ -1938,6 +1918,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
return Representation::Double();
case kMathAbs:
return representation();
@@ -2294,7 +2275,7 @@ class HPhi: public HValue {
return Representation::None();
}
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual Representation RequiredInputRepresentation(int index) {
return representation();
}
@@ -2472,7 +2453,7 @@ class HConstant: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(Constant)
protected:
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual bool DataEquals(HValue* other) {
HConstant* other_constant = HConstant::cast(other);
@@ -3145,12 +3126,14 @@ class HAdd: public HArithmeticBinaryOperation {
virtual HType CalculateInferredType();
+ virtual HValue* Canonicalize();
+
DECLARE_CONCRETE_INSTRUCTION(Add)
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3163,6 +3146,8 @@ class HSub: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+ virtual HValue* Canonicalize();
+
static HInstruction* NewHSub(Zone* zone,
HValue* context,
HValue* left,
@@ -3173,7 +3158,7 @@ class HSub: public HArithmeticBinaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3201,7 +3186,7 @@ class HMul: public HArithmeticBinaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3234,7 +3219,7 @@ class HMod: public HArithmeticBinaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3248,7 +3233,6 @@ class HDiv: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
-
static HInstruction* NewHDiv(Zone* zone,
HValue* context,
HValue* left,
@@ -3259,7 +3243,7 @@ class HDiv: public HArithmeticBinaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3291,7 +3275,7 @@ class HBitwise: public HBitwiseBinaryOperation {
return op() == HBitwise::cast(other)->op();
}
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
private:
Token::Value op_;
@@ -3303,7 +3287,7 @@ class HShl: public HBitwiseBinaryOperation {
HShl(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
static HInstruction* NewHShl(Zone* zone,
HValue* context,
@@ -3322,7 +3306,7 @@ class HShr: public HBitwiseBinaryOperation {
HShr(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
static HInstruction* NewHShr(Zone* zone,
HValue* context,
@@ -3341,7 +3325,7 @@ class HSar: public HBitwiseBinaryOperation {
HSar(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
static HInstruction* NewHSar(Zone* zone,
HValue* context,
@@ -3931,7 +3915,7 @@ class HLoadKeyedSpecializedArrayElement: public HTemplateInstruction<2> {
HValue* key() { return OperandAt(1); }
ElementsKind elements_kind() const { return elements_kind_; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement)
@@ -4299,8 +4283,8 @@ class HStringCharCodeAt: public HTemplateInstruction<3> {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange() {
- return new Range(0, String::kMaxUC16CharCode);
+ virtual Range* InferRange(Zone* zone) {
+ return new(zone) Range(0, String::kMaxUC16CharCode);
}
};
@@ -4352,12 +4336,35 @@ class HStringLength: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange() {
- return new Range(0, String::kMaxLength);
+ virtual Range* InferRange(Zone* zone) {
+ return new(zone) Range(0, String::kMaxLength);
}
};
+class HAllocateObject: public HTemplateInstruction<1> {
+ public:
+ HAllocateObject(HValue* context, Handle<JSFunction> constructor)
+ : constructor_(constructor) {
+ SetOperandAt(0, context);
+ set_representation(Representation::Tagged());
+ }
+
+ HValue* context() { return OperandAt(0); }
+ Handle<JSFunction> constructor() { return constructor_; }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+ virtual HType CalculateInferredType();
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject)
+
+ private:
+ Handle<JSFunction> constructor_;
+};
+
+
template <int V>
class HMaterializedLiteral: public HTemplateInstruction<V> {
public:
@@ -4596,6 +4603,26 @@ class HValueOf: public HUnaryOperation {
};
+class HDateField: public HUnaryOperation {
+ public:
+ HDateField(HValue* date, Smi* index)
+ : HUnaryOperation(date), index_(index) {
+ set_representation(Representation::Tagged());
+ }
+
+ Smi* index() const { return index_; }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DateField)
+
+ private:
+ Smi* index_;
+};
+
+
class HDeleteProperty: public HBinaryOperation {
public:
HDeleteProperty(HValue* context, HValue* obj, HValue* key)
diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc
index 4908586f7c7..11b11571790 100644
--- a/deps/v8/src/hydrogen.cc
+++ b/deps/v8/src/hydrogen.cc
@@ -600,7 +600,7 @@ HConstant* HGraph::GetConstantHole() {
HGraphBuilder::HGraphBuilder(CompilationInfo* info,
TypeFeedbackOracle* oracle)
: function_state_(NULL),
- initial_function_state_(this, info, oracle, false),
+ initial_function_state_(this, info, oracle, NORMAL_RETURN),
ast_context_(NULL),
break_scope_(NULL),
graph_(NULL),
@@ -730,7 +730,7 @@ HBasicBlock* HGraph::CreateBasicBlock() {
void HGraph::Canonicalize() {
if (!FLAG_use_canonicalizing) return;
- HPhase phase("Canonicalize", this);
+ HPhase phase("H_Canonicalize", this);
for (int i = 0; i < blocks()->length(); ++i) {
HInstruction* instr = blocks()->at(i)->first();
while (instr != NULL) {
@@ -743,7 +743,7 @@ void HGraph::Canonicalize() {
void HGraph::OrderBlocks() {
- HPhase phase("Block ordering");
+ HPhase phase("H_Block ordering");
BitVector visited(blocks_.length(), zone());
ZoneList<HBasicBlock*> reverse_result(8);
@@ -805,7 +805,7 @@ void HGraph::Postorder(HBasicBlock* block,
void HGraph::AssignDominators() {
- HPhase phase("Assign dominators", this);
+ HPhase phase("H_Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) {
HBasicBlock* block = blocks_[i];
if (block->IsLoopHeader()) {
@@ -824,7 +824,7 @@ void HGraph::AssignDominators() {
// Mark all blocks that are dominated by an unconditional soft deoptimize to
// prevent code motion across those blocks.
void HGraph::PropagateDeoptimizingMark() {
- HPhase phase("Propagate deoptimizing mark", this);
+ HPhase phase("H_Propagate deoptimizing mark", this);
MarkAsDeoptimizingRecursively(entry_block());
}
@@ -837,7 +837,7 @@ void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
}
void HGraph::EliminateRedundantPhis() {
- HPhase phase("Redundant phi elimination", this);
+ HPhase phase("H_Redundant phi elimination", this);
// Worklist of phis that can potentially be eliminated. Initialized with
// all phi nodes. When elimination of a phi node modifies another phi node
@@ -871,7 +871,7 @@ void HGraph::EliminateRedundantPhis() {
void HGraph::EliminateUnreachablePhis() {
- HPhase phase("Unreachable phi elimination", this);
+ HPhase phase("H_Unreachable phi elimination", this);
// Initialize worklist.
ZoneList<HPhi*> phi_list(blocks_.length());
@@ -979,7 +979,8 @@ void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
class HRangeAnalysis BASE_EMBEDDED {
public:
- explicit HRangeAnalysis(HGraph* graph) : graph_(graph), changed_ranges_(16) {}
+ explicit HRangeAnalysis(HGraph* graph) :
+ graph_(graph), zone_(graph->isolate()->zone()), changed_ranges_(16) { }
void Analyze();
@@ -993,6 +994,7 @@ class HRangeAnalysis BASE_EMBEDDED {
void AddRange(HValue* value, Range* range);
HGraph* graph_;
+ Zone* zone_;
ZoneList<HValue*> changed_ranges_;
};
@@ -1008,7 +1010,7 @@ void HRangeAnalysis::TraceRange(const char* msg, ...) {
void HRangeAnalysis::Analyze() {
- HPhase phase("Range analysis", graph_);
+ HPhase phase("H_Range analysis", graph_);
Analyze(graph_->entry_block());
}
@@ -1079,14 +1081,14 @@ void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
if (op == Token::EQ || op == Token::EQ_STRICT) {
// The same range has to apply for value.
- new_range = range->Copy();
+ new_range = range->Copy(zone_);
} else if (op == Token::LT || op == Token::LTE) {
- new_range = range->CopyClearLower();
+ new_range = range->CopyClearLower(zone_);
if (op == Token::LT) {
new_range->AddConstant(-1);
}
} else if (op == Token::GT || op == Token::GTE) {
- new_range = range->CopyClearUpper();
+ new_range = range->CopyClearUpper(zone_);
if (op == Token::GT) {
new_range->AddConstant(1);
}
@@ -1101,7 +1103,7 @@ void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
void HRangeAnalysis::InferRange(HValue* value) {
ASSERT(!value->HasRange());
if (!value->representation().IsNone()) {
- value->ComputeInitialRange();
+ value->ComputeInitialRange(zone_);
Range* range = value->range();
TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
value->id(),
@@ -1122,7 +1124,7 @@ void HRangeAnalysis::RollBackTo(int index) {
void HRangeAnalysis::AddRange(HValue* value, Range* range) {
Range* original_range = value->range();
- value->AddNewRange(range);
+ value->AddNewRange(range, zone_);
changed_ranges_.Add(value);
Range* new_range = value->range();
TraceRange("Updated range of %d set to [%d,%d]\n",
@@ -1483,6 +1485,11 @@ void HGlobalValueNumberer::ComputeBlockSideEffects() {
GVNFlagSet side_effects;
while (instr != NULL) {
side_effects.Add(instr->ChangesFlags());
+ if (instr->IsSoftDeoptimize()) {
+ block_side_effects_[id].RemoveAll();
+ side_effects.RemoveAll();
+ break;
+ }
instr = instr->next();
}
block_side_effects_[id].Add(side_effects);
@@ -1829,7 +1836,7 @@ Representation HInferRepresentation::TryChange(HValue* value) {
void HInferRepresentation::Analyze() {
- HPhase phase("Infer representations", graph_);
+ HPhase phase("H_Infer representations", graph_);
// (1) Initialize bit vectors and count real uses. Each phi gets a
// bit-vector of length <number of phis>.
@@ -1908,7 +1915,7 @@ void HInferRepresentation::Analyze() {
void HGraph::InitializeInferredTypes() {
- HPhase phase("Inferring types", this);
+ HPhase phase("H_Inferring types", this);
InitializeInferredTypes(0, this->blocks_.length() - 1);
}
@@ -2045,8 +2052,7 @@ void HGraph::InsertRepresentationChangesForValue(HValue* value) {
void HGraph::InsertRepresentationChanges() {
- HPhase phase("Insert representation changes", this);
-
+ HPhase phase("H_Representation changes", this);
// Compute truncation flag for phis: Initially assume that all
// int32-phis allow truncation and iteratively remove the ones that
@@ -2065,13 +2071,9 @@ void HGraph::InsertRepresentationChanges() {
for (int i = 0; i < phi_list()->length(); i++) {
HPhi* phi = phi_list()->at(i);
if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
- for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
- HValue* use = it.value();
- if (!use->CheckFlag(HValue::kTruncatingToInt32)) {
- phi->ClearFlag(HValue::kTruncatingToInt32);
- change = true;
- break;
- }
+ if (!phi->CheckUsesForFlag(HValue::kTruncatingToInt32)) {
+ phi->ClearFlag(HValue::kTruncatingToInt32);
+ change = true;
}
}
}
@@ -2106,7 +2108,7 @@ void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
void HGraph::MarkDeoptimizeOnUndefined() {
- HPhase phase("MarkDeoptimizeOnUndefined", this);
+ HPhase phase("H_MarkDeoptimizeOnUndefined", this);
// Compute DeoptimizeOnUndefined flag for phis.
// Any phi that can reach a use with DeoptimizeOnUndefined set must
// have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with
@@ -2156,12 +2158,12 @@ void HGraph::ComputeMinusZeroChecks() {
FunctionState::FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle,
- bool drop_extra)
+ ReturnHandlingFlag return_handling)
: owner_(owner),
compilation_info_(info),
oracle_(oracle),
call_context_(NULL),
- drop_extra_(drop_extra),
+ return_handling_(return_handling),
function_return_(NULL),
test_context_(NULL),
outer_(owner->function_state()) {
@@ -2204,7 +2206,7 @@ AstContext::AstContext(HGraphBuilder* owner, Expression::Context kind)
for_typeof_(false) {
owner->set_ast_context(this); // Push.
#ifdef DEBUG
- ASSERT(!owner->environment()->is_arguments_adaptor());
+ ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
original_length_ = owner->environment()->length();
#endif
}
@@ -2219,7 +2221,7 @@ EffectContext::~EffectContext() {
ASSERT(owner()->HasStackOverflow() ||
owner()->current_block() == NULL ||
(owner()->environment()->length() == original_length_ &&
- !owner()->environment()->is_arguments_adaptor()));
+ owner()->environment()->frame_type() == JS_FUNCTION));
}
@@ -2227,7 +2229,7 @@ ValueContext::~ValueContext() {
ASSERT(owner()->HasStackOverflow() ||
owner()->current_block() == NULL ||
(owner()->environment()->length() == original_length_ + 1 &&
- !owner()->environment()->is_arguments_adaptor()));
+ owner()->environment()->frame_type() == JS_FUNCTION));
}
@@ -2432,7 +2434,7 @@ HGraph* HGraphBuilder::CreateGraph() {
if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info());
{
- HPhase phase("Block building");
+ HPhase phase("H_Block building");
current_block_ = graph()->entry_block();
Scope* scope = info()->scope();
@@ -2466,7 +2468,7 @@ HGraph* HGraphBuilder::CreateGraph() {
// Handle implicit declaration of the function name in named function
// expressions before other declarations.
if (scope->is_function_scope() && scope->function() != NULL) {
- HandleVariableDeclaration(scope->function(), CONST, NULL, NULL);
+ HandleDeclaration(scope->function(), CONST, NULL, NULL);
}
VisitDeclarations(scope->declarations());
AddSimulate(AstNode::kDeclarationsId);
@@ -2517,7 +2519,7 @@ HGraph* HGraphBuilder::CreateGraph() {
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
- HPhase phase("Global value numbering", graph());
+ HPhase phase("H_Global value numbering", graph());
HGlobalValueNumberer gvn(graph(), info());
bool removed_side_effects = gvn.Analyze();
// Trigger a second analysis pass to further eliminate duplicate values that
@@ -2550,7 +2552,7 @@ HGraph* HGraphBuilder::CreateGraph() {
void HGraph::ReplaceCheckedValues() {
- HPhase phase("Replace checked values", this);
+ HPhase phase("H_Replace checked values", this);
for (int i = 0; i < blocks()->length(); ++i) {
HInstruction* instr = blocks()->at(i)->first();
while (instr != NULL) {
@@ -2590,8 +2592,8 @@ void HGraphBuilder::PushAndAdd(HInstruction* instr) {
}
-template <int V>
-HInstruction* HGraphBuilder::PreProcessCall(HCall<V>* call) {
+template <class Instruction>
+HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
int count = call->argument_count();
ZoneList<HValue*> arguments(count);
for (int i = 0; i < count; ++i) {
@@ -2819,7 +2821,38 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
CHECK_ALIVE(VisitForValue(stmt->expression()));
HValue* result = environment()->Pop();
current_block()->FinishExit(new(zone()) HReturn(result));
- set_current_block(NULL);
+ } else if (function_state()->is_construct()) {
+ // Return from an inlined construct call. In a test context the return
+ // value will always evaluate to true, in a value context the return value
+ // needs to be a JSObject.
+ if (context->IsTest()) {
+ TestContext* test = TestContext::cast(context);
+ CHECK_ALIVE(VisitForEffect(stmt->expression()));
+ current_block()->Goto(test->if_true(), function_state()->drop_extra());
+ } else if (context->IsEffect()) {
+ CHECK_ALIVE(VisitForEffect(stmt->expression()));
+ current_block()->Goto(function_return(), function_state()->drop_extra());
+ } else {
+ ASSERT(context->IsValue());
+ CHECK_ALIVE(VisitForValue(stmt->expression()));
+ HValue* return_value = Pop();
+ HValue* receiver = environment()->Lookup(0);
+ HHasInstanceTypeAndBranch* typecheck =
+ new(zone()) HHasInstanceTypeAndBranch(return_value,
+ FIRST_SPEC_OBJECT_TYPE,
+ LAST_SPEC_OBJECT_TYPE);
+ HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
+ HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
+ typecheck->SetSuccessorAt(0, if_spec_object);
+ typecheck->SetSuccessorAt(1, not_spec_object);
+ current_block()->Finish(typecheck);
+ if_spec_object->AddLeaveInlined(return_value,
+ function_return(),
+ function_state()->drop_extra());
+ not_spec_object->AddLeaveInlined(receiver,
+ function_return(),
+ function_state()->drop_extra());
+ }
} else {
// Return from an inlined function, visit the subexpression in the
// expression context of the call.
@@ -2834,13 +2867,13 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
} else {
ASSERT(context->IsValue());
CHECK_ALIVE(VisitForValue(stmt->expression()));
- HValue* return_value = environment()->Pop();
+ HValue* return_value = Pop();
current_block()->AddLeaveInlined(return_value,
function_return(),
function_state()->drop_extra());
}
- set_current_block(NULL);
}
+ set_current_block(NULL);
}
@@ -3075,7 +3108,6 @@ void HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
}
}
-
AddSimulate(osr_entry_id);
AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
HContext* context = new(zone()) HContext;
@@ -3246,6 +3278,10 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
return Bailout("ForInStatement optimization is disabled");
}
+ if (!oracle()->IsForInFastCase(stmt)) {
+ return Bailout("ForInStatement is not fast case");
+ }
+
if (!stmt->each()->IsVariableProxy() ||
!stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
return Bailout("ForInStatement with non-local each variable");
@@ -3256,10 +3292,8 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
CHECK_ALIVE(VisitForValue(stmt->enumerable()));
HValue* enumerable = Top(); // Leave enumerable at the top.
- HValue* context = environment()->LookupContext();
-
HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
- context, enumerable));
+ environment()->LookupContext(), enumerable));
AddSimulate(stmt->PrepareId());
HInstruction* array = AddInstruction(
@@ -3336,9 +3370,11 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
set_current_block(body_exit);
HValue* current_index = Pop();
- PushAndAdd(
- new(zone()) HAdd(context, current_index, graph()->GetConstant1()));
-
+ HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(),
+ current_index,
+ graph()->GetConstant1());
+ new_index->AssumeRepresentation(Representation::Integer32());
+ PushAndAdd(new_index);
body_exit = current_block();
}
@@ -4486,9 +4522,7 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
ASSERT(val != NULL);
switch (elements_kind) {
case EXTERNAL_PIXEL_ELEMENTS: {
- HClampToUint8* clamp = new(zone()) HClampToUint8(val);
- AddInstruction(clamp);
- val = clamp;
+ val = AddInstruction(new(zone()) HClampToUint8(val));
break;
}
case EXTERNAL_BYTE_ELEMENTS:
@@ -4497,9 +4531,13 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS: {
- HToInt32* floor_val = new(zone()) HToInt32(val);
- AddInstruction(floor_val);
- val = floor_val;
+ if (!val->representation().IsInteger32()) {
+ val = AddInstruction(new(zone()) HChange(
+ val,
+ Representation::Integer32(),
+ true, // Truncate to int32.
+ false)); // Don't deoptimize undefined (irrelevant here).
+ }
break;
}
case EXTERNAL_FLOAT_ELEMENTS:
@@ -4516,6 +4554,7 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
return new(zone()) HStoreKeyedSpecializedArrayElement(
external_elements, checked_key, val, elements_kind);
} else {
+ ASSERT(val == NULL);
return new(zone()) HLoadKeyedSpecializedArrayElement(
external_elements, checked_key, elements_kind);
}
@@ -5002,7 +5041,7 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
PrintF("Trying to inline the polymorphic call to %s\n",
*name->ToCString());
}
- if (FLAG_polymorphic_inlining && TryInline(expr)) {
+ if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
// Trying to inline will signal that we should bailout from the
// entire compilation by setting stack overflow on the visitor.
if (HasStackOverflow()) return;
@@ -5072,19 +5111,18 @@ void HGraphBuilder::TraceInline(Handle<JSFunction> target,
}
-bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
+bool HGraphBuilder::TryInline(CallKind call_kind,
+ Handle<JSFunction> target,
+ ZoneList<Expression*>* arguments,
+ HValue* receiver,
+ int ast_id,
+ int return_id,
+ ReturnHandlingFlag return_handling) {
if (!FLAG_use_inlining) return false;
- // The function call we are inlining is a method call if the call
- // is a property call.
- CallKind call_kind = (expr->expression()->AsProperty() == NULL)
- ? CALL_AS_FUNCTION
- : CALL_AS_METHOD;
-
// Precondition: call is monomorphic and we have found a target with the
// appropriate arity.
Handle<JSFunction> caller = info()->closure();
- Handle<JSFunction> target = expr->target();
Handle<SharedFunctionInfo> target_shared(target->shared());
// Do a quick check on source code length to avoid parsing large
@@ -5132,7 +5170,7 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, "inline depth limit reached");
return false;
}
- if (!env->outer()->is_arguments_adaptor()) {
+ if (env->outer()->frame_type() == JS_FUNCTION) {
current_level++;
}
env = env->outer();
@@ -5240,16 +5278,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
isolate());
// The function state is new-allocated because we need to delete it
// in two different places.
- FunctionState* target_state =
- new FunctionState(this, &target_info, &target_oracle, drop_extra);
+ FunctionState* target_state = new FunctionState(
+ this, &target_info, &target_oracle, return_handling);
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner_env =
environment()->CopyForInlining(target,
- expr->arguments()->length(),
+ arguments->length(),
function,
undefined,
- call_kind);
+ call_kind,
+ function_state()->is_construct());
#ifdef V8_TARGET_ARCH_IA32
// IA32 only, overwrite the caller's context in the deoptimization
// environment with the correct one.
@@ -5261,14 +5300,13 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
AddInstruction(context);
inner_env->BindContext(context);
#endif
- HBasicBlock* body_entry = CreateBasicBlock(inner_env);
- current_block()->Goto(body_entry);
- body_entry->SetJoinId(expr->ReturnId());
- set_current_block(body_entry);
+ AddSimulate(return_id);
+ current_block()->UpdateEnvironment(inner_env);
AddInstruction(new(zone()) HEnterInlined(target,
- expr->arguments()->length(),
+ arguments->length(),
function,
- call_kind));
+ call_kind,
+ function_state()->is_construct()));
VisitDeclarations(target_info.scope()->declarations());
VisitStatements(function->body());
if (HasStackOverflow()) {
@@ -5287,32 +5325,27 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, NULL);
if (current_block() != NULL) {
- // Add a return of undefined if control can fall off the body. In a
- // test context, undefined is false.
- if (inlined_test_context() == NULL) {
+ // Add default return value (i.e. undefined for normals calls or the newly
+ // allocated receiver for construct calls) if control can fall off the
+ // body. In a test context, undefined is false and any JSObject is true.
+ if (call_context()->IsValue()) {
ASSERT(function_return() != NULL);
- ASSERT(call_context()->IsEffect() || call_context()->IsValue());
- if (call_context()->IsEffect()) {
- current_block()->Goto(function_return(), drop_extra);
- } else {
- current_block()->AddLeaveInlined(undefined,
- function_return(),
- drop_extra);
- }
+ HValue* return_value = function_state()->is_construct()
+ ? receiver
+ : undefined;
+ current_block()->AddLeaveInlined(return_value,
+ function_return(),
+ function_state()->drop_extra());
+ } else if (call_context()->IsEffect()) {
+ ASSERT(function_return() != NULL);
+ current_block()->Goto(function_return(), function_state()->drop_extra());
} else {
- // The graph builder assumes control can reach both branches of a
- // test, so we materialize the undefined value and test it rather than
- // simply jumping to the false target.
- //
- // TODO(3168478): refactor to avoid this.
ASSERT(call_context()->IsTest());
- HBasicBlock* empty_true = graph()->CreateBasicBlock();
- HBasicBlock* empty_false = graph()->CreateBasicBlock();
- HBranch* test = new(zone()) HBranch(undefined, empty_true, empty_false);
- current_block()->Finish(test);
-
- empty_true->Goto(inlined_test_context()->if_true(), drop_extra);
- empty_false->Goto(inlined_test_context()->if_false(), drop_extra);
+ ASSERT(inlined_test_context() != NULL);
+ HBasicBlock* target = function_state()->is_construct()
+ ? inlined_test_context()->if_true()
+ : inlined_test_context()->if_false();
+ current_block()->Goto(target, function_state()->drop_extra());
}
}
@@ -5328,12 +5361,12 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
// Forward to the real test context.
if (if_true->HasPredecessor()) {
- if_true->SetJoinId(expr->id());
+ if_true->SetJoinId(ast_id);
HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
if_true->Goto(true_target, function_state()->drop_extra());
}
if (if_false->HasPredecessor()) {
- if_false->SetJoinId(expr->id());
+ if_false->SetJoinId(ast_id);
HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
if_false->Goto(false_target, function_state()->drop_extra());
}
@@ -5341,7 +5374,7 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return true;
} else if (function_return()->HasPredecessor()) {
- function_return()->SetJoinId(expr->id());
+ function_return()->SetJoinId(ast_id);
set_current_block(function_return());
} else {
set_current_block(NULL);
@@ -5351,6 +5384,34 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
}
+bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
+ // The function call we are inlining is a method call if the call
+ // is a property call.
+ CallKind call_kind = (expr->expression()->AsProperty() == NULL)
+ ? CALL_AS_FUNCTION
+ : CALL_AS_METHOD;
+
+ return TryInline(call_kind,
+ expr->target(),
+ expr->arguments(),
+ NULL,
+ expr->id(),
+ expr->ReturnId(),
+ drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
+}
+
+
+bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) {
+ return TryInline(CALL_AS_FUNCTION,
+ expr->target(),
+ expr->arguments(),
+ receiver,
+ expr->id(),
+ expr->ReturnId(),
+ CONSTRUCT_CALL_RETURN);
+}
+
+
bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
@@ -5361,6 +5422,7 @@ bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
if (expr->arguments()->length() == 1) {
HValue* argument = Pop();
HValue* context = environment()->LookupContext();
@@ -5421,6 +5483,7 @@ bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
HValue* argument = Pop();
@@ -5680,7 +5743,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
} else {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
- if (TryInline(expr)) return;
+ if (TryInlineCall(expr)) return;
call = PreProcessCall(
new(zone()) HCallConstantFunction(expr->target(),
argument_count));
@@ -5744,7 +5807,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
}
return;
}
- if (TryInline(expr)) return;
+ if (TryInlineCall(expr)) return;
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count));
} else {
@@ -5780,7 +5843,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
return;
}
- if (TryInline(expr, true)) { // Drop function from environment.
+ if (TryInlineCall(expr, true)) { // Drop function from environment.
return;
} else {
call = PreProcessCall(new(zone()) HInvokeFunction(context,
@@ -5810,25 +5873,72 @@ void HGraphBuilder::VisitCall(Call* expr) {
}
+// Checks whether allocation using the given constructor can be inlined.
+static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
+ return constructor->has_initial_map() &&
+ constructor->initial_map()->instance_type() == JS_OBJECT_TYPE;
+}
+
+
void HGraphBuilder::VisitCallNew(CallNew* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- // The constructor function is also used as the receiver argument to the
- // JS construct call builtin.
- HValue* constructor = NULL;
- CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
- CHECK_ALIVE(VisitArgumentList(expr->arguments()));
-
+ expr->RecordTypeFeedback(oracle());
+ int argument_count = expr->arguments()->length() + 1; // Plus constructor.
HValue* context = environment()->LookupContext();
- // The constructor is both an operand to the instruction and an argument
- // to the construct call.
- int arg_count = expr->arguments()->length() + 1; // Plus constructor.
- HCallNew* call = new(zone()) HCallNew(context, constructor, arg_count);
- call->set_position(expr->position());
- Drop(arg_count);
- return ast_context()->ReturnInstruction(call, expr->id());
+ if (FLAG_inline_construct &&
+ expr->IsMonomorphic() &&
+ IsAllocationInlineable(expr->target())) {
+ // The constructor function is on the stack in the unoptimized code
+ // during evaluation of the arguments.
+ CHECK_ALIVE(VisitForValue(expr->expression()));
+ HValue* function = Top();
+ CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ Handle<JSFunction> constructor = expr->target();
+ HValue* check = AddInstruction(
+ new(zone()) HCheckFunction(function, constructor));
+
+ // Force completion of inobject slack tracking before generating
+ // allocation code to finalize instance size.
+ if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
+ constructor->shared()->CompleteInobjectSlackTracking();
+ }
+
+ // Replace the constructor function with a newly allocated receiver.
+ HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
+ // Index of the receiver from the top of the expression stack.
+ const int receiver_index = argument_count - 1;
+ AddInstruction(receiver);
+ ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
+ environment()->SetExpressionStackAt(receiver_index, receiver);
+
+ if (TryInlineConstruct(expr, receiver)) return;
+
+ // TODO(mstarzinger): For now we remove the previous HAllocateObject and
+ // add HPushArgument for the arguments in case inlining failed. What we
+ // actually should do is emit HInvokeFunction on the constructor instead
+ // of using HCallNew as a fallback.
+ receiver->DeleteAndReplaceWith(NULL);
+ check->DeleteAndReplaceWith(NULL);
+ environment()->SetExpressionStackAt(receiver_index, function);
+ HInstruction* call = PreProcessCall(
+ new(zone()) HCallNew(context, function, argument_count));
+ call->set_position(expr->position());
+ return ast_context()->ReturnInstruction(call, expr->id());
+ } else {
+ // The constructor function is both an operand to the instruction and an
+ // argument to the construct call.
+ HValue* constructor = NULL;
+ CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
+ CHECK_ALIVE(VisitArgumentList(expr->arguments()));
+ HInstruction* call =
+ new(zone()) HCallNew(context, constructor, argument_count);
+ Drop(argument_count);
+ call->set_position(expr->position());
+ return ast_context()->ReturnInstruction(call, expr->id());
+ }
}
@@ -6544,15 +6654,6 @@ static bool IsLiteralCompareNil(HValue* left,
}
-static bool IsLiteralCompareBool(HValue* left,
- Token::Value op,
- HValue* right) {
- return op == Token::EQ_STRICT &&
- ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) ||
- (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean()));
-}
-
-
void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
@@ -6600,12 +6701,6 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) {
return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
}
- if (IsLiteralCompareBool(left, op, right)) {
- HCompareObjectEqAndBranch* result =
- new(zone()) HCompareObjectEqAndBranch(left, right);
- result->set_position(expr->position());
- return ast_context()->ReturnControl(result, expr->id());
- }
if (op == Token::INSTANCEOF) {
// Check to see if the rhs of the instanceof is a global function not
@@ -6733,20 +6828,16 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
}
-void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
- UNREACHABLE();
-}
-
void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
int length = declarations->length();
int global_count = 0;
for (int i = 0; i < declarations->length(); i++) {
- VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration();
- if (decl == NULL) continue;
- HandleVariableDeclaration(decl->proxy(),
- decl->mode(),
- decl->fun(),
- &global_count);
+ Declaration* decl = declarations->at(i);
+ FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
+ HandleDeclaration(decl->proxy(),
+ decl->mode(),
+ fun_decl != NULL ? fun_decl->fun() : NULL,
+ &global_count);
}
// Batch declare global functions and variables.
@@ -6754,13 +6845,13 @@ void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
Handle<FixedArray> array =
isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
for (int j = 0, i = 0; i < length; i++) {
- VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration();
- if (decl == NULL) continue;
+ Declaration* decl = declarations->at(i);
Variable* var = decl->proxy()->var();
if (var->IsUnallocated()) {
array->set(j++, *(var->name()));
- if (decl->fun() == NULL) {
+ FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
+ if (fun_decl == NULL) {
if (var->binding_needs_init()) {
// In case this binding needs initialization use the hole.
array->set_the_hole(j++);
@@ -6769,7 +6860,7 @@ void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
}
} else {
Handle<SharedFunctionInfo> function =
- Compiler::BuildFunctionInfo(decl->fun(), info()->script());
+ Compiler::BuildFunctionInfo(fun_decl->fun(), info()->script());
// Check for stack-overflow exception.
if (function.is_null()) {
SetStackOverflow();
@@ -6791,10 +6882,10 @@ void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
}
-void HGraphBuilder::HandleVariableDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* function,
+ int* global_count) {
Variable* var = proxy->var();
bool binding_needs_init =
(mode == CONST || mode == CONST_HARMONY || mode == LET);
@@ -6830,8 +6921,28 @@ void HGraphBuilder::HandleVariableDeclaration(VariableProxy* proxy,
}
+void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
- // TODO(rossberg)
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* decl) {
+ UNREACHABLE();
}
@@ -6953,10 +7064,11 @@ void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
ASSERT(call->arguments()->length() == 0);
if (function_state()->outer() != NULL) {
- // We are generating graph for inlined function. Currently
- // constructor inlining is not supported and we can just return
- // false from %_IsConstructCall().
- return ast_context()->ReturnValue(graph()->GetConstantFalse());
+ // We are generating graph for inlined function.
+ HValue* value = function_state()->is_construct()
+ ? graph()->GetConstantTrue()
+ : graph()->GetConstantFalse();
+ return ast_context()->ReturnValue(value);
} else {
return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch,
call->id());
@@ -7010,6 +7122,17 @@ void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
}
+void HGraphBuilder::GenerateDateField(CallRuntime* call) {
+ ASSERT(call->arguments()->length() == 2);
+ ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->handle()));
+ CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ HValue* date = Pop();
+ HDateField* result = new(zone()) HDateField(date, index);
+ return ast_context()->ReturnInstruction(result, call->id());
+}
+
+
void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
ASSERT(call->arguments()->length() == 2);
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
@@ -7344,14 +7467,14 @@ HEnvironment::HEnvironment(HEnvironment* outer,
: closure_(closure),
values_(0),
assigned_variables_(4),
+ frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
local_count_(0),
outer_(outer),
pop_count_(0),
push_count_(0),
- ast_id_(AstNode::kNoNumber),
- arguments_adaptor_(false) {
+ ast_id_(AstNode::kNoNumber) {
Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
}
@@ -7359,31 +7482,32 @@ HEnvironment::HEnvironment(HEnvironment* outer,
HEnvironment::HEnvironment(const HEnvironment* other)
: values_(0),
assigned_variables_(0),
+ frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
local_count_(0),
outer_(NULL),
pop_count_(0),
push_count_(0),
- ast_id_(other->ast_id()),
- arguments_adaptor_(false) {
+ ast_id_(other->ast_id()) {
Initialize(other);
}
HEnvironment::HEnvironment(HEnvironment* outer,
Handle<JSFunction> closure,
+ FrameType frame_type,
int arguments)
: closure_(closure),
values_(arguments),
assigned_variables_(0),
+ frame_type_(frame_type),
parameter_count_(arguments),
local_count_(0),
outer_(outer),
pop_count_(0),
push_count_(0),
- ast_id_(AstNode::kNoNumber),
- arguments_adaptor_(true) {
+ ast_id_(AstNode::kNoNumber) {
}
@@ -7404,13 +7528,13 @@ void HEnvironment::Initialize(const HEnvironment* other) {
closure_ = other->closure();
values_.AddAll(other->values_);
assigned_variables_.AddAll(other->assigned_variables_);
+ frame_type_ = other->frame_type_;
parameter_count_ = other->parameter_count_;
local_count_ = other->local_count_;
if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
pop_count_ = other->pop_count_;
push_count_ = other->push_count_;
ast_id_ = other->ast_id_;
- arguments_adaptor_ = other->arguments_adaptor_;
}
@@ -7511,13 +7635,28 @@ HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
}
+HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
+ Handle<JSFunction> target,
+ FrameType frame_type,
+ int arguments) const {
+ HEnvironment* new_env = new(closure()->GetIsolate()->zone())
+ HEnvironment(outer, target, frame_type, arguments + 1);
+ for (int i = 0; i <= arguments; ++i) { // Include receiver.
+ new_env->Push(ExpressionStackAt(arguments - i));
+ }
+ new_env->ClearHistory();
+ return new_env;
+}
+
+
HEnvironment* HEnvironment::CopyForInlining(
Handle<JSFunction> target,
int arguments,
FunctionLiteral* function,
HConstant* undefined,
- CallKind call_kind) const {
- ASSERT(!is_arguments_adaptor());
+ CallKind call_kind,
+ bool is_construct) const {
+ ASSERT(frame_type() == JS_FUNCTION);
Zone* zone = closure()->GetIsolate()->zone();
@@ -7528,13 +7667,16 @@ HEnvironment* HEnvironment::CopyForInlining(
outer->Drop(arguments + 1); // Including receiver.
outer->ClearHistory();
+ if (is_construct) {
+ // Create artificial constructor stub environment. The receiver should
+ // actually be the constructor function, but we pass the newly allocated
+ // object instead, DoComputeConstructStubFrame() relies on that.
+ outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
+ }
+
if (arity != arguments) {
// Create artificial arguments adaptation environment.
- outer = new(zone) HEnvironment(outer, target, arguments + 1);
- for (int i = 0; i <= arguments; ++i) { // Include receiver.
- outer->Push(ExpressionStackAt(arguments - i));
- }
- outer->ClearHistory();
+ outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
}
HEnvironment* inner =
@@ -7549,7 +7691,7 @@ HEnvironment* HEnvironment::CopyForInlining(
// builtin function, pass undefined as the receiver for function
// calls (instead of the global receiver).
if ((target->shared()->native() || !function->is_classic_mode()) &&
- call_kind == CALL_AS_FUNCTION) {
+ call_kind == CALL_AS_FUNCTION && !is_construct) {
inner->SetValueAt(0, undefined);
}
inner->SetValueAt(arity + 1, LookupContext());
@@ -7892,7 +8034,10 @@ void HPhase::End() const {
HStatistics::Instance()->SaveTiming(name_, end - start_, size);
}
- if (FLAG_trace_hydrogen) {
+ // Produce trace output if flag is set so that the first letter of the
+ // phase name matches the command line parameter FLAG_trace_phase.
+ if (FLAG_trace_hydrogen &&
+ OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL) {
if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_);
if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_);
if (allocator_ != NULL) {
diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h
index 65aa346cbe4..b0d67ebb66c 100644
--- a/deps/v8/src/hydrogen.h
+++ b/deps/v8/src/hydrogen.h
@@ -361,19 +361,19 @@ class HGraph: public ZoneObject {
Zone* HBasicBlock::zone() { return graph_->zone(); }
+// Type of stack frame an environment might refer to.
+enum FrameType { JS_FUNCTION, JS_CONSTRUCT, ARGUMENTS_ADAPTOR };
+
+
class HEnvironment: public ZoneObject {
public:
HEnvironment(HEnvironment* outer,
Scope* scope,
Handle<JSFunction> closure);
- bool is_arguments_adaptor() const {
- return arguments_adaptor_;
- }
-
HEnvironment* DiscardInlined(bool drop_extra) {
- HEnvironment* outer = outer_->is_arguments_adaptor() ?
- outer_->outer_ : outer_;
+ HEnvironment* outer = outer_;
+ while (outer->frame_type() != JS_FUNCTION) outer = outer->outer_;
if (drop_extra) outer->Drop(1);
return outer;
}
@@ -384,6 +384,7 @@ class HEnvironment: public ZoneObject {
const ZoneList<int>* assigned_variables() const {
return &assigned_variables_;
}
+ FrameType frame_type() const { return frame_type_; }
int parameter_count() const { return parameter_count_; }
int specials_count() const { return specials_count_; }
int local_count() const { return local_count_; }
@@ -469,7 +470,8 @@ class HEnvironment: public ZoneObject {
int arguments,
FunctionLiteral* function,
HConstant* undefined,
- CallKind call_kind) const;
+ CallKind call_kind,
+ bool is_construct) const;
void AddIncomingEdge(HBasicBlock* block, HEnvironment* other);
@@ -490,9 +492,17 @@ class HEnvironment: public ZoneObject {
private:
explicit HEnvironment(const HEnvironment* other);
- // Create an argument adaptor environment.
- HEnvironment(HEnvironment* outer, Handle<JSFunction> closure, int arguments);
+ HEnvironment(HEnvironment* outer,
+ Handle<JSFunction> closure,
+ FrameType frame_type,
+ int arguments);
+ // Create an artificial stub environment (e.g. for argument adaptor or
+ // constructor stub).
+ HEnvironment* CreateStubEnvironment(HEnvironment* outer,
+ Handle<JSFunction> target,
+ FrameType frame_type,
+ int arguments) const;
// True if index is included in the expression stack part of the environment.
bool HasExpressionAt(int index) const;
@@ -515,6 +525,7 @@ class HEnvironment: public ZoneObject {
// Value array [parameters] [specials] [locals] [temporaries].
ZoneList<HValue*> values_;
ZoneList<int> assigned_variables_;
+ FrameType frame_type_;
int parameter_count_;
int specials_count_;
int local_count_;
@@ -522,7 +533,6 @@ class HEnvironment: public ZoneObject {
int pop_count_;
int push_count_;
int ast_id_;
- bool arguments_adaptor_;
};
@@ -650,18 +660,26 @@ class TestContext: public AstContext {
};
+enum ReturnHandlingFlag {
+ NORMAL_RETURN,
+ DROP_EXTRA_ON_RETURN,
+ CONSTRUCT_CALL_RETURN
+};
+
+
class FunctionState {
public:
FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle,
- bool drop_extra);
+ ReturnHandlingFlag return_handling);
~FunctionState();
CompilationInfo* compilation_info() { return compilation_info_; }
TypeFeedbackOracle* oracle() { return oracle_; }
AstContext* call_context() { return call_context_; }
- bool drop_extra() { return drop_extra_; }
+ bool drop_extra() { return return_handling_ == DROP_EXTRA_ON_RETURN; }
+ bool is_construct() { return return_handling_ == CONSTRUCT_CALL_RETURN; }
HBasicBlock* function_return() { return function_return_; }
TestContext* test_context() { return test_context_; }
void ClearInlinedTestContext() {
@@ -681,11 +699,13 @@ class FunctionState {
// inlined. NULL when not inlining.
AstContext* call_context_;
- // Indicate if we have to drop an extra value from the environment on
- // return from inlined functions.
- bool drop_extra_;
+ // Indicate whether we have to perform special handling on return from
+ // inlined functions.
+ // - DROP_EXTRA_ON_RETURN: Drop an extra value from the environment.
+ // - CONSTRUCT_CALL_RETURN: Either use allocated receiver or return value.
+ ReturnHandlingFlag return_handling_;
- // When inlining in an effect of value context, this is the return block.
+ // When inlining in an effect or value context, this is the return block.
// It is NULL otherwise. When inlining in a test context, there are a
// pair of return blocks in the context. When not inlining, there is no
// local return point.
@@ -825,7 +845,6 @@ class HGraphBuilder: public AstVisitor {
CompilationInfo* info() const {
return function_state()->compilation_info();
}
-
AstContext* call_context() const {
return function_state()->call_context();
}
@@ -851,10 +870,10 @@ class HGraphBuilder: public AstVisitor {
INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION)
#undef INLINE_FUNCTION_GENERATOR_DECLARATION
- void HandleVariableDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function,
- int* global_count);
+ void HandleDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* function,
+ int* global_count);
void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr);
@@ -922,7 +941,7 @@ class HGraphBuilder: public AstVisitor {
// Remove the arguments from the bailout environment and emit instructions
// to push them as outgoing parameters.
- template <int V> HInstruction* PreProcessCall(HCall<V>* call);
+ template <class Instruction> HInstruction* PreProcessCall(Instruction* call);
void TraceRepresentation(Token::Value op,
TypeInfo info,
@@ -954,11 +973,20 @@ class HGraphBuilder: public AstVisitor {
// Try to optimize fun.apply(receiver, arguments) pattern.
bool TryCallApply(Call* expr);
- bool TryInline(Call* expr, bool drop_extra = false);
+ bool TryInline(CallKind call_kind,
+ Handle<JSFunction> target,
+ ZoneList<Expression*>* arguments,
+ HValue* receiver,
+ int ast_id,
+ int return_id,
+ ReturnHandlingFlag return_handling);
+
+ bool TryInlineCall(Call* expr, bool drop_extra = false);
+ bool TryInlineConstruct(CallNew* expr, HValue* receiver);
bool TryInlineBuiltinMethodCall(Call* expr,
- HValue* receiver,
- Handle<Map> receiver_map,
- CheckType check_type);
+ HValue* receiver,
+ Handle<Map> receiver_map,
+ CheckType check_type);
bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra);
// If --trace-inlining, print a line of the inlining trace. Inlining
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index fdf21e5429c..a5d42cfbe4c 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -324,6 +324,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
+
// Restore context from the frame.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -1639,7 +1644,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ call(edx);
+ // Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Leave frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ ret(0);
diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc
index 2d078cdc06a..d3e2a919cad 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.cc
+++ b/deps/v8/src/ia32/code-stubs-ia32.cc
@@ -2510,7 +2510,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ fld_d(Operand(esp, 0));
__ add(esp, Immediate(kDoubleSize));
}
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ mov(Operand(ecx, 0), ebx);
__ mov(Operand(ecx, kIntSize), edx);
__ mov(Operand(ecx, 2 * kIntSize), eax);
@@ -2526,7 +2526,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ sub(esp, Immediate(kDoubleSize));
__ movdbl(Operand(esp, 0), xmm1);
__ fld_d(Operand(esp, 0));
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ fstp_d(Operand(esp, 0));
__ movdbl(xmm1, Operand(esp, 0));
__ add(esp, Immediate(kDoubleSize));
@@ -2578,14 +2578,15 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
}
-void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
+void TranscendentalCacheStub::GenerateOperation(
+ MacroAssembler* masm, TranscendentalCache::Type type) {
// Only free register is edi.
// Input value is on FP stack, and also in ebx/edx.
// Input value is possibly in xmm1.
// Address of result (a newly allocated HeapNumber) may be in eax.
- if (type_ == TranscendentalCache::SIN ||
- type_ == TranscendentalCache::COS ||
- type_ == TranscendentalCache::TAN) {
+ if (type == TranscendentalCache::SIN ||
+ type == TranscendentalCache::COS ||
+ type == TranscendentalCache::TAN) {
// Both fsin and fcos require arguments in the range +/-2^63 and
// return NaN for infinities and NaN. They can share all code except
// the actual fsin/fcos operation.
@@ -2649,7 +2650,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// FPU Stack: input % 2*pi
__ bind(&in_range);
- switch (type_) {
+ switch (type) {
case TranscendentalCache::SIN:
__ fsin();
break;
@@ -2667,7 +2668,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
}
__ bind(&done);
} else {
- ASSERT(type_ == TranscendentalCache::LOG);
+ ASSERT(type == TranscendentalCache::LOG);
__ fldln2();
__ fxch();
__ fyl2x();
@@ -6154,7 +6155,6 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// ebx: instance type
// Calculate length of sub string using the smi values.
- Label result_longer_than_two;
__ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
__ JumpIfNotSmi(ecx, &runtime);
__ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
@@ -6167,43 +6167,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
__ bind(&not_original_string);
- // Special handling of sub-strings of length 1 and 2. One character strings
- // are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache.
- __ cmp(ecx, Immediate(Smi::FromInt(2)));
- __ j(greater, &result_longer_than_two);
- __ j(less, &runtime);
-
- // Sub string of length 2 requested.
- // eax: string
- // ebx: instance type
- // ecx: sub string length (smi, value is 2)
- // edx: from index (smi)
- __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
-
- // Get the two characters forming the sub string.
- __ SmiUntag(edx); // From index is no longer smi.
- __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
- __ movzx_b(ecx,
- FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
-
- // Try to lookup two character string in symbol table.
- Label combine_two_char, save_two_char;
- StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, ebx, ecx, eax, edx, edi, &combine_two_char, &save_two_char);
- __ IncrementCounter(counters->sub_string_native(), 1);
- __ ret(3 * kPointerSize);
-
- __ bind(&combine_two_char);
- __ shl(ecx, kBitsPerByte);
- __ or_(ebx, ecx);
- __ bind(&save_two_char);
- __ AllocateAsciiString(eax, 2, ecx, edx, &runtime);
- __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
- __ IncrementCounter(counters->sub_string_native(), 1);
- __ ret(3 * kPointerSize);
- __ bind(&result_longer_than_two);
// eax: string
// ebx: instance type
// ecx: sub string length (smi)
@@ -6270,11 +6234,11 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&two_byte_slice);
__ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
__ bind(&set_slice_header);
- __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
__ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
- __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
__ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
+ __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
+ __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
@@ -6498,7 +6462,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
__ mov_b(scratch, Operand(left, index, times_1, 0));
__ cmpb(scratch, Operand(right, index, times_1, 0));
__ j(not_equal, chars_not_equal, chars_not_equal_near);
- __ add(index, Immediate(1));
+ __ inc(index);
__ j(not_zero, &loop);
}
@@ -6572,16 +6536,16 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
__ mov(ecx, edx);
__ and_(ecx, eax);
__ JumpIfSmi(ecx, &generic_stub, Label::kNear);
__ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined1, Label::kNear);
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or SS2 or CMOV is unsupported.
@@ -6607,14 +6571,28 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ mov(ecx, Immediate(Smi::FromInt(-1)));
__ cmov(below, eax, ecx);
__ ret(0);
-
- __ bind(&unordered);
}
+ __ bind(&unordered);
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
__ bind(&generic_stub);
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
+ __ j(not_equal, &miss);
+ __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
+ __ j(equal, &unordered);
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -6667,9 +6645,10 @@ void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
- ASSERT(GetCondition() == equal);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = edx;
Register right = eax;
@@ -6708,25 +6687,33 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ bind(&not_same);
// Check that both strings are symbols. If they are, we're done
- // because we already know they are not identical.
- Label do_compare;
- STATIC_ASSERT(kSymbolTag != 0);
- __ and_(tmp1, tmp2);
- __ test(tmp1, Immediate(kIsSymbolMask));
- __ j(zero, &do_compare, Label::kNear);
- // Make sure eax is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(eax));
- __ ret(0);
+ // because we already know they are not identical. But in the case of
+ // non-equality compare, we still need to determine the order.
+ if (equality) {
+ Label do_compare;
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ and_(tmp1, tmp2);
+ __ test(tmp1, Immediate(kIsSymbolMask));
+ __ j(zero, &do_compare, Label::kNear);
+ // Make sure eax is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(eax));
+ __ ret(0);
+ __ bind(&do_compare);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ bind(&do_compare);
__ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
@@ -6734,7 +6721,11 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ push(left);
__ push(right);
__ push(tmp1);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -6823,7 +6814,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// Compute the masked index: (hash + i + i * i) & mask.
Register index = r0;
@@ -6849,11 +6840,17 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ cmp(entity_name, Handle<String>(name));
__ j(equal, miss);
+ Label the_hole;
+ // Check for the hole and skip.
+ __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
+ __ j(equal, &the_hole, Label::kNear);
+
// Check if the entry name is not a symbol.
__ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
kIsSymbolMask);
__ j(zero, miss);
+ __ bind(&the_hole);
}
StringDictionaryLookupStub stub(properties,
diff --git a/deps/v8/src/ia32/code-stubs-ia32.h b/deps/v8/src/ia32/code-stubs-ia32.h
index 4d23c3a174c..803a711de96 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.h
+++ b/deps/v8/src/ia32/code-stubs-ia32.h
@@ -49,6 +49,8 @@ class TranscendentalCacheStub: public CodeStub {
ArgumentType argument_type)
: type_(type), argument_type_(argument_type) {}
void Generate(MacroAssembler* masm);
+ static void GenerateOperation(MacroAssembler* masm,
+ TranscendentalCache::Type type);
private:
TranscendentalCache::Type type_;
ArgumentType argument_type_;
@@ -56,7 +58,6 @@ class TranscendentalCacheStub: public CodeStub {
Major MajorKey() { return TranscendentalCache; }
int MinorKey() { return type_ | argument_type_; }
Runtime::FunctionId RuntimeFunction();
- void GenerateOperation(MacroAssembler* masm);
};
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index 3e085a245dc..de6901f9cdb 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -30,6 +30,7 @@
#if defined(V8_TARGET_ARCH_IA32)
#include "codegen.h"
+#include "heap.h"
#include "macro-assembler.h"
namespace v8 {
@@ -55,6 +56,53 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
#define __ masm.
+
+TranscendentalFunction CreateTranscendentalFunction(
+ TranscendentalCache::Type type) {
+ size_t actual_size;
+ // Allocate buffer in executable space.
+ byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB,
+ &actual_size,
+ true));
+ if (buffer == NULL) {
+ // Fallback to library function if function cannot be created.
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ }
+
+ MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
+ // esp[1 * kPointerSize]: raw double input
+ // esp[0 * kPointerSize]: return address
+ // Move double input into registers.
+
+ __ push(ebx);
+ __ push(edx);
+ __ push(edi);
+ __ fld_d(Operand(esp, 4 * kPointerSize));
+ __ mov(ebx, Operand(esp, 4 * kPointerSize));
+ __ mov(edx, Operand(esp, 5 * kPointerSize));
+ TranscendentalCacheStub::GenerateOperation(&masm, type);
+ // The return value is expected to be on ST(0) of the FPU stack.
+ __ pop(edi);
+ __ pop(edx);
+ __ pop(ebx);
+ __ Ret();
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ ASSERT(desc.reloc_size == 0);
+
+ CPU::FlushICache(buffer, actual_size);
+ OS::ProtectCode(buffer, actual_size);
+ return FUNCTION_CAST<TranscendentalFunction>(buffer);
+}
+
+
static void MemCopyWrapper(void* dest, const void* src, size_t size) {
memcpy(dest, src, size);
}
diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc
index 0c552d7d01b..11de1c4a788 100644
--- a/deps/v8/src/ia32/deoptimizer-ia32.cc
+++ b/deps/v8/src/ia32/deoptimizer-ia32.cc
@@ -206,7 +206,7 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
static const byte kJnsInstruction = 0x79;
-static const byte kJnsOffset = 0x11;
+static const byte kJnsOffset = 0x13;
static const byte kJaeInstruction = 0x73;
static const byte kJaeOffset = 0x07;
static const byte kCallInstruction = 0xe8;
@@ -219,8 +219,8 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
Address call_target_address = pc_after - kIntSize;
- ASSERT(check_code->entry() ==
- Assembler::target_address_at(call_target_address));
+ ASSERT_EQ(check_code->entry(),
+ Assembler::target_address_at(call_target_address));
// The stack check code matches the pattern:
//
// cmp esp, <limit>
@@ -239,13 +239,13 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
// ok:
if (FLAG_count_based_interrupts) {
- ASSERT(*(call_target_address - 3) == kJnsInstruction);
- ASSERT(*(call_target_address - 2) == kJnsOffset);
+ ASSERT_EQ(*(call_target_address - 3), kJnsInstruction);
+ ASSERT_EQ(*(call_target_address - 2), kJnsOffset);
} else {
- ASSERT(*(call_target_address - 3) == kJaeInstruction);
- ASSERT(*(call_target_address - 2) == kJaeOffset);
+ ASSERT_EQ(*(call_target_address - 3), kJaeInstruction);
+ ASSERT_EQ(*(call_target_address - 2), kJaeOffset);
}
- ASSERT(*(call_target_address - 1) == kCallInstruction);
+ ASSERT_EQ(*(call_target_address - 1), kCallInstruction);
*(call_target_address - 3) = kNopByteOne;
*(call_target_address - 2) = kNopByteTwo;
Assembler::set_target_address_at(call_target_address,
@@ -261,14 +261,14 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
Address call_target_address = pc_after - kIntSize;
- ASSERT(replacement_code->entry() ==
- Assembler::target_address_at(call_target_address));
+ ASSERT_EQ(replacement_code->entry(),
+ Assembler::target_address_at(call_target_address));
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
- ASSERT(*(call_target_address - 3) == kNopByteOne &&
- *(call_target_address - 2) == kNopByteTwo &&
- *(call_target_address - 1) == kCallInstruction);
+ ASSERT_EQ(*(call_target_address - 3), kNopByteOne);
+ ASSERT_EQ(*(call_target_address - 2), kNopByteTwo);
+ ASSERT_EQ(*(call_target_address - 1), kCallInstruction);
if (FLAG_count_based_interrupts) {
*(call_target_address - 3) = kJnsInstruction;
*(call_target_address - 2) = kJnsOffset;
@@ -467,7 +467,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
- unsigned input_frame_size = input_->GetFrameSize();
unsigned output_frame_size = height_in_bytes + fixed_frame_size;
// Allocate and store the output frame description.
@@ -489,16 +488,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Compute the incoming parameter translation.
int parameter_count = height;
unsigned output_offset = output_frame_size;
- unsigned input_offset = input_frame_size;
for (int i = 0; i < parameter_count; ++i) {
output_offset -= kPointerSize;
DoTranslateCommand(iterator, frame_index, output_offset);
}
- input_offset -= (parameter_count * kPointerSize);
// Read caller's PC from the previous frame.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t callers_pc = output_[frame_index - 1]->GetPc();
output_frame->SetFrameSlot(output_offset, callers_pc);
if (FLAG_trace_deopt) {
@@ -508,7 +504,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Read caller's FP from the previous frame, and set this frame's FP.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t value = output_[frame_index - 1]->GetFp();
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
@@ -520,7 +515,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// A marker value is used in place of the context.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t context = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
output_frame->SetFrameSlot(output_offset, context);
@@ -531,7 +525,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(function);
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -541,7 +534,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Number of incoming arguments.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -561,6 +553,110 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 6 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
int frame_index) {
int node_id = iterator->Next();
@@ -672,6 +768,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
value = reinterpret_cast<uint32_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
+ output_frame->SetContext(value);
if (is_topmost) output_frame->SetRegister(esi.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index ee1462567c6..b42ce95f8db 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -127,28 +127,6 @@ void FullCodeGenerator::Generate() {
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
- // We can optionally optimize based on counters rather than statistical
- // sampling.
- if (info->ShouldSelfOptimize()) {
- if (FLAG_trace_opt_verbose) {
- PrintF("[adding self-optimization header to %s]\n",
- *info->function()->debug_name()->ToCString());
- }
- has_self_optimization_header_ = true;
- MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
- Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
- JSGlobalPropertyCell* cell;
- if (maybe_cell->To(&cell)) {
- __ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
- Immediate(Smi::FromInt(1)));
- Handle<Code> compile_stub(
- isolate()->builtins()->builtin(Builtins::kLazyRecompile));
- STATIC_ASSERT(kSmiTag == 0);
- __ j(zero, compile_stub);
- ASSERT(masm_->pc_offset() == self_optimization_header_size());
- }
- }
-
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -330,6 +308,25 @@ void FullCodeGenerator::ClearAccumulator() {
}
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ mov(ebx, Immediate(profiling_counter_));
+ __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(Smi::FromInt(delta)));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing: if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ __ mov(ebx, Immediate(profiling_counter_));
+ __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(Smi::FromInt(reset_value)));
+}
+
+
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
@@ -342,15 +339,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(127, Max(1, distance / 100));
}
- if (Serializer::enabled()) {
- __ mov(ebx, Immediate(profiling_counter_));
- __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
- Immediate(Smi::FromInt(weight)));
- } else {
- // This version is slightly faster, but not snapshot safe.
- __ sub(Operand::Cell(profiling_counter_),
- Immediate(Smi::FromInt(weight)));
- }
+ EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
InterruptStub stub;
__ CallStub(&stub);
@@ -379,15 +368,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
__ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
if (FLAG_count_based_interrupts) {
- // Reset the countdown.
- if (Serializer::enabled()) {
- __ mov(ebx, Immediate(profiling_counter_));
- __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
- Immediate(Smi::FromInt(FLAG_interrupt_budget)));
- } else {
- __ mov(Operand::Cell(profiling_counter_),
- Immediate(Smi::FromInt(FLAG_interrupt_budget)));
- }
+ EmitProfilingCounterReset();
}
__ bind(&ok);
@@ -410,37 +391,28 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(eax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
- if (FLAG_interrupt_at_exit) {
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
- if (FLAG_weighted_back_edges) {
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
int distance = masm_->pc_offset();
weight = Min(127, Max(1, distance / 100));
}
- if (Serializer::enabled()) {
- __ mov(ebx, Immediate(profiling_counter_));
- __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
- Immediate(Smi::FromInt(weight)));
- } else {
- // This version is slightly faster, but not snapshot safe.
- __ sub(Operand::Cell(profiling_counter_),
- Immediate(Smi::FromInt(weight)));
- }
+ EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
__ push(eax);
- InterruptStub stub;
- __ CallStub(&stub);
- __ pop(eax);
- // Reset the countdown.
- if (Serializer::enabled()) {
- __ mov(ebx, Immediate(profiling_counter_));
- __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
- Immediate(Smi::FromInt(FLAG_interrupt_budget)));
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
- __ mov(Operand::Cell(profiling_counter_),
- Immediate(Smi::FromInt(FLAG_interrupt_budget)));
+ InterruptStub stub;
+ __ CallStub(&stub);
}
+ __ pop(eax);
+ EmitProfilingCounterReset();
__ bind(&ok);
}
#ifdef DEBUG
@@ -1033,6 +1005,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register eax. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(ebx, cell);
+ __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+
__ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
__ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -1509,10 +1491,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
case ObjectLiteral::Property::GETTER:
__ push(Operand(esp, 0)); // Duplicate receiver.
VisitForStackValue(key);
- __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0)));
- VisitForStackValue(value);
+ if (property->kind() == ObjectLiteral::Property::GETTER) {
+ VisitForStackValue(value);
+ __ push(Immediate(isolate()->factory()->null_value()));
+ } else {
+ __ push(Immediate(isolate()->factory()->null_value()));
+ VisitForStackValue(value);
+ }
__ push(Immediate(Smi::FromInt(NONE)));
__ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
break;
@@ -2400,6 +2385,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
CallConstructStub stub(flags);
__ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(eax);
}
@@ -2947,6 +2933,48 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = eax;
+ Register result = eax;
+ Register scratch = ecx;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, scratch);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch, Operand::StaticVariable(stamp));
+ __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ mov(Operand(esp, 0), object);
+ __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+ context()->Plug(result);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index fec330849a0..a656175af8f 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -67,7 +67,7 @@ class SafepointGenerator : public CallWrapper {
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope(SSE2);
@@ -394,10 +394,18 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- if (environment->is_arguments_adaptor()) {
- translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
- } else {
- translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
}
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
@@ -550,7 +558,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(
int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
- if (!e->is_arguments_adaptor()) {
+ if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
@@ -1265,6 +1273,7 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
Register result = ToRegister(instr->result());
Register map = ToRegister(instr->TempAt(0));
ASSERT(input.is(result));
+
Label done;
// If the object is a smi return the object.
__ JumpIfSmi(input, &done, Label::kNear);
@@ -1278,6 +1287,43 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(result));
+ ASSERT(object.is(eax));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, scratch);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch, Operand::StaticVariable(stamp));
+ __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ mov(Operand(esp, 0), object);
+ __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->Equals(instr->result()));
@@ -3042,16 +3088,64 @@ void LCodeGen::DoPower(LPower* instr) {
void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
+
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
ASSERT(ToRegister(instr->InputAt(0)).is(eax));
+ // Assert that the register size is indeed the size of each seed.
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == kSeedSize);
- __ PrepareCallCFunction(1, ebx);
__ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
- __ mov(Operand(esp, 0), eax);
- __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ mov(ebx, FieldOperand(eax, kRandomSeedOffset));
+ // ebx: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ mov(ecx, FieldOperand(ebx, ByteArray::kHeaderSize));
+ // If state[0] == 0, call runtime to initialize seeds.
+ __ test(ecx, ecx);
+ __ j(zero, deferred->entry());
+ // Load state[1].
+ __ mov(eax, FieldOperand(ebx, ByteArray::kHeaderSize + kSeedSize));
+ // ecx: state[0]
+ // eax: state[1]
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ __ movzx_w(edx, ecx);
+ __ imul(edx, edx, 18273);
+ __ shr(ecx, 16);
+ __ add(ecx, edx);
+ // Save state[0].
+ __ mov(FieldOperand(ebx, ByteArray::kHeaderSize), ecx);
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ movzx_w(edx, eax);
+ __ imul(edx, edx, 36969);
+ __ shr(eax, 16);
+ __ add(eax, edx);
+ // Save state[1].
+ __ mov(FieldOperand(ebx, ByteArray::kHeaderSize + kSeedSize), eax);
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ shl(ecx, 14);
+ __ and_(eax, Immediate(0x3FFFF));
+ __ add(eax, ecx);
+ __ bind(deferred->exit());
// Convert 32 random bits in eax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
@@ -3064,6 +3158,14 @@ void LCodeGen::DoRandom(LRandom* instr) {
}
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1, ebx);
+ __ mov(Operand(esp, 0), eax);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ // Return value is in eax.
+}
+
+
void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(instr->value()->Equals(instr->result()));
XMMRegister input_reg = ToDoubleRegister(instr->value());
@@ -4194,6 +4296,94 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
}
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ no_reg,
+ scratch,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(scratch, constructor);
+ __ mov(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
+
+ if (FLAG_debug_code) {
+ __ AbortIfSmi(map);
+ __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
+ instance_size >> kPointerSizeLog2);
+ __ Assert(equal, "Unexpected instance size");
+ __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset),
+ initial_map->pre_allocated_property_fields());
+ __ Assert(equal, "Unexpected pre-allocated property fields count");
+ __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset),
+ initial_map->unused_property_fields());
+ __ Assert(equal, "Unexpected unused property fields count");
+ __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset),
+ initial_map->inobject_properties());
+ __ Assert(equal, "Unexpected in-object property fields count");
+ }
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ mov(FieldOperand(result, JSObject::kMapOffset), map);
+ __ mov(scratch, factory()->empty_fixed_array());
+ __ mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
+ __ mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
+ if (initial_map->inobject_properties() != 0) {
+ __ mov(scratch, factory()->undefined_value());
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ mov(FieldOperand(result, property_offset), scratch);
+ }
+ }
+
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ Set(result, Immediate(0));
+
+ PushSafepointRegistersScope scope(this);
+ __ PushHeapObject(constructor);
+ CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr, instr->context());
+ __ StoreToSafepointRegisterSlot(result, eax);
+}
+
+
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
Heap* heap = isolate()->heap();
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.h b/deps/v8/src/ia32/lithium-codegen-ia32.h
index d86d48cd8cd..481a2ae7a5a 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.h
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.h
@@ -105,8 +105,10 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
diff --git a/deps/v8/src/ia32/lithium-ia32.cc b/deps/v8/src/ia32/lithium-ia32.cc
index 32704b04e56..223fde2be58 100644
--- a/deps/v8/src/ia32/lithium-ia32.cc
+++ b/deps/v8/src/ia32/lithium-ia32.cc
@@ -388,7 +388,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -551,7 +551,7 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
chunk_ = new(zone()) LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -1005,11 +1005,12 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
LEnvironment* result =
new(zone()) LEnvironment(hydrogen_env->closure(),
- hydrogen_env->is_arguments_adaptor(),
+ hydrogen_env->frame_type(),
ast_id,
hydrogen_env->parameter_count(),
argument_count_,
@@ -1031,7 +1032,7 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
result->AddValue(op, value->representation());
}
- if (!hydrogen_env->is_arguments_adaptor()) {
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
*argument_index_accumulator = argument_index;
}
@@ -1194,7 +1195,7 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
input);
return DefineSameAsFirst(result);
- } else if (op == kMathSin || op == kMathCos) {
+ } else if (op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* context = UseFixed(instr->context(), esi);
LOperand* input = UseFixedDouble(instr->value(), xmm1);
LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
@@ -1646,6 +1647,14 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
}
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* date = UseFixed(instr->value(), eax);
+ LDateField* result =
+ new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
+ return MarkAsCall(DefineFixed(result, eax), instr);
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
return AssignEnvironment(new(zone()) LBoundsCheck(
UseRegisterOrConstantAtStart(instr->index()),
@@ -1815,34 +1824,6 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
-
- LInstruction* result;
- if (input_rep.IsDouble()) {
- LOperand* reg = UseRegister(value);
- LOperand* temp_reg =
- CpuFeatures::IsSupported(SSE3) ? NULL : TempRegister();
- result = DefineAsRegister(new(zone()) LDoubleToI(reg, temp_reg));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* reg = UseRegister(value);
- // Register allocator doesn't (yet) support allocation of double
- // temps. Reserve xmm1 explicitly.
- LOperand* xmm_temp =
- CpuFeatures::IsSupported(SSE3) ? NULL : FixedTemp(xmm1);
- result = DefineSameAsFirst(new(zone()) LTaggedToI(reg, xmm_temp));
- }
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
return new(zone()) LReturn(UseFixed(instr->value(), eax));
}
@@ -2213,6 +2194,14 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
}
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* temp = TempRegister();
+ LAllocateObject* result = new(zone()) LAllocateObject(context, temp);
+ return AssignPointerMap(DefineAsRegister(result));
+}
+
+
LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
LOperand* context = UseFixed(instr->context(), esi);
return MarkAsCall(
@@ -2385,7 +2374,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
diff --git a/deps/v8/src/ia32/lithium-ia32.h b/deps/v8/src/ia32/lithium-ia32.h
index b879fc13ccf..dd41bfbc77c 100644
--- a/deps/v8/src/ia32/lithium-ia32.h
+++ b/deps/v8/src/ia32/lithium-ia32.h
@@ -43,6 +43,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -171,7 +172,8 @@ class LCodeGen;
V(ForInPrepareMap) \
V(ForInCacheArray) \
V(CheckMapValue) \
- V(LoadFieldByIndex)
+ V(LoadFieldByIndex) \
+ V(DateField)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -1001,6 +1003,24 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
};
+class LDateField: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LDateField(LOperand* date, LOperand* temp, Smi* index)
+ : index_(index) {
+ inputs_[0] = date;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DateField, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(DateField)
+
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 2, 0> {
public:
LThrow(LOperand* context, LOperand* value) {
@@ -1995,6 +2015,20 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
};
+class LAllocateObject: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LAllocateObject(LOperand* context, LOperand* temp) {
+ inputs_[0] = context;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+
+ LOperand* context() { return inputs_[0]; }
+};
+
+
class LFastLiteral: public LTemplateInstruction<1, 1, 0> {
public:
explicit LFastLiteral(LOperand* context) {
@@ -2307,7 +2341,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- isolate_(graph->isolate()),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2337,7 +2371,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2446,7 +2480,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
- Isolate* isolate_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
index e613a06c3c5..2c9b60c8687 100644
--- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
@@ -1085,7 +1085,7 @@ int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid
int delta = code_handle->address() - re_code->address();
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index 56484af3099..de71818c986 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -44,19 +44,30 @@ static void ProbeTable(Isolate* isolate,
Code::Flags flags,
StubCache::Table table,
Register name,
+ Register receiver,
+ // Number of the cache entry pointer-size scaled.
Register offset,
Register extra) {
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
+ ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
Label miss;
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ lea(offset, Operand(offset, offset, times_2, 0));
+
if (extra.is_valid()) {
// Get the code entry from the cache.
- __ mov(extra, Operand::StaticArray(offset, times_2, value_offset));
+ __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
// Check that the key in the entry matches the name.
- __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
+ __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
+ __ j(not_equal, &miss);
+
+ // Check the map matches.
+ __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
+ __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
__ j(not_equal, &miss);
// Check that the flags match what we're looking for.
@@ -65,6 +76,14 @@ static void ProbeTable(Isolate* isolate,
__ cmp(offset, flags);
__ j(not_equal, &miss);
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
+
// Jump to the first instruction in the code stub.
__ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ jmp(extra);
@@ -75,11 +94,19 @@ static void ProbeTable(Isolate* isolate,
__ push(offset);
// Check that the key in the entry matches the name.
- __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
+ __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
+ __ j(not_equal, &miss);
+
+ // Check the map matches.
+ __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
+ __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
__ j(not_equal, &miss);
+ // Restore offset register.
+ __ mov(offset, Operand(esp, 0));
+
// Get the code entry from the cache.
- __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
+ __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
// Check that the flags match what we're looking for.
__ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
@@ -87,9 +114,17 @@ static void ProbeTable(Isolate* isolate,
__ cmp(offset, flags);
__ j(not_equal, &miss);
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
+
// Restore offset and re-load code entry from cache.
__ pop(offset);
- __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
+ __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
// Jump to the first instruction in the code stub.
__ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -159,12 +194,13 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Label miss;
- // Assert that code is valid. The shifting code relies on the entry size
- // being 8.
- ASSERT(sizeof(Entry) == 8);
+ // Assert that code is valid. The multiplying code relies on the entry size
+ // being 12.
+ ASSERT(sizeof(Entry) == 12);
// Assert the flags do not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -176,37 +212,51 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
ASSERT(!extra.is(name));
ASSERT(!extra.is(scratch));
- // Assert scratch and extra registers are valid, and extra2 is unused.
+ // Assert scratch and extra registers are valid, and extra2/3 are unused.
ASSERT(!scratch.is(no_reg));
ASSERT(extra2.is(no_reg));
+ ASSERT(extra3.is(no_reg));
+
+ Register offset = scratch;
+ scratch = no_reg;
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &miss);
// Get the map of the receiver and compute the hash.
- __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
- __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
- __ xor_(scratch, flags);
- __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
+ __ mov(offset, FieldOperand(name, String::kHashFieldOffset));
+ __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ xor_(offset, flags);
+ // We mask out the last two bits because they are not part of the hash and
+ // they are always 01 for maps. Also in the two 'and' instructions below.
+ __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
+ // ProbeTable expects the offset to be pointer scaled, which it is, because
+ // the heap object tag size is 2 and the pointer size log 2 is also 2.
+ ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
// Probe the primary table.
- ProbeTable(isolate(), masm, flags, kPrimary, name, scratch, extra);
+ ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
// Primary miss: Compute hash for secondary probe.
- __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
- __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
- __ xor_(scratch, flags);
- __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
- __ sub(scratch, name);
- __ add(scratch, Immediate(flags));
- __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
+ __ mov(offset, FieldOperand(name, String::kHashFieldOffset));
+ __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ xor_(offset, flags);
+ __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
+ __ sub(offset, name);
+ __ add(offset, Immediate(flags));
+ __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
// Probe the secondary table.
- ProbeTable(isolate(), masm, flags, kSecondary, name, scratch, extra);
+ ProbeTable(
+ isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
}
diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc
index 9bea13b9484..c2ee45ec1e5 100644
--- a/deps/v8/src/ic.cc
+++ b/deps/v8/src/ic.cc
@@ -2482,9 +2482,21 @@ CompareIC::State CompareIC::TargetState(State state,
case UNINITIALIZED:
if (x->IsSmi() && y->IsSmi()) return SMIS;
if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
- if (!Token::IsEqualityOp(op_)) return GENERIC;
- if (x->IsSymbol() && y->IsSymbol()) return SYMBOLS;
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ // Ordered comparisons treat undefined as NaN, so the
+ // HEAP_NUMBER stub will do the right thing.
+ if ((x->IsNumber() && y->IsUndefined()) ||
+ (y->IsNumber() && x->IsUndefined())) {
+ return HEAP_NUMBERS;
+ }
+ }
+ if (x->IsSymbol() && y->IsSymbol()) {
+ // We compare symbols as strings if we need to determine
+ // the order in a non-equality compare.
+ return Token::IsEqualityOp(op_) ? SYMBOLS : STRINGS;
+ }
if (x->IsString() && y->IsString()) return STRINGS;
+ if (!Token::IsEqualityOp(op_)) return GENERIC;
if (x->IsJSObject() && y->IsJSObject()) {
if (Handle<JSObject>::cast(x)->map() ==
Handle<JSObject>::cast(y)->map() &&
diff --git a/deps/v8/src/interface.cc b/deps/v8/src/interface.cc
new file mode 100644
index 00000000000..e344b861504
--- /dev/null
+++ b/deps/v8/src/interface.cc
@@ -0,0 +1,226 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "interface.h"
+
+namespace v8 {
+namespace internal {
+
+static bool Match(void* key1, void* key2) {
+ String* name1 = *static_cast<String**>(key1);
+ String* name2 = *static_cast<String**>(key2);
+ ASSERT(name1->IsSymbol());
+ ASSERT(name2->IsSymbol());
+ return name1 == name2;
+}
+
+
+Interface* Interface::Lookup(Handle<String> name) {
+ ASSERT(IsModule());
+ ZoneHashMap* map = Chase()->exports_;
+ if (map == NULL) return NULL;
+ ZoneHashMap::Entry* p = map->Lookup(name.location(), name->Hash(), false);
+ if (p == NULL) return NULL;
+ ASSERT(*static_cast<String**>(p->key) == *name);
+ ASSERT(p->value != NULL);
+ return static_cast<Interface*>(p->value);
+}
+
+
+#ifdef DEBUG
+// Current nesting depth for debug output.
+class Nesting {
+ public:
+ Nesting() { current_ += 2; }
+ ~Nesting() { current_ -= 2; }
+ static int current() { return current_; }
+ private:
+ static int current_;
+};
+
+int Nesting::current_ = 0;
+#endif
+
+
+void Interface::DoAdd(
+ void* name, uint32_t hash, Interface* interface, bool* ok) {
+ MakeModule(ok);
+ if (!*ok) return;
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*s# Adding...\n", Nesting::current(), "");
+ PrintF("%*sthis = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*s%s : ", Nesting::current(), "",
+ (*reinterpret_cast<String**>(name))->ToAsciiArray());
+ interface->Print(Nesting::current());
+ }
+#endif
+
+ ZoneHashMap** map = &Chase()->exports_;
+ if (*map == NULL) *map = new ZoneHashMap(Match, 8);
+
+ ZoneHashMap::Entry* p = (*map)->Lookup(name, hash, !IsFrozen());
+ if (p == NULL) {
+ // This didn't have name but was frozen already, that's an error.
+ *ok = false;
+ } else if (p->value == NULL) {
+ p->value = interface;
+ } else {
+#ifdef DEBUG
+ Nesting nested;
+#endif
+ reinterpret_cast<Interface*>(p->value)->Unify(interface, ok);
+ }
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*sthis' = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*s# Added.\n", Nesting::current(), "");
+ }
+#endif
+}
+
+
+void Interface::Unify(Interface* that, bool* ok) {
+ if (this->forward_) return this->Chase()->Unify(that, ok);
+ if (that->forward_) return this->Unify(that->Chase(), ok);
+ ASSERT(this->forward_ == NULL);
+ ASSERT(that->forward_ == NULL);
+
+ *ok = true;
+ if (this == that) return;
+ if (this->IsValue()) return that->MakeValue(ok);
+ if (that->IsValue()) return this->MakeValue(ok);
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*s# Unifying...\n", Nesting::current(), "");
+ PrintF("%*sthis = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*sthat = ", Nesting::current(), "");
+ that->Print(Nesting::current());
+ }
+#endif
+
+ // Merge the smaller interface into the larger, for performance.
+ if (this->exports_ != NULL && (that->exports_ == NULL ||
+ this->exports_->occupancy() >= that->exports_->occupancy())) {
+ this->DoUnify(that, ok);
+ } else {
+ that->DoUnify(this, ok);
+ }
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*sthis' = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*sthat' = ", Nesting::current(), "");
+ that->Print(Nesting::current());
+ PrintF("%*s# Unified.\n", Nesting::current(), "");
+ }
+#endif
+}
+
+
+void Interface::DoUnify(Interface* that, bool* ok) {
+ ASSERT(this->forward_ == NULL);
+ ASSERT(that->forward_ == NULL);
+ ASSERT(!this->IsValue());
+ ASSERT(!that->IsValue());
+ ASSERT(*ok);
+
+#ifdef DEBUG
+ Nesting nested;
+#endif
+
+ // Try to merge all members from that into this.
+ ZoneHashMap* map = that->exports_;
+ if (map != NULL) {
+ for (ZoneHashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
+ this->DoAdd(p->key, p->hash, static_cast<Interface*>(p->value), ok);
+ if (!*ok) return;
+ }
+ }
+
+ // If the new interface is larger than that's, then there were members in
+ // 'this' which 'that' didn't have. If 'that' was frozen that is an error.
+ int this_size = this->exports_ == NULL ? 0 : this->exports_->occupancy();
+ int that_size = map == NULL ? 0 : map->occupancy();
+ if (that->IsFrozen() && this_size > that_size) {
+ *ok = false;
+ return;
+ }
+
+ // Merge interfaces.
+ this->flags_ |= that->flags_;
+ that->forward_ = this;
+}
+
+
+#ifdef DEBUG
+void Interface::Print(int n) {
+ int n0 = n > 0 ? n : 0;
+
+ if (FLAG_print_interface_details) {
+ PrintF("%p", static_cast<void*>(this));
+ for (Interface* link = this->forward_; link != NULL; link = link->forward_)
+ PrintF("->%p", static_cast<void*>(link));
+ PrintF(" ");
+ }
+
+ if (IsUnknown()) {
+ PrintF("unknown\n");
+ } else if (IsValue()) {
+ PrintF("value\n");
+ } else if (IsModule()) {
+ PrintF("module %s{", IsFrozen() ? "" : "(unresolved) ");
+ ZoneHashMap* map = Chase()->exports_;
+ if (map == NULL || map->occupancy() == 0) {
+ PrintF("}\n");
+ } else if (n < 0 || n0 >= 2 * FLAG_print_interface_depth) {
+ // Avoid infinite recursion on cyclic types.
+ PrintF("...}\n");
+ } else {
+ PrintF("\n");
+ for (ZoneHashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
+ String* name = *static_cast<String**>(p->key);
+ Interface* interface = static_cast<Interface*>(p->value);
+ PrintF("%*s%s : ", n0 + 2, "", name->ToAsciiArray());
+ interface->Print(n0 + 2);
+ }
+ PrintF("%*s}\n", n0, "");
+ }
+ }
+}
+#endif
+
+} } // namespace v8::internal
diff --git a/deps/v8/src/interface.h b/deps/v8/src/interface.h
new file mode 100644
index 00000000000..c2991cbd637
--- /dev/null
+++ b/deps/v8/src/interface.h
@@ -0,0 +1,156 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_INTERFACE_H_
+#define V8_INTERFACE_H_
+
+#include "zone-inl.h" // For operator new.
+
+namespace v8 {
+namespace internal {
+
+
+// This class implements the following abstract grammar of interfaces
+// (i.e. module types):
+// interface ::= UNDETERMINED | VALUE | MODULE(exports)
+// exports ::= {name : interface, ...}
+// A frozen module type is one that is fully determined. Unification does not
+// allow adding additional exports to frozen interfaces.
+// Otherwise, unifying modules merges their exports.
+// Undetermined types are unification variables that can be unified freely.
+
+class Interface : public ZoneObject {
+ public:
+ // ---------------------------------------------------------------------------
+ // Factory methods.
+
+ static Interface* NewValue() {
+ static Interface value_interface(VALUE + FROZEN); // Cached.
+ return &value_interface;
+ }
+
+ static Interface* NewUnknown() {
+ return new Interface(NONE);
+ }
+
+ static Interface* NewModule() {
+ return new Interface(MODULE);
+ }
+
+ // ---------------------------------------------------------------------------
+ // Mutators.
+
+ // Add a name to the list of exports. If it already exists, unify with
+ // interface, otherwise insert unless this is closed.
+ void Add(Handle<String> name, Interface* interface, bool* ok) {
+ DoAdd(name.location(), name->Hash(), interface, ok);
+ }
+
+ // Unify with another interface. If successful, both interface objects will
+ // represent the same type, and changes to one are reflected in the other.
+ void Unify(Interface* that, bool* ok);
+
+ // Determine this interface to be a value interface.
+ void MakeValue(bool* ok) {
+ *ok = !IsModule();
+ if (*ok) Chase()->flags_ |= VALUE;
+ }
+
+ // Determine this interface to be a module interface.
+ void MakeModule(bool* ok) {
+ *ok = !IsValue();
+ if (*ok) Chase()->flags_ |= MODULE;
+ }
+
+ // Do not allow any further refinements, directly or through unification.
+ void Freeze(bool* ok) {
+ *ok = IsValue() || IsModule();
+ if (*ok) Chase()->flags_ |= FROZEN;
+ }
+
+ // ---------------------------------------------------------------------------
+ // Accessors.
+
+ // Look up an exported name. Returns NULL if not (yet) defined.
+ Interface* Lookup(Handle<String> name);
+
+ // Check whether this is still a fully undetermined type.
+ bool IsUnknown() { return Chase()->flags_ == NONE; }
+
+ // Check whether this is a value type.
+ bool IsValue() { return Chase()->flags_ & VALUE; }
+
+ // Check whether this is a module type.
+ bool IsModule() { return Chase()->flags_ & MODULE; }
+
+ // Check whether this is closed (i.e. fully determined).
+ bool IsFrozen() { return Chase()->flags_ & FROZEN; }
+
+ // ---------------------------------------------------------------------------
+ // Debugging.
+#ifdef DEBUG
+ void Print(int n = 0); // n = indentation; n < 0 => don't print recursively
+#endif
+
+ // ---------------------------------------------------------------------------
+ // Implementation.
+ private:
+ enum Flags { // All flags are monotonic
+ NONE = 0,
+ VALUE = 1, // This type describes a value
+ MODULE = 2, // This type describes a module
+ FROZEN = 4 // This type is fully determined
+ };
+
+ int flags_;
+ Interface* forward_; // Unification link
+ ZoneHashMap* exports_; // Module exports and their types (allocated lazily)
+
+ explicit Interface(int flags)
+ : flags_(flags),
+ forward_(NULL),
+ exports_(NULL) {
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Creating %p\n", static_cast<void*>(this));
+#endif
+ }
+
+ Interface* Chase() {
+ Interface* result = this;
+ while (result->forward_ != NULL) result = result->forward_;
+ if (result != this) forward_ = result; // On-the-fly path compression.
+ return result;
+ }
+
+ void DoAdd(void* name, uint32_t hash, Interface* interface, bool* ok);
+ void DoUnify(Interface* that, bool* ok);
+};
+
+} } // namespace v8::internal
+
+#endif // V8_INTERFACE_H_
diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc
index 128136faa0b..3dfcbb5a3fd 100644
--- a/deps/v8/src/isolate.cc
+++ b/deps/v8/src/isolate.cc
@@ -1486,6 +1486,7 @@ Isolate::Isolate()
has_installed_extensions_(false),
string_tracker_(NULL),
regexp_stack_(NULL),
+ date_cache_(NULL),
embedder_data_(NULL),
context_exit_happened_(false) {
TRACE_ISOLATE(constructor);
@@ -1618,6 +1619,9 @@ Isolate::~Isolate() {
delete unicode_cache_;
unicode_cache_ = NULL;
+ delete date_cache_;
+ date_cache_ = NULL;
+
delete regexp_stack_;
regexp_stack_ = NULL;
@@ -1782,6 +1786,7 @@ bool Isolate::Init(Deserializer* des) {
stub_cache_ = new StubCache(this);
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
+ date_cache_ = new DateCache();
// Enable logging before setting up the heap
logger_->SetUp();
@@ -1836,13 +1841,12 @@ bool Isolate::Init(Deserializer* des) {
#ifdef ENABLE_DEBUGGER_SUPPORT
debug_->SetUp(create_heap_objects);
#endif
- stub_cache_->Initialize(create_heap_objects);
// If we are deserializing, read the state into the now-empty heap.
if (des != NULL) {
des->Deserialize();
- stub_cache_->Initialize(true);
}
+ stub_cache_->Initialize();
// Finish initialization of ThreadLocal after deserialization is done.
clear_pending_exception();
diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h
index 71fe8838f87..2c2618a1f01 100644
--- a/deps/v8/src/isolate.h
+++ b/deps/v8/src/isolate.h
@@ -36,6 +36,7 @@
#include "contexts.h"
#include "execution.h"
#include "frames.h"
+#include "date.h"
#include "global-handles.h"
#include "handles.h"
#include "hashmap.h"
@@ -1017,6 +1018,17 @@ class Isolate {
return OS::TimeCurrentMillis() - time_millis_at_init_;
}
+ DateCache* date_cache() {
+ return date_cache_;
+ }
+
+ void set_date_cache(DateCache* date_cache) {
+ if (date_cache != date_cache_) {
+ delete date_cache_;
+ }
+ date_cache_ = date_cache;
+ }
+
private:
Isolate();
@@ -1184,6 +1196,9 @@ class Isolate {
unibrow::Mapping<unibrow::Ecma262Canonicalize>
regexp_macro_assembler_canonicalize_;
RegExpStack* regexp_stack_;
+
+ DateCache* date_cache_;
+
unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
void* embedder_data_;
diff --git a/deps/v8/src/jsregexp.cc b/deps/v8/src/jsregexp.cc
index 82b495819e9..7e695bb2214 100644
--- a/deps/v8/src/jsregexp.cc
+++ b/deps/v8/src/jsregexp.cc
@@ -175,7 +175,8 @@ Handle<Object> RegExpImpl::Exec(Handle<JSRegExp> regexp,
case JSRegExp::IRREGEXP: {
Handle<Object> result =
IrregexpExec(regexp, subject, index, last_match_info);
- ASSERT(!result.is_null() || Isolate::Current()->has_pending_exception());
+ ASSERT(!result.is_null() ||
+ regexp->GetIsolate()->has_pending_exception());
return result;
}
default:
@@ -527,6 +528,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
Handle<String> subject,
int previous_index,
Handle<JSArray> last_match_info) {
+ Isolate* isolate = jsregexp->GetIsolate();
ASSERT_EQ(jsregexp->TypeTag(), JSRegExp::IRREGEXP);
// Prepare space for the return values.
@@ -542,11 +544,11 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
int required_registers = RegExpImpl::IrregexpPrepare(jsregexp, subject);
if (required_registers < 0) {
// Compiling failed with an exception.
- ASSERT(Isolate::Current()->has_pending_exception());
+ ASSERT(isolate->has_pending_exception());
return Handle<Object>::null();
}
- OffsetsVector registers(required_registers);
+ OffsetsVector registers(required_registers, isolate);
IrregexpResult res = RegExpImpl::IrregexpExecOnce(
jsregexp, subject, previous_index, Vector<int>(registers.vector(),
@@ -568,11 +570,11 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
return last_match_info;
}
if (res == RE_EXCEPTION) {
- ASSERT(Isolate::Current()->has_pending_exception());
+ ASSERT(isolate->has_pending_exception());
return Handle<Object>::null();
}
ASSERT(res == RE_FAILURE);
- return Isolate::Current()->factory()->null_value();
+ return isolate->factory()->null_value();
}
diff --git a/deps/v8/src/jsregexp.h b/deps/v8/src/jsregexp.h
index 42c76fbd676..8875de9eb23 100644
--- a/deps/v8/src/jsregexp.h
+++ b/deps/v8/src/jsregexp.h
@@ -1466,12 +1466,12 @@ class RegExpEngine: public AllStatic {
class OffsetsVector {
public:
- explicit inline OffsetsVector(int num_registers)
+ inline OffsetsVector(int num_registers, Isolate* isolate)
: offsets_vector_length_(num_registers) {
if (offsets_vector_length_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
vector_ = NewArray<int>(offsets_vector_length_);
} else {
- vector_ = Isolate::Current()->jsregexp_static_offsets_vector();
+ vector_ = isolate->jsregexp_static_offsets_vector();
}
}
inline ~OffsetsVector() {
diff --git a/deps/v8/src/lithium-allocator.cc b/deps/v8/src/lithium-allocator.cc
index 20003f05cb6..83805dc729a 100644
--- a/deps/v8/src/lithium-allocator.cc
+++ b/deps/v8/src/lithium-allocator.cc
@@ -1105,7 +1105,7 @@ bool LAllocator::Allocate(LChunk* chunk) {
void LAllocator::MeetRegisterConstraints() {
- HPhase phase("Register constraints", chunk_);
+ HPhase phase("L_Register constraints", chunk_);
first_artificial_register_ = next_virtual_register_;
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
for (int i = 0; i < blocks->length(); ++i) {
@@ -1117,7 +1117,7 @@ void LAllocator::MeetRegisterConstraints() {
void LAllocator::ResolvePhis() {
- HPhase phase("Resolve phis", chunk_);
+ HPhase phase("L_Resolve phis", chunk_);
// Process the blocks in reverse order.
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
@@ -1207,7 +1207,7 @@ HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
void LAllocator::ConnectRanges() {
- HPhase phase("Connect ranges", this);
+ HPhase phase("L_Connect ranges", this);
for (int i = 0; i < live_ranges()->length(); ++i) {
LiveRange* first_range = live_ranges()->at(i);
if (first_range == NULL || first_range->parent() != NULL) continue;
@@ -1247,7 +1247,7 @@ bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
void LAllocator::ResolveControlFlow() {
- HPhase phase("Resolve control flow", this);
+ HPhase phase("L_Resolve control flow", this);
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
for (int block_id = 1; block_id < blocks->length(); ++block_id) {
HBasicBlock* block = blocks->at(block_id);
@@ -1268,7 +1268,7 @@ void LAllocator::ResolveControlFlow() {
void LAllocator::BuildLiveRanges() {
- HPhase phase("Build live ranges", this);
+ HPhase phase("L_Build live ranges", this);
InitializeLivenessAnalysis();
// Process the blocks in reverse order.
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
@@ -1373,7 +1373,7 @@ bool LAllocator::SafePointsAreInOrder() const {
void LAllocator::PopulatePointerMaps() {
- HPhase phase("Populate pointer maps", this);
+ HPhase phase("L_Populate pointer maps", this);
const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
ASSERT(SafePointsAreInOrder());
@@ -1492,14 +1492,14 @@ void LAllocator::ProcessOsrEntry() {
void LAllocator::AllocateGeneralRegisters() {
- HPhase phase("Allocate general registers", this);
+ HPhase phase("L_Allocate general registers", this);
num_registers_ = Register::kNumAllocatableRegisters;
AllocateRegisters();
}
void LAllocator::AllocateDoubleRegisters() {
- HPhase phase("Allocate double registers", this);
+ HPhase phase("L_Allocate double registers", this);
num_registers_ = DoubleRegister::kNumAllocatableRegisters;
mode_ = DOUBLE_REGISTERS;
AllocateRegisters();
diff --git a/deps/v8/src/lithium.h b/deps/v8/src/lithium.h
index 474e555fec8..ec7269509f4 100644
--- a/deps/v8/src/lithium.h
+++ b/deps/v8/src/lithium.h
@@ -438,14 +438,14 @@ class LPointerMap: public ZoneObject {
class LEnvironment: public ZoneObject {
public:
LEnvironment(Handle<JSFunction> closure,
- bool is_arguments_adaptor,
+ FrameType frame_type,
int ast_id,
int parameter_count,
int argument_count,
int value_count,
LEnvironment* outer)
: closure_(closure),
- is_arguments_adaptor_(is_arguments_adaptor),
+ frame_type_(frame_type),
arguments_stack_height_(argument_count),
deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
translation_index_(-1),
@@ -459,6 +459,7 @@ class LEnvironment: public ZoneObject {
outer_(outer) { }
Handle<JSFunction> closure() const { return closure_; }
+ FrameType frame_type() const { return frame_type_; }
int arguments_stack_height() const { return arguments_stack_height_; }
int deoptimization_index() const { return deoptimization_index_; }
int translation_index() const { return translation_index_; }
@@ -503,11 +504,9 @@ class LEnvironment: public ZoneObject {
void PrintTo(StringStream* stream);
- bool is_arguments_adaptor() const { return is_arguments_adaptor_; }
-
private:
Handle<JSFunction> closure_;
- bool is_arguments_adaptor_;
+ FrameType frame_type_;
int arguments_stack_height_;
int deoptimization_index_;
int translation_index_;
diff --git a/deps/v8/src/macros.py b/deps/v8/src/macros.py
index 8e9c62dbe58..93287ae3d44 100644
--- a/deps/v8/src/macros.py
+++ b/deps/v8/src/macros.py
@@ -164,16 +164,36 @@ const MAX_TIME_BEFORE_UTC = 8640002592000000;
# Gets the value of a Date object. If arg is not a Date object
# a type error is thrown.
-macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
-macro DAY(time) = ($floor(time / 86400000));
-macro NAN_OR_DATE_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : DateFromTime(time));
-macro HOUR_FROM_TIME(time) = (Modulo($floor(time / 3600000), 24));
-macro MIN_FROM_TIME(time) = (Modulo($floor(time / 60000), 60));
-macro NAN_OR_MIN_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MIN_FROM_TIME(time));
-macro SEC_FROM_TIME(time) = (Modulo($floor(time / 1000), 60));
-macro NAN_OR_SEC_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : SEC_FROM_TIME(time));
-macro MS_FROM_TIME(time) = (Modulo(time, 1000));
-macro NAN_OR_MS_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MS_FROM_TIME(time));
+macro CHECK_DATE(arg) = if (%_ClassOf(arg) !== 'Date') ThrowDateTypeError();
+macro LOCAL_DATE_VALUE(arg) = (%_DateField(arg, 0) + %_DateField(arg, 21));
+macro UTC_DATE_VALUE(arg) = (%_DateField(arg, 0));
+
+macro LOCAL_YEAR(arg) = (%_DateField(arg, 1));
+macro LOCAL_MONTH(arg) = (%_DateField(arg, 2));
+macro LOCAL_DAY(arg) = (%_DateField(arg, 3));
+macro LOCAL_WEEKDAY(arg) = (%_DateField(arg, 4));
+macro LOCAL_HOUR(arg) = (%_DateField(arg, 5));
+macro LOCAL_MIN(arg) = (%_DateField(arg, 6));
+macro LOCAL_SEC(arg) = (%_DateField(arg, 7));
+macro LOCAL_MS(arg) = (%_DateField(arg, 8));
+macro LOCAL_DAYS(arg) = (%_DateField(arg, 9));
+macro LOCAL_TIME_IN_DAY(arg) = (%_DateField(arg, 10));
+
+macro UTC_YEAR(arg) = (%_DateField(arg, 11));
+macro UTC_MONTH(arg) = (%_DateField(arg, 12));
+macro UTC_DAY(arg) = (%_DateField(arg, 13));
+macro UTC_WEEKDAY(arg) = (%_DateField(arg, 14));
+macro UTC_HOUR(arg) = (%_DateField(arg, 15));
+macro UTC_MIN(arg) = (%_DateField(arg, 16));
+macro UTC_SEC(arg) = (%_DateField(arg, 17));
+macro UTC_MS(arg) = (%_DateField(arg, 18));
+macro UTC_DAYS(arg) = (%_DateField(arg, 19));
+macro UTC_TIME_IN_DAY(arg) = (%_DateField(arg, 20));
+
+macro TIMEZONE_OFFSET(arg) = (%_DateField(arg, 21));
+
+macro SET_UTC_DATE_VALUE(arg, value) = (%DateSetValue(arg, value, 1));
+macro SET_LOCAL_DATE_VALUE(arg, value) = (%DateSetValue(arg, value, 0));
# Last input and last subject of regexp matches.
macro LAST_SUBJECT(array) = ((array)[1]);
diff --git a/deps/v8/src/mark-compact-inl.h b/deps/v8/src/mark-compact-inl.h
index fd25a403cae..c9ed66f6bf1 100644
--- a/deps/v8/src/mark-compact-inl.h
+++ b/deps/v8/src/mark-compact-inl.h
@@ -45,8 +45,10 @@ MarkBit Marking::MarkBitFrom(Address addr) {
void MarkCompactCollector::SetFlags(int flags) {
- sweep_precisely_ = ((flags & Heap::kMakeHeapIterableMask) != 0);
+ sweep_precisely_ = ((flags & Heap::kSweepPreciselyMask) != 0);
reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0);
+ abort_incremental_marking_ =
+ ((flags & Heap::kAbortIncrementalMarkingMask) != 0);
}
diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc
index 7c59c0471e4..94f10919a7a 100644
--- a/deps/v8/src/mark-compact.cc
+++ b/deps/v8/src/mark-compact.cc
@@ -686,8 +686,8 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
}
#endif
- // Clear marking bits for precise sweeping to collect all garbage.
- if (was_marked_incrementally_ && PreciseSweepingRequired()) {
+ // Clear marking bits if incremental marking is aborted.
+ if (was_marked_incrementally_ && abort_incremental_marking_) {
heap()->incremental_marking()->Abort();
ClearMarkbits();
AbortCompaction();
diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h
index dc4bcee259d..442ad1d98c3 100644
--- a/deps/v8/src/mark-compact.h
+++ b/deps/v8/src/mark-compact.h
@@ -420,14 +420,9 @@ class MarkCompactCollector {
// Pointer to member function, used in IterateLiveObjects.
typedef int (MarkCompactCollector::*LiveObjectCallback)(HeapObject* obj);
- // Set the global force_compaction flag, it must be called before Prepare
- // to take effect.
+ // Set the global flags, it must be called before Prepare to take effect.
inline void SetFlags(int flags);
- inline bool PreciseSweepingRequired() {
- return sweep_precisely_;
- }
-
static void Initialize();
void CollectEvacuationCandidates(PagedSpace* space);
@@ -579,6 +574,8 @@ class MarkCompactCollector {
bool reduce_memory_footprint_;
+ bool abort_incremental_marking_;
+
// True if we are collecting slots to perform evacuation from evacuation
// candidates.
bool compacting_;
diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js
index e641133dda5..a3adcf8634d 100644
--- a/deps/v8/src/messages.js
+++ b/deps/v8/src/messages.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -210,7 +210,7 @@ function FormatMessage(message) {
"no_input_to_regexp", ["No input to ", "%0"],
"invalid_json", ["String '", "%0", "' is not valid JSON"],
"circular_structure", ["Converting circular structure to JSON"],
- "obj_ctor_property_non_object", ["Object.", "%0", " called on non-object"],
+ "called_on_non_object", ["%0", " called on non-object"],
"called_on_null_or_undefined", ["%0", " called on null or undefined"],
"array_indexof_not_defined", ["Array.getIndexOf: Argument undefined"],
"object_not_extensible", ["Can't add property ", "%0", ", object is not extensible"],
@@ -246,6 +246,8 @@ function FormatMessage(message) {
"cant_prevent_ext_external_array_elements", ["Cannot prevent extension of an object with external array elements"],
"redef_external_array_element", ["Cannot redefine a property of an object with external array elements"],
"harmony_const_assign", ["Assignment to constant variable."],
+ "invalid_module_path", ["Module does not export '", "%0", "', or export is not itself a module"],
+ "module_type_error", ["Module '", "%0", "' used improperly"],
];
var messages = { __proto__ : null };
for (var i = 0; i < messagesDictionary.length; i += 2) {
@@ -533,6 +535,13 @@ function ScriptNameOrSourceURL() {
if (this.name) {
return this.name;
}
+
+ // The result is cached as on long scripts it takes noticable time to search
+ // for the sourceURL.
+ if (this.hasCachedNameOrSourceURL)
+ return this.cachedNameOrSourceURL;
+ this.hasCachedNameOrSourceURL = true;
+
// TODO(608): the spaces in a regexp below had to be escaped as \040
// because this file is being processed by js2c whose handling of spaces
// in regexps is broken. Also, ['"] are excluded from allowed URLs to
@@ -541,6 +550,7 @@ function ScriptNameOrSourceURL() {
// the scanner/parser.
var source = ToString(this.source);
var sourceUrlPos = %StringIndexOf(source, "sourceURL=", 0);
+ this.cachedNameOrSourceURL = this.name;
if (sourceUrlPos > 4) {
var sourceUrlPattern =
/\/\/@[\040\t]sourceURL=[\040\t]*([^\s\'\"]*)[\040\t]*$/gm;
@@ -551,15 +561,17 @@ function ScriptNameOrSourceURL() {
var match =
%_RegExpExec(sourceUrlPattern, source, sourceUrlPos - 4, matchInfo);
if (match) {
- return SubString(source, matchInfo[CAPTURE(2)], matchInfo[CAPTURE(3)]);
+ this.cachedNameOrSourceURL =
+ SubString(source, matchInfo[CAPTURE(2)], matchInfo[CAPTURE(3)]);
}
}
- return this.name;
+ return this.cachedNameOrSourceURL;
}
SetUpLockedPrototype(Script,
- $Array("source", "name", "line_ends", "line_offset", "column_offset"),
+ $Array("source", "name", "line_ends", "line_offset", "column_offset",
+ "cachedNameOrSourceURL", "hasCachedNameOrSourceURL" ),
$Array(
"lineFromPosition", ScriptLineFromPosition,
"locationFromPosition", ScriptLocationFromPosition,
@@ -759,8 +771,7 @@ function DefineOneShotAccessor(obj, name, fun) {
hasBeenSet = true;
value = v;
};
- %DefineOrRedefineAccessorProperty(obj, name, GETTER, getter, DONT_ENUM);
- %DefineOrRedefineAccessorProperty(obj, name, SETTER, setter, DONT_ENUM);
+ %DefineOrRedefineAccessorProperty(obj, name, getter, setter, DONT_ENUM);
}
function CallSite(receiver, fun, pos) {
@@ -1190,9 +1201,8 @@ function ErrorToStringDetectCycle(error) {
}
function ErrorToString() {
- if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
- throw MakeTypeError("called_on_null_or_undefined",
- ["Error.prototype.toString"]);
+ if (!IS_SPEC_OBJECT(this)) {
+ throw MakeTypeError("called_on_non_object", ["Error.prototype.toString"]);
}
try {
diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc
index cc11235824e..09a99243872 100644
--- a/deps/v8/src/mips/builtins-mips.cc
+++ b/deps/v8/src/mips/builtins-mips.cc
@@ -1008,6 +1008,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
+
// Restore context from the frame.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -1730,8 +1735,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&too_few);
EnterArgumentsAdaptorFrame(masm);
- // TODO(MIPS): Optimize these loops.
-
// Calculate copy start address into a0 and copy end address is fp.
// a0: actual number of arguments as a smi
// a1: function
@@ -1753,9 +1756,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label copy;
__ bind(&copy);
__ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
- __ push(t0);
+ __ Subu(sp, sp, kPointerSize);
__ Subu(a0, a0, kPointerSize);
- __ Branch(&copy, ne, a0, Operand(t3));
+ __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
+ __ sw(t0, MemOperand(sp)); // In the delay slot.
// Fill the remaining expected arguments with undefined.
// a1: function
@@ -1768,8 +1772,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label fill;
__ bind(&fill);
- __ push(t0);
- __ Branch(&fill, ne, sp, Operand(a2));
+ __ Subu(sp, sp, kPointerSize);
+ __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
+ __ sw(t0, MemOperand(sp));
}
// Call the entry point.
@@ -1777,7 +1782,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Call(a3);
+ // Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Exit frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ Ret();
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index de6fb953c47..3eaa5249181 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -5363,17 +5363,19 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ Branch(&done, eq, a3, Operand(at));
- // MegamorphicSentinel is an immortal immovable object (undefined) so no
- // write-barrier is needed.
- __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
- __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
- __ Branch(&done);
+ __ Branch(USE_DELAY_SLOT, &done, eq, a3, Operand(at));
// An uninitialized cache is patched with the function.
+ // Store a1 in the delay slot. This may or may not get overwritten depending
+ // on the result of the comparison.
__ sw(a1, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
// No need for a write barrier here - cells are rescanned.
+ // MegamorphicSentinel is an immortal immovable object (undefined) so no
+ // write-barrier is needed.
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+
__ bind(&done);
}
@@ -6149,8 +6151,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&sliced_string);
// Sliced string. Fetch parent and correct start index by offset.
- __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
__ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
+ __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
__ sra(t0, t0, 1); // Add offset to index.
__ Addu(a3, a3, t0);
// Update instance type.
@@ -6188,8 +6190,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
__ bind(&set_slice_header);
__ sll(a3, a3, 1);
- __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
__ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
+ __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
__ jmp(&return_v0);
__ bind(&copy_routine);
@@ -6783,15 +6785,15 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
__ And(a2, a1, Operand(a0));
__ JumpIfSmi(a2, &generic_stub);
__ GetObjectType(a0, a2, a2);
- __ Branch(&miss, ne, a2, Operand(HEAP_NUMBER_TYPE));
+ __ Branch(&maybe_undefined1, ne, a2, Operand(HEAP_NUMBER_TYPE));
__ GetObjectType(a1, a2, a2);
- __ Branch(&miss, ne, a2, Operand(HEAP_NUMBER_TYPE));
+ __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or FPU is unsupported.
@@ -6823,14 +6825,29 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ bind(&fpu_lt);
__ Ret(USE_DELAY_SLOT);
__ li(v0, Operand(LESS)); // In delay slot.
-
- __ bind(&unordered);
}
+ __ bind(&unordered);
+
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
__ bind(&generic_stub);
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ Branch(&miss, ne, a0, Operand(at));
+ __ GetObjectType(a1, a2, a2);
+ __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ Branch(&unordered, eq, a1, Operand(at));
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -7070,7 +7087,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// scratch0 points to properties hash.
// Compute the masked index: (hash + i + i * i) & mask.
@@ -7099,9 +7116,15 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ Branch(done, eq, entity_name, Operand(tmp));
if (i != kInlinedProbes - 1) {
+ // Load the hole ready for use below:
+ __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
+
// Stop if found the property.
__ Branch(miss, eq, entity_name, Operand(Handle<String>(name)));
+ Label the_hole;
+ __ Branch(&the_hole, eq, entity_name, Operand(tmp));
+
// Check if the entry name is not a symbol.
__ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
__ lbu(entity_name,
@@ -7109,6 +7132,8 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ And(scratch0, entity_name, Operand(kIsSymbolMask));
__ Branch(miss, eq, scratch0, Operand(zero_reg));
+ __ bind(&the_hole);
+
// Restore the properties.
__ lw(properties,
FieldMemOperand(receiver, JSObject::kPropertiesOffset));
diff --git a/deps/v8/src/mips/codegen-mips.cc b/deps/v8/src/mips/codegen-mips.cc
index d7bddaf1252..8cbb771952d 100644
--- a/deps/v8/src/mips/codegen-mips.cc
+++ b/deps/v8/src/mips/codegen-mips.cc
@@ -37,6 +37,19 @@ namespace internal {
#define __ ACCESS_MASM(masm)
+TranscendentalFunction CreateTranscendentalFunction(
+ TranscendentalCache::Type type) {
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ return NULL;
+}
+
+
// -------------------------------------------------------------------------
// Platform-specific RuntimeCallHelper functions.
diff --git a/deps/v8/src/mips/deoptimizer-mips.cc b/deps/v8/src/mips/deoptimizer-mips.cc
index 78720f4403d..611fbaaf965 100644
--- a/deps/v8/src/mips/deoptimizer-mips.cc
+++ b/deps/v8/src/mips/deoptimizer-mips.cc
@@ -355,7 +355,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
- unsigned input_frame_size = input_->GetFrameSize();
unsigned output_frame_size = height_in_bytes + fixed_frame_size;
// Allocate and store the output frame description.
@@ -377,16 +376,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Compute the incoming parameter translation.
int parameter_count = height;
unsigned output_offset = output_frame_size;
- unsigned input_offset = input_frame_size;
for (int i = 0; i < parameter_count; ++i) {
output_offset -= kPointerSize;
DoTranslateCommand(iterator, frame_index, output_offset);
}
- input_offset -= (parameter_count * kPointerSize);
// Read caller's PC from the previous frame.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t callers_pc = output_[frame_index - 1]->GetPc();
output_frame->SetFrameSlot(output_offset, callers_pc);
if (FLAG_trace_deopt) {
@@ -396,7 +392,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Read caller's FP from the previous frame, and set this frame's FP.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t value = output_[frame_index - 1]->GetFp();
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
@@ -408,7 +403,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// A marker value is used in place of the context.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t context = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
output_frame->SetFrameSlot(output_offset, context);
@@ -419,7 +413,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(function);
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -429,7 +422,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Number of incoming arguments.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -449,6 +441,119 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 7 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // Constructor function being invoked by the stub.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
// This code is very similar to ia32/arm code, but relies on register names
// (fp, sp) and how the frame is laid out.
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
@@ -561,9 +666,8 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
value = reinterpret_cast<intptr_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
- if (is_topmost) {
- output_frame->SetRegister(cp.code(), value);
- }
+ output_frame->SetContext(value);
+ if (is_topmost) output_frame->SetRegister(cp.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
top_address + output_offset, output_offset, value);
@@ -837,7 +941,7 @@ void Deoptimizer::EntryGenerator::Generate() {
// Maximum size of a table entry generated below.
-const int Deoptimizer::table_entry_size_ = 12 * Assembler::kInstrSize;
+const int Deoptimizer::table_entry_size_ = 9 * Assembler::kInstrSize;
void Deoptimizer::TableEntryGenerator::GeneratePrologue() {
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm());
@@ -851,29 +955,20 @@ void Deoptimizer::TableEntryGenerator::GeneratePrologue() {
__ bind(&start);
if (type() != EAGER) {
// Emulate ia32 like call by pushing return address to stack.
- __ addiu(sp, sp, -3 * kPointerSize);
- __ sw(ra, MemOperand(sp, 2 * kPointerSize));
- } else {
__ addiu(sp, sp, -2 * kPointerSize);
+ __ sw(ra, MemOperand(sp, 1 * kPointerSize));
+ } else {
+ __ addiu(sp, sp, -1 * kPointerSize);
}
- // Using ori makes sure only one instruction is generated. This will work
- // as long as the number of deopt entries is below 2^16.
- __ ori(at, zero_reg, i);
- __ sw(at, MemOperand(sp, kPointerSize));
- __ sw(ra, MemOperand(sp, 0));
- // This branch instruction only jumps over one instruction, and that is
- // executed in the delay slot. The result is that execution is linear but
- // the ra register is updated.
- __ bal(1);
// Jump over the remaining deopt entries (including this one).
- // Only include the remaining part of the current entry in the calculation.
+ // This code is always reached by calling Jump, which puts the target (label
+ // start) into t9.
const int remaining_entries = (count() - i) * table_entry_size_;
- const int cur_size = masm()->SizeOfCodeGeneratedSince(&start);
- // ra points to the instruction after the delay slot. Adjust by 4.
- __ Addu(at, ra, remaining_entries - cur_size - Assembler::kInstrSize);
- __ lw(ra, MemOperand(sp, 0));
- __ jr(at); // Expose delay slot.
- __ addiu(sp, sp, kPointerSize); // In delay slot.
+ __ Addu(t9, t9, remaining_entries);
+ // 'at' was clobbered so we can only load the current entry value here.
+ __ li(at, i);
+ __ jr(t9); // Expose delay slot.
+ __ sw(at, MemOperand(sp, 0 * kPointerSize)); // In the delay slot.
// Pad the rest of the code.
while (table_entry_size_ > (masm()->SizeOfCodeGeneratedSince(&start))) {
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index c8239e32461..c5ef2ccbf74 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -120,7 +120,7 @@ class JumpPatchSite BASE_EMBEDDED {
int FullCodeGenerator::self_optimization_header_size() {
- return 0; // TODO(jkummerow): determine correct value.
+ return 11 * Instruction::kInstrSize;
}
@@ -164,7 +164,7 @@ void FullCodeGenerator::Generate() {
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
__ Jump(compile_stub, RelocInfo::CODE_TARGET, eq, a3, Operand(zero_reg));
- ASSERT(masm_->pc_offset() == self_optimization_header_size());
+ ASSERT_EQ(masm_->pc_offset(), self_optimization_header_size());
}
}
@@ -952,7 +952,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Register null_value = t1;
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
__ Branch(&exit, eq, a0, Operand(null_value));
-
+ PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
+ __ mov(a0, v0);
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(a0, &convert);
@@ -975,44 +976,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
// guarantee cache validity, call the runtime system to check cache
// validity or get the property names in a fixed array.
- Label next;
- // Preload a couple of values used in the loop.
- Register empty_fixed_array_value = t2;
- __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = t3;
- __ LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
- __ mov(a1, a0);
- __ bind(&next);
-
- // Check that there are no elements. Register a1 contains the
- // current JS object we've reached through the prototype chain.
- __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
- __ Branch(&call_runtime, ne, a2, Operand(empty_fixed_array_value));
-
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in a2 for the subsequent
- // prototype load.
- __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
- __ lw(a3, FieldMemOperand(a2, Map::kInstanceDescriptorsOrBitField3Offset));
- __ JumpIfSmi(a3, &call_runtime);
-
- // Check that there is an enum cache in the non-empty instance
- // descriptors (a3). This is the case if the next enumeration
- // index field does not contain a smi.
- __ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumerationIndexOffset));
- __ JumpIfSmi(a3, &call_runtime);
-
- // For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- __ Branch(&check_prototype, eq, a1, Operand(a0));
- __ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheBridgeCacheOffset));
- __ Branch(&call_runtime, ne, a3, Operand(empty_fixed_array_value));
-
- // Load the prototype from the map and loop if non-null.
- __ bind(&check_prototype);
- __ lw(a1, FieldMemOperand(a2, Map::kPrototypeOffset));
- __ Branch(&next, ne, a1, Operand(null_value));
+ __ CheckEnumCache(null_value, &call_runtime);
// The enum cache is valid. Load the map of the object being
// iterated over and use the cache for the iteration.
@@ -1051,6 +1015,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register v0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(a1, cell);
+ __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+ __ sw(a2, FieldMemOperand(a1, JSGlobalPropertyCell::kValueOffset));
+
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
__ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -1064,6 +1038,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Push(a1, a0); // Fixed array length (as smi) and initial index.
// Generate code for doing the condition check.
+ PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
__ bind(&loop);
// Load the current count to a0, load the length to a1.
__ lw(a0, MemOperand(sp, 0 * kPointerSize));
@@ -1108,7 +1083,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(result_register(), a3);
// Perform the assignment as if via '='.
{ EffectContext context(this);
- EmitAssignment(stmt->each(), stmt->AssignmentId());
+ EmitAssignment(stmt->each());
}
// Generate code for the body of the loop.
@@ -1129,6 +1104,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Drop(5);
// Exit and decrement the loop depth.
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
__ bind(&exit);
decrement_loop_depth();
}
@@ -1534,11 +1510,15 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ lw(a0, MemOperand(sp));
__ push(a0);
VisitForStackValue(key);
- __ li(a1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0)));
- __ push(a1);
- VisitForStackValue(value);
+ if (property->kind() == ObjectLiteral::Property::GETTER) {
+ VisitForStackValue(value);
+ __ LoadRoot(a1, Heap::kNullValueRootIndex);
+ __ push(a1);
+ } else {
+ __ LoadRoot(a1, Heap::kNullValueRootIndex);
+ __ push(a1);
+ VisitForStackValue(value);
+ }
__ li(a0, Operand(Smi::FromInt(NONE)));
__ push(a0);
__ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
@@ -1899,7 +1879,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
}
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
@@ -1951,7 +1931,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
break;
}
}
- PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(v0);
}
@@ -2444,6 +2423,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
CallConstructStub stub(flags);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0);
}
diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc
index c3cdb4cd388..b6f019f4785 100644
--- a/deps/v8/src/mips/ic-mips.cc
+++ b/deps/v8/src/mips/ic-mips.cc
@@ -401,7 +401,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a1, a2, a3, t0, t1);
+ masm, flags, a1, a2, a3, t0, t1, t2);
// If the stub cache probing failed, the receiver might be a value.
// For value objects, we use the map of the prototype objects for
@@ -437,7 +437,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
// Probe the stub cache for the value object.
__ bind(&probe);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a1, a2, a3, t0, t1);
+ masm, flags, a1, a2, a3, t0, t1, t2);
__ bind(&miss);
}
@@ -702,7 +702,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// Probe the stub cache.
Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a0, a2, a3, t0, t1);
+ masm, flags, a0, a2, a3, t0, t1, t2);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1513,7 +1513,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
Code::Flags flags =
Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a1, a2, a3, t0, t1);
+ masm, flags, a1, a2, a3, t0, t1, t2);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1571,7 +1571,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// Check that the array has fast properties, otherwise the length
// property might have been redefined.
- // TODO(mstarzinger): Port this check to MIPS.
+ __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
+ __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
+ __ LoadRoot(at, Heap::kHashTableMapRootIndex);
+ __ Branch(&miss, eq, scratch, Operand(at));
// Check that value is a smi.
__ JumpIfNotSmi(value, &miss);
diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc
index 252e8f4478b..d0531ec71ae 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.cc
+++ b/deps/v8/src/mips/lithium-codegen-mips.cc
@@ -62,7 +62,7 @@ class SafepointGenerator : public CallWrapper {
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope(FPU);
@@ -447,10 +447,18 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- if (environment->is_arguments_adaptor()) {
- translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
- } else {
- translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
}
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
@@ -580,7 +588,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
- if (!e->is_arguments_adaptor()) {
+ if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
@@ -3125,14 +3133,63 @@ void LCodeGen::DoPower(LPower* instr) {
void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
+
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(f0));
ASSERT(ToRegister(instr->InputAt(0)).is(a0));
- __ PrepareCallCFunction(1, a1);
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == kSeedSize);
+
__ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
- __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ lw(a2, FieldMemOperand(a0, kRandomSeedOffset));
+ // a2: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ lw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
+ __ Branch(deferred->entry(), eq, a1, Operand(zero_reg));
+ // Load state[1].
+ __ lw(a0, FieldMemOperand(a2, ByteArray::kHeaderSize + kSeedSize));
+ // a1: state[0].
+ // a0: state[1].
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ __ And(a3, a1, Operand(0xFFFF));
+ __ li(t0, Operand(18273));
+ __ mul(a3, a3, t0);
+ __ srl(a1, a1, 16);
+ __ Addu(a1, a3, a1);
+ // Save state[0].
+ __ sw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ And(a3, a0, Operand(0xFFFF));
+ __ li(t0, Operand(36969));
+ __ mul(a3, a3, t0);
+ __ srl(a0, a0, 16),
+ __ Addu(a0, a3, a0);
+ // Save state[1].
+ __ sw(a0, FieldMemOperand(a2, ByteArray::kHeaderSize + kSeedSize));
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ And(a0, a0, Operand(0x3FFFF));
+ __ sll(a1, a1, 14);
+ __ Addu(v0, a0, a1);
+
+ __ bind(deferred->exit());
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
__ li(a2, Operand(0x41300000));
@@ -3144,6 +3201,12 @@ void LCodeGen::DoRandom(LRandom* instr) {
__ sub_d(f0, f12, f14);
}
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1, scratch0());
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ // Return value is in v0.
+}
+
void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(f4));
@@ -4222,6 +4285,80 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
}
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch2 = ToRegister(instr->TempAt(1));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ scratch,
+ scratch2,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(map, constructor);
+ __ lw(map, FieldMemOperand(map, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ sw(map, FieldMemOperand(result, JSObject::kMapOffset));
+ __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
+ __ sw(scratch, FieldMemOperand(result, JSObject::kElementsOffset));
+ __ sw(scratch, FieldMemOperand(result, JSObject::kPropertiesOffset));
+ if (initial_map->inobject_properties() != 0) {
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ sw(scratch, FieldMemOperand(result, property_offset));
+ }
+ }
+
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ mov(result, zero_reg);
+
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ LoadHeapObject(a0, constructor);
+ __ push(a0);
+ CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+ __ StoreToSafepointRegisterSlot(v0, result);
+}
+
+
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Heap* heap = isolate()->heap();
ElementsKind boilerplate_elements_kind =
@@ -4790,6 +4927,89 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
}
+void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
+ Register result = ToRegister(instr->result());
+ Register object = ToRegister(instr->object());
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ DeoptimizeIf(eq, instr->environment(), object, Operand(at));
+
+ Register null_value = t1;
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
+ DeoptimizeIf(eq, instr->environment(), object, Operand(null_value));
+
+ __ And(at, object, kSmiTagMask);
+ DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
+
+ STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+ __ GetObjectType(object, a1, a1);
+ DeoptimizeIf(le, instr->environment(), a1, Operand(LAST_JS_PROXY_TYPE));
+
+ Label use_cache, call_runtime;
+ ASSERT(object.is(a0));
+ __ CheckEnumCache(null_value, &call_runtime);
+
+ __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset));
+ __ Branch(&use_cache);
+
+ // Get the set of properties to enumerate.
+ __ bind(&call_runtime);
+ __ push(object);
+ CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
+
+ __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
+ ASSERT(result.is(v0));
+ __ LoadRoot(at, Heap::kMetaMapRootIndex);
+ DeoptimizeIf(ne, instr->environment(), a1, Operand(at));
+ __ bind(&use_cache);
+}
+
+
+void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
+ Register map = ToRegister(instr->map());
+ Register result = ToRegister(instr->result());
+ __ LoadInstanceDescriptors(map, result);
+ __ lw(result,
+ FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ __ lw(result,
+ FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
+ DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
+}
+
+
+void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
+ Register object = ToRegister(instr->value());
+ Register map = ToRegister(instr->map());
+ __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
+ DeoptimizeIf(ne, instr->environment(), map, Operand(scratch0()));
+}
+
+
+void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
+ Register object = ToRegister(instr->object());
+ Register index = ToRegister(instr->index());
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+
+ Label out_of_object, done;
+ __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg));
+ __ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot.
+
+ STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
+ __ Addu(scratch, object, scratch);
+ __ lw(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
+
+ __ Branch(&done);
+
+ __ bind(&out_of_object);
+ __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
+ // Index is equal to negated out of object property index plus 1.
+ __ Subu(scratch, result, scratch);
+ __ lw(result, FieldMemOperand(scratch,
+ FixedArray::kHeaderSize - kPointerSize));
+ __ bind(&done);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/deps/v8/src/mips/lithium-codegen-mips.h b/deps/v8/src/mips/lithium-codegen-mips.h
index 513992c67aa..b5082561e00 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.h
+++ b/deps/v8/src/mips/lithium-codegen-mips.h
@@ -110,8 +110,10 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
diff --git a/deps/v8/src/mips/lithium-mips.cc b/deps/v8/src/mips/lithium-mips.cc
index c534abcb65d..1c4e1da3fbc 100644
--- a/deps/v8/src/mips/lithium-mips.cc
+++ b/deps/v8/src/mips/lithium-mips.cc
@@ -440,7 +440,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -476,7 +476,7 @@ void LChunk::MarkEmptyBlocks() {
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new LInstructionGap(block);
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
instructions_.Add(gap);
@@ -551,8 +551,8 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ chunk_ = new(zone()) LChunk(info(), graph());
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -582,14 +582,14 @@ void LChunkBuilder::Abort(const char* format, ...) {
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
- return new LUnallocated(LUnallocated::FIXED_REGISTER,
- Register::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
}
LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
- return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
- DoubleRegister::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ DoubleRegister::ToAllocationIndex(reg));
}
@@ -604,29 +604,29 @@ LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
LOperand* LChunkBuilder::UseRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
return Use(value,
- new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
- LUnallocated::USED_AT_START));
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ LUnallocated::USED_AT_START));
}
LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
}
LOperand* LChunkBuilder::Use(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
}
LOperand* LChunkBuilder::UseAtStart(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE,
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
LUnallocated::USED_AT_START));
}
@@ -662,7 +662,7 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
LOperand* LChunkBuilder::UseAny(HValue* value) {
return value->IsConstant()
? chunk_->DefineConstantOperand(HConstant::cast(value))
- : Use(value, new LUnallocated(LUnallocated::ANY));
+ : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
}
@@ -688,21 +688,24 @@ LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsSpilled(
LTemplateInstruction<1, I, T>* instr, int index) {
- return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineSameAsFirst(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -784,13 +787,14 @@ LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_));
return instr;
}
LUnallocated* LChunkBuilder::TempRegister() {
- LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ LUnallocated* operand =
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
operand->set_virtual_register(allocator_->GetVirtualRegister());
if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
return operand;
@@ -812,17 +816,17 @@ LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
- return new LLabel(instr->block());
+ return new(zone()) LLabel(instr->block());
}
LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
@@ -834,7 +838,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LArithmeticT* result = new LArithmeticT(op, left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -868,7 +872,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
}
LInstruction* result =
- DefineAsRegister(new LShiftI(op, left, right, does_deopt));
+ DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
return does_deopt ? AssignEnvironment(result) : result;
}
@@ -881,7 +885,7 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
ASSERT(op != Token::MOD);
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
return DefineAsRegister(result);
}
@@ -899,7 +903,8 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
ASSERT(right->representation().IsTagged());
LOperand* left_operand = UseFixed(left, a1);
LOperand* right_operand = UseFixed(right, a0);
- LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand);
+ LArithmeticT* result =
+ new(zone()) LArithmeticT(op, left_operand, right_operand);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -995,15 +1000,17 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
- LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
- hydrogen_env->is_arguments_adaptor(),
- ast_id,
- hydrogen_env->parameter_count(),
- argument_count_,
- value_count,
- outer);
+ LEnvironment* result = new(zone()) LEnvironment(
+ hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1013,14 +1020,14 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new LArgument(argument_index++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
- if (!hydrogen_env->is_arguments_adaptor()) {
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
*argument_index_accumulator = argument_index;
}
@@ -1029,7 +1036,7 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
- return new LGoto(instr->FirstSuccessor()->block_id());
+ return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
}
@@ -1039,10 +1046,10 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
- return new LGoto(successor->block_id());
+ return new(zone()) LGoto(successor->block_id());
}
- LBranch* result = new LBranch(UseRegister(value));
+ LBranch* result = new(zone()) LBranch(UseRegister(value));
// Tagged values that are not known smis or booleans require a
// deoptimization environment.
Representation rep = value->representation();
@@ -1058,24 +1065,25 @@ LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LCmpMapAndBranch(value, temp);
+ return new(zone()) LCmpMapAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- return DefineAsRegister(new LArgumentsLength(UseRegister(length->value())));
+ return DefineAsRegister(
+ new(zone()) LArgumentsLength(UseRegister(length->value())));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- return DefineAsRegister(new LArgumentsElements);
+ return DefineAsRegister(new(zone()) LArgumentsElements);
}
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstanceOf* result =
- new LInstanceOf(UseFixed(instr->left(), a0),
- UseFixed(instr->right(), a1));
+ new(zone()) LInstanceOf(UseFixed(instr->left(), a0),
+ UseFixed(instr->right(), a1));
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1083,7 +1091,8 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseFixed(instr->left(), a0), FixedTemp(t0));
+ new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), a0),
+ FixedTemp(t0));
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1093,10 +1102,10 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* receiver = UseFixed(instr->receiver(), a0);
LOperand* length = UseFixed(instr->length(), a2);
LOperand* elements = UseFixed(instr->elements(), a3);
- LApplyArguments* result = new LApplyArguments(function,
- receiver,
- length,
- elements);
+ LApplyArguments* result = new(zone()) LApplyArguments(function,
+ receiver,
+ length,
+ elements);
return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1104,75 +1113,77 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
++argument_count_;
LOperand* argument = Use(instr->argument());
- return new LPushArgument(argument);
+ return new(zone()) LPushArgument(argument);
}
LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LThisFunction);
+ return instr->HasNoUses()
+ ? NULL
+ : DefineAsRegister(new(zone()) LThisFunction);
}
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LContext);
+ return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
}
LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LOuterContext(context));
+ return DefineAsRegister(new(zone()) LOuterContext(context));
}
LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
- return MarkAsCall(new LDeclareGlobals, instr);
+ return MarkAsCall(new(zone()) LDeclareGlobals, instr);
}
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalObject(context));
+ return DefineAsRegister(new(zone()) LGlobalObject(context));
}
LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
LOperand* global_object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalReceiver(global_object));
+ return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
}
LInstruction* LChunkBuilder::DoCallConstantFunction(
HCallConstantFunction* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallConstantFunction, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, v0), instr);
}
LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
LOperand* function = UseFixed(instr->function(), a1);
argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
+ LInvokeFunction* result = new(zone()) LInvokeFunction(function);
return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
- if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* input = UseFixedDouble(instr->value(), f4);
- LUnaryMathOperation* result = new LUnaryMathOperation(input, NULL);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, NULL);
return MarkAsCall(DefineFixedDouble(result, f4), instr);
} else if (op == kMathPowHalf) {
// Input cannot be the same as the result.
// See lithium-codegen-mips.cc::DoMathPowHalf.
LOperand* input = UseFixedDouble(instr->value(), f8);
LOperand* temp = FixedTemp(f6);
- LUnaryMathOperation* result = new LUnaryMathOperation(input, temp);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
return DefineFixedDouble(result, f4);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
- LUnaryMathOperation* result = new LUnaryMathOperation(input, temp);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
switch (op) {
case kMathAbs:
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
@@ -1194,32 +1205,32 @@ LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
argument_count_ -= instr->argument_count();
LOperand* key = UseFixed(instr->key(), a2);
- return MarkAsCall(DefineFixed(new LCallKeyed(key), v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKeyed(key), v0), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallNamed, v0), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, v0), instr);
}
LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallKnownGlobal, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, v0), instr);
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* constructor = UseFixed(instr->constructor(), a1);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new(zone()) LCallNew(constructor);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1227,13 +1238,14 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* function = UseFixed(instr->function(), a1);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallFunction(function), v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallFunction(function), v0),
+ instr);
}
LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallRuntime, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, v0), instr);
}
@@ -1259,7 +1271,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- return DefineAsRegister(new LBitI(left, right));
+ return DefineAsRegister(new(zone()) LBitI(left, right));
} else {
ASSERT(instr->representation().IsTagged());
ASSERT(instr->left()->representation().IsTagged());
@@ -1267,7 +1279,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LArithmeticT* result = new LArithmeticT(instr->op(), left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
}
@@ -1276,7 +1288,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
- return DefineAsRegister(new LBitNotI(UseRegisterAtStart(instr->value())));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LBitNotI(value));
}
@@ -1292,7 +1305,7 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
LOperand* dividend = UseFixed(instr->left(), a0);
LOperand* divisor = UseFixed(instr->right(), a1);
return AssignEnvironment(AssignPointerMap(
- DefineFixed(new LDivI(dividend, divisor), v0)));
+ DefineFixed(new(zone()) LDivI(dividend, divisor), v0)));
} else {
return DoArithmeticT(Token::DIV, instr);
}
@@ -1308,15 +1321,15 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->HasPowerOf2Divisor()) {
ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
LOperand* value = UseRegisterAtStart(instr->left());
- mod = new LModI(value, UseOrConstant(instr->right()));
+ mod = new(zone()) LModI(value, UseOrConstant(instr->right()));
} else {
LOperand* dividend = UseRegister(instr->left());
LOperand* divisor = UseRegister(instr->right());
- mod = new LModI(dividend,
- divisor,
- TempRegister(),
- FixedTemp(f20),
- FixedTemp(f22));
+ mod = new(zone()) LModI(dividend,
+ divisor,
+ TempRegister(),
+ FixedTemp(f20),
+ FixedTemp(f22));
}
if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
@@ -1334,7 +1347,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
// TODO(fschneider): Allow any register as input registers.
LOperand* left = UseFixedDouble(instr->left(), f2);
LOperand* right = UseFixedDouble(instr->right(), f4);
- LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
return MarkAsCall(DefineFixedDouble(result, f2), instr);
}
}
@@ -1355,7 +1368,7 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
} else {
left = UseRegisterAtStart(instr->LeastConstantOperand());
}
- LMulI* mul = new LMulI(left, right, temp);
+ LMulI* mul = new(zone()) LMulI(left, right, temp);
if (instr->CheckFlag(HValue::kCanOverflow) ||
instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
AssignEnvironment(mul);
@@ -1377,7 +1390,7 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- LSubI* sub = new LSubI(left, right);
+ LSubI* sub = new(zone()) LSubI(left, right);
LInstruction* result = DefineAsRegister(sub);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1397,7 +1410,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- LAddI* add = new LAddI(left, right);
+ LAddI* add = new(zone()) LAddI(left, right);
LInstruction* result = DefineAsRegister(add);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1422,7 +1435,7 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
LOperand* right = exponent_type.IsDouble() ?
UseFixedDouble(instr->right(), f4) :
UseFixed(instr->right(), a2);
- LPower* result = new LPower(left, right);
+ LPower* result = new(zone()) LPower(left, right);
return MarkAsCall(DefineFixedDouble(result, f0),
instr,
CAN_DEOPTIMIZE_EAGERLY);
@@ -1433,7 +1446,7 @@ LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
ASSERT(instr->representation().IsDouble());
ASSERT(instr->global_object()->representation().IsTagged());
LOperand* global_object = UseFixed(instr->global_object(), a0);
- LRandom* result = new LRandom(global_object);
+ LRandom* result = new(zone()) LRandom(global_object);
return MarkAsCall(DefineFixedDouble(result, f0), instr);
}
@@ -1444,7 +1457,7 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LCmpT* result = new LCmpT(left, right);
+ LCmpT* result = new(zone()) LCmpT(left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1457,14 +1470,14 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterOrConstantAtStart(instr->left());
LOperand* right = UseRegisterOrConstantAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
} else {
ASSERT(r.IsDouble());
ASSERT(instr->left()->representation().IsDouble());
ASSERT(instr->right()->representation().IsDouble());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
}
}
@@ -1473,47 +1486,50 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
HCompareObjectEqAndBranch* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpObjectEqAndBranch(left, right);
+ return new(zone()) LCmpObjectEqAndBranch(left, right);
}
LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
HCompareConstantEqAndBranch* instr) {
- return new LCmpConstantEqAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LCmpConstantEqAndBranch(
+ UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsNilAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = TempRegister();
- return new LIsObjectAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
+ temp);
}
LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = TempRegister();
- return new LIsStringAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
+ temp);
}
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsSmiAndBranch(Use(instr->value()));
+ return new(zone()) LIsSmiAndBranch(Use(instr->value()));
}
LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
HIsUndetectableAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsUndetectableAndBranch(UseRegisterAtStart(instr->value()),
- TempRegister());
+ return new(zone()) LIsUndetectableAndBranch(
+ UseRegisterAtStart(instr->value()), TempRegister());
}
@@ -1523,7 +1539,8 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LStringCompareAndBranch* result = new LStringCompareAndBranch(left, right);
+ LStringCompareAndBranch* result =
+ new(zone()) LStringCompareAndBranch(left, right);
return MarkAsCall(result, instr);
}
@@ -1531,7 +1548,8 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasInstanceTypeAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasInstanceTypeAndBranch(value);
}
@@ -1540,14 +1558,14 @@ LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGetCachedArrayIndex(value));
+ return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
}
LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
HHasCachedArrayIndexAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasCachedArrayIndexAndBranch(
+ return new(zone()) LHasCachedArrayIndexAndBranch(
UseRegisterAtStart(instr->value()));
}
@@ -1555,40 +1573,41 @@ LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LClassOfTestAndBranch(UseRegister(instr->value()),
- TempRegister());
+ return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
+ TempRegister());
}
LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LJSArrayLength(array));
+ return DefineAsRegister(new(zone()) LJSArrayLength(array));
}
LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayBaseLength(array));
+ return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
}
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LElementsKind(object));
+ return DefineAsRegister(new(zone()) LElementsKind(object));
}
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
- LValueOf* result = new LValueOf(object, TempRegister());
+ LValueOf* result = new(zone()) LValueOf(object, TempRegister());
return DefineAsRegister(result);
}
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
- UseRegister(instr->length())));
+ LOperand* value = UseRegisterAtStart(instr->index());
+ LOperand* length = UseRegister(instr->length());
+ return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1601,7 +1620,7 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), a0);
- return MarkAsCall(new LThrow(value), instr);
+ return MarkAsCall(new(zone()) LThrow(value), instr);
}
@@ -1624,7 +1643,7 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new LNumberUntagD(value);
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
@@ -1632,14 +1651,17 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
if (!needs_check) {
- res = DefineAsRegister(new LSmiUntag(value, needs_check));
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
: NULL;
LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(f22)
: NULL;
- res = DefineSameAsFirst(new LTaggedToI(value, temp1, temp2, temp3));
+ res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
+ temp1,
+ temp2,
+ temp3));
res = AssignEnvironment(res);
}
return res;
@@ -1653,16 +1675,15 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
// Make sure that the temp and result_temp registers are
// different.
LUnallocated* result_temp = TempRegister();
- LNumberTagD* result = new LNumberTagD(value, temp1, temp2);
+ LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
Define(result, result_temp);
return AssignPointerMap(result);
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- LDoubleToI* res =
- new LDoubleToI(value,
- TempRegister(),
- instr->CanTruncateToInt32() ? TempRegister() : NULL);
+ LOperand* temp1 = TempRegister();
+ LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL;
+ LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2);
return AssignEnvironment(DefineAsRegister(res));
}
} else if (from.IsInteger32()) {
@@ -1670,15 +1691,15 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
HValue* val = instr->value();
LOperand* value = UseRegisterAtStart(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
- return DefineAsRegister(new LSmiTag(value));
+ return DefineAsRegister(new(zone()) LSmiTag(value));
} else {
- LNumberTagI* result = new LNumberTagI(value);
+ LNumberTagI* result = new(zone()) LNumberTagI(value);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
} else {
ASSERT(to.IsDouble());
LOperand* value = Use(instr->value());
- return DefineAsRegister(new LInteger32ToDouble(value));
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
}
}
UNREACHABLE();
@@ -1688,13 +1709,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckNonSmi(value));
+ return AssignEnvironment(new(zone()) LCheckNonSmi(value));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckInstanceType(value);
+ LInstruction* result = new(zone()) LCheckInstanceType(value);
return AssignEnvironment(result);
}
@@ -1702,26 +1723,26 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
LOperand* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
- LInstruction* result = new LCheckPrototypeMaps(temp1, temp2);
+ LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckSmi(value));
+ return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckFunction(value));
+ return AssignEnvironment(new(zone()) LCheckFunction(value));
}
LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckMap(value);
+ LInstruction* result = new(zone()) LCheckMap(value);
return AssignEnvironment(result);
}
@@ -1732,57 +1753,32 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
// Revisit this decision, here and 8 lines below.
- return DefineAsRegister(new LClampDToUint8(reg, FixedTemp(f22)));
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(f22)));
} else if (input_rep.IsInteger32()) {
- return DefineAsRegister(new LClampIToUint8(reg));
+ return DefineAsRegister(new(zone()) LClampIToUint8(reg));
} else {
ASSERT(input_rep.IsTagged());
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve f22 explicitly.
- LClampTToUint8* result = new LClampTToUint8(reg, FixedTemp(f22));
+ LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(f22));
return AssignEnvironment(DefineAsRegister(result));
}
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
- LOperand* reg = UseRegister(value);
- if (input_rep.IsDouble()) {
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LDoubleToI* res = new LDoubleToI(reg, temp1, temp2);
- return AssignEnvironment(DefineAsRegister(res));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LOperand* temp3 = FixedTemp(f22);
- LTaggedToI* res = new LTaggedToI(reg, temp1, temp2, temp3);
- return AssignEnvironment(DefineSameAsFirst(res));
- }
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
- return new LReturn(UseFixed(instr->value(), v0));
+ return new(zone()) LReturn(UseFixed(instr->value(), v0));
}
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- return DefineAsRegister(new LConstantI);
+ return DefineAsRegister(new(zone()) LConstantI);
} else if (r.IsDouble()) {
- return DefineAsRegister(new LConstantD);
+ return DefineAsRegister(new(zone()) LConstantD);
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT);
+ return DefineAsRegister(new(zone()) LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1791,7 +1787,7 @@ LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
- LLoadGlobalCell* result = new LLoadGlobalCell;
+ LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
return instr->RequiresHoleCheck()
? AssignEnvironment(DefineAsRegister(result))
: DefineAsRegister(result);
@@ -1800,7 +1796,7 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), a0);
- LLoadGlobalGeneric* result = new LLoadGlobalGeneric(global_object);
+ LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1810,8 +1806,8 @@ LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
// Use a temp to check the value in the cell in the case where we perform
// a hole check.
return instr->RequiresHoleCheck()
- ? AssignEnvironment(new LStoreGlobalCell(value, TempRegister()))
- : new LStoreGlobalCell(value, NULL);
+ ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
+ : new(zone()) LStoreGlobalCell(value, NULL);
}
@@ -1819,14 +1815,15 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), a1);
LOperand* value = UseFixed(instr->value(), a0);
LStoreGlobalGeneric* result =
- new LStoreGlobalGeneric(global_object, value);
+ new(zone()) LStoreGlobalGeneric(global_object, value);
return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- LInstruction* result = DefineAsRegister(new LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1841,14 +1838,14 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
context = UseRegister(instr->context());
value = UseRegister(instr->value());
}
- LInstruction* result = new LStoreContextSlot(context, value);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value);
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
return DefineAsRegister(
- new LLoadNamedField(UseRegisterAtStart(instr->object())));
+ new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
}
@@ -1857,11 +1854,13 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* obj = UseFixed(instr->object(), a0);
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return MarkAsCall(DefineFixed(result, v0), instr);
} else {
LOperand* obj = UseRegisterAtStart(instr->object());
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return AssignEnvironment(DefineAsRegister(result));
}
}
@@ -1869,7 +1868,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), a0);
- LInstruction* result = DefineFixed(new LLoadNamedGeneric(object), v0);
+ LInstruction* result = DefineFixed(new(zone()) LLoadNamedGeneric(object), v0);
return MarkAsCall(result, instr);
}
@@ -1877,20 +1876,20 @@ LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
return AssignEnvironment(DefineAsRegister(
- new LLoadFunctionPrototype(UseRegister(instr->function()))));
+ new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadElements(input));
+ return DefineAsRegister(new(zone()) LLoadElements(input));
}
LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
HLoadExternalArrayPointer* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadExternalArrayPointer(input));
+ return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
}
@@ -1900,7 +1899,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterAtStart(instr->key());
- LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
+ LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
if (instr->RequiresHoleCheck()) AssignEnvironment(result);
return DefineAsRegister(result);
}
@@ -1913,7 +1912,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LOperand* elements = UseTempRegister(instr->elements());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastDoubleElement* result =
- new LLoadKeyedFastDoubleElement(elements, key);
+ new(zone()) LLoadKeyedFastDoubleElement(elements, key);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -1933,7 +1932,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new LLoadKeyedSpecializedArrayElement(external_pointer, key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -1947,7 +1946,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* key = UseFixed(instr->key(), a0);
LInstruction* result =
- DefineFixed(new LLoadKeyedGeneric(object, key), v0);
+ DefineFixed(new(zone()) LLoadKeyedGeneric(object, key), v0);
return MarkAsCall(result, instr);
}
@@ -1966,7 +1965,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastElement(obj, key, val);
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1980,7 +1979,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LOperand* val = UseTempRegister(instr->value());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastDoubleElement(elements, key, val);
+ return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
}
@@ -2007,9 +2006,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
: UseRegister(instr->value());
LOperand* key = UseRegisterOrConstant(instr->key());
- return new LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
+ key,
+ val);
}
@@ -2022,7 +2021,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
+ return MarkAsCall(new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
}
@@ -2033,14 +2032,16 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, NULL);
+ new(zone()) LTransitionElementsKind(object, new_map_reg, NULL);
return DefineSameAsFirst(result);
} else {
LOperand* object = UseFixed(instr->object(), a0);
LOperand* fixed_object_reg = FixedTemp(a2);
LOperand* new_map_reg = FixedTemp(a3);
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+ new(zone()) LTransitionElementsKind(object,
+ new_map_reg,
+ fixed_object_reg);
return MarkAsCall(DefineFixed(result, v0), instr);
}
}
@@ -2057,7 +2058,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new LStoreNamedField(obj, val);
+ return new(zone()) LStoreNamedField(obj, val);
}
@@ -2065,7 +2066,7 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LOperand* obj = UseFixed(instr->object(), a1);
LOperand* val = UseFixed(instr->value(), a0);
- LInstruction* result = new LStoreNamedGeneric(obj, val);
+ LInstruction* result = new(zone()) LStoreNamedGeneric(obj, val);
return MarkAsCall(result, instr);
}
@@ -2073,60 +2074,68 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), v0),
+ instr);
}
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseTempRegister(instr->string());
LOperand* index = UseTempRegister(instr->index());
- LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
+ LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
LOperand* char_code = UseRegister(instr->value());
- LStringCharFromCode* result = new LStringCharFromCode(char_code);
+ LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LStringLength(string));
+ return DefineAsRegister(new(zone()) LStringLength(string));
+}
+
+
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LAllocateObject* result = new(zone()) LAllocateObject(
+ TempRegister(), TempRegister());
+ return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFastLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
- return MarkAsCall(DefineFixed(new LArrayLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
- return MarkAsCall(DefineFixed(new LRegExpLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFunctionLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LOperand* object = UseFixed(instr->object(), a0);
LOperand* key = UseFixed(instr->key(), a1);
- LDeleteProperty* result = new LDeleteProperty(object, key);
+ LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -2134,13 +2143,13 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
- return AssignEnvironment(new LOsrEntry);
+ return AssignEnvironment(new(zone()) LOsrEntry);
}
LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
int spill_index = chunk()->GetParameterStackSlot(instr->index());
- return DefineAsSpilled(new LParameter, spill_index);
+ return DefineAsSpilled(new(zone()) LParameter, spill_index);
}
@@ -2150,13 +2159,13 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
- return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+ return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
}
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallStub, v0), instr);
}
@@ -2173,32 +2182,33 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LOperand* arguments = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = UseRegister(instr->index());
- LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ LAccessArgumentsAt* result =
+ new(zone()) LAccessArgumentsAt(arguments, length, index);
return AssignEnvironment(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
LOperand* object = UseFixed(instr->value(), a0);
- LToFastProperties* result = new LToFastProperties(object);
+ LToFastProperties* result = new(zone()) LToFastProperties(object);
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseFixed(instr->value(), a0));
+ LTypeof* result = new(zone()) LTypeof(UseFixed(instr->value(), a0));
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
- return new LTypeofIsAndBranch(UseTempRegister(instr->value()));
+ return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
}
LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
HIsConstructCallAndBranch* instr) {
- return new LIsConstructCallAndBranch(TempRegister());
+ return new(zone()) LIsConstructCallAndBranch(TempRegister());
}
@@ -2221,7 +2231,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LInstruction* result = new LLazyBailout;
+ LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
instruction_pending_deoptimization_environment_->
set_deoptimization_environment(result->environment());
@@ -2235,10 +2245,10 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
- return MarkAsCall(new LStackCheck, instr);
+ return MarkAsCall(new(zone()) LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
- return AssignEnvironment(AssignPointerMap(new LStackCheck));
+ return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
}
}
@@ -2250,7 +2260,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2268,9 +2279,37 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
LInstruction* LChunkBuilder::DoIn(HIn* instr) {
LOperand* key = UseRegisterAtStart(instr->key());
LOperand* object = UseRegisterAtStart(instr->object());
- LIn* result = new LIn(key, object);
+ LIn* result = new(zone()) LIn(key, object);
return MarkAsCall(DefineFixed(result, v0), instr);
}
+LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
+ LOperand* object = UseFixed(instr->enumerable(), a0);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
+ return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
+ LOperand* map = UseRegister(instr->map());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LForInCacheArray(map)));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* map = UseRegisterAtStart(instr->map());
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
+ LOperand* object = UseRegister(instr->object());
+ LOperand* index = UseRegister(instr->index());
+ return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
+}
+
+
} } // namespace v8::internal
diff --git a/deps/v8/src/mips/lithium-mips.h b/deps/v8/src/mips/lithium-mips.h
index f4c3c212764..2128ce3e9eb 100644
--- a/deps/v8/src/mips/lithium-mips.h
+++ b/deps/v8/src/mips/lithium-mips.h
@@ -49,6 +49,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -172,7 +173,11 @@ class LCodeGen;
V(TypeofIsAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
- V(ValueOf)
+ V(ValueOf) \
+ V(ForInPrepareMap) \
+ V(ForInCacheArray) \
+ V(CheckMapValue) \
+ V(LoadFieldByIndex)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -1917,6 +1922,18 @@ class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
};
+class LAllocateObject: public LTemplateInstruction<1, 0, 2> {
+ public:
+ LAllocateObject(LOperand* temp1, LOperand* temp2) {
+ temps_[0] = temp1;
+ temps_[1] = temp2;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
@@ -2064,6 +2081,62 @@ class LIn: public LTemplateInstruction<1, 2, 0> {
};
+class LForInPrepareMap: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInPrepareMap(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
+};
+
+
+class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInCacheArray(LOperand* map) {
+ inputs_[0] = map;
+ }
+
+ LOperand* map() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
+
+ int idx() {
+ return HForInCacheArray::cast(this->hydrogen_value())->idx();
+ }
+};
+
+
+class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
+ public:
+ LCheckMapValue(LOperand* value, LOperand* map) {
+ inputs_[0] = value;
+ inputs_[1] = map;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* map() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
+};
+
+
+class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadFieldByIndex(LOperand* object, LOperand* index) {
+ inputs_[0] = object;
+ inputs_[1] = index;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+ LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
+};
+
+
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
@@ -2131,6 +2204,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2160,6 +2234,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2265,6 +2340,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc
index 7a733bca5b4..77d03b55549 100644
--- a/deps/v8/src/mips/macro-assembler-mips.cc
+++ b/deps/v8/src/mips/macro-assembler-mips.cc
@@ -5093,6 +5093,48 @@ void MacroAssembler::LoadInstanceDescriptors(Register map,
}
+void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
+ Label next;
+ // Preload a couple of values used in the loop.
+ Register empty_fixed_array_value = t2;
+ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
+ Register empty_descriptor_array_value = t3;
+ LoadRoot(empty_descriptor_array_value,
+ Heap::kEmptyDescriptorArrayRootIndex);
+ mov(a1, a0);
+ bind(&next);
+
+ // Check that there are no elements. Register a1 contains the
+ // current JS object we've reached through the prototype chain.
+ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
+ Branch(call_runtime, ne, a2, Operand(empty_fixed_array_value));
+
+ // Check that instance descriptors are not empty so that we can
+ // check for an enum cache. Leave the map in a2 for the subsequent
+ // prototype load.
+ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
+ lw(a3, FieldMemOperand(a2, Map::kInstanceDescriptorsOrBitField3Offset));
+ JumpIfSmi(a3, call_runtime);
+
+ // Check that there is an enum cache in the non-empty instance
+ // descriptors (a3). This is the case if the next enumeration
+ // index field does not contain a smi.
+ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumerationIndexOffset));
+ JumpIfSmi(a3, call_runtime);
+
+ // For all objects but the receiver, check that the cache is empty.
+ Label check_prototype;
+ Branch(&check_prototype, eq, a1, Operand(a0));
+ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ Branch(call_runtime, ne, a3, Operand(empty_fixed_array_value));
+
+ // Load the prototype from the map and loop if non-null.
+ bind(&check_prototype);
+ lw(a1, FieldMemOperand(a2, Map::kPrototypeOffset));
+ Branch(&next, ne, a1, Operand(null_value));
+}
+
+
void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
ASSERT(!output_reg.is(input_reg));
Label done;
diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h
index 56a3433b86f..6ae8657e1e6 100644
--- a/deps/v8/src/mips/macro-assembler-mips.h
+++ b/deps/v8/src/mips/macro-assembler-mips.h
@@ -1353,6 +1353,10 @@ class MacroAssembler: public Assembler {
Register value,
Register scratch);
+ // Expects object in a0 and returns map with validated enum cache
+ // in a0. Assumes that any other register can be used as a scratch.
+ void CheckEnumCache(Register null_value, Label* call_runtime);
+
private:
void CallCFunctionHelper(Register function,
int num_reg_arguments,
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.cc b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
index cb210fed04f..330ff2b8d1e 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.cc
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
@@ -1056,7 +1056,7 @@ int RegExpMacroAssemblerMIPS::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid.
int delta = code_handle->address() - re_code->address();
diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc
index b9ab2422aed..fde5ba994c1 100644
--- a/deps/v8/src/mips/stub-cache-mips.cc
+++ b/deps/v8/src/mips/stub-cache-mips.cc
@@ -43,51 +43,74 @@ static void ProbeTable(Isolate* isolate,
MacroAssembler* masm,
Code::Flags flags,
StubCache::Table table,
+ Register receiver,
Register name,
+ // Number of the cache entry, not scaled.
Register offset,
Register scratch,
- Register scratch2) {
+ Register scratch2,
+ Register offset_scratch) {
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
+ ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
+ uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
// Check the relative positions of the address fields.
ASSERT(value_off_addr > key_off_addr);
ASSERT((value_off_addr - key_off_addr) % 4 == 0);
ASSERT((value_off_addr - key_off_addr) < (256 * 4));
+ ASSERT(map_off_addr > key_off_addr);
+ ASSERT((map_off_addr - key_off_addr) % 4 == 0);
+ ASSERT((map_off_addr - key_off_addr) < (256 * 4));
Label miss;
- Register offsets_base_addr = scratch;
+ Register base_addr = scratch;
+ scratch = no_reg;
+
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ sll(offset_scratch, offset, 1);
+ __ Addu(offset_scratch, offset_scratch, offset);
+
+ // Calculate the base address of the entry.
+ __ li(base_addr, Operand(key_offset));
+ __ sll(at, offset_scratch, kPointerSizeLog2);
+ __ Addu(base_addr, base_addr, at);
// Check that the key in the entry matches the name.
- __ li(offsets_base_addr, Operand(key_offset));
- __ sll(scratch2, offset, 1);
- __ addu(scratch2, offsets_base_addr, scratch2);
- __ lw(scratch2, MemOperand(scratch2));
- __ Branch(&miss, ne, name, Operand(scratch2));
+ __ lw(at, MemOperand(base_addr, 0));
+ __ Branch(&miss, ne, name, Operand(at));
+
+ // Check the map matches.
+ __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
+ __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ Branch(&miss, ne, at, Operand(scratch2));
// Get the code entry from the cache.
- __ Addu(offsets_base_addr, offsets_base_addr,
- Operand(value_off_addr - key_off_addr));
- __ sll(scratch2, offset, 1);
- __ addu(scratch2, offsets_base_addr, scratch2);
- __ lw(scratch2, MemOperand(scratch2));
+ Register code = scratch2;
+ scratch2 = no_reg;
+ __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
// Check that the flags match what we're looking for.
- __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
- __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
- __ Branch(&miss, ne, scratch2, Operand(flags));
+ Register flags_reg = base_addr;
+ base_addr = no_reg;
+ __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
+ __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
+ __ Branch(&miss, ne, flags_reg, Operand(flags));
- // Re-load code entry from cache.
- __ sll(offset, offset, 1);
- __ addu(offset, offset, offsets_base_addr);
- __ lw(offset, MemOperand(offset));
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
// Jump to the first instruction in the code stub.
- __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(offset);
+ __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
// Miss: fall through.
__ bind(&miss);
@@ -157,13 +180,14 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Isolate* isolate = masm->isolate();
Label miss;
- // Make sure that code is valid. The shifting code relies on the
- // entry size being 8.
- ASSERT(sizeof(Entry) == 8);
+ // Make sure that code is valid. The multiplying code relies on the
+ // entry size being 12.
+ ASSERT(sizeof(Entry) == 12);
// Make sure the flags does not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -179,39 +203,66 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
ASSERT(!extra2.is(scratch));
ASSERT(!extra2.is(extra));
- // Check scratch, extra and extra2 registers are valid.
+ // Check register validity.
ASSERT(!scratch.is(no_reg));
ASSERT(!extra.is(no_reg));
ASSERT(!extra2.is(no_reg));
+ ASSERT(!extra3.is(no_reg));
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
+ extra2, extra3);
// Check that the receiver isn't a smi.
- __ JumpIfSmi(receiver, &miss, t0);
+ __ JumpIfSmi(receiver, &miss);
// Get the map of the receiver and compute the hash.
__ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
- __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ Addu(scratch, scratch, Operand(t8));
- __ Xor(scratch, scratch, Operand(flags));
- __ And(scratch,
- scratch,
- Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
+ __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ Addu(scratch, scratch, at);
+ uint32_t mask = kPrimaryTableSize - 1;
+ // We shift out the last two bits because they are not part of the hash and
+ // they are always 01 for maps.
+ __ srl(scratch, scratch, kHeapObjectTagSize);
+ __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
+ __ And(scratch, scratch, Operand(mask));
// Probe the primary table.
- ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
+ ProbeTable(isolate,
+ masm,
+ flags,
+ kPrimary,
+ receiver,
+ name,
+ scratch,
+ extra,
+ extra2,
+ extra3);
// Primary miss: Compute hash for secondary probe.
- __ Subu(scratch, scratch, Operand(name));
- __ Addu(scratch, scratch, Operand(flags));
- __ And(scratch,
- scratch,
- Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
+ __ srl(at, name, kHeapObjectTagSize);
+ __ Subu(scratch, scratch, at);
+ uint32_t mask2 = kSecondaryTableSize - 1;
+ __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
+ __ And(scratch, scratch, Operand(mask2));
// Probe the secondary table.
- ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
+ ProbeTable(isolate,
+ masm,
+ flags,
+ kSecondary,
+ receiver,
+ name,
+ scratch,
+ extra,
+ extra2,
+ extra3);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
+ extra2, extra3);
}
diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc
index 7aef91215d3..16e03e7548f 100644
--- a/deps/v8/src/objects-debug.cc
+++ b/deps/v8/src/objects-debug.cc
@@ -138,6 +138,9 @@ void HeapObject::HeapObjectVerify() {
case JS_VALUE_TYPE:
JSValue::cast(this)->JSValueVerify();
break;
+ case JS_DATE_TYPE:
+ JSDate::cast(this)->JSDateVerify();
+ break;
case JS_FUNCTION_TYPE:
JSFunction::cast(this)->JSFunctionVerify();
break;
@@ -371,6 +374,53 @@ void JSValue::JSValueVerify() {
}
+void JSDate::JSDateVerify() {
+ if (value()->IsHeapObject()) {
+ VerifyHeapPointer(value());
+ }
+ CHECK(value()->IsUndefined() || value()->IsSmi() || value()->IsHeapNumber());
+ CHECK(year()->IsUndefined() || year()->IsSmi() || year()->IsNaN());
+ CHECK(month()->IsUndefined() || month()->IsSmi() || month()->IsNaN());
+ CHECK(day()->IsUndefined() || day()->IsSmi() || day()->IsNaN());
+ CHECK(weekday()->IsUndefined() || weekday()->IsSmi() || weekday()->IsNaN());
+ CHECK(hour()->IsUndefined() || hour()->IsSmi() || hour()->IsNaN());
+ CHECK(min()->IsUndefined() || min()->IsSmi() || min()->IsNaN());
+ CHECK(sec()->IsUndefined() || sec()->IsSmi() || sec()->IsNaN());
+ CHECK(cache_stamp()->IsUndefined() ||
+ cache_stamp()->IsSmi() ||
+ cache_stamp()->IsNaN());
+
+ if (month()->IsSmi()) {
+ int month = Smi::cast(this->month())->value();
+ CHECK(0 <= month && month <= 11);
+ }
+ if (day()->IsSmi()) {
+ int day = Smi::cast(this->day())->value();
+ CHECK(1 <= day && day <= 31);
+ }
+ if (hour()->IsSmi()) {
+ int hour = Smi::cast(this->hour())->value();
+ CHECK(0 <= hour && hour <= 23);
+ }
+ if (min()->IsSmi()) {
+ int min = Smi::cast(this->min())->value();
+ CHECK(0 <= min && min <= 59);
+ }
+ if (sec()->IsSmi()) {
+ int sec = Smi::cast(this->sec())->value();
+ CHECK(0 <= sec && sec <= 59);
+ }
+ if (weekday()->IsSmi()) {
+ int weekday = Smi::cast(this->weekday())->value();
+ CHECK(0 <= weekday && weekday <= 6);
+ }
+ if (cache_stamp()->IsSmi()) {
+ CHECK(Smi::cast(cache_stamp())->value() <=
+ Smi::cast(Isolate::Current()->date_cache()->stamp())->value());
+ }
+}
+
+
void JSMessageObject::JSMessageObjectVerify() {
CHECK(IsJSMessageObject());
CHECK(type()->IsString());
diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h
index bc6217ba5eb..d0e9bf82bac 100644
--- a/deps/v8/src/objects-inl.h
+++ b/deps/v8/src/objects-inl.h
@@ -605,6 +605,7 @@ TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
+TYPE_CHECKER(JSDate, JS_DATE_TYPE)
TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
@@ -800,6 +801,11 @@ double Object::Number() {
}
+bool Object::IsNaN() {
+ return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
+}
+
+
MaybeObject* Object::ToSmi() {
if (IsSmi()) return this;
if (IsHeapNumber()) {
@@ -1425,6 +1431,8 @@ int JSObject::GetHeaderSize() {
return JSFunction::kSize;
case JS_VALUE_TYPE:
return JSValue::kSize;
+ case JS_DATE_TYPE:
+ return JSDate::kSize;
case JS_ARRAY_TYPE:
return JSArray::kSize;
case JS_WEAK_MAP_TYPE:
@@ -1988,7 +1996,8 @@ AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
bool DescriptorArray::IsProperty(int descriptor_number) {
- return IsRealProperty(GetType(descriptor_number));
+ Entry entry(this, descriptor_number);
+ return IsPropertyDescriptor(&entry);
}
@@ -4118,6 +4127,24 @@ JSValue* JSValue::cast(Object* obj) {
}
+ACCESSORS(JSDate, value, Object, kValueOffset)
+ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
+ACCESSORS(JSDate, year, Object, kYearOffset)
+ACCESSORS(JSDate, month, Object, kMonthOffset)
+ACCESSORS(JSDate, day, Object, kDayOffset)
+ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
+ACCESSORS(JSDate, hour, Object, kHourOffset)
+ACCESSORS(JSDate, min, Object, kMinOffset)
+ACCESSORS(JSDate, sec, Object, kSecOffset)
+
+
+JSDate* JSDate::cast(Object* obj) {
+ ASSERT(obj->IsJSDate());
+ ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
+ return reinterpret_cast<JSDate*>(obj);
+}
+
+
ACCESSORS(JSMessageObject, type, String, kTypeOffset)
ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
diff --git a/deps/v8/src/objects-printer.cc b/deps/v8/src/objects-printer.cc
index d5c02f4321a..38e61386b27 100644
--- a/deps/v8/src/objects-printer.cc
+++ b/deps/v8/src/objects-printer.cc
@@ -151,6 +151,9 @@ void HeapObject::HeapObjectPrint(FILE* out) {
PrintF(out, "Value wrapper around:");
JSValue::cast(this)->value()->Print(out);
break;
+ case JS_DATE_TYPE:
+ JSDate::cast(this)->value()->Print(out);
+ break;
case CODE_TYPE:
Code::cast(this)->CodePrint(out);
break;
@@ -660,6 +663,30 @@ char* String::ToAsciiArray() {
}
+static const char* const weekdays[] = {
+ "???", "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"
+};
+
+void JSDate::JSDatePrint(FILE* out) {
+ HeapObject::PrintHeader(out, "JSDate");
+ PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - value = ");
+ value()->Print(out);
+ if (!year()->IsSmi()) {
+ PrintF(out, " - time = NaN\n");
+ } else {
+ PrintF(out, " - time = %s %04d/%02d/%02d %02d:%02d:%02d\n",
+ weekdays[weekday()->IsSmi() ? Smi::cast(weekday())->value() + 1 : 0],
+ year()->IsSmi() ? Smi::cast(year())->value() : -1,
+ month()->IsSmi() ? Smi::cast(month())->value() : -1,
+ day()->IsSmi() ? Smi::cast(day())->value() : -1,
+ hour()->IsSmi() ? Smi::cast(hour())->value() : -1,
+ min()->IsSmi() ? Smi::cast(min())->value() : -1,
+ sec()->IsSmi() ? Smi::cast(sec())->value() : -1);
+ }
+}
+
+
void JSProxy::JSProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSProxy");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
diff --git a/deps/v8/src/objects-visiting.cc b/deps/v8/src/objects-visiting.cc
index 9ca102b2fda..c7c8a87895a 100644
--- a/deps/v8/src/objects-visiting.cc
+++ b/deps/v8/src/objects-visiting.cc
@@ -134,6 +134,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
+ case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index 8941151c0cc..e0a95372b48 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -33,6 +33,7 @@
#include "codegen.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "date.h"
#include "elements.h"
#include "execution.h"
#include "full-codegen.h"
@@ -56,50 +57,8 @@ namespace v8 {
namespace internal {
void PrintElementsKind(FILE* out, ElementsKind kind) {
- switch (kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- PrintF(out, "FAST_SMI_ONLY_ELEMENTS");
- break;
- case FAST_ELEMENTS:
- PrintF(out, "FAST_ELEMENTS");
- break;
- case FAST_DOUBLE_ELEMENTS:
- PrintF(out, "FAST_DOUBLE_ELEMENTS");
- break;
- case DICTIONARY_ELEMENTS:
- PrintF(out, "DICTIONARY_ELEMENTS");
- break;
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- PrintF(out, "NON_STRICT_ARGUMENTS_ELEMENTS");
- break;
- case EXTERNAL_BYTE_ELEMENTS:
- PrintF(out, "EXTERNAL_BYTE_ELEMENTS");
- break;
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- PrintF(out, "EXTERNAL_UNSIGNED_BYTE_ELEMENTS");
- break;
- case EXTERNAL_SHORT_ELEMENTS:
- PrintF(out, "EXTERNAL_SHORT_ELEMENTS");
- break;
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- PrintF(out, "EXTERNAL_UNSIGNED_SHORT_ELEMENTS");
- break;
- case EXTERNAL_INT_ELEMENTS:
- PrintF(out, "EXTERNAL_INT_ELEMENTS");
- break;
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- PrintF(out, "EXTERNAL_UNSIGNED_INT_ELEMENTS");
- break;
- case EXTERNAL_FLOAT_ELEMENTS:
- PrintF(out, "EXTERNAL_FLOAT_ELEMENTS");
- break;
- case EXTERNAL_DOUBLE_ELEMENTS:
- PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
- break;
- case EXTERNAL_PIXEL_ELEMENTS:
- PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
- break;
- }
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
+ PrintF(out, "%s", accessor->name());
}
@@ -733,10 +692,7 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
if (js_object->elements() != heap->empty_fixed_array()) {
MaybeObject* result = js_object->GetElementsAccessor()->Get(
- js_object->elements(),
- index,
- js_object,
- receiver);
+ receiver, js_object, index);
if (result != heap->the_hole_value()) return result;
}
}
@@ -1112,7 +1068,7 @@ void JSObject::JSObjectShortPrint(StringStream* accumulator) {
switch (map()->instance_type()) {
case JS_ARRAY_TYPE: {
double length = JSArray::cast(this)->length()->Number();
- accumulator->Add("<JS array[%u]>", static_cast<uint32_t>(length));
+ accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
break;
}
case JS_WEAK_MAP_TYPE: {
@@ -1383,6 +1339,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
+ case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
case JS_SET_TYPE:
case JS_MAP_TYPE:
@@ -4367,7 +4324,7 @@ void JSObject::LookupCallback(String* name, LookupResult* result) {
static bool UpdateGetterSetterInDictionary(
SeededNumberDictionary* dictionary,
uint32_t index,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes) {
int entry = dictionary->FindEntry(index);
@@ -4381,7 +4338,7 @@ static bool UpdateGetterSetterInDictionary(
dictionary->DetailsAtPut(entry,
PropertyDetails(attributes, CALLBACKS, index));
}
- AccessorPair::cast(result)->set(is_getter, fun);
+ AccessorPair::cast(result)->set(component, fun);
return true;
}
}
@@ -4390,7 +4347,7 @@ static bool UpdateGetterSetterInDictionary(
MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes) {
switch (GetElementsKind()) {
@@ -4412,7 +4369,7 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
case DICTIONARY_ELEMENTS:
if (UpdateGetterSetterInDictionary(element_dictionary(),
index,
- is_getter,
+ component,
fun,
attributes)) {
return GetHeap()->undefined_value();
@@ -4433,7 +4390,7 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
SeededNumberDictionary::cast(arguments);
if (UpdateGetterSetterInDictionary(dictionary,
index,
- is_getter,
+ component,
fun,
attributes)) {
return GetHeap()->undefined_value();
@@ -4448,14 +4405,14 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
{ MaybeObject* maybe_accessors = GetHeap()->AllocateAccessorPair();
if (!maybe_accessors->To(&accessors)) return maybe_accessors;
}
- accessors->set(is_getter, fun);
+ accessors->set(component, fun);
return SetElementCallback(index, accessors, attributes);
}
MaybeObject* JSObject::DefinePropertyAccessor(String* name,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes) {
// Lookup the name.
@@ -4473,7 +4430,7 @@ MaybeObject* JSObject::DefinePropertyAccessor(String* name,
AccessorPair::cast(obj)->CopyWithoutTransitions();
if (!maybe_copy->To(&copy)) return maybe_copy;
}
- copy->set(is_getter, fun);
+ copy->set(component, fun);
// Use set to update attributes.
return SetPropertyCallback(name, copy, attributes);
}
@@ -4484,7 +4441,7 @@ MaybeObject* JSObject::DefinePropertyAccessor(String* name,
{ MaybeObject* maybe_accessors = GetHeap()->AllocateAccessorPair();
if (!maybe_accessors->To(&accessors)) return maybe_accessors;
}
- accessors->set(is_getter, fun);
+ accessors->set(component, fun);
return SetPropertyCallback(name, accessors, attributes);
}
@@ -4593,7 +4550,7 @@ MaybeObject* JSObject::SetPropertyCallback(String* name,
}
MaybeObject* JSObject::DefineAccessor(String* name,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes) {
ASSERT(fun->IsSpecFunction() || fun->IsUndefined());
@@ -4609,7 +4566,7 @@ MaybeObject* JSObject::DefineAccessor(String* name,
Object* proto = GetPrototype();
if (proto->IsNull()) return this;
ASSERT(proto->IsJSGlobalObject());
- return JSObject::cast(proto)->DefineAccessor(name, is_getter,
+ return JSObject::cast(proto)->DefineAccessor(name, component,
fun, attributes);
}
@@ -4624,8 +4581,8 @@ MaybeObject* JSObject::DefineAccessor(String* name,
uint32_t index = 0;
return name->AsArrayIndex(&index) ?
- DefineElementAccessor(index, is_getter, fun, attributes) :
- DefinePropertyAccessor(name, is_getter, fun, attributes);
+ DefineElementAccessor(index, component, fun, attributes) :
+ DefinePropertyAccessor(name, component, fun, attributes);
}
@@ -4711,7 +4668,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
}
-Object* JSObject::LookupAccessor(String* name, bool is_getter) {
+Object* JSObject::LookupAccessor(String* name, AccessorComponent component) {
Heap* heap = GetHeap();
// Make sure that the top context does not change when doing callbacks or
@@ -4737,12 +4694,9 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
int entry = dictionary->FindEntry(index);
if (entry != SeededNumberDictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- if (element->IsAccessorPair()) {
- AccessorPair* accessors = AccessorPair::cast(element);
- return is_getter ? accessors->getter() : accessors->setter();
- }
+ if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
+ element->IsAccessorPair()) {
+ return AccessorPair::cast(element)->SafeGet(component);
}
}
}
@@ -4758,8 +4712,7 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
if (result.type() == CALLBACKS) {
Object* obj = result.GetCallbackObject();
if (obj->IsAccessorPair()) {
- AccessorPair* accessors = AccessorPair::cast(obj);
- return is_getter ? accessors->getter() : accessors->setter();
+ return AccessorPair::cast(obj)->SafeGet(component);
}
}
}
@@ -5597,7 +5550,7 @@ MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps,
MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
ElementsAccessor* accessor = array->GetElementsAccessor();
MaybeObject* maybe_result =
- accessor->AddElementsToFixedArray(array->elements(), this, array, array);
+ accessor->AddElementsToFixedArray(array, array, this);
FixedArray* result;
if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
#ifdef DEBUG
@@ -5615,7 +5568,7 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
ElementsAccessor* accessor = ElementsAccessor::ForArray(other);
MaybeObject* maybe_result =
- accessor->AddElementsToFixedArray(other, this, NULL, NULL);
+ accessor->AddElementsToFixedArray(NULL, NULL, this, other);
FixedArray* result;
if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
#ifdef DEBUG
@@ -5768,9 +5721,8 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
ASSERT(descriptor->GetDetails().type() != NULL_DESCRIPTOR);
// Ensure the key is a symbol.
- Object* result;
{ MaybeObject* maybe_result = descriptor->KeyToSymbol();
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ if (maybe_result->IsFailure()) return maybe_result;
}
int new_size = 0;
@@ -5805,9 +5757,7 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
DescriptorArray* new_descriptors;
{ MaybeObject* maybe_result = Allocate(new_size);
- if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
- return maybe_result;
- }
+ if (!maybe_result->To(&new_descriptors)) return maybe_result;
}
DescriptorArray::WhitenessWitness witness(new_descriptors);
@@ -5857,27 +5807,18 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
MaybeObject* DescriptorArray::RemoveTransitions() {
- // Remove all transitions and null descriptors. Return a copy of the array
- // with all transitions removed, or a Failure object if the new array could
- // not be allocated.
-
- // Compute the size of the map transition entries to be removed.
+ // Allocate the new descriptor array.
int new_number_of_descriptors = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
if (IsProperty(i)) new_number_of_descriptors++;
}
-
- // Allocate the new descriptor array.
DescriptorArray* new_descriptors;
{ MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
- if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
- return maybe_result;
- }
+ if (!maybe_result->To(&new_descriptors)) return maybe_result;
}
- DescriptorArray::WhitenessWitness witness(new_descriptors);
-
// Copy the content.
+ DescriptorArray::WhitenessWitness witness(new_descriptors);
int next_descriptor = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
if (IsProperty(i)) {
@@ -6008,6 +5949,12 @@ MaybeObject* AccessorPair::CopyWithoutTransitions() {
}
+Object* AccessorPair::SafeGet(AccessorComponent component) {
+ Object* accessor = get(component);
+ return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
+}
+
+
MaybeObject* DeoptimizationInputData::Allocate(int deopt_entry_count,
PretenureFlag pretenure) {
ASSERT(deopt_entry_count > 0);
@@ -6862,10 +6809,21 @@ void String::WriteToFlat(String* src,
// Left hand side is longer. Recurse over right.
if (to > boundary) {
String* second = cons_string->second();
- WriteToFlat(second,
- sink + boundary - from,
- 0,
+ // When repeatedly appending to a string, we get a cons string that
+ // is unbalanced to the left, a list, essentially. We inline the
+ // common case of sequential ascii right child.
+ if (to - boundary == 1) {
+ sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
+ } else if (second->IsSeqAsciiString()) {
+ CopyChars(sink + boundary - from,
+ SeqAsciiString::cast(second)->GetChars(),
to - boundary);
+ } else {
+ WriteToFlat(second,
+ sink + boundary - from,
+ 0,
+ to - boundary);
+ }
to = boundary;
}
source = first;
@@ -7567,11 +7525,10 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
// Copy the map so this does not affect unrelated functions.
// Remove map transitions because they point to maps with a
// different prototype.
- Object* new_object;
+ Map* new_map;
{ MaybeObject* maybe_new_map = map()->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_object)) return maybe_new_map;
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
}
- Map* new_map = Map::cast(new_object);
Heap* heap = new_map->GetHeap();
set_map(new_map);
new_map->set_constructor(value);
@@ -7628,12 +7585,12 @@ Context* JSFunction::GlobalContextFromLiterals(FixedArray* literals) {
MaybeObject* Oddball::Initialize(const char* to_string,
Object* to_number,
byte kind) {
- Object* symbol;
+ String* symbol;
{ MaybeObject* maybe_symbol =
Isolate::Current()->heap()->LookupAsciiSymbol(to_string);
- if (!maybe_symbol->ToObject(&symbol)) return maybe_symbol;
+ if (!maybe_symbol->To(&symbol)) return maybe_symbol;
}
- set_to_string(String::cast(symbol));
+ set_to_string(symbol);
set_to_number(to_number);
set_kind(kind);
return this;
@@ -7869,9 +7826,7 @@ void SharedFunctionInfo::DisableOptimization() {
code()->set_optimizable(false);
}
if (FLAG_trace_opt) {
- PrintF("[disabled optimization for: ");
- DebugName()->ShortPrint();
- PrintF("]\n");
+ PrintF("[disabled optimization for %s]\n", *DebugName()->ToCString());
}
}
@@ -8257,9 +8212,15 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
break;
}
- case Translation::ARGUMENTS_ADAPTOR_FRAME: {
+ case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME: {
+ int function_id = iterator.Next();
+ JSFunction* function =
+ JSFunction::cast(LiteralArray()->get(function_id));
unsigned height = iterator.Next();
- PrintF(out, "{arguments adaptor, height=%d}", height);
+ PrintF(out, "{function=");
+ function->PrintName(out);
+ PrintF(out, ", height=%u}", height);
break;
}
@@ -8495,43 +8456,6 @@ void Code::Disassemble(const char* name, FILE* out) {
#endif // ENABLE_DISASSEMBLER
-static void CopyFastElementsToFast(FixedArray* source,
- FixedArray* destination,
- WriteBarrierMode mode) {
- int count = source->length();
- int copy_size = Min(count, destination->length());
- if (mode == SKIP_WRITE_BARRIER ||
- !Page::FromAddress(destination->address())->IsFlagSet(
- MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING)) {
- Address to = destination->address() + FixedArray::kHeaderSize;
- Address from = source->address() + FixedArray::kHeaderSize;
- memcpy(reinterpret_cast<void*>(to),
- reinterpret_cast<void*>(from),
- kPointerSize * copy_size);
- } else {
- for (int i = 0; i < copy_size; ++i) {
- destination->set(i, source->get(i), mode);
- }
- }
-}
-
-
-static void CopySlowElementsToFast(SeededNumberDictionary* source,
- FixedArray* destination,
- WriteBarrierMode mode) {
- int destination_length = destination->length();
- for (int i = 0; i < source->Capacity(); ++i) {
- Object* key = source->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t entry = static_cast<uint32_t>(key->Number());
- if (entry < static_cast<uint32_t>(destination_length)) {
- destination->set(entry, source->ValueAt(i), mode);
- }
- }
- }
-}
-
-
MaybeObject* JSObject::SetFastElementsCapacityAndLength(
int capacity,
int length,
@@ -8541,17 +8465,14 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
ASSERT(!HasExternalArrayElements());
// Allocate a new fast elements backing store.
- FixedArray* new_elements = NULL;
- { Object* object;
- MaybeObject* maybe = heap->AllocateFixedArrayWithHoles(capacity);
- if (!maybe->ToObject(&object)) return maybe;
- new_elements = FixedArray::cast(object);
+ FixedArray* new_elements;
+ { MaybeObject* maybe = heap->AllocateFixedArrayWithHoles(capacity);
+ if (!maybe->To(&new_elements)) return maybe;
}
// Find the new map to use for this object if there is a map change.
Map* new_map = NULL;
if (elements()->map() != heap->non_strict_arguments_elements_map()) {
- Object* object;
// The resized array has FAST_SMI_ONLY_ELEMENTS if the capacity mode forces
// it, or if it's allowed and the old elements array contained only SMIs.
bool has_fast_smi_only_elements =
@@ -8563,85 +8484,22 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
? FAST_SMI_ONLY_ELEMENTS
: FAST_ELEMENTS;
MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
- if (!maybe->ToObject(&object)) return maybe;
- new_map = Map::cast(object);
+ if (!maybe->To(&new_map)) return maybe;
}
FixedArrayBase* old_elements_raw = elements();
ElementsKind elements_kind = GetElementsKind();
- switch (elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- AssertNoAllocation no_gc;
- WriteBarrierMode mode(new_elements->GetWriteBarrierMode(no_gc));
- CopyFastElementsToFast(FixedArray::cast(old_elements_raw),
- new_elements, mode);
- set_map_and_elements(new_map, new_elements);
- break;
- }
- case DICTIONARY_ELEMENTS: {
- AssertNoAllocation no_gc;
- WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc);
- CopySlowElementsToFast(SeededNumberDictionary::cast(old_elements_raw),
- new_elements,
- mode);
- set_map_and_elements(new_map, new_elements);
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- AssertNoAllocation no_gc;
- WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc);
- // The object's map and the parameter map are unchanged, the unaliased
- // arguments are copied to the new backing store.
- FixedArray* parameter_map = FixedArray::cast(old_elements_raw);
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- if (arguments->IsDictionary()) {
- CopySlowElementsToFast(SeededNumberDictionary::cast(arguments),
- new_elements,
- mode);
- } else {
- CopyFastElementsToFast(arguments, new_elements, mode);
- }
- parameter_map->set(1, new_elements);
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- FixedDoubleArray* old_elements = FixedDoubleArray::cast(old_elements_raw);
- uint32_t old_length = static_cast<uint32_t>(old_elements->length());
- // Fill out the new array with this content and array holes.
- for (uint32_t i = 0; i < old_length; i++) {
- if (!old_elements->is_the_hole(i)) {
- Object* obj;
- // Objects must be allocated in the old object space, since the
- // overall number of HeapNumbers needed for the conversion might
- // exceed the capacity of new space, and we would fail repeatedly
- // trying to convert the FixedDoubleArray.
- MaybeObject* maybe_value_object =
- GetHeap()->AllocateHeapNumber(old_elements->get_scalar(i),
- TENURED);
- if (!maybe_value_object->ToObject(&obj)) return maybe_value_object;
- // Force write barrier. It's not worth trying to exploit
- // elems->GetWriteBarrierMode(), since it requires an
- // AssertNoAllocation stack object that would have to be positioned
- // after the HeapNumber allocation anyway.
- new_elements->set(i, obj, UPDATE_WRITE_BARRIER);
- }
- }
- set_map(new_map);
- set_elements(new_elements);
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- case EXTERNAL_PIXEL_ELEMENTS:
- UNREACHABLE();
- break;
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
+ ElementsKind to_kind = (elements_kind == FAST_SMI_ONLY_ELEMENTS)
+ ? FAST_SMI_ONLY_ELEMENTS
+ : FAST_ELEMENTS;
+ // int copy_size = Min(old_elements_raw->length(), new_elements->length());
+ accessor->CopyElements(this, new_elements, to_kind);
+ if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ set_map_and_elements(new_map, new_elements);
+ } else {
+ FixedArray* parameter_map = FixedArray::cast(old_elements_raw);
+ parameter_map->set(1, new_elements);
}
if (FLAG_trace_elements_transitions) {
@@ -8665,18 +8523,17 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
- Object* obj;
+ FixedDoubleArray* elems;
{ MaybeObject* maybe_obj =
heap->AllocateUninitializedFixedDoubleArray(capacity);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ if (!maybe_obj->To(&elems)) return maybe_obj;
}
- FixedDoubleArray* elems = FixedDoubleArray::cast(obj);
+ Map* new_map;
{ MaybeObject* maybe_obj =
GetElementsTransitionMap(heap->isolate(), FAST_DOUBLE_ELEMENTS);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ if (!maybe_obj->To(&new_map)) return maybe_obj;
}
- Map* new_map = Map::cast(obj);
FixedArrayBase* old_elements = elements();
ElementsKind elements_kind(GetElementsKind());
@@ -8728,11 +8585,8 @@ MaybeObject* JSArray::Initialize(int capacity) {
if (capacity == 0) {
new_elements = heap->empty_fixed_array();
} else {
- Object* obj;
- { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- new_elements = FixedArray::cast(obj);
+ MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
+ if (!maybe_obj->To(&new_elements)) return maybe_obj;
}
set_elements(new_elements);
return this;
@@ -8792,7 +8646,7 @@ MaybeObject* Map::PutPrototypeTransition(Object* prototype, Map* map) {
// Grow array by factor 2 over and above what we need.
{ MaybeObject* maybe_cache =
GetHeap()->AllocateFixedArray(transitions * 2 * step + header);
- if (!maybe_cache->To<FixedArray>(&new_cache)) return maybe_cache;
+ if (!maybe_cache->To(&new_cache)) return maybe_cache;
}
for (int i = 0; i < capacity * step; i++) {
@@ -8906,78 +8760,6 @@ MaybeObject* JSObject::EnsureCanContainElements(Arguments* args,
}
-bool JSObject::HasElementPostInterceptor(JSReceiver* receiver, uint32_t index) {
- switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if ((index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole()) {
- return true;
- }
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
- if ((index < length) &&
- !FixedDoubleArray::cast(elements())->is_the_hole(index)) {
- return true;
- }
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS: {
- ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
- if (index < static_cast<uint32_t>(pixels->length())) {
- return true;
- }
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- ExternalArray* array = ExternalArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- return true;
- }
- break;
- }
- case DICTIONARY_ELEMENTS: {
- if (element_dictionary()->FindEntry(index)
- != SeededNumberDictionary::kNotFound) {
- return true;
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNREACHABLE();
- break;
- }
-
- // Handle [] on String objects.
- if (this->IsStringObjectWithCharacterAt(index)) return true;
-
- Object* pt = GetPrototype();
- if (pt->IsNull()) return false;
- if (pt->IsJSProxy()) {
- // We need to follow the spec and simulate a call to [[GetOwnProperty]].
- return JSProxy::cast(pt)->GetElementAttributeWithHandler(
- receiver, index) != ABSENT;
- }
- return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
-}
-
-
bool JSObject::HasElementWithInterceptor(JSReceiver* receiver, uint32_t index) {
Isolate* isolate = GetIsolate();
// Make sure that the top context does not change when doing
@@ -9017,7 +8799,21 @@ bool JSObject::HasElementWithInterceptor(JSReceiver* receiver, uint32_t index) {
}
if (!result.IsEmpty()) return true;
}
- return holder_handle->HasElementPostInterceptor(*receiver_handle, index);
+
+ if (holder_handle->GetElementsAccessor()->HasElement(
+ *receiver_handle, *holder_handle, index)) {
+ return true;
+ }
+
+ if (holder_handle->IsStringObjectWithCharacterAt(index)) return true;
+ Object* pt = holder_handle->GetPrototype();
+ if (pt->IsJSProxy()) {
+ // We need to follow the spec and simulate a call to [[GetOwnProperty]].
+ return JSProxy::cast(pt)->GetElementAttributeWithHandler(
+ receiver, index) != ABSENT;
+ }
+ if (pt->IsNull()) return false;
+ return JSObject::cast(pt)->HasElementWithReceiver(*receiver_handle, index);
}
@@ -9127,28 +8923,6 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
-bool JSObject::HasElementInElements(FixedArray* elements,
- ElementsKind kind,
- uint32_t index) {
- ASSERT(kind == FAST_ELEMENTS || kind == DICTIONARY_ELEMENTS);
- if (kind == FAST_ELEMENTS) {
- int length = IsJSArray()
- ? Smi::cast(JSArray::cast(this)->length())->value()
- : elements->length();
- if (index < static_cast<uint32_t>(length) &&
- !elements->get(index)->IsTheHole()) {
- return true;
- }
- } else {
- if (SeededNumberDictionary::cast(elements)->FindEntry(index) !=
- SeededNumberDictionary::kNotFound) {
- return true;
- }
- }
- return false;
-}
-
-
bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
@@ -9164,68 +8938,9 @@ bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
return HasElementWithInterceptor(receiver, index);
}
- ElementsKind kind = GetElementsKind();
- switch (kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if ((index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole()) return true;
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
- if ((index < length) &&
- !FixedDoubleArray::cast(elements())->is_the_hole(index)) return true;
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS: {
- ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
- if (index < static_cast<uint32_t>(pixels->length())) {
- return true;
- }
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- ExternalArray* array = ExternalArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- return true;
- }
- break;
- }
- case DICTIONARY_ELEMENTS: {
- if (element_dictionary()->FindEntry(index)
- != SeededNumberDictionary::kNotFound) {
- return true;
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- FixedArray* parameter_map = FixedArray::cast(elements());
- uint32_t length = parameter_map->length();
- Object* probe =
- (index < length - 2) ? parameter_map->get(index + 2) : NULL;
- if (probe != NULL && !probe->IsTheHole()) return true;
-
- // Not a mapped parameter, check the arguments.
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : FAST_ELEMENTS;
- if (HasElementInElements(arguments, kind, index)) return true;
- break;
- }
+ ElementsAccessor* accessor = GetElementsAccessor();
+ if (accessor->HasElement(receiver, this, index)) {
+ return true;
}
// Handle [] on String objects.
@@ -9434,10 +9149,8 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
if (backing_store->map() == GetHeap()->non_strict_arguments_elements_map()) {
backing_store = FixedArray::cast(backing_store->get(1));
} else {
- Object* writable;
MaybeObject* maybe = EnsureWritableFastElements();
- if (!maybe->ToObject(&writable)) return maybe;
- backing_store = FixedArray::cast(writable);
+ if (!maybe->To(&backing_store)) return maybe;
}
uint32_t capacity = static_cast<uint32_t>(backing_store->length());
@@ -9490,10 +9203,11 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
}
// Change elements kind from SMI_ONLY to generic FAST if necessary.
if (HasFastSmiOnlyElements() && !value->IsSmi()) {
- MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
- FAST_ELEMENTS);
Map* new_map;
- if (!maybe_new_map->To<Map>(&new_map)) return maybe_new_map;
+ { MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
+ FAST_ELEMENTS);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ }
set_map(new_map);
if (FLAG_trace_elements_transitions) {
PrintElementsTransition(stdout, FAST_SMI_ONLY_ELEMENTS, elements(),
@@ -9502,17 +9216,18 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
}
// Increase backing store capacity if that's been decided previously.
if (new_capacity != capacity) {
- Object* new_elements;
+ FixedArray* new_elements;
SetFastElementsCapacityMode set_capacity_mode =
value->IsSmi() && HasFastSmiOnlyElements()
? kAllowSmiOnlyElements
: kDontAllowSmiOnlyElements;
- MaybeObject* maybe =
- SetFastElementsCapacityAndLength(new_capacity,
- array_length,
- set_capacity_mode);
- if (!maybe->ToObject(&new_elements)) return maybe;
- FixedArray::cast(new_elements)->set(index, value);
+ { MaybeObject* maybe =
+ SetFastElementsCapacityAndLength(new_capacity,
+ array_length,
+ set_capacity_mode);
+ if (!maybe->To(&new_elements)) return maybe;
+ }
+ new_elements->set(index, value);
return value;
}
// Finally, set the new element and length.
@@ -9612,7 +9327,7 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
FixedArrayBase* new_dictionary;
PropertyDetails details = PropertyDetails(attributes, NORMAL);
MaybeObject* maybe = dictionary->AddNumberEntry(index, value, details);
- if (!maybe->To<FixedArrayBase>(&new_dictionary)) return maybe;
+ if (!maybe->To(&new_dictionary)) return maybe;
if (dictionary != SeededNumberDictionary::cast(new_dictionary)) {
if (is_arguments) {
elements->set(1, new_dictionary);
@@ -10085,10 +9800,9 @@ MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
Heap* heap = holder_handle->GetHeap();
ElementsAccessor* handler = holder_handle->GetElementsAccessor();
- MaybeObject* raw_result = handler->Get(holder_handle->elements(),
- index,
+ MaybeObject* raw_result = handler->Get(*this_handle,
*holder_handle,
- *this_handle);
+ index);
if (raw_result != heap->the_hole_value()) return raw_result;
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -13158,4 +12872,133 @@ int BreakPointInfo::GetBreakPointCount() {
#endif // ENABLE_DEBUGGER_SUPPORT
+MaybeObject* JSDate::GetField(Object* object, Smi* index) {
+ return JSDate::cast(object)->DoGetField(
+ static_cast<FieldIndex>(index->value()));
+}
+
+
+Object* JSDate::DoGetField(FieldIndex index) {
+ ASSERT(index != kDateValue);
+
+ DateCache* date_cache = GetIsolate()->date_cache();
+
+ if (index < kFirstUncachedField) {
+ Object* stamp = cache_stamp();
+ if (stamp != date_cache->stamp() && stamp->IsSmi()) {
+ // Since the stamp is not NaN, the value is also not NaN.
+ int64_t local_time_ms =
+ date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
+ SetLocalFields(local_time_ms, date_cache);
+ }
+ switch (index) {
+ case kYear: return year();
+ case kMonth: return month();
+ case kDay: return day();
+ case kWeekday: return weekday();
+ case kHour: return hour();
+ case kMinute: return min();
+ case kSecond: return sec();
+ default: UNREACHABLE();
+ }
+ }
+
+ if (index >= kFirstUTCField) {
+ return GetUTCField(index, value()->Number(), date_cache);
+ }
+
+ double time = value()->Number();
+ if (isnan(time)) return GetIsolate()->heap()->nan_value();
+
+ int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
+ int days = DateCache::DaysFromTime(local_time_ms);
+
+ if (index == kDays) return Smi::FromInt(days);
+
+ int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
+ if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
+ ASSERT(index == kTimeInDay);
+ return Smi::FromInt(time_in_day_ms);
+}
+
+
+Object* JSDate::GetUTCField(FieldIndex index,
+ double value,
+ DateCache* date_cache) {
+ ASSERT(index >= kFirstUTCField);
+
+ if (isnan(value)) return GetIsolate()->heap()->nan_value();
+
+ int64_t time_ms = static_cast<int64_t>(value);
+
+ if (index == kTimezoneOffset) {
+ return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
+ }
+
+ int days = DateCache::DaysFromTime(time_ms);
+
+ if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
+
+ if (index <= kDayUTC) {
+ int year, month, day;
+ date_cache->YearMonthDayFromDays(days, &year, &month, &day);
+ if (index == kYearUTC) return Smi::FromInt(year);
+ if (index == kMonthUTC) return Smi::FromInt(month);
+ ASSERT(index == kDayUTC);
+ return Smi::FromInt(day);
+ }
+
+ int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
+ switch (index) {
+ case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
+ case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
+ case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
+ case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
+ case kDaysUTC: return Smi::FromInt(days);
+ case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
+ default: UNREACHABLE();
+ }
+
+ UNREACHABLE();
+ return NULL;
+}
+
+
+void JSDate::SetValue(Object* value, bool is_value_nan) {
+ set_value(value);
+ if (is_value_nan) {
+ HeapNumber* nan = GetIsolate()->heap()->nan_value();
+ set_cache_stamp(nan, SKIP_WRITE_BARRIER);
+ set_year(nan, SKIP_WRITE_BARRIER);
+ set_month(nan, SKIP_WRITE_BARRIER);
+ set_day(nan, SKIP_WRITE_BARRIER);
+ set_hour(nan, SKIP_WRITE_BARRIER);
+ set_min(nan, SKIP_WRITE_BARRIER);
+ set_sec(nan, SKIP_WRITE_BARRIER);
+ set_weekday(nan, SKIP_WRITE_BARRIER);
+ } else {
+ set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
+ }
+}
+
+
+void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) {
+ int days = DateCache::DaysFromTime(local_time_ms);
+ int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
+ int year, month, day;
+ date_cache->YearMonthDayFromDays(days, &year, &month, &day);
+ int weekday = date_cache->Weekday(days);
+ int hour = time_in_day_ms / (60 * 60 * 1000);
+ int min = (time_in_day_ms / (60 * 1000)) % 60;
+ int sec = (time_in_day_ms / 1000) % 60;
+ set_cache_stamp(date_cache->stamp());
+ set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
+ set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
+ set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
+ set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
+ set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
+ set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
+ set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
+}
+
} } // namespace v8::internal
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index be75faba65c..7906d14fa31 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -64,6 +64,7 @@
// - JSBuiltinsObject
// - JSGlobalProxy
// - JSValue
+// - JSDate
// - JSMessageObject
// - JSProxy
// - JSFunctionProxy
@@ -300,6 +301,7 @@ const int kVariableSizeSentinel = 0;
V(JS_MESSAGE_OBJECT_TYPE) \
\
V(JS_VALUE_TYPE) \
+ V(JS_DATE_TYPE) \
V(JS_OBJECT_TYPE) \
V(JS_CONTEXT_EXTENSION_OBJECT_TYPE) \
V(JS_GLOBAL_OBJECT_TYPE) \
@@ -619,6 +621,7 @@ enum InstanceType {
JS_PROXY_TYPE, // LAST_JS_PROXY_TYPE
JS_VALUE_TYPE, // FIRST_JS_OBJECT_TYPE
+ JS_DATE_TYPE,
JS_OBJECT_TYPE,
JS_CONTEXT_EXTENSION_OBJECT_TYPE,
JS_GLOBAL_OBJECT_TYPE,
@@ -813,6 +816,7 @@ class MaybeObject BASE_EMBEDDED {
V(Oddball) \
V(SharedFunctionInfo) \
V(JSValue) \
+ V(JSDate) \
V(JSMessageObject) \
V(StringWrapper) \
V(Foreign) \
@@ -854,6 +858,8 @@ class JSReceiver;
class Object : public MaybeObject {
public:
// Type testing.
+ bool IsObject() { return true; }
+
#define IS_TYPE_FUNCTION_DECL(type_) inline bool Is##type_();
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
@@ -887,6 +893,7 @@ class Object : public MaybeObject {
// Extract the number.
inline double Number();
+ inline bool IsNaN();
// Returns true if the object is of the correct type to be used as a
// implementation of a JSObject's elements.
@@ -1360,6 +1367,13 @@ enum SetPropertyMode {
};
+// Indicator for one component of an AccessorPair.
+enum AccessorComponent {
+ ACCESSOR_GETTER,
+ ACCESSOR_SETTER
+};
+
+
// JSReceiver includes types on which properties can be defined, i.e.,
// JSObject and JSProxy.
class JSReceiver: public HeapObject {
@@ -1612,10 +1626,10 @@ class JSObject: public JSReceiver {
bool continue_search);
MUST_USE_RESULT MaybeObject* DefineAccessor(String* name,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes);
- Object* LookupAccessor(String* name, bool is_getter);
+ Object* LookupAccessor(String* name, AccessorComponent component);
MUST_USE_RESULT MaybeObject* DefineAccessor(AccessorInfo* info);
@@ -1745,7 +1759,6 @@ class JSObject: public JSReceiver {
LocalElementType HasLocalElement(uint32_t index);
bool HasElementWithInterceptor(JSReceiver* receiver, uint32_t index);
- bool HasElementPostInterceptor(JSReceiver* receiver, uint32_t index);
MUST_USE_RESULT MaybeObject* SetFastElement(uint32_t index,
Object* value,
@@ -2145,9 +2158,6 @@ class JSObject: public JSReceiver {
bool ReferencesObjectFromElements(FixedArray* elements,
ElementsKind kind,
Object* object);
- bool HasElementInElements(FixedArray* elements,
- ElementsKind kind,
- uint32_t index);
// Returns true if most of the elements backing storage is used.
bool HasDenseElements();
@@ -2166,12 +2176,12 @@ class JSObject: public JSReceiver {
PropertyAttributes attributes);
MUST_USE_RESULT MaybeObject* DefineElementAccessor(
uint32_t index,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes);
MUST_USE_RESULT MaybeObject* DefinePropertyAccessor(
String* name,
- bool is_getter,
+ AccessorComponent component,
Object* fun,
PropertyAttributes attributes);
void LookupInDescriptor(String* name, LookupResult* result);
@@ -2503,8 +2513,8 @@ class DescriptorArray: public FixedArray {
MUST_USE_RESULT MaybeObject* CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag);
- // Remove all transitions. Return a copy of the array with all transitions
- // removed, or a Failure object if the new array could not be allocated.
+ // Return a copy of the array with all transitions and null descriptors
+ // removed. Return a Failure object in case of an allocation failure.
MUST_USE_RESULT MaybeObject* RemoveTransitions();
// Sort the instance descriptors by the hash codes of their keys.
@@ -2589,6 +2599,20 @@ class DescriptorArray: public FixedArray {
static const int kMaxNumberOfDescriptors = 1024 + 512;
private:
+ // An entry in a DescriptorArray, represented as an (array, index) pair.
+ class Entry {
+ public:
+ inline explicit Entry(DescriptorArray* descs, int index) :
+ descs_(descs), index_(index) { }
+
+ inline PropertyType type() { return descs_->GetType(index_); }
+ inline Object* GetCallbackObject() { return descs_->GetValue(index_); }
+
+ private:
+ DescriptorArray* descs_;
+ int index_;
+ };
+
// Conversion from descriptor number to array indices.
static int ToKeyIndex(int descriptor_number) {
return descriptor_number+kFirstIndex;
@@ -4063,6 +4087,9 @@ class TypeFeedbackCells: public FixedArray {
// Casting.
static inline TypeFeedbackCells* cast(Object* obj);
+
+ static const int kForInFastCaseMarker = 0;
+ static const int kForInSlowCaseMarker = 1;
};
@@ -5972,7 +5999,7 @@ class JSBuiltinsObject: public GlobalObject {
};
-// Representation for JS Wrapper objects, String, Number, Boolean, Date, etc.
+// Representation for JS Wrapper objects, String, Number, Boolean, etc.
class JSValue: public JSObject {
public:
// [value]: the object being wrapped.
@@ -6001,6 +6028,106 @@ class JSValue: public JSObject {
};
+class DateCache;
+
+// Representation for JS date objects.
+class JSDate: public JSObject {
+ public:
+ // If one component is NaN, all of them are, indicating a NaN time value.
+ // [value]: the time value.
+ DECL_ACCESSORS(value, Object)
+ // [year]: caches year. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(year, Object)
+ // [month]: caches month. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(month, Object)
+ // [day]: caches day. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(day, Object)
+ // [weekday]: caches day of week. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(weekday, Object)
+ // [hour]: caches hours. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(hour, Object)
+ // [min]: caches minutes. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(min, Object)
+ // [sec]: caches seconds. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(sec, Object)
+ // [cache stamp]: sample of the date cache stamp at the
+ // moment when local fields were cached.
+ DECL_ACCESSORS(cache_stamp, Object)
+
+ // Casting.
+ static inline JSDate* cast(Object* obj);
+
+ // Returns the date field with the specified index.
+ // See FieldIndex for the list of date fields.
+ static MaybeObject* GetField(Object* date, Smi* index);
+
+ void SetValue(Object* value, bool is_value_nan);
+
+
+ // Dispatched behavior.
+#ifdef OBJECT_PRINT
+ inline void JSDatePrint() {
+ JSDatePrint(stdout);
+ }
+ void JSDatePrint(FILE* out);
+#endif
+#ifdef DEBUG
+ void JSDateVerify();
+#endif
+ // The order is important. It must be kept in sync with date macros
+ // in macros.py.
+ enum FieldIndex {
+ kDateValue,
+ kYear,
+ kMonth,
+ kDay,
+ kWeekday,
+ kHour,
+ kMinute,
+ kSecond,
+ kFirstUncachedField,
+ kMillisecond = kFirstUncachedField,
+ kDays,
+ kTimeInDay,
+ kFirstUTCField,
+ kYearUTC = kFirstUTCField,
+ kMonthUTC,
+ kDayUTC,
+ kWeekdayUTC,
+ kHourUTC,
+ kMinuteUTC,
+ kSecondUTC,
+ kMillisecondUTC,
+ kDaysUTC,
+ kTimeInDayUTC,
+ kTimezoneOffset
+ };
+
+ // Layout description.
+ static const int kValueOffset = JSObject::kHeaderSize;
+ static const int kYearOffset = kValueOffset + kPointerSize;
+ static const int kMonthOffset = kYearOffset + kPointerSize;
+ static const int kDayOffset = kMonthOffset + kPointerSize;
+ static const int kWeekdayOffset = kDayOffset + kPointerSize;
+ static const int kHourOffset = kWeekdayOffset + kPointerSize;
+ static const int kMinOffset = kHourOffset + kPointerSize;
+ static const int kSecOffset = kMinOffset + kPointerSize;
+ static const int kCacheStampOffset = kSecOffset + kPointerSize;
+ static const int kSize = kCacheStampOffset + kPointerSize;
+
+ private:
+ inline Object* DoGetField(FieldIndex index);
+
+ Object* GetUTCField(FieldIndex index, double value, DateCache* date_cache);
+
+ // Computes and caches the cacheable fields of the date.
+ inline void SetLocalFields(int64_t local_time_ms, DateCache* date_cache);
+
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSDate);
+};
+
+
// Representation of message objects used for error reporting through
// the API. The messages are formatted in JavaScript so this object is
// a real JavaScript object. The information used for formatting the
@@ -7910,15 +8037,27 @@ class AccessorPair: public Struct {
MUST_USE_RESULT MaybeObject* CopyWithoutTransitions();
- // TODO(svenpanne) Evil temporary helper, will vanish soon...
- void set(bool modify_getter, Object* value) {
- if (modify_getter) {
+ Object* get(AccessorComponent component) {
+ ASSERT(component == ACCESSOR_GETTER || component == ACCESSOR_SETTER);
+ return (component == ACCESSOR_GETTER) ? getter() : setter();
+ }
+
+ void set(AccessorComponent component, Object* value) {
+ ASSERT(component == ACCESSOR_GETTER || component == ACCESSOR_SETTER);
+ if (component == ACCESSOR_GETTER) {
set_getter(value);
} else {
set_setter(value);
}
}
+ // Same as get, but returns undefined instead of the hole.
+ Object* SafeGet(AccessorComponent component);
+
+ bool ContainsAccessor() {
+ return IsJSAccessor(getter()) || IsJSAccessor(setter());
+ }
+
#ifdef OBJECT_PRINT
void AccessorPairPrint(FILE* out = stdout);
#endif
@@ -7931,6 +8070,15 @@ class AccessorPair: public Struct {
static const int kSize = kSetterOffset + kPointerSize;
private:
+ // Strangely enough, in addition to functions and harmony proxies, the spec
+ // requires us to consider undefined as a kind of accessor, too:
+ // var obj = {};
+ // Object.defineProperty(obj, "foo", {get: undefined});
+ // assertTrue("foo" in obj);
+ bool IsJSAccessor(Object* obj) {
+ return obj->IsSpecFunction() || obj->IsUndefined();
+ }
+
DISALLOW_IMPLICIT_CONSTRUCTORS(AccessorPair);
};
diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc
index f0556cb3553..ca8cbb9029c 100644
--- a/deps/v8/src/parser.cc
+++ b/deps/v8/src/parser.cc
@@ -757,6 +757,12 @@ void Parser::ReportMessage(const char* type, Vector<const char*> args) {
}
+void Parser::ReportMessage(const char* type, Vector<Handle<String> > args) {
+ Scanner::Location source_location = scanner().location();
+ ReportMessageAt(source_location, type, args);
+}
+
+
void Parser::ReportMessageAt(Scanner::Location source_location,
const char* type,
Vector<const char*> args) {
@@ -1163,6 +1169,7 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
this_property_assignment_finder.GetThisPropertyAssignments());
}
}
+
return 0;
}
@@ -1184,10 +1191,10 @@ Statement* Parser::ParseModuleElement(ZoneStringList* labels,
switch (peek()) {
case Token::FUNCTION:
- return ParseFunctionDeclaration(ok);
+ return ParseFunctionDeclaration(NULL, ok);
case Token::LET:
case Token::CONST:
- return ParseVariableStatement(kModuleElement, ok);
+ return ParseVariableStatement(kModuleElement, NULL, ok);
case Token::IMPORT:
return ParseImportDeclaration(ok);
case Token::EXPORT:
@@ -1205,7 +1212,7 @@ Statement* Parser::ParseModuleElement(ZoneStringList* labels,
estmt->expression()->AsVariableProxy()->name()->Equals(
isolate()->heap()->module_symbol()) &&
!scanner().literal_contains_escapes()) {
- return ParseModuleDeclaration(ok);
+ return ParseModuleDeclaration(NULL, ok);
}
}
return stmt;
@@ -1214,20 +1221,38 @@ Statement* Parser::ParseModuleElement(ZoneStringList* labels,
}
-Block* Parser::ParseModuleDeclaration(bool* ok) {
+Block* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
// ModuleDeclaration:
// 'module' Identifier Module
// Create new block with one expected declaration.
Block* block = factory()->NewBlock(NULL, 1, true);
Handle<String> name = ParseIdentifier(CHECK_OK);
- // top_scope_->AddDeclaration(
- // factory()->NewModuleDeclaration(proxy, module, top_scope_));
- VariableProxy* proxy = Declare(name, LET, NULL, true, CHECK_OK);
- Module* module = ParseModule(ok);
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Module %s...\n", name->ToAsciiArray());
+#endif
+
+ Module* module = ParseModule(CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(name, LET, module->interface());
+ Declaration* declaration =
+ factory()->NewModuleDeclaration(proxy, module, top_scope_);
+ Declare(declaration, true, CHECK_OK);
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Module %s.\n", name->ToAsciiArray());
+
+ if (FLAG_print_interfaces) {
+ PrintF("module %s : ", name->ToAsciiArray());
+ module->interface()->Print();
+ }
+#endif
+
// TODO(rossberg): Add initialization statement to block.
- USE(proxy);
- USE(module);
+
+ if (names) names->Add(name);
return block;
}
@@ -1235,19 +1260,26 @@ Block* Parser::ParseModuleDeclaration(bool* ok) {
Module* Parser::ParseModule(bool* ok) {
// Module:
// '{' ModuleElement '}'
- // '=' ModulePath
- // 'at' String
+ // '=' ModulePath ';'
+ // 'at' String ';'
switch (peek()) {
case Token::LBRACE:
return ParseModuleLiteral(ok);
- case Token::ASSIGN:
+ case Token::ASSIGN: {
Expect(Token::ASSIGN, CHECK_OK);
- return ParseModulePath(ok);
+ Module* result = ParseModulePath(CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+ return result;
+ }
- default:
- return ParseModuleUrl(ok);
+ default: {
+ ExpectContextualKeyword("at", CHECK_OK);
+ Module* result = ParseModuleUrl(CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+ return result;
+ }
}
}
@@ -1258,6 +1290,9 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
// Construct block expecting 16 statements.
Block* body = factory()->NewBlock(NULL, 16, false);
+#ifdef DEBUG
+ if (FLAG_print_interface_details) PrintF("# Literal ");
+#endif
Scope* scope = NewScope(top_scope_, MODULE_SCOPE);
Expect(Token::LBRACE, CHECK_OK);
@@ -1283,7 +1318,10 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
Expect(Token::RBRACE, CHECK_OK);
scope->set_end_position(scanner().location().end_pos);
body->set_block_scope(scope);
- return factory()->NewModuleLiteral(body);
+
+ scope->interface()->Freeze(ok);
+ ASSERT(ok);
+ return factory()->NewModuleLiteral(body, scope->interface());
}
@@ -1293,10 +1331,28 @@ Module* Parser::ParseModulePath(bool* ok) {
// ModulePath '.' Identifier
Module* result = ParseModuleVariable(CHECK_OK);
-
while (Check(Token::PERIOD)) {
Handle<String> name = ParseIdentifierName(CHECK_OK);
- result = factory()->NewModulePath(result, name);
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Path .%s ", name->ToAsciiArray());
+#endif
+ Module* member = factory()->NewModulePath(result, name);
+ result->interface()->Add(name, member->interface(), ok);
+ if (!*ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("PATH TYPE ERROR at '%s'\n", name->ToAsciiArray());
+ PrintF("result: ");
+ result->interface()->Print();
+ PrintF("member: ");
+ member->interface()->Print();
+ }
+#endif
+ ReportMessage("invalid_module_path", Vector<Handle<String> >(&name, 1));
+ return NULL;
+ }
+ result = member;
}
return result;
@@ -1308,39 +1364,167 @@ Module* Parser::ParseModuleVariable(bool* ok) {
// Identifier
Handle<String> name = ParseIdentifier(CHECK_OK);
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Module variable %s ", name->ToAsciiArray());
+#endif
VariableProxy* proxy = top_scope_->NewUnresolved(
- factory(), name, scanner().location().beg_pos);
+ factory(), name, scanner().location().beg_pos, Interface::NewModule());
+
return factory()->NewModuleVariable(proxy);
}
Module* Parser::ParseModuleUrl(bool* ok) {
// Module:
- // 'at' String
+ // String
- Expect(Token::IDENTIFIER, CHECK_OK);
- Handle<String> symbol = GetSymbol(CHECK_OK);
- if (!symbol->IsEqualTo(CStrVector("at"))) {
- *ok = false;
- ReportUnexpectedToken(scanner().current_token());
- return NULL;
- }
Expect(Token::STRING, CHECK_OK);
- symbol = GetSymbol(CHECK_OK);
+ Handle<String> symbol = GetSymbol(CHECK_OK);
+ // TODO(ES6): Request JS resource from environment...
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) PrintF("# Url ");
+#endif
return factory()->NewModuleUrl(symbol);
}
+Module* Parser::ParseModuleSpecifier(bool* ok) {
+ // ModuleSpecifier:
+ // String
+ // ModulePath
+
+ if (peek() == Token::STRING) {
+ return ParseModuleUrl(ok);
+ } else {
+ return ParseModulePath(ok);
+ }
+}
+
+
Block* Parser::ParseImportDeclaration(bool* ok) {
- // TODO(rossberg)
- return NULL;
+ // ImportDeclaration:
+ // 'import' IdentifierName (',' IdentifierName)* 'from' ModuleSpecifier ';'
+ //
+ // TODO(ES6): implement destructuring ImportSpecifiers
+
+ Expect(Token::IMPORT, CHECK_OK);
+ ZoneStringList names(1);
+
+ Handle<String> name = ParseIdentifierName(CHECK_OK);
+ names.Add(name);
+ while (peek() == Token::COMMA) {
+ Consume(Token::COMMA);
+ name = ParseIdentifierName(CHECK_OK);
+ names.Add(name);
+ }
+
+ ExpectContextualKeyword("from", CHECK_OK);
+ Module* module = ParseModuleSpecifier(CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+
+ // Generate a separate declaration for each identifier.
+ // TODO(ES6): once we implement destructuring, make that one declaration.
+ Block* block = factory()->NewBlock(NULL, 1, true);
+ for (int i = 0; i < names.length(); ++i) {
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Import %s ", names[i]->ToAsciiArray());
+#endif
+ Interface* interface = Interface::NewUnknown();
+ module->interface()->Add(names[i], interface, ok);
+ if (!*ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("IMPORT TYPE ERROR at '%s'\n", names[i]->ToAsciiArray());
+ PrintF("module: ");
+ module->interface()->Print();
+ }
+#endif
+ ReportMessage("invalid_module_path", Vector<Handle<String> >(&name, 1));
+ return NULL;
+ }
+ VariableProxy* proxy = NewUnresolved(names[i], LET, interface);
+ Declaration* declaration =
+ factory()->NewImportDeclaration(proxy, module, top_scope_);
+ Declare(declaration, true, CHECK_OK);
+ // TODO(rossberg): Add initialization statement to block.
+ }
+
+ return block;
}
-Block* Parser::ParseExportDeclaration(bool* ok) {
- // TODO(rossberg)
- return NULL;
+Statement* Parser::ParseExportDeclaration(bool* ok) {
+ // ExportDeclaration:
+ // 'export' Identifier (',' Identifier)* ';'
+ // 'export' VariableDeclaration
+ // 'export' FunctionDeclaration
+ // 'export' ModuleDeclaration
+ //
+ // TODO(ES6): implement structuring ExportSpecifiers
+
+ Expect(Token::EXPORT, CHECK_OK);
+
+ Statement* result = NULL;
+ ZoneStringList names(1);
+ switch (peek()) {
+ case Token::IDENTIFIER: {
+ Handle<String> name = ParseIdentifier(CHECK_OK);
+ // Handle 'module' as a context-sensitive keyword.
+ if (!name->IsEqualTo(CStrVector("module"))) {
+ names.Add(name);
+ while (peek() == Token::COMMA) {
+ Consume(Token::COMMA);
+ name = ParseIdentifier(CHECK_OK);
+ names.Add(name);
+ }
+ ExpectSemicolon(CHECK_OK);
+ result = factory()->NewEmptyStatement();
+ } else {
+ result = ParseModuleDeclaration(&names, CHECK_OK);
+ }
+ break;
+ }
+
+ case Token::FUNCTION:
+ result = ParseFunctionDeclaration(&names, CHECK_OK);
+ break;
+
+ case Token::VAR:
+ case Token::LET:
+ case Token::CONST:
+ result = ParseVariableStatement(kModuleElement, &names, CHECK_OK);
+ break;
+
+ default:
+ *ok = false;
+ ReportUnexpectedToken(scanner().current_token());
+ return NULL;
+ }
+
+ // Extract declared names into export declarations and interface.
+ Interface* interface = top_scope_->interface();
+ for (int i = 0; i < names.length(); ++i) {
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Export %s ", names[i]->ToAsciiArray());
+#endif
+ Interface* inner = Interface::NewUnknown();
+ interface->Add(names[i], inner, CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(names[i], LET, inner);
+ USE(proxy);
+ // TODO(rossberg): Rethink whether we actually need to store export
+ // declarations (for compilation?).
+ // ExportDeclaration* declaration =
+ // factory()->NewExportDeclaration(proxy, top_scope_);
+ // top_scope_->AddDeclaration(declaration);
+ }
+
+ ASSERT(result != NULL);
+ return result;
}
@@ -1358,10 +1542,10 @@ Statement* Parser::ParseBlockElement(ZoneStringList* labels,
switch (peek()) {
case Token::FUNCTION:
- return ParseFunctionDeclaration(ok);
+ return ParseFunctionDeclaration(NULL, ok);
case Token::LET:
case Token::CONST:
- return ParseVariableStatement(kModuleElement, ok);
+ return ParseVariableStatement(kModuleElement, NULL, ok);
default:
return ParseStatement(labels, ok);
}
@@ -1403,7 +1587,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
case Token::CONST: // fall through
case Token::LET:
case Token::VAR:
- stmt = ParseVariableStatement(kStatement, ok);
+ stmt = ParseVariableStatement(kStatement, NULL, ok);
break;
case Token::SEMICOLON:
@@ -1480,7 +1664,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
*ok = false;
return NULL;
}
- return ParseFunctionDeclaration(ok);
+ return ParseFunctionDeclaration(NULL, ok);
}
case Token::DEBUGGER:
@@ -1497,21 +1681,24 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
}
-VariableProxy* Parser::Declare(Handle<String> name,
- VariableMode mode,
- FunctionLiteral* fun,
- bool resolve,
- bool* ok) {
- Variable* var = NULL;
+VariableProxy* Parser::NewUnresolved(
+ Handle<String> name, VariableMode mode, Interface* interface) {
// If we are inside a function, a declaration of a var/const variable is a
// truly local variable, and the scope of the variable is always the function
// scope.
// Let/const variables in harmony mode are always added to the immediately
// enclosing scope.
- Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY)
- ? top_scope_ : top_scope_->DeclarationScope();
- InitializationFlag init_flag = (fun != NULL || mode == VAR)
- ? kCreatedInitialized : kNeedsInitialization;
+ return DeclarationScope(mode)->NewUnresolved(
+ factory(), name, scanner().location().beg_pos, interface);
+}
+
+
+void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
+ VariableProxy* proxy = declaration->proxy();
+ Handle<String> name = proxy->name();
+ VariableMode mode = declaration->mode();
+ Scope* declaration_scope = DeclarationScope(mode);
+ Variable* var = NULL;
// If a function scope exists, then we can statically declare this
// variable and also set its mode. In any case, a Declaration node
@@ -1526,12 +1713,14 @@ VariableProxy* Parser::Declare(Handle<String> name,
if (declaration_scope->is_function_scope() ||
declaration_scope->is_strict_or_extended_eval_scope() ||
declaration_scope->is_block_scope() ||
- declaration_scope->is_module_scope()) {
+ declaration_scope->is_module_scope() ||
+ declaration->AsModuleDeclaration() != NULL) {
// Declare the variable in the function scope.
var = declaration_scope->LocalLookup(name);
if (var == NULL) {
// Declare the name.
- var = declaration_scope->DeclareLocal(name, mode, init_flag);
+ var = declaration_scope->DeclareLocal(
+ name, mode, declaration->initialization(), proxy->interface());
} else {
// The name was declared in this scope before; check for conflicting
// re-declarations. We have a conflict if either of the declarations is
@@ -1558,7 +1747,7 @@ VariableProxy* Parser::Declare(Handle<String> name,
Vector<const char*> args(elms, 2);
ReportMessage("redeclaration", args);
*ok = false;
- return NULL;
+ return;
}
const char* type = (var->mode() == VAR)
? "var" : var->is_const_mode() ? "const" : "let";
@@ -1588,10 +1777,7 @@ VariableProxy* Parser::Declare(Handle<String> name,
// semantic issue as long as we keep the source order, but it may be
// a performance issue since it may lead to repeated
// Runtime::DeclareContextSlot() calls.
- VariableProxy* proxy = declaration_scope->NewUnresolved(
- factory(), name, scanner().location().beg_pos);
- declaration_scope->AddDeclaration(
- factory()->NewVariableDeclaration(proxy, mode, fun, top_scope_));
+ declaration_scope->AddDeclaration(declaration);
if ((mode == CONST || mode == CONST_HARMONY) &&
declaration_scope->is_global_scope()) {
@@ -1615,7 +1801,7 @@ VariableProxy* Parser::Declare(Handle<String> name,
mode,
true,
kind,
- init_flag);
+ declaration->initialization());
var->AllocateTo(Variable::LOOKUP, -1);
resolve = true;
}
@@ -1644,9 +1830,30 @@ VariableProxy* Parser::Declare(Handle<String> name,
// initialization code. Thus, inside the 'with' statement, we need
// both access to the static and the dynamic context chain; the
// runtime needs to provide both.
- if (resolve && var != NULL) proxy->BindTo(var);
+ if (resolve && var != NULL) {
+ proxy->BindTo(var);
- return proxy;
+ if (FLAG_harmony_modules) {
+ bool ok;
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Declare %s\n", var->name()->ToAsciiArray());
+#endif
+ proxy->interface()->Unify(var->interface(), &ok);
+ if (!ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("DECLARE TYPE ERROR\n");
+ PrintF("proxy: ");
+ proxy->interface()->Print();
+ PrintF("var: ");
+ var->interface()->Print();
+ }
+#endif
+ ReportMessage("module_type_error", Vector<Handle<String> >(&name, 1));
+ }
+ }
+ }
}
@@ -1673,7 +1880,7 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
// isn't lazily compiled. The extension structures are only
// accessible while parsing the first time not when reparsing
// because of lazy compilation.
- top_scope_->DeclarationScope()->ForceEagerCompilation();
+ DeclarationScope(VAR)->ForceEagerCompilation();
// Compute the function template for the native function.
v8::Handle<v8::FunctionTemplate> fun_template =
@@ -1698,16 +1905,19 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
// TODO(1240846): It's weird that native function declarations are
// introduced dynamically when we meet their declarations, whereas
// other functions are set up when entering the surrounding scope.
+ VariableProxy* proxy = NewUnresolved(name, VAR);
+ Declaration* declaration =
+ factory()->NewVariableDeclaration(proxy, VAR, top_scope_);
+ Declare(declaration, true, CHECK_OK);
SharedFunctionInfoLiteral* lit =
factory()->NewSharedFunctionInfoLiteral(shared);
- VariableProxy* var = Declare(name, VAR, NULL, true, CHECK_OK);
return factory()->NewExpressionStatement(
factory()->NewAssignment(
- Token::INIT_VAR, var, lit, RelocInfo::kNoPosition));
+ Token::INIT_VAR, proxy, lit, RelocInfo::kNoPosition));
}
-Statement* Parser::ParseFunctionDeclaration(bool* ok) {
+Statement* Parser::ParseFunctionDeclaration(ZoneStringList* names, bool* ok) {
// FunctionDeclaration ::
// 'function' Identifier '(' FormalParameterListopt ')' '{' FunctionBody '}'
Expect(Token::FUNCTION, CHECK_OK);
@@ -1724,7 +1934,11 @@ Statement* Parser::ParseFunctionDeclaration(bool* ok) {
// scope, we treat is as such and introduce the function with it's
// initial value upon entering the corresponding scope.
VariableMode mode = is_extended_mode() ? LET : VAR;
- Declare(name, mode, fun, true, CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(name, mode);
+ Declaration* declaration =
+ factory()->NewFunctionDeclaration(proxy, mode, fun, top_scope_);
+ Declare(declaration, true, CHECK_OK);
+ if (names) names->Add(name);
return factory()->NewEmptyStatement();
}
@@ -1791,13 +2005,14 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
Block* Parser::ParseVariableStatement(VariableDeclarationContext var_context,
+ ZoneStringList* names,
bool* ok) {
// VariableStatement ::
// VariableDeclarations ';'
Handle<String> ignore;
Block* result =
- ParseVariableDeclarations(var_context, NULL, &ignore, CHECK_OK);
+ ParseVariableDeclarations(var_context, NULL, names, &ignore, CHECK_OK);
ExpectSemicolon(CHECK_OK);
return result;
}
@@ -1817,6 +2032,7 @@ bool Parser::IsEvalOrArguments(Handle<String> string) {
Block* Parser::ParseVariableDeclarations(
VariableDeclarationContext var_context,
VariableDeclarationProperties* decl_props,
+ ZoneStringList* names,
Handle<String>* out,
bool* ok) {
// VariableDeclarations ::
@@ -1902,8 +2118,8 @@ Block* Parser::ParseVariableDeclarations(
UNREACHABLE(); // by current callers
}
- Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY)
- ? top_scope_ : top_scope_->DeclarationScope();
+ Scope* declaration_scope = DeclarationScope(mode);
+
// The scope of a var/const declared variable anywhere inside a function
// is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can
// transform a source-level var/const declaration into a (Function)
@@ -1950,7 +2166,10 @@ Block* Parser::ParseVariableDeclarations(
// For let/const declarations in harmony mode, we can also immediately
// pre-resolve the proxy because it resides in the same scope as the
// declaration.
- VariableProxy* proxy = Declare(name, mode, NULL, mode != VAR, CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(name, mode);
+ Declaration* declaration =
+ factory()->NewVariableDeclaration(proxy, mode, top_scope_);
+ Declare(declaration, mode != VAR, CHECK_OK);
nvars++;
if (declaration_scope->num_var_or_const() > kMaxNumFunctionLocals) {
ReportMessageAt(scanner().location(), "too_many_variables",
@@ -1958,6 +2177,7 @@ Block* Parser::ParseVariableDeclarations(
*ok = false;
return NULL;
}
+ if (names) names->Add(name);
// Parse initialization expression if present and/or needed. A
// declaration of the form:
@@ -2610,7 +2830,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
if (peek() == Token::VAR || peek() == Token::CONST) {
Handle<String> name;
Block* variable_statement =
- ParseVariableDeclarations(kForStatement, NULL, &name, CHECK_OK);
+ ParseVariableDeclarations(kForStatement, NULL, NULL, &name, CHECK_OK);
if (peek() == Token::IN && !name.is_null()) {
VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
@@ -2639,7 +2859,8 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Handle<String> name;
VariableDeclarationProperties decl_props = kHasNoInitializers;
Block* variable_statement =
- ParseVariableDeclarations(kForStatement, &decl_props, &name, CHECK_OK);
+ ParseVariableDeclarations(kForStatement, &decl_props, NULL, &name,
+ CHECK_OK);
bool accept_IN = !name.is_null() && decl_props != kHasInitializers;
if (peek() == Token::IN && accept_IN) {
// Rewrite a for-in statement of the form
@@ -3387,8 +3608,14 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
case Token::FUTURE_STRICT_RESERVED_WORD: {
Handle<String> name = ParseIdentifier(CHECK_OK);
if (fni_ != NULL) fni_->PushVariableName(name);
+ // The name may refer to a module instance object, so its type is unknown.
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Variable %s ", name->ToAsciiArray());
+#endif
+ Interface* interface = Interface::NewUnknown();
result = top_scope_->NewUnresolved(
- factory(), name, scanner().location().beg_pos);
+ factory(), name, scanner().location().beg_pos, interface);
break;
}
@@ -3672,17 +3899,11 @@ Handle<Object> Parser::GetBoilerplateValue(Expression* expression) {
return isolate()->factory()->undefined_value();
}
-// Defined in ast.cc
-bool IsEqualString(void* first, void* second);
-bool IsEqualNumber(void* first, void* second);
-
-
// Validation per 11.1.5 Object Initialiser
class ObjectLiteralPropertyChecker {
public:
ObjectLiteralPropertyChecker(Parser* parser, LanguageMode language_mode) :
- props(&IsEqualString),
- elems(&IsEqualNumber),
+ props_(Literal::Match),
parser_(parser),
language_mode_(language_mode) {
}
@@ -3711,8 +3932,7 @@ class ObjectLiteralPropertyChecker {
}
}
- HashMap props;
- HashMap elems;
+ HashMap props_;
Parser* parser_;
LanguageMode language_mode_;
};
@@ -3722,44 +3942,9 @@ void ObjectLiteralPropertyChecker::CheckProperty(
ObjectLiteral::Property* property,
Scanner::Location loc,
bool* ok) {
-
ASSERT(property != NULL);
-
- Literal* lit = property->key();
- Handle<Object> handle = lit->handle();
-
- uint32_t hash;
- HashMap* map;
- void* key;
-
- if (handle->IsSymbol()) {
- Handle<String> name(String::cast(*handle));
- if (name->AsArrayIndex(&hash)) {
- Handle<Object> key_handle = FACTORY->NewNumberFromUint(hash);
- key = key_handle.location();
- map = &elems;
- } else {
- key = handle.location();
- hash = name->Hash();
- map = &props;
- }
- } else if (handle->ToArrayIndex(&hash)) {
- key = handle.location();
- map = &elems;
- } else {
- ASSERT(handle->IsNumber());
- double num = handle->Number();
- char arr[100];
- Vector<char> buffer(arr, ARRAY_SIZE(arr));
- const char* str = DoubleToCString(num, buffer);
- Handle<String> name = FACTORY->NewStringFromAscii(CStrVector(str));
- key = name.location();
- hash = name->Hash();
- map = &props;
- }
-
- // Lookup property previously defined, if any.
- HashMap::Entry* entry = map->Lookup(key, hash, true);
+ Literal* literal = property->key();
+ HashMap::Entry* entry = props_.Lookup(literal, literal->Hash(), true);
intptr_t prev = reinterpret_cast<intptr_t> (entry->value);
intptr_t curr = GetPropertyKind(property);
@@ -3984,7 +4169,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
Expression* value = ParseAssignmentExpression(true, CHECK_OK);
ObjectLiteral::Property* property =
- new(zone()) ObjectLiteral::Property(key, value);
+ new(zone()) ObjectLiteral::Property(key, value, isolate());
// Mark top-level object literals that contain function literals and
// pretenure the literal so it can be added as a constant function
@@ -4569,6 +4754,18 @@ void Parser::ExpectSemicolon(bool* ok) {
}
+void Parser::ExpectContextualKeyword(const char* keyword, bool* ok) {
+ Expect(Token::IDENTIFIER, ok);
+ if (!*ok) return;
+ Handle<String> symbol = GetSymbol(ok);
+ if (!*ok) return;
+ if (!symbol->IsEqualTo(CStrVector(keyword))) {
+ *ok = false;
+ ReportUnexpectedToken(scanner().current_token());
+ }
+}
+
+
Literal* Parser::GetLiteralUndefined() {
return factory()->NewLiteral(isolate()->factory()->undefined_value());
}
diff --git a/deps/v8/src/parser.h b/deps/v8/src/parser.h
index 66c801d9817..90ef39983e1 100644
--- a/deps/v8/src/parser.h
+++ b/deps/v8/src/parser.h
@@ -557,6 +557,7 @@ class Parser {
void ReportUnexpectedToken(Token::Value token);
void ReportInvalidPreparseData(Handle<String> name, bool* ok);
void ReportMessage(const char* message, Vector<const char*> args);
+ void ReportMessage(const char* message, Vector<Handle<String> > args);
bool inside_with() const { return top_scope_->inside_with(); }
Scanner& scanner() { return scanner_; }
@@ -566,6 +567,10 @@ class Parser {
ASSERT(top_scope_ != NULL);
return top_scope_->is_extended_mode();
}
+ Scope* DeclarationScope(VariableMode mode) {
+ return (mode == LET || mode == CONST_HARMONY)
+ ? top_scope_ : top_scope_->DeclarationScope();
+ }
// Check if the given string is 'eval' or 'arguments'.
bool IsEvalOrArguments(Handle<String> string);
@@ -577,23 +582,26 @@ class Parser {
void* ParseSourceElements(ZoneList<Statement*>* processor,
int end_token, bool* ok);
Statement* ParseModuleElement(ZoneStringList* labels, bool* ok);
- Block* ParseModuleDeclaration(bool* ok);
+ Block* ParseModuleDeclaration(ZoneStringList* names, bool* ok);
Module* ParseModule(bool* ok);
Module* ParseModuleLiteral(bool* ok);
Module* ParseModulePath(bool* ok);
Module* ParseModuleVariable(bool* ok);
Module* ParseModuleUrl(bool* ok);
+ Module* ParseModuleSpecifier(bool* ok);
Block* ParseImportDeclaration(bool* ok);
- Block* ParseExportDeclaration(bool* ok);
+ Statement* ParseExportDeclaration(bool* ok);
Statement* ParseBlockElement(ZoneStringList* labels, bool* ok);
Statement* ParseStatement(ZoneStringList* labels, bool* ok);
- Statement* ParseFunctionDeclaration(bool* ok);
+ Statement* ParseFunctionDeclaration(ZoneStringList* names, bool* ok);
Statement* ParseNativeDeclaration(bool* ok);
Block* ParseBlock(ZoneStringList* labels, bool* ok);
Block* ParseVariableStatement(VariableDeclarationContext var_context,
+ ZoneStringList* names,
bool* ok);
Block* ParseVariableDeclarations(VariableDeclarationContext var_context,
VariableDeclarationProperties* decl_props,
+ ZoneStringList* names,
Handle<String>* out,
bool* ok);
Statement* ParseExpressionOrLabelledStatement(ZoneStringList* labels,
@@ -696,6 +704,7 @@ class Parser {
void Expect(Token::Value token, bool* ok);
bool Check(Token::Value token);
void ExpectSemicolon(bool* ok);
+ void ExpectContextualKeyword(const char* keyword, bool* ok);
Handle<String> LiteralString(PretenureFlag tenured) {
if (scanner().is_literal_ascii()) {
@@ -756,10 +765,10 @@ class Parser {
void CheckConflictingVarDeclarations(Scope* scope, bool* ok);
// Parser support
- VariableProxy* Declare(Handle<String> name, VariableMode mode,
- FunctionLiteral* fun,
- bool resolve,
- bool* ok);
+ VariableProxy* NewUnresolved(Handle<String> name,
+ VariableMode mode,
+ Interface* interface = Interface::NewValue());
+ void Declare(Declaration* declaration, bool resolve, bool* ok);
bool TargetStackContainsLabel(Handle<String> label);
BreakableStatement* LookupBreakTarget(Handle<String> label, bool* ok);
diff --git a/deps/v8/src/platform-linux.cc b/deps/v8/src/platform-linux.cc
index 0da1c08fdd5..cfcbd913fca 100644
--- a/deps/v8/src/platform-linux.cc
+++ b/deps/v8/src/platform-linux.cc
@@ -187,15 +187,15 @@ bool OS::ArmCpuHasFeature(CpuFeature feature) {
// pair r0, r1 is loaded with 0.0. If -mfloat-abi=hard is pased to GCC then
// calling this will return 1.0 and otherwise 0.0.
static void ArmUsingHardFloatHelper() {
- asm("mov r0, #0");
+ asm("mov r0, #0":::"r0");
#if defined(__VFP_FP__) && !defined(__SOFTFP__)
// Load 0x3ff00000 into r1 using instructions available in both ARM
// and Thumb mode.
- asm("mov r1, #3");
- asm("mov r2, #255");
- asm("lsl r1, r1, #8");
- asm("orr r1, r1, r2");
- asm("lsl r1, r1, #20");
+ asm("mov r1, #3":::"r1");
+ asm("mov r2, #255":::"r2");
+ asm("lsl r1, r1, #8":::"r1");
+ asm("orr r1, r1, r2":::"r1");
+ asm("lsl r1, r1, #20":::"r1");
// For vmov d0, r0, r1 use ARM mode.
#ifdef __thumb__
asm volatile(
@@ -209,12 +209,12 @@ static void ArmUsingHardFloatHelper() {
" adr r3, 2f+1 \n\t"
" bx r3 \n\t"
" .THUMB \n"
- "2: \n\t");
+ "2: \n\t":::"r3");
#else
asm("vmov d0, r0, r1");
#endif // __thumb__
#endif // defined(__VFP_FP__) && !defined(__SOFTFP__)
- asm("mov r1, #0");
+ asm("mov r1, #0":::"r1");
}
diff --git a/deps/v8/src/platform-nullos.cc b/deps/v8/src/platform-nullos.cc
index 918327a983a..e05345c80a3 100644
--- a/deps/v8/src/platform-nullos.cc
+++ b/deps/v8/src/platform-nullos.cc
@@ -55,6 +55,30 @@ double modulo(double x, double y) {
}
+double fast_sin(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
+double fast_cos(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
+double fast_tan(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
+double fast_log(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
// Initialize OS class early in the V8 startup.
void OS::SetUp() {
// Seed the random number generator.
diff --git a/deps/v8/src/platform-posix.cc b/deps/v8/src/platform-posix.cc
index 34fd5c44985..4543a66e8c5 100644
--- a/deps/v8/src/platform-posix.cc
+++ b/deps/v8/src/platform-posix.cc
@@ -53,6 +53,7 @@
#include "v8.h"
+#include "codegen.h"
#include "platform.h"
namespace v8 {
@@ -126,6 +127,29 @@ double modulo(double x, double y) {
}
+static Mutex* transcendental_function_mutex = OS::CreateMutex();
+
+#define TRANSCENDENTAL_FUNCTION(name, type) \
+static TranscendentalFunction fast_##name##_function = NULL; \
+double fast_##name(double x) { \
+ if (fast_##name##_function == NULL) { \
+ ScopedLock lock(transcendental_function_mutex); \
+ TranscendentalFunction temp = \
+ CreateTranscendentalFunction(type); \
+ MemoryBarrier(); \
+ fast_##name##_function = temp; \
+ } \
+ return (*fast_##name##_function)(x); \
+}
+
+TRANSCENDENTAL_FUNCTION(sin, TranscendentalCache::SIN)
+TRANSCENDENTAL_FUNCTION(cos, TranscendentalCache::COS)
+TRANSCENDENTAL_FUNCTION(tan, TranscendentalCache::TAN)
+TRANSCENDENTAL_FUNCTION(log, TranscendentalCache::LOG)
+
+#undef TRANSCENDENTAL_FUNCTION
+
+
double OS::nan_value() {
// NAN from math.h is defined in C99 and not in POSIX.
return NAN;
diff --git a/deps/v8/src/platform-win32.cc b/deps/v8/src/platform-win32.cc
index e9e99246cb7..53915c6c0e4 100644
--- a/deps/v8/src/platform-win32.cc
+++ b/deps/v8/src/platform-win32.cc
@@ -32,6 +32,7 @@
#include "v8.h"
+#include "codegen.h"
#include "platform.h"
#include "vm-state-inl.h"
@@ -112,11 +113,16 @@ int strncpy_s(char* dest, size_t dest_size, const char* source, size_t count) {
}
+#ifndef __MINGW64_VERSION_MAJOR
+
inline void MemoryBarrier() {
int barrier = 0;
__asm__ __volatile__("xchgl %%eax,%0 ":"=r" (barrier));
}
+#endif // __MINGW64_VERSION_MAJOR
+
+
#endif // __MINGW32__
// Generate a pseudo-random number in the range 0-2^31-1. Usually
@@ -201,6 +207,30 @@ double modulo(double x, double y) {
#endif // _WIN64
+
+static Mutex* transcendental_function_mutex = OS::CreateMutex();
+
+#define TRANSCENDENTAL_FUNCTION(name, type) \
+static TranscendentalFunction fast_##name##_function = NULL; \
+double fast_##name(double x) { \
+ if (fast_##name##_function == NULL) { \
+ ScopedLock lock(transcendental_function_mutex); \
+ TranscendentalFunction temp = \
+ CreateTranscendentalFunction(type); \
+ MemoryBarrier(); \
+ fast_##name##_function = temp; \
+ } \
+ return (*fast_##name##_function)(x); \
+}
+
+TRANSCENDENTAL_FUNCTION(sin, TranscendentalCache::SIN)
+TRANSCENDENTAL_FUNCTION(cos, TranscendentalCache::COS)
+TRANSCENDENTAL_FUNCTION(tan, TranscendentalCache::TAN)
+TRANSCENDENTAL_FUNCTION(log, TranscendentalCache::LOG)
+
+#undef TRANSCENDENTAL_FUNCTION
+
+
// ----------------------------------------------------------------------------
// The Time class represents time on win32. A timestamp is represented as
// a 64-bit integer in 100 nanoseconds since January 1, 1601 (UTC). JavaScript
@@ -885,7 +915,6 @@ void* OS::Allocate(const size_t requested,
bool is_executable) {
// VirtualAlloc rounds allocated size to page size automatically.
size_t msize = RoundUp(requested, static_cast<int>(GetPageSize()));
- void* address = 0;
// Windows XP SP2 allows Data Excution Prevention (DEP).
int prot = is_executable ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
@@ -938,12 +967,8 @@ void OS::Sleep(int milliseconds) {
void OS::Abort() {
if (!IsDebuggerPresent()) {
-#ifdef _MSC_VER
// Make the MSVCRT do a silent abort.
- _set_abort_behavior(0, _WRITE_ABORT_MSG);
- _set_abort_behavior(0, _CALL_REPORTFAULT);
-#endif // _MSC_VER
- abort();
+ raise(SIGABRT);
} else {
DebugBreak();
}
diff --git a/deps/v8/src/platform.h b/deps/v8/src/platform.h
index 38e633a3808..ccb4109b22b 100644
--- a/deps/v8/src/platform.h
+++ b/deps/v8/src/platform.h
@@ -96,6 +96,12 @@ class Mutex;
double ceiling(double x);
double modulo(double x, double y);
+// Custom implementation of sin, cos, tan and log.
+double fast_sin(double input);
+double fast_cos(double input);
+double fast_tan(double input);
+double fast_log(double input);
+
// Forward declarations.
class Socket;
diff --git a/deps/v8/src/prettyprinter.cc b/deps/v8/src/prettyprinter.cc
index f3ec75adcff..0d8dadce1a0 100644
--- a/deps/v8/src/prettyprinter.cc
+++ b/deps/v8/src/prettyprinter.cc
@@ -61,10 +61,15 @@ void PrettyPrinter::VisitBlock(Block* node) {
void PrettyPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
Print("var ");
PrintLiteral(node->proxy()->name(), false);
- if (node->fun() != NULL) {
- Print(" = ");
- PrintFunctionLiteral(node->fun());
- }
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitFunctionDeclaration(FunctionDeclaration* node) {
+ Print("function ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(" = ");
+ PrintFunctionLiteral(node->fun());
Print(";");
}
@@ -78,6 +83,22 @@ void PrettyPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
}
+void PrettyPrinter::VisitImportDeclaration(ImportDeclaration* node) {
+ Print("import ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(" from ");
+ Visit(node->module());
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitExportDeclaration(ExportDeclaration* node) {
+ Print("export ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(";");
+}
+
+
void PrettyPrinter::VisitModuleLiteral(ModuleLiteral* node) {
VisitBlock(node->body());
}
@@ -744,19 +765,18 @@ void AstPrinter::VisitBlock(Block* node) {
void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
- if (node->fun() == NULL) {
- // var or const declarations
- PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
- node->proxy()->var(),
- node->proxy()->name());
- } else {
- // function declarations
- PrintIndented("FUNCTION ");
- PrintLiteral(node->proxy()->name(), true);
- Print(" = function ");
- PrintLiteral(node->fun()->name(), false);
- Print("\n");
- }
+ PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
+ node->proxy()->var(),
+ node->proxy()->name());
+}
+
+
+void AstPrinter::VisitFunctionDeclaration(FunctionDeclaration* node) {
+ PrintIndented("FUNCTION ");
+ PrintLiteral(node->proxy()->name(), true);
+ Print(" = function ");
+ PrintLiteral(node->fun()->name(), false);
+ Print("\n");
}
@@ -767,6 +787,19 @@ void AstPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
}
+void AstPrinter::VisitImportDeclaration(ImportDeclaration* node) {
+ IndentedScope indent(this, "IMPORT");
+ PrintLiteralIndented("NAME", node->proxy()->name(), true);
+ Visit(node->module());
+}
+
+
+void AstPrinter::VisitExportDeclaration(ExportDeclaration* node) {
+ IndentedScope indent(this, "EXPORT ");
+ PrintLiteral(node->proxy()->name(), true);
+}
+
+
void AstPrinter::VisitModuleLiteral(ModuleLiteral* node) {
VisitBlock(node->body());
}
diff --git a/deps/v8/src/profile-generator-inl.h b/deps/v8/src/profile-generator-inl.h
index 7a70b013bb2..d967ed3897a 100644
--- a/deps/v8/src/profile-generator-inl.h
+++ b/deps/v8/src/profile-generator-inl.h
@@ -95,7 +95,7 @@ CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
}
-uint64_t HeapObjectsMap::GetNthGcSubrootId(int delta) {
+SnapshotObjectId HeapObjectsMap::GetNthGcSubrootId(int delta) {
return kGcRootsFirstSubrootId + delta * kObjectIdStep;
}
@@ -115,10 +115,10 @@ int V8HeapExplorer::GetGcSubrootOrder(HeapObject* subroot) {
}
-uint64_t HeapEntry::id() {
+SnapshotObjectId HeapEntry::id() {
union {
Id stored_id;
- uint64_t returned_id;
+ SnapshotObjectId returned_id;
} id_adaptor = {id_};
return id_adaptor.returned_id;
}
diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc
index 156fbc7b63c..ca975652afe 100644
--- a/deps/v8/src/profile-generator.cc
+++ b/deps/v8/src/profile-generator.cc
@@ -965,13 +965,13 @@ HeapEntry* HeapGraphEdge::From() {
void HeapEntry::Init(HeapSnapshot* snapshot,
Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int self_size,
int children_count,
int retainers_count) {
snapshot_ = snapshot;
type_ = type;
- painted_ = kUnpainted;
+ painted_ = false;
name_ = name;
self_size_ = self_size;
retained_size_ = 0;
@@ -980,7 +980,7 @@ void HeapEntry::Init(HeapSnapshot* snapshot,
dominator_ = NULL;
union {
- uint64_t set_id;
+ SnapshotObjectId set_id;
Id stored_id;
} id_adaptor = {id};
id_ = id_adaptor.stored_id;
@@ -992,8 +992,8 @@ void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
const char* name,
HeapEntry* entry,
int retainer_index) {
- children_arr()[child_index].Init(child_index, type, name, entry);
- entry->retainers_arr()[retainer_index] = children_arr() + child_index;
+ children()[child_index].Init(child_index, type, name, entry);
+ entry->retainers()[retainer_index] = children_arr() + child_index;
}
@@ -1002,22 +1002,14 @@ void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
int index,
HeapEntry* entry,
int retainer_index) {
- children_arr()[child_index].Init(child_index, type, index, entry);
- entry->retainers_arr()[retainer_index] = children_arr() + child_index;
+ children()[child_index].Init(child_index, type, index, entry);
+ entry->retainers()[retainer_index] = children_arr() + child_index;
}
void HeapEntry::SetUnidirElementReference(
int child_index, int index, HeapEntry* entry) {
- children_arr()[child_index].Init(child_index, index, entry);
-}
-
-
-int HeapEntry::RetainedSize(bool exact) {
- if (exact && (retained_size_ & kExactRetainedSizeTag) == 0) {
- CalculateExactRetainedSize();
- }
- return retained_size_ & (~kExactRetainedSizeTag);
+ children()[child_index].Init(child_index, index, entry);
}
@@ -1026,43 +1018,10 @@ Handle<HeapObject> HeapEntry::GetHeapObject() {
}
-template<class Visitor>
-void HeapEntry::ApplyAndPaintAllReachable(Visitor* visitor) {
- List<HeapEntry*> list(10);
- list.Add(this);
- this->paint_reachable();
- visitor->Apply(this);
- while (!list.is_empty()) {
- HeapEntry* entry = list.RemoveLast();
- Vector<HeapGraphEdge> children = entry->children();
- for (int i = 0; i < children.length(); ++i) {
- if (children[i].type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* child = children[i].to();
- if (!child->painted_reachable()) {
- list.Add(child);
- child->paint_reachable();
- visitor->Apply(child);
- }
- }
- }
-}
-
-
-class NullClass {
- public:
- void Apply(HeapEntry* entry) { }
-};
-
-void HeapEntry::PaintAllReachable() {
- NullClass null;
- ApplyAndPaintAllReachable(&null);
-}
-
-
void HeapEntry::Print(
const char* prefix, const char* edge_name, int max_depth, int indent) {
OS::Print("%6d %7d @%6llu %*c %s%s: ",
- self_size(), RetainedSize(false), id(),
+ self_size(), retained_size(), id(),
indent, ' ', prefix, edge_name);
if (type() != kString) {
OS::Print("%s %.40s\n", TypeAsString(), name_);
@@ -1137,69 +1096,15 @@ const char* HeapEntry::TypeAsString() {
}
-int HeapEntry::EntriesSize(int entries_count,
- int children_count,
- int retainers_count) {
+size_t HeapEntry::EntriesSize(int entries_count,
+ int children_count,
+ int retainers_count) {
return sizeof(HeapEntry) * entries_count // NOLINT
+ sizeof(HeapGraphEdge) * children_count // NOLINT
+ sizeof(HeapGraphEdge*) * retainers_count; // NOLINT
}
-class RetainedSizeCalculator {
- public:
- RetainedSizeCalculator()
- : retained_size_(0) {
- }
-
- int retained_size() const { return retained_size_; }
-
- void Apply(HeapEntry** entry_ptr) {
- if ((*entry_ptr)->painted_reachable()) {
- retained_size_ += (*entry_ptr)->self_size();
- }
- }
-
- private:
- int retained_size_;
-};
-
-
-void HeapEntry::CalculateExactRetainedSize() {
- // To calculate retained size, first we paint all reachable nodes in
- // one color, then we paint (or re-paint) all nodes reachable from
- // other nodes with a different color. Then we sum up self sizes of
- // nodes painted with the first color.
- snapshot()->ClearPaint();
- PaintAllReachable();
-
- List<HeapEntry*> list(10);
- HeapEntry* root = snapshot()->root();
- if (this != root) {
- list.Add(root);
- root->paint_reachable_from_others();
- }
- while (!list.is_empty()) {
- HeapEntry* curr = list.RemoveLast();
- Vector<HeapGraphEdge> children = curr->children();
- for (int i = 0; i < children.length(); ++i) {
- if (children[i].type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* child = children[i].to();
- if (child != this && child->not_painted_reachable_from_others()) {
- list.Add(child);
- child->paint_reachable_from_others();
- }
- }
- }
-
- RetainedSizeCalculator ret_size_calc;
- snapshot()->IterateEntries(&ret_size_calc);
- retained_size_ = ret_size_calc.retained_size();
- ASSERT((retained_size_ & kExactRetainedSizeTag) == 0);
- retained_size_ |= kExactRetainedSizeTag;
-}
-
-
// It is very important to keep objects that form a heap snapshot
// as small as possible.
namespace { // Avoid littering the global namespace.
@@ -1209,13 +1114,14 @@ template <size_t ptr_size> struct SnapshotSizeConstants;
template <> struct SnapshotSizeConstants<4> {
static const int kExpectedHeapGraphEdgeSize = 12;
static const int kExpectedHeapEntrySize = 36;
- static const int kMaxSerializableSnapshotRawSize = 256 * MB;
+ static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
};
template <> struct SnapshotSizeConstants<8> {
static const int kExpectedHeapGraphEdgeSize = 24;
static const int kExpectedHeapEntrySize = 48;
- static const int kMaxSerializableSnapshotRawSize = 768 * MB;
+ static const uint64_t kMaxSerializableSnapshotRawSize =
+ static_cast<uint64_t>(6000) * MB;
};
} // namespace
@@ -1316,7 +1222,7 @@ HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag,
HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int size,
int children_count,
int retainers_count) {
@@ -1348,7 +1254,7 @@ HeapEntry* HeapSnapshot::GetNextEntryToInit() {
}
-HeapEntry* HeapSnapshot::GetEntryById(uint64_t id) {
+HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
// Perform a binary search by id.
@@ -1357,7 +1263,7 @@ HeapEntry* HeapSnapshot::GetEntryById(uint64_t id) {
while (low <= high) {
int mid =
(static_cast<unsigned int>(low) + static_cast<unsigned int>(high)) >> 1;
- uint64_t mid_id = entries_by_id->at(mid)->id();
+ SnapshotObjectId mid_id = entries_by_id->at(mid)->id();
if (mid_id > id)
high = mid - 1;
else if (mid_id < id)
@@ -1393,12 +1299,12 @@ void HeapSnapshot::Print(int max_depth) {
// We split IDs on evens for embedder objects (see
// HeapObjectsMap::GenerateId) and odds for native objects.
-const uint64_t HeapObjectsMap::kInternalRootObjectId = 1;
-const uint64_t HeapObjectsMap::kGcRootsObjectId =
+const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
+const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
-const uint64_t HeapObjectsMap::kGcRootsFirstSubrootId =
+const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
-const uint64_t HeapObjectsMap::kFirstAvailableObjectId =
+const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
HeapObjectsMap::kGcRootsFirstSubrootId +
VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
@@ -1420,12 +1326,12 @@ void HeapObjectsMap::SnapshotGenerationFinished() {
}
-uint64_t HeapObjectsMap::FindObject(Address addr) {
+SnapshotObjectId HeapObjectsMap::FindObject(Address addr) {
if (!initial_fill_mode_) {
- uint64_t existing = FindEntry(addr);
+ SnapshotObjectId existing = FindEntry(addr);
if (existing != 0) return existing;
}
- uint64_t id = next_id_;
+ SnapshotObjectId id = next_id_;
next_id_ += kObjectIdStep;
AddEntry(addr, id);
return id;
@@ -1448,7 +1354,7 @@ void HeapObjectsMap::MoveObject(Address from, Address to) {
}
-void HeapObjectsMap::AddEntry(Address addr, uint64_t id) {
+void HeapObjectsMap::AddEntry(Address addr, SnapshotObjectId id) {
HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
ASSERT(entry->value == NULL);
entry->value = reinterpret_cast<void*>(entries_->length());
@@ -1456,7 +1362,7 @@ void HeapObjectsMap::AddEntry(Address addr, uint64_t id) {
}
-uint64_t HeapObjectsMap::FindEntry(Address addr) {
+SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
if (entry != NULL) {
int entry_index =
@@ -1496,8 +1402,8 @@ void HeapObjectsMap::RemoveDeadEntries() {
}
-uint64_t HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
- uint64_t id = static_cast<uint64_t>(info->GetHash());
+SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
+ SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
const char* label = info->GetLabel();
id ^= HashSequentialString(label,
static_cast<int>(strlen(label)),
@@ -1567,7 +1473,8 @@ void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
}
-Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(uint64_t id) {
+Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
+ SnapshotObjectId id) {
// First perform a full GC in order to avoid dead objects.
HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"HeapSnapshotsCollection::FindHeapObjectById");
@@ -1766,18 +1673,6 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
GetGcSubrootOrder(object),
children_count,
retainers_count);
- } else if (object->IsJSGlobalObject()) {
- const char* tag = objects_tags_.GetTag(object);
- const char* name = collection_->names()->GetName(
- GetConstructorName(JSObject::cast(object)));
- if (tag != NULL) {
- name = collection_->names()->GetFormatted("%s / %s", name, tag);
- }
- return AddEntry(object,
- HeapEntry::kObject,
- name,
- children_count,
- retainers_count);
} else if (object->IsJSFunction()) {
JSFunction* func = JSFunction::cast(object);
SharedFunctionInfo* shared = func->shared();
@@ -1798,8 +1693,7 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
} else if (object->IsJSObject()) {
return AddEntry(object,
HeapEntry::kObject,
- collection_->names()->GetName(
- GetConstructorName(JSObject::cast(object))),
+ "",
children_count,
retainers_count);
} else if (object->IsString()) {
@@ -2444,6 +2338,32 @@ bool V8HeapExplorer::IterateAndExtractReferences(
}
+bool V8HeapExplorer::IterateAndSetObjectNames(SnapshotFillerInterface* filler) {
+ HeapIterator iterator(HeapIterator::kFilterUnreachable);
+ filler_ = filler;
+ for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
+ SetObjectName(obj);
+ }
+ return true;
+}
+
+
+void V8HeapExplorer::SetObjectName(HeapObject* object) {
+ if (!object->IsJSObject() || object->IsJSRegExp() || object->IsJSFunction()) {
+ return;
+ }
+ const char* name = collection_->names()->GetName(
+ GetConstructorName(JSObject::cast(object)));
+ if (object->IsJSGlobalObject()) {
+ const char* tag = objects_tags_.GetTag(object);
+ if (tag != NULL) {
+ name = collection_->names()->GetFormatted("%s / %s", name, tag);
+ }
+ }
+ GetEntry(object)->set_name(name);
+}
+
+
void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
HeapEntry* parent_entry,
String* reference_name,
@@ -3002,15 +2922,6 @@ void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
}
-HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
- v8::ActivityControl* control)
- : snapshot_(snapshot),
- control_(control),
- v8_heap_explorer_(snapshot_, this),
- dom_explorer_(snapshot_, this) {
-}
-
-
class SnapshotCounter : public SnapshotFillerInterface {
public:
explicit SnapshotCounter(HeapEntriesMap* entries) : entries_(entries) { }
@@ -3135,6 +3046,15 @@ class SnapshotFiller : public SnapshotFillerInterface {
};
+HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
+ v8::ActivityControl* control)
+ : snapshot_(snapshot),
+ control_(control),
+ v8_heap_explorer_(snapshot_, this),
+ dom_explorer_(snapshot_, this) {
+}
+
+
bool HeapSnapshotGenerator::GenerateSnapshot() {
v8_heap_explorer_.TagGlobalObjects();
@@ -3179,17 +3099,19 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
debug_heap->Verify();
#endif
- // Allocate and fill entries in the snapshot, allocate references.
+ // Allocate memory for entries and references.
snapshot_->AllocateEntries(entries_.entries_count(),
entries_.total_children_count(),
entries_.total_retainers_count());
+
+ // Allocate heap objects to entries hash map.
entries_.AllocateEntries();
// Pass 2. Fill references.
if (!FillReferences()) return false;
if (!SetEntriesDominators()) return false;
- if (!ApproximateRetainedSizes()) return false;
+ if (!CalculateRetainedSizes()) return false;
progress_counter_ = progress_total_;
if (!ProgressReport(true)) return false;
@@ -3227,17 +3149,22 @@ void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
bool HeapSnapshotGenerator::CountEntriesAndReferences() {
SnapshotCounter counter(&entries_);
v8_heap_explorer_.AddRootEntries(&counter);
- return
- v8_heap_explorer_.IterateAndExtractReferences(&counter) &&
- dom_explorer_.IterateAndExtractReferences(&counter);
+ return v8_heap_explorer_.IterateAndExtractReferences(&counter)
+ && dom_explorer_.IterateAndExtractReferences(&counter);
}
bool HeapSnapshotGenerator::FillReferences() {
SnapshotFiller filler(snapshot_, &entries_);
- return
- v8_heap_explorer_.IterateAndExtractReferences(&filler) &&
- dom_explorer_.IterateAndExtractReferences(&filler);
+ // IterateAndExtractReferences cannot set object names because
+ // it makes call to JSObject::LocalLookupRealNamedProperty which
+ // in turn may relocate objects in property maps thus changing the heap
+ // layout and affecting retainer counts. This is not acceptable because
+ // number of retainers must not change between count and fill passes.
+ // To avoid this there's a separate postpass that set object names.
+ return v8_heap_explorer_.IterateAndExtractReferences(&filler)
+ && dom_explorer_.IterateAndExtractReferences(&filler)
+ && v8_heap_explorer_.IterateAndSetObjectNames(&filler);
}
@@ -3247,7 +3174,7 @@ void HeapSnapshotGenerator::FillReversePostorderIndexes(
int current_entry = 0;
List<HeapEntry*> nodes_to_visit;
nodes_to_visit.Add(snapshot_->root());
- snapshot_->root()->paint_reachable();
+ snapshot_->root()->paint();
while (!nodes_to_visit.is_empty()) {
HeapEntry* entry = nodes_to_visit.last();
Vector<HeapGraphEdge> children = entry->children();
@@ -3255,9 +3182,9 @@ void HeapSnapshotGenerator::FillReversePostorderIndexes(
for (int i = 0; i < children.length(); ++i) {
if (children[i].type() == HeapGraphEdge::kShortcut) continue;
HeapEntry* child = children[i].to();
- if (!child->painted_reachable()) {
+ if (!child->painted()) {
nodes_to_visit.Add(child);
- child->paint_reachable();
+ child->paint();
has_new_edges = true;
}
}
@@ -3293,25 +3220,26 @@ bool HeapSnapshotGenerator::BuildDominatorTree(
for (int i = 0; i < root_index; ++i) (*dominators)[i] = kNoDominator;
(*dominators)[root_index] = root_index;
- // The affected array is used to mark those entries that may
- // be affected because of dominators change among their retainers.
+ // The affected array is used to mark entries which dominators
+ // have to be racalculated because of changes in their retainers.
ScopedVector<bool> affected(entries_length);
- for (int i = 0; i < entries_length; ++i) affected[i] = false;
+ for (int i = 0; i < affected.length(); ++i) affected[i] = false;
+ // Mark the root direct children as affected.
Vector<HeapGraphEdge> children = entries[root_index]->children();
for (int i = 0; i < children.length(); ++i) {
- // Mark the root direct children as affected.
affected[children[i].to()->ordered_index()] = true;
}
bool changed = true;
while (changed) {
changed = false;
+ if (!ProgressReport(true)) return false;
for (int i = root_index - 1; i >= 0; --i) {
+ if (!affected[i]) continue;
+ affected[i] = false;
// If dominator of the entry has already been set to root,
// then it can't propagate any further.
if ((*dominators)[i] == root_index) continue;
- if (!affected[i]) continue;
- affected[i] = false;
int new_idom_index = kNoDominator;
Vector<HeapGraphEdge*> rets = entries[i]->retainers();
for (int j = 0; j < rets.length(); ++j) {
@@ -3355,7 +3283,7 @@ bool HeapSnapshotGenerator::SetEntriesDominators() {
}
-bool HeapSnapshotGenerator::ApproximateRetainedSizes() {
+bool HeapSnapshotGenerator::CalculateRetainedSizes() {
// As for the dominators tree we only know parent nodes, not
// children, to sum up total sizes we "bubble" node's self size
// adding it to all of its parents.
@@ -3510,8 +3438,8 @@ HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
HeapEntry* root = result->AddRootEntry(1);
const char* text = snapshot_->collection()->names()->GetFormatted(
"The snapshot is too big. "
- "Maximum snapshot size is %d MB. "
- "Actual snapshot size is %d MB.",
+ "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
+ "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
(snapshot_->raw_entries_size() + MB - 1) / MB);
HeapEntry* message = result->AddEntry(
@@ -3588,6 +3516,9 @@ void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
|| edge->type() == HeapGraphEdge::kHidden
|| edge->type() == HeapGraphEdge::kWeak
? edge->index() : GetStringId(edge->name());
+ STATIC_CHECK(sizeof(int) == sizeof(edge->type())); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(edge_name_or_index)); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(GetNodeId(edge->to()))); // NOLINT
int result = OS::SNPrintF(buffer, ",%d,%d,%d",
edge->type(), edge_name_or_index, GetNodeId(edge->to()));
USE(result);
@@ -3597,17 +3528,26 @@ void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
- // The buffer needs space for 7 ints, 7 commas, \n and \0
+ // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
static const int kBufferSize =
- MaxDecimalDigitsIn<sizeof(int)>::kSigned * 7 + 7 + 1 + 1; // NOLINT
+ 6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned // NOLINT
+ + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned // NOLINT
+ + 7 + 1 + 1;
EmbeddedVector<char, kBufferSize> buffer;
Vector<HeapGraphEdge> children = entry->children();
- int result = OS::SNPrintF(buffer, "\n,%d,%d,%d,%d,%d,%d,%d",
+ STATIC_CHECK(sizeof(int) == sizeof(entry->type())); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(GetStringId(entry->name()))); // NOLINT
+ STATIC_CHECK(sizeof(unsigned) == sizeof(entry->id())); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(entry->self_size())); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(entry->retained_size())); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(GetNodeId(entry->dominator()))); // NOLINT
+ STATIC_CHECK(sizeof(int) == sizeof(children.length())); // NOLINT
+ int result = OS::SNPrintF(buffer, "\n,%d,%d,%u,%d,%d,%d,%d",
entry->type(),
GetStringId(entry->name()),
entry->id(),
entry->self_size(),
- entry->RetainedSize(false),
+ entry->retained_size(),
GetNodeId(entry->dominator()),
children.length());
USE(result);
diff --git a/deps/v8/src/profile-generator.h b/deps/v8/src/profile-generator.h
index 13c6b2db0b6..fadae7e28d7 100644
--- a/deps/v8/src/profile-generator.h
+++ b/deps/v8/src/profile-generator.h
@@ -35,6 +35,8 @@
namespace v8 {
namespace internal {
+typedef uint32_t SnapshotObjectId;
+
class TokenEnumerator {
public:
TokenEnumerator();
@@ -533,7 +535,7 @@ class HeapEntry BASE_EMBEDDED {
void Init(HeapSnapshot* snapshot,
Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int self_size,
int children_count,
int retainers_count);
@@ -541,7 +543,8 @@ class HeapEntry BASE_EMBEDDED {
HeapSnapshot* snapshot() { return snapshot_; }
Type type() { return static_cast<Type>(type_); }
const char* name() { return name_; }
- inline uint64_t id();
+ void set_name(const char* name) { name_ = name; }
+ inline SnapshotObjectId id();
int self_size() { return self_size_; }
int retained_size() { return retained_size_; }
void add_retained_size(int size) { retained_size_ += size; }
@@ -558,22 +561,9 @@ class HeapEntry BASE_EMBEDDED {
ASSERT(entry != NULL);
dominator_ = entry;
}
-
- void clear_paint() { painted_ = kUnpainted; }
- bool painted_reachable() { return painted_ == kPainted; }
- void paint_reachable() {
- ASSERT(painted_ == kUnpainted);
- painted_ = kPainted;
- }
- bool not_painted_reachable_from_others() {
- return painted_ != kPaintedReachableFromOthers;
- }
- void paint_reachable_from_others() {
- painted_ = kPaintedReachableFromOthers;
- }
- template<class Visitor>
- void ApplyAndPaintAllReachable(Visitor* visitor);
- void PaintAllReachable();
+ void clear_paint() { painted_ = false; }
+ bool painted() { return painted_; }
+ void paint() { painted_ = true; }
void SetIndexedReference(HeapGraphEdge::Type type,
int child_index,
@@ -587,17 +577,18 @@ class HeapEntry BASE_EMBEDDED {
int retainer_index);
void SetUnidirElementReference(int child_index, int index, HeapEntry* entry);
- int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); }
- int RetainedSize(bool exact);
+ size_t EntrySize() {
+ return EntriesSize(1, children_count_, retainers_count_);
+ }
void Print(
const char* prefix, const char* edge_name, int max_depth, int indent);
Handle<HeapObject> GetHeapObject();
- static int EntriesSize(int entries_count,
- int children_count,
- int retainers_count);
+ static size_t EntriesSize(int entries_count,
+ int children_count,
+ int retainers_count);
private:
HeapGraphEdge* children_arr() {
@@ -606,12 +597,11 @@ class HeapEntry BASE_EMBEDDED {
HeapGraphEdge** retainers_arr() {
return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_);
}
- void CalculateExactRetainedSize();
const char* TypeAsString();
- unsigned painted_: 2;
+ unsigned painted_: 1;
unsigned type_: 4;
- int children_count_: 26;
+ int children_count_: 27;
int retainers_count_;
int self_size_;
union {
@@ -626,13 +616,6 @@ class HeapEntry BASE_EMBEDDED {
} id_; // This is to avoid extra padding of 64-bit value.
const char* name_;
- // Paints used for exact retained sizes calculation.
- static const unsigned kUnpainted = 0;
- static const unsigned kPainted = 1;
- static const unsigned kPaintedReachableFromOthers = 2;
-
- static const int kExactRetainedSizeTag = 1;
-
DISALLOW_COPY_AND_ASSIGN(HeapEntry);
};
@@ -666,13 +649,13 @@ class HeapSnapshot {
HeapEntry* natives_root() { return natives_root_entry_; }
HeapEntry* gc_subroot(int index) { return gc_subroot_entries_[index]; }
List<HeapEntry*>* entries() { return &entries_; }
- int raw_entries_size() { return raw_entries_size_; }
+ size_t raw_entries_size() { return raw_entries_size_; }
void AllocateEntries(
int entries_count, int children_count, int retainers_count);
HeapEntry* AddEntry(HeapEntry::Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int size,
int children_count,
int retainers_count);
@@ -683,7 +666,7 @@ class HeapSnapshot {
int retainers_count);
HeapEntry* AddNativesRootEntry(int children_count, int retainers_count);
void ClearPaint();
- HeapEntry* GetEntryById(uint64_t id);
+ HeapEntry* GetEntryById(SnapshotObjectId id);
List<HeapEntry*>* GetSortedEntriesList();
template<class Visitor>
void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); }
@@ -706,7 +689,7 @@ class HeapSnapshot {
char* raw_entries_;
List<HeapEntry*> entries_;
bool entries_sorted_;
- int raw_entries_size_;
+ size_t raw_entries_size_;
friend class HeapSnapshotTester;
@@ -720,29 +703,31 @@ class HeapObjectsMap {
~HeapObjectsMap();
void SnapshotGenerationFinished();
- uint64_t FindObject(Address addr);
+ SnapshotObjectId FindObject(Address addr);
void MoveObject(Address from, Address to);
- static uint64_t GenerateId(v8::RetainedObjectInfo* info);
- static inline uint64_t GetNthGcSubrootId(int delta);
+ static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
+ static inline SnapshotObjectId GetNthGcSubrootId(int delta);
static const int kObjectIdStep = 2;
- static const uint64_t kInternalRootObjectId;
- static const uint64_t kGcRootsObjectId;
- static const uint64_t kNativesRootObjectId;
- static const uint64_t kGcRootsFirstSubrootId;
- static const uint64_t kFirstAvailableObjectId;
+ static const SnapshotObjectId kInternalRootObjectId;
+ static const SnapshotObjectId kGcRootsObjectId;
+ static const SnapshotObjectId kNativesRootObjectId;
+ static const SnapshotObjectId kGcRootsFirstSubrootId;
+ static const SnapshotObjectId kFirstAvailableObjectId;
private:
struct EntryInfo {
- explicit EntryInfo(uint64_t id) : id(id), accessed(true) { }
- EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { }
- uint64_t id;
+ explicit EntryInfo(SnapshotObjectId id) : id(id), accessed(true) { }
+ EntryInfo(SnapshotObjectId id, bool accessed)
+ : id(id),
+ accessed(accessed) { }
+ SnapshotObjectId id;
bool accessed;
};
- void AddEntry(Address addr, uint64_t id);
- uint64_t FindEntry(Address addr);
+ void AddEntry(Address addr, SnapshotObjectId id);
+ SnapshotObjectId FindEntry(Address addr);
void RemoveDeadEntries();
static bool AddressesMatch(void* key1, void* key2) {
@@ -756,7 +741,7 @@ class HeapObjectsMap {
}
bool initial_fill_mode_;
- uint64_t next_id_;
+ SnapshotObjectId next_id_;
HashMap entries_map_;
List<EntryInfo>* entries_;
@@ -781,8 +766,8 @@ class HeapSnapshotsCollection {
StringsStorage* names() { return &names_; }
TokenEnumerator* token_enumerator() { return token_enumerator_; }
- uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); }
- Handle<HeapObject> FindHeapObjectById(uint64_t id);
+ SnapshotObjectId GetObjectId(Address addr) { return ids_.FindObject(addr); }
+ Handle<HeapObject> FindHeapObjectById(SnapshotObjectId id);
void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
private:
@@ -940,6 +925,7 @@ class V8HeapExplorer : public HeapEntriesAllocator {
void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount(HeapIterator* iterator);
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
+ bool IterateAndSetObjectNames(SnapshotFillerInterface* filler);
void TagGlobalObjects();
static String* GetConstructorName(JSObject* object);
@@ -1006,6 +992,7 @@ class V8HeapExplorer : public HeapEntriesAllocator {
void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
void SetGcSubrootReference(
VisitorSynchronization::SyncTag tag, bool is_weak, Object* child);
+ void SetObjectName(HeapObject* object);
void TagObject(Object* obj, const char* tag);
HeapEntry* GetEntry(Object* obj);
@@ -1099,9 +1086,9 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
bool GenerateSnapshot();
private:
- bool ApproximateRetainedSizes();
bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
Vector<int>* dominators);
+ bool CalculateRetainedSizes();
bool CountEntriesAndReferences();
bool FillReferences();
void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
diff --git a/deps/v8/src/property-details.h b/deps/v8/src/property-details.h
index 81f521a627a..c79aa969d34 100644
--- a/deps/v8/src/property-details.h
+++ b/deps/v8/src/property-details.h
@@ -73,26 +73,6 @@ enum PropertyType {
};
-inline bool IsRealProperty(PropertyType type) {
- switch (type) {
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case CALLBACKS:
- case HANDLER:
- case INTERCEPTOR:
- return true;
- case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
- case CONSTANT_TRANSITION:
- case NULL_DESCRIPTOR:
- return false;
- }
- UNREACHABLE(); // keep the compiler happy
- return false;
-}
-
-
// PropertyDetails captures type and attributes for a property.
// They are used both in property dictionaries and instance descriptors.
class PropertyDetails BASE_EMBEDDED {
diff --git a/deps/v8/src/property.h b/deps/v8/src/property.h
index d5efb7f3519..04f78b22d44 100644
--- a/deps/v8/src/property.h
+++ b/deps/v8/src/property.h
@@ -49,11 +49,8 @@ class Descriptor BASE_EMBEDDED {
MUST_USE_RESULT MaybeObject* KeyToSymbol() {
if (!StringShape(key_).IsSymbol()) {
- Object* result;
- { MaybeObject* maybe_result = HEAP->LookupSymbol(key_);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- key_ = String::cast(result);
+ MaybeObject* maybe_result = HEAP->LookupSymbol(key_);
+ if (!maybe_result->To(&key_)) return maybe_result;
}
return key_;
}
@@ -164,6 +161,35 @@ class CallbacksDescriptor: public Descriptor {
};
+template <class T>
+bool IsPropertyDescriptor(T* desc) {
+ switch (desc->type()) {
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ return true;
+ case CALLBACKS: {
+ Object* callback_object = desc->GetCallbackObject();
+ // Non-JavaScript (i.e. native) accessors are always a property, otherwise
+ // either the getter or the setter must be an accessor. Put another way:
+ // If we only see map transitions and holes in a pair, this is not a
+ // property.
+ return (!callback_object->IsAccessorPair() ||
+ AccessorPair::cast(callback_object)->ContainsAccessor());
+ }
+ case MAP_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR:
+ return false;
+ }
+ UNREACHABLE(); // keep the compiler happy
+ return false;
+}
+
+
class LookupResult BASE_EMBEDDED {
public:
explicit LookupResult(Isolate* isolate)
@@ -261,10 +287,9 @@ class LookupResult BASE_EMBEDDED {
bool IsFound() { return lookup_type_ != NOT_FOUND; }
bool IsHandler() { return lookup_type_ == HANDLER_TYPE; }
- // Is the result is a property excluding transitions and the null
- // descriptor?
+ // Is the result is a property excluding transitions and the null descriptor?
bool IsProperty() {
- return IsFound() && IsRealProperty(GetPropertyDetails().type());
+ return IsFound() && IsPropertyDescriptor(this);
}
bool IsCacheable() { return cacheable_; }
diff --git a/deps/v8/src/regexp.js b/deps/v8/src/regexp.js
index b724f681835..ace0be15645 100644
--- a/deps/v8/src/regexp.js
+++ b/deps/v8/src/regexp.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -421,18 +421,12 @@ function SetUpRegExp() {
LAST_INPUT(lastMatchInfo) = ToString(string);
};
- %DefineOrRedefineAccessorProperty($RegExp, 'input', GETTER, RegExpGetInput,
- DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'input', SETTER, RegExpSetInput,
- DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$_', GETTER, RegExpGetInput,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$_', SETTER, RegExpSetInput,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$input', GETTER, RegExpGetInput,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$input', SETTER, RegExpSetInput,
- DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'input', RegExpGetInput,
+ RegExpSetInput, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$_', RegExpGetInput,
+ RegExpSetInput, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$input', RegExpGetInput,
+ RegExpSetInput, DONT_ENUM | DONT_DELETE);
// The properties multiline and $* are aliases for each other. When this
// value is set in SpiderMonkey, the value it is set to is coerced to a
@@ -446,13 +440,10 @@ function SetUpRegExp() {
var RegExpGetMultiline = function() { return multiline; };
var RegExpSetMultiline = function(flag) { multiline = flag ? true : false; };
- %DefineOrRedefineAccessorProperty($RegExp, 'multiline', GETTER,
- RegExpGetMultiline, DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'multiline', SETTER,
+ %DefineOrRedefineAccessorProperty($RegExp, 'multiline', RegExpGetMultiline,
RegExpSetMultiline, DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$*', GETTER, RegExpGetMultiline,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$*', SETTER, RegExpSetMultiline,
+ %DefineOrRedefineAccessorProperty($RegExp, '$*', RegExpGetMultiline,
+ RegExpSetMultiline,
DONT_ENUM | DONT_DELETE);
@@ -460,44 +451,28 @@ function SetUpRegExp() {
// Static properties set by a successful match.
- %DefineOrRedefineAccessorProperty($RegExp, 'lastMatch', GETTER,
- RegExpGetLastMatch, DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'lastMatch', SETTER, NoOpSetter,
+ %DefineOrRedefineAccessorProperty($RegExp, 'lastMatch', RegExpGetLastMatch,
+ NoOpSetter, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$&', RegExpGetLastMatch,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'lastParen', RegExpGetLastParen,
+ NoOpSetter, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$+', RegExpGetLastParen,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'leftContext',
+ RegExpGetLeftContext, NoOpSetter,
DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$&', GETTER, RegExpGetLastMatch,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$&', SETTER, NoOpSetter,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'lastParen', GETTER,
- RegExpGetLastParen, DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'lastParen', SETTER, NoOpSetter,
+ %DefineOrRedefineAccessorProperty($RegExp, '$`', RegExpGetLeftContext,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'rightContext',
+ RegExpGetRightContext, NoOpSetter,
DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$+', GETTER, RegExpGetLastParen,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$+', SETTER, NoOpSetter,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'leftContext', GETTER,
- RegExpGetLeftContext, DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'leftContext', SETTER, NoOpSetter,
- DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$`', GETTER, RegExpGetLeftContext,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$`', SETTER, NoOpSetter,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'rightContext', GETTER,
- RegExpGetRightContext, DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, 'rightContext', SETTER, NoOpSetter,
- DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, "$'", GETTER,
- RegExpGetRightContext,
- DONT_ENUM | DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, "$'", SETTER, NoOpSetter,
- DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, "$'", RegExpGetRightContext,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
for (var i = 1; i < 10; ++i) {
- %DefineOrRedefineAccessorProperty($RegExp, '$' + i, GETTER,
- RegExpMakeCaptureGetter(i), DONT_DELETE);
- %DefineOrRedefineAccessorProperty($RegExp, '$' + i, SETTER, NoOpSetter,
+ %DefineOrRedefineAccessorProperty($RegExp, '$' + i,
+ RegExpMakeCaptureGetter(i), NoOpSetter,
DONT_DELETE);
}
}
diff --git a/deps/v8/src/rewriter.cc b/deps/v8/src/rewriter.cc
index 8308792baa8..55f93ee0d71 100644
--- a/deps/v8/src/rewriter.cc
+++ b/deps/v8/src/rewriter.cc
@@ -210,7 +210,10 @@ void Processor::VisitWithStatement(WithStatement* node) {
// Do nothing:
void Processor::VisitVariableDeclaration(VariableDeclaration* node) {}
+void Processor::VisitFunctionDeclaration(FunctionDeclaration* node) {}
void Processor::VisitModuleDeclaration(ModuleDeclaration* node) {}
+void Processor::VisitImportDeclaration(ImportDeclaration* node) {}
+void Processor::VisitExportDeclaration(ExportDeclaration* node) {}
void Processor::VisitModuleLiteral(ModuleLiteral* node) {}
void Processor::VisitModuleVariable(ModuleVariable* node) {}
void Processor::VisitModulePath(ModulePath* node) {}
diff --git a/deps/v8/src/runtime-profiler.cc b/deps/v8/src/runtime-profiler.cc
index 8bd59d1de26..70586aa06b2 100644
--- a/deps/v8/src/runtime-profiler.cc
+++ b/deps/v8/src/runtime-profiler.cc
@@ -257,7 +257,8 @@ void RuntimeProfiler::OptimizeNow() {
}
}
- if (function->IsMarkedForLazyRecompilation()) {
+ if (function->IsMarkedForLazyRecompilation() &&
+ function->shared()->code()->kind() == Code::FUNCTION) {
Code* unoptimized = function->shared()->code();
int nesting = unoptimized->allow_osr_at_loop_nesting_level();
if (nesting == 0) AttemptOnStackReplacement(function);
diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc
index b377e6e54ba..cc5aeab78f5 100644
--- a/deps/v8/src/runtime.cc
+++ b/deps/v8/src/runtime.cc
@@ -40,6 +40,7 @@
#include "dateparser-inl.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "date.h"
#include "execution.h"
#include "global-handles.h"
#include "isolate-inl.h"
@@ -882,14 +883,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsInPrototypeChain) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_IsConstructCall) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 0);
- JavaScriptFrameIterator it(isolate);
- return isolate->heap()->ToBoolean(it.frame()->IsConstructor());
-}
-
-
// Recursively traverses hidden prototypes if property is not found
static void GetOwnPropertyImplementation(JSObject* obj,
String* name,
@@ -1003,23 +996,14 @@ enum PropertyDescriptorIndices {
DESCRIPTOR_SIZE
};
-// Returns an array with the property description:
-// if args[1] is not a property on args[0]
-// returns undefined
-// if args[1] is a data property on args[0]
-// [false, value, Writeable, Enumerable, Configurable]
-// if args[1] is an accessor on args[0]
-// [true, GetFunction, SetFunction, Enumerable, Configurable]
-RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
- ASSERT(args.length() == 2);
+
+static MaybeObject* GetOwnProperty(Isolate* isolate,
+ Handle<JSObject> obj,
+ Handle<String> name) {
Heap* heap = isolate->heap();
- HandleScope scope(isolate);
Handle<FixedArray> elms = isolate->factory()->NewFixedArray(DESCRIPTOR_SIZE);
Handle<JSArray> desc = isolate->factory()->NewJSArrayWithElements(elms);
LookupResult result(isolate);
- CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
- CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
-
// This could be an element.
uint32_t index;
if (name->AsArrayIndex(&index)) {
@@ -1081,10 +1065,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
AccessorPair::cast(dictionary->ValueAt(entry));
elms->set(IS_ACCESSOR_INDEX, heap->true_value());
if (CheckElementAccess(*obj, index, v8::ACCESS_GET)) {
- elms->set(GETTER_INDEX, accessors->getter());
+ elms->set(GETTER_INDEX, accessors->SafeGet(ACCESSOR_GETTER));
}
if (CheckElementAccess(*obj, index, v8::ACCESS_SET)) {
- elms->set(SETTER_INDEX, accessors->setter());
+ elms->set(SETTER_INDEX, accessors->SafeGet(ACCESSOR_SETTER));
}
break;
}
@@ -1131,10 +1115,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
AccessorPair* accessors = AccessorPair::cast(result.GetCallbackObject());
if (CheckAccess(*obj, *name, &result, v8::ACCESS_GET)) {
- elms->set(GETTER_INDEX, accessors->getter());
+ elms->set(GETTER_INDEX, accessors->SafeGet(ACCESSOR_GETTER));
}
if (CheckAccess(*obj, *name, &result, v8::ACCESS_SET)) {
- elms->set(SETTER_INDEX, accessors->setter());
+ elms->set(SETTER_INDEX, accessors->SafeGet(ACCESSOR_SETTER));
}
} else {
elms->set(IS_ACCESSOR_INDEX, heap->false_value());
@@ -1153,6 +1137,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
}
+// Returns an array with the property description:
+// if args[1] is not a property on args[0]
+// returns undefined
+// if args[1] is a data property on args[0]
+// [false, value, Writeable, Enumerable, Configurable]
+// if args[1] is an accessor on args[0]
+// [true, GetFunction, SetFunction, Enumerable, Configurable]
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
+ ASSERT(args.length() == 2);
+ HandleScope scope(isolate);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
+ return GetOwnProperty(isolate, obj, name);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_PreventExtensions) {
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(JSObject, obj, 0);
@@ -3771,7 +3771,7 @@ static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
- OffsetsVector registers(required_registers);
+ OffsetsVector registers(required_registers, isolate);
Vector<int32_t> register_vector(registers.vector(), registers.length());
int subject_length = subject->length();
bool first = true;
@@ -3844,7 +3844,7 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
- OffsetsVector registers(required_registers);
+ OffsetsVector registers(required_registers, isolate);
Vector<int32_t> register_vector(registers.vector(), registers.length());
RegExpImpl::IrregexpResult result =
@@ -3863,7 +3863,7 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
if (result == RegExpImpl::RE_SUCCESS) {
// Need to keep a copy of the previous match for creating last_match_info
// at the end, so we have two vectors that we swap between.
- OffsetsVector registers2(required_registers);
+ OffsetsVector registers2(required_registers, isolate);
Vector<int> prev_register_vector(registers2.vector(), registers2.length());
bool first = true;
do {
@@ -4315,6 +4315,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
args.at<Object>(1));
}
+
+static bool IsValidAccessor(Handle<Object> obj) {
+ return obj->IsUndefined() || obj->IsSpecFunction() || obj->IsNull();
+}
+
+
// Implements part of 8.12.9 DefineOwnProperty.
// There are 3 cases that lead here:
// Step 4b - define a new accessor property.
@@ -4325,17 +4331,37 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) {
ASSERT(args.length() == 5);
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
- CONVERT_ARG_CHECKED(String, name, 1);
- CONVERT_SMI_ARG_CHECKED(flag_setter, 2);
- Object* fun = args[3];
+ RUNTIME_ASSERT(!obj->IsNull());
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, getter, 2);
+ RUNTIME_ASSERT(IsValidAccessor(getter));
+ CONVERT_ARG_HANDLE_CHECKED(Object, setter, 3);
+ RUNTIME_ASSERT(IsValidAccessor(setter));
CONVERT_SMI_ARG_CHECKED(unchecked, 4);
-
RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked);
- RUNTIME_ASSERT(!obj->IsNull());
- RUNTIME_ASSERT(fun->IsSpecFunction() || fun->IsUndefined());
- return obj->DefineAccessor(name, flag_setter == 0, fun, attr);
+ // TODO(svenpanne) Define getter/setter/attributes in a single step.
+ if (getter->IsNull() && setter->IsNull()) {
+ JSArray* array;
+ { MaybeObject* maybe_array = GetOwnProperty(isolate, obj, name);
+ if (!maybe_array->To(&array)) return maybe_array;
+ }
+ Object* current = FixedArray::cast(array->elements())->get(GETTER_INDEX);
+ getter = Handle<Object>(current, isolate);
+ }
+ if (!getter->IsNull()) {
+ MaybeObject* ok =
+ obj->DefineAccessor(*name, ACCESSOR_GETTER, *getter, attr);
+ if (ok->IsFailure()) return ok;
+ }
+ if (!setter->IsNull()) {
+ MaybeObject* ok =
+ obj->DefineAccessor(*name, ACCESSOR_SETTER, *setter, attr);
+ if (ok->IsFailure()) return ok;
+ }
+
+ return isolate->heap()->undefined_value();
}
// Implements part of 8.12.9 DefineOwnProperty.
@@ -4349,9 +4375,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSObject, js_object, 0);
CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
- Handle<Object> obj_value = args.at<Object>(2);
+ CONVERT_ARG_HANDLE_CHECKED(Object, obj_value, 2);
CONVERT_SMI_ARG_CHECKED(unchecked, 3);
-
RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked);
@@ -7518,51 +7543,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_tan) {
}
-static int MakeDay(int year, int month) {
- static const int day_from_month[] = {0, 31, 59, 90, 120, 151,
- 181, 212, 243, 273, 304, 334};
- static const int day_from_month_leap[] = {0, 31, 60, 91, 121, 152,
- 182, 213, 244, 274, 305, 335};
-
- year += month / 12;
- month %= 12;
- if (month < 0) {
- year--;
- month += 12;
- }
-
- ASSERT(month >= 0);
- ASSERT(month < 12);
-
- // year_delta is an arbitrary number such that:
- // a) year_delta = -1 (mod 400)
- // b) year + year_delta > 0 for years in the range defined by
- // ECMA 262 - 15.9.1.1, i.e. upto 100,000,000 days on either side of
- // Jan 1 1970. This is required so that we don't run into integer
- // division of negative numbers.
- // c) there shouldn't be an overflow for 32-bit integers in the following
- // operations.
- static const int year_delta = 399999;
- static const int base_day = 365 * (1970 + year_delta) +
- (1970 + year_delta) / 4 -
- (1970 + year_delta) / 100 +
- (1970 + year_delta) / 400;
-
- int year1 = year + year_delta;
- int day_from_year = 365 * year1 +
- year1 / 4 -
- year1 / 100 +
- year1 / 400 -
- base_day;
-
- if ((year % 4 != 0) || (year % 100 == 0 && year % 400 != 0)) {
- return day_from_year + day_from_month[month];
- }
-
- return day_from_year + day_from_month_leap[month];
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_DateMakeDay) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -7570,319 +7550,44 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateMakeDay) {
CONVERT_SMI_ARG_CHECKED(year, 0);
CONVERT_SMI_ARG_CHECKED(month, 1);
- return Smi::FromInt(MakeDay(year, month));
-}
-
-
-static const int kDays4Years[] = {0, 365, 2 * 365, 3 * 365 + 1};
-static const int kDaysIn4Years = 4 * 365 + 1;
-static const int kDaysIn100Years = 25 * kDaysIn4Years - 1;
-static const int kDaysIn400Years = 4 * kDaysIn100Years + 1;
-static const int kDays1970to2000 = 30 * 365 + 7;
-static const int kDaysOffset = 1000 * kDaysIn400Years + 5 * kDaysIn400Years -
- kDays1970to2000;
-static const int kYearsOffset = 400000;
-
-static const char kDayInYear[] = {
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
-
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
-
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
-
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
-
-static const char kMonthInYear[] = {
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
-
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
-
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
-
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11};
-
-
-// This function works for dates from 1970 to 2099.
-static inline void DateYMDFromTimeAfter1970(int date,
- int& year, int& month, int& day) {
-#ifdef DEBUG
- int save_date = date; // Need this for ASSERT in the end.
-#endif
-
- year = 1970 + (4 * date + 2) / kDaysIn4Years;
- date %= kDaysIn4Years;
-
- month = kMonthInYear[date];
- day = kDayInYear[date];
-
- ASSERT(MakeDay(year, month) + day - 1 == save_date);
+ return Smi::FromInt(isolate->date_cache()->DaysFromYearMonth(year, month));
}
-static inline void DateYMDFromTimeSlow(int date,
- int& year, int& month, int& day) {
-#ifdef DEBUG
- int save_date = date; // Need this for ASSERT in the end.
-#endif
-
- date += kDaysOffset;
- year = 400 * (date / kDaysIn400Years) - kYearsOffset;
- date %= kDaysIn400Years;
-
- ASSERT(MakeDay(year, 0) + date == save_date);
-
- date--;
- int yd1 = date / kDaysIn100Years;
- date %= kDaysIn100Years;
- year += 100 * yd1;
-
- date++;
- int yd2 = date / kDaysIn4Years;
- date %= kDaysIn4Years;
- year += 4 * yd2;
-
- date--;
- int yd3 = date / 365;
- date %= 365;
- year += yd3;
-
- bool is_leap = (!yd1 || yd2) && !yd3;
-
- ASSERT(date >= -1);
- ASSERT(is_leap || (date >= 0));
- ASSERT((date < 365) || (is_leap && (date < 366)));
- ASSERT(is_leap == ((year % 4 == 0) && (year % 100 || (year % 400 == 0))));
- ASSERT(is_leap || ((MakeDay(year, 0) + date) == save_date));
- ASSERT(!is_leap || ((MakeDay(year, 0) + date + 1) == save_date));
-
- if (is_leap) {
- day = kDayInYear[2*365 + 1 + date];
- month = kMonthInYear[2*365 + 1 + date];
- } else {
- day = kDayInYear[date];
- month = kMonthInYear[date];
- }
-
- ASSERT(MakeDay(year, month) + day - 1 == save_date);
-}
-
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DateSetValue) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 3);
-static inline void DateYMDFromTime(int date,
- int& year, int& month, int& day) {
- if (date >= 0 && date < 32 * kDaysIn4Years) {
- DateYMDFromTimeAfter1970(date, year, month, day);
+ CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 0);
+ CONVERT_DOUBLE_ARG_CHECKED(time, 1);
+ CONVERT_SMI_ARG_CHECKED(is_utc, 2);
+
+ DateCache* date_cache = isolate->date_cache();
+
+ Object* value = NULL;
+ bool is_value_nan = false;
+ if (isnan(time)) {
+ value = isolate->heap()->nan_value();
+ is_value_nan = true;
+ } else if (!is_utc &&
+ (time < -DateCache::kMaxTimeBeforeUTCInMs ||
+ time > DateCache::kMaxTimeBeforeUTCInMs)) {
+ value = isolate->heap()->nan_value();
+ is_value_nan = true;
} else {
- DateYMDFromTimeSlow(date, year, month, day);
+ time = is_utc ? time : date_cache->ToUTC(static_cast<int64_t>(time));
+ if (time < -DateCache::kMaxTimeInMs ||
+ time > DateCache::kMaxTimeInMs) {
+ value = isolate->heap()->nan_value();
+ is_value_nan = true;
+ } else {
+ MaybeObject* maybe_result =
+ isolate->heap()->AllocateHeapNumber(DoubleToInteger(time));
+ if (!maybe_result->ToObject(&value)) return maybe_result;
+ }
}
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DateYMDFromTime) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 2);
-
- CONVERT_DOUBLE_ARG_CHECKED(t, 0);
- CONVERT_ARG_CHECKED(JSArray, res_array, 1);
-
- int year, month, day;
- DateYMDFromTime(static_cast<int>(floor(t / 86400000)), year, month, day);
-
- FixedArrayBase* elms_base = FixedArrayBase::cast(res_array->elements());
- RUNTIME_ASSERT(elms_base->length() == 3);
- RUNTIME_ASSERT(res_array->HasFastTypeElements());
-
- MaybeObject* maybe = res_array->EnsureWritableFastElements();
- if (maybe->IsFailure()) return maybe;
- FixedArray* elms = FixedArray::cast(res_array->elements());
- elms->set(0, Smi::FromInt(year));
- elms->set(1, Smi::FromInt(month));
- elms->set(2, Smi::FromInt(day));
-
- return isolate->heap()->undefined_value();
+ date->SetValue(value, is_value_nan);
+ return *date;
}
@@ -9208,13 +8913,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
return isolate->StackOverflow();
}
- return Execution::HandleStackGuardInterrupt();
+ return Execution::HandleStackGuardInterrupt(isolate);
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_Interrupt) {
ASSERT(args.length() == 0);
- return Execution::HandleStackGuardInterrupt();
+ return Execution::HandleStackGuardInterrupt(isolate);
}
@@ -9358,25 +9063,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateLocalTimezone) {
ASSERT(args.length() == 1);
CONVERT_DOUBLE_ARG_CHECKED(x, 0);
- const char* zone = OS::LocalTimezone(x);
+ int64_t time = isolate->date_cache()->EquivalentTime(static_cast<int64_t>(x));
+ const char* zone = OS::LocalTimezone(static_cast<double>(time));
return isolate->heap()->AllocateStringFromUtf8(CStrVector(zone));
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DateLocalTimeOffset) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 0);
-
- return isolate->heap()->NumberFromDouble(OS::LocalTimeOffset());
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DateDaylightSavingsOffset) {
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DateToUTC) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
CONVERT_DOUBLE_ARG_CHECKED(x, 0);
- return isolate->heap()->NumberFromDouble(OS::DaylightSavingsOffset(x));
+ int64_t time = isolate->date_cache()->ToUTC(static_cast<int64_t>(x));
+
+ return isolate->heap()->NumberFromDouble(static_cast<double>(time));
}
@@ -10300,7 +10000,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LookupAccessor) {
CONVERT_ARG_CHECKED(JSObject, obj, 0);
CONVERT_ARG_CHECKED(String, name, 1);
CONVERT_SMI_ARG_CHECKED(flag, 2);
- return obj->LookupAccessor(name, flag == 0);
+ AccessorComponent component = flag == 0 ? ACCESSOR_GETTER : ACCESSOR_SETTER;
+ return obj->LookupAccessor(name, component);
}
@@ -10499,9 +10200,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
details->set(0, *value);
details->set(1, property_details);
if (hasJavaScriptAccessors) {
+ AccessorPair* accessors = AccessorPair::cast(*result_callback_obj);
details->set(2, isolate->heap()->ToBoolean(caught_exception));
- details->set(3, AccessorPair::cast(*result_callback_obj)->getter());
- details->set(4, AccessorPair::cast(*result_callback_obj)->setter());
+ details->set(3, accessors->SafeGet(ACCESSOR_GETTER));
+ details->set(4, accessors->SafeGet(ACCESSOR_SETTER));
}
return *isolate->factory()->NewJSArrayWithElements(details);
@@ -10640,6 +10342,7 @@ class FrameInspector {
frame, inlined_jsframe_index, isolate);
}
has_adapted_arguments_ = frame_->has_adapted_arguments();
+ is_bottommost_ = inlined_jsframe_index == 0;
is_optimized_ = frame_->is_optimized();
}
@@ -10677,6 +10380,11 @@ class FrameInspector {
? deoptimized_frame_->GetSourcePosition()
: frame_->LookupCode()->SourcePosition(frame_->pc());
}
+ bool IsConstructor() {
+ return is_optimized_ && !is_bottommost_
+ ? deoptimized_frame_->HasConstructStub()
+ : frame_->IsConstructor();
+ }
// To inspect all the provided arguments the frame might need to be
// replaced with the arguments frame.
@@ -10692,6 +10400,7 @@ class FrameInspector {
DeoptimizedFrameInfo* deoptimized_frame_;
Isolate* isolate_;
bool is_optimized_;
+ bool is_bottommost_;
bool has_adapted_arguments_;
DISALLOW_COPY_AND_ASSIGN(FrameInspector);
@@ -10785,9 +10494,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// Find source position in unoptimized code.
int position = frame_inspector.GetSourcePosition();
- // Check for constructor frame. Inlined frames cannot be construct calls.
- bool inlined_frame = is_optimized && inlined_jsframe_index != 0;
- bool constructor = !inlined_frame && it.frame()->IsConstructor();
+ // Check for constructor frame.
+ bool constructor = frame_inspector.IsConstructor();
// Get scope info and read from it for local variable information.
Handle<JSFunction> function(JSFunction::cast(frame_inspector.GetFunction()));
@@ -12893,7 +12601,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFlags) {
// Performs a GC.
// Presently, it only does a full GC.
RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectGarbage) {
- isolate->heap()->CollectAllGarbage(true, "%CollectGarbage");
+ isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage");
return isolate->heap()->undefined_value();
}
diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h
index bd6568f45e4..c5ce3c38804 100644
--- a/deps/v8/src/runtime.h
+++ b/deps/v8/src/runtime.h
@@ -70,8 +70,6 @@ namespace internal {
F(GetPrototype, 1, 1) \
F(IsInPrototypeChain, 2, 1) \
\
- F(IsConstructCall, 0, 1) \
- \
F(GetOwnProperty, 2, 1) \
\
F(IsExtensible, 1, 1) \
@@ -246,10 +244,9 @@ namespace internal {
F(DateCurrentTime, 0, 1) \
F(DateParseString, 2, 1) \
F(DateLocalTimezone, 1, 1) \
- F(DateLocalTimeOffset, 0, 1) \
- F(DateDaylightSavingsOffset, 1, 1) \
+ F(DateToUTC, 1, 1) \
F(DateMakeDay, 2, 1) \
- F(DateYMDFromTime, 2, 1) \
+ F(DateSetValue, 3, 1) \
\
/* Numbers */ \
\
@@ -493,6 +490,7 @@ namespace internal {
F(Arguments, 1, 1) \
F(ValueOf, 1, 1) \
F(SetValueOf, 2, 1) \
+ F(DateField, 2 /* date object, field index */, 1) \
F(StringCharFromCode, 1, 1) \
F(StringCharAt, 2, 1) \
F(ObjectEquals, 2, 1) \
diff --git a/deps/v8/src/scopes.cc b/deps/v8/src/scopes.cc
index 8d71f8ae858..859cbd1ae61 100644
--- a/deps/v8/src/scopes.cc
+++ b/deps/v8/src/scopes.cc
@@ -67,7 +67,8 @@ Variable* VariableMap::Declare(
VariableMode mode,
bool is_valid_lhs,
Variable::Kind kind,
- InitializationFlag initialization_flag) {
+ InitializationFlag initialization_flag,
+ Interface* interface) {
Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), true);
if (p->value == NULL) {
// The variable has not been declared yet -> insert it.
@@ -77,7 +78,8 @@ Variable* VariableMap::Declare(
mode,
is_valid_lhs,
kind,
- initialization_flag);
+ initialization_flag,
+ interface);
}
return reinterpret_cast<Variable*>(p->value);
}
@@ -105,6 +107,9 @@ Scope::Scope(Scope* outer_scope, ScopeType type)
params_(4),
unresolved_(16),
decls_(4),
+ interface_(FLAG_harmony_modules &&
+ (type == MODULE_SCOPE || type == GLOBAL_SCOPE)
+ ? Interface::NewModule() : NULL),
already_resolved_(false) {
SetDefaults(type, outer_scope, Handle<ScopeInfo>::null());
// At some point we might want to provide outer scopes to
@@ -125,6 +130,7 @@ Scope::Scope(Scope* inner_scope,
params_(4),
unresolved_(16),
decls_(4),
+ interface_(NULL),
already_resolved_(true) {
SetDefaults(type, NULL, scope_info);
if (!scope_info.is_null()) {
@@ -145,6 +151,7 @@ Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name)
params_(0),
unresolved_(0),
decls_(0),
+ interface_(NULL),
already_resolved_(true) {
SetDefaults(CATCH_SCOPE, NULL, Handle<ScopeInfo>::null());
AddInnerScope(inner_scope);
@@ -255,7 +262,7 @@ bool Scope::Analyze(CompilationInfo* info) {
// Allocate the variables.
{
AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate());
- top->AllocateVariables(info->global_scope(), &ast_node_factory);
+ if (!top->AllocateVariables(info, &ast_node_factory)) return false;
}
#ifdef DEBUG
@@ -264,6 +271,11 @@ bool Scope::Analyze(CompilationInfo* info) {
: FLAG_print_scopes) {
scope->Print();
}
+
+ if (FLAG_harmony_modules && FLAG_print_interfaces && top->is_global_scope()) {
+ PrintF("global : ");
+ top->interface()->Print();
+ }
#endif
if (FLAG_harmony_scoping) {
@@ -438,7 +450,8 @@ void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
Variable* Scope::DeclareLocal(Handle<String> name,
VariableMode mode,
- InitializationFlag init_flag) {
+ InitializationFlag init_flag,
+ Interface* interface) {
ASSERT(!already_resolved());
// This function handles VAR and CONST modes. DYNAMIC variables are
// introduces during variable allocation, INTERNAL variables are allocated
@@ -448,8 +461,8 @@ Variable* Scope::DeclareLocal(Handle<String> name,
mode == CONST_HARMONY ||
mode == LET);
++num_var_or_const_;
- return
- variables_.Declare(this, name, mode, true, Variable::NORMAL, init_flag);
+ return variables_.Declare(
+ this, name, mode, true, Variable::NORMAL, init_flag, interface);
}
@@ -586,7 +599,7 @@ void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
}
-void Scope::AllocateVariables(Scope* global_scope,
+bool Scope::AllocateVariables(CompilationInfo* info,
AstNodeFactory<AstNullVisitor>* factory) {
// 1) Propagate scope information.
bool outer_scope_calls_non_strict_eval = false;
@@ -598,10 +611,12 @@ void Scope::AllocateVariables(Scope* global_scope,
PropagateScopeInfo(outer_scope_calls_non_strict_eval);
// 2) Resolve variables.
- ResolveVariablesRecursively(global_scope, factory);
+ if (!ResolveVariablesRecursively(info, factory)) return false;
// 3) Allocate variables.
AllocateVariablesRecursively();
+
+ return true;
}
@@ -916,14 +931,14 @@ Variable* Scope::LookupRecursive(Handle<String> name,
}
-void Scope::ResolveVariable(Scope* global_scope,
+bool Scope::ResolveVariable(CompilationInfo* info,
VariableProxy* proxy,
AstNodeFactory<AstNullVisitor>* factory) {
- ASSERT(global_scope == NULL || global_scope->is_global_scope());
+ ASSERT(info->global_scope()->is_global_scope());
// If the proxy is already resolved there's nothing to do
// (functions and consts may be resolved by the parser).
- if (proxy->var() != NULL) return;
+ if (proxy->var() != NULL) return true;
// Otherwise, try to resolve the variable.
BindingKind binding_kind;
@@ -947,8 +962,7 @@ void Scope::ResolveVariable(Scope* global_scope,
case UNBOUND:
// No binding has been found. Declare a variable in global scope.
- ASSERT(global_scope != NULL);
- var = global_scope->DeclareGlobal(proxy->name());
+ var = info->global_scope()->DeclareGlobal(proxy->name());
break;
case UNBOUND_EVAL_SHADOWED:
@@ -965,23 +979,62 @@ void Scope::ResolveVariable(Scope* global_scope,
ASSERT(var != NULL);
proxy->BindTo(var);
+
+ if (FLAG_harmony_modules) {
+ bool ok;
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Resolve %s:\n", var->name()->ToAsciiArray());
+#endif
+ proxy->interface()->Unify(var->interface(), &ok);
+ if (!ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("SCOPES TYPE ERROR\n");
+ PrintF("proxy: ");
+ proxy->interface()->Print();
+ PrintF("var: ");
+ var->interface()->Print();
+ }
+#endif
+
+ // Inconsistent use of module. Throw a syntax error.
+ // TODO(rossberg): generate more helpful error message.
+ MessageLocation location(info->script(),
+ proxy->position(),
+ proxy->position());
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Handle<JSArray> array = factory->NewJSArray(1);
+ USE(JSObject::SetElement(array, 0, var->name(), NONE, kStrictMode));
+ Handle<Object> result =
+ factory->NewSyntaxError("module_type_error", array);
+ isolate->Throw(*result, &location);
+ return false;
+ }
+ }
+
+ return true;
}
-void Scope::ResolveVariablesRecursively(
- Scope* global_scope,
+bool Scope::ResolveVariablesRecursively(
+ CompilationInfo* info,
AstNodeFactory<AstNullVisitor>* factory) {
- ASSERT(global_scope == NULL || global_scope->is_global_scope());
+ ASSERT(info->global_scope()->is_global_scope());
// Resolve unresolved variables for this scope.
for (int i = 0; i < unresolved_.length(); i++) {
- ResolveVariable(global_scope, unresolved_[i], factory);
+ if (!ResolveVariable(info, unresolved_[i], factory)) return false;
}
// Resolve unresolved variables for inner scopes.
for (int i = 0; i < inner_scopes_.length(); i++) {
- inner_scopes_[i]->ResolveVariablesRecursively(global_scope, factory);
+ if (!inner_scopes_[i]->ResolveVariablesRecursively(info, factory))
+ return false;
}
+
+ return true;
}
diff --git a/deps/v8/src/scopes.h b/deps/v8/src/scopes.h
index 30c95ee4e5a..d315b7e5dcf 100644
--- a/deps/v8/src/scopes.h
+++ b/deps/v8/src/scopes.h
@@ -49,7 +49,8 @@ class VariableMap: public ZoneHashMap {
VariableMode mode,
bool is_valid_lhs,
Variable::Kind kind,
- InitializationFlag initialization_flag);
+ InitializationFlag initialization_flag,
+ Interface* interface = Interface::NewValue());
Variable* Lookup(Handle<String> name);
};
@@ -145,7 +146,8 @@ class Scope: public ZoneObject {
// declared before, the previously declared variable is returned.
Variable* DeclareLocal(Handle<String> name,
VariableMode mode,
- InitializationFlag init_flag);
+ InitializationFlag init_flag,
+ Interface* interface = Interface::NewValue());
// Declare an implicit global variable in this scope which must be a
// global scope. The variable was introduced (possibly from an inner
@@ -157,12 +159,14 @@ class Scope: public ZoneObject {
template<class Visitor>
VariableProxy* NewUnresolved(AstNodeFactory<Visitor>* factory,
Handle<String> name,
- int position = RelocInfo::kNoPosition) {
+ int position = RelocInfo::kNoPosition,
+ Interface* interface = Interface::NewValue()) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!already_resolved());
- VariableProxy* proxy = factory->NewVariableProxy(name, false, position);
+ VariableProxy* proxy =
+ factory->NewVariableProxy(name, false, position, interface);
unresolved_.Add(proxy);
return proxy;
}
@@ -295,9 +299,6 @@ class Scope: public ZoneObject {
// Does this scope contain a with statement.
bool contains_with() const { return scope_contains_with_; }
- // The scope immediately surrounding this scope, or NULL.
- Scope* outer_scope() const { return outer_scope_; }
-
// ---------------------------------------------------------------------------
// Accessors.
@@ -336,6 +337,12 @@ class Scope: public ZoneObject {
// Inner scope list.
ZoneList<Scope*>* inner_scopes() { return &inner_scopes_; }
+ // The scope immediately surrounding this scope, or NULL.
+ Scope* outer_scope() const { return outer_scope_; }
+
+ // The interface as inferred so far; only for module scopes.
+ Interface* interface() const { return interface_; }
+
// ---------------------------------------------------------------------------
// Variable allocation.
@@ -345,17 +352,6 @@ class Scope: public ZoneObject {
void CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
ZoneList<Variable*>* context_locals);
- // Resolve and fill in the allocation information for all variables
- // in this scopes. Must be called *after* all scopes have been
- // processed (parsed) to ensure that unresolved variables can be
- // resolved properly.
- //
- // In the case of code compiled and run using 'eval', the context
- // parameter is the context in which eval was called. In all other
- // cases the context parameter is an empty handle.
- void AllocateVariables(Scope* global_scope,
- AstNodeFactory<AstNullVisitor>* factory);
-
// Current number of var or const locals.
int num_var_or_const() { return num_var_or_const_; }
@@ -372,6 +368,11 @@ class Scope: public ZoneObject {
// Determine if we can use lazy compilation for this scope.
bool AllowsLazyCompilation() const;
+ // True if we can lazily recompile functions with this scope.
+ bool allows_lazy_recompilation() const {
+ return !force_eager_compilation_;
+ }
+
// True if the outer context of this scope is always the global context.
bool HasTrivialOuterContext() const;
@@ -448,6 +449,8 @@ class Scope: public ZoneObject {
VariableProxy* function_;
// Convenience variable; function scopes only.
Variable* arguments_;
+ // Interface; module scopes only.
+ Interface* interface_;
// Illegal redeclaration.
Expression* illegal_redecl_;
@@ -543,10 +546,12 @@ class Scope: public ZoneObject {
Variable* LookupRecursive(Handle<String> name,
BindingKind* binding_kind,
AstNodeFactory<AstNullVisitor>* factory);
- void ResolveVariable(Scope* global_scope,
+ MUST_USE_RESULT
+ bool ResolveVariable(CompilationInfo* info,
VariableProxy* proxy,
AstNodeFactory<AstNullVisitor>* factory);
- void ResolveVariablesRecursively(Scope* global_scope,
+ MUST_USE_RESULT
+ bool ResolveVariablesRecursively(CompilationInfo* info,
AstNodeFactory<AstNullVisitor>* factory);
// Scope analysis.
@@ -566,6 +571,18 @@ class Scope: public ZoneObject {
void AllocateNonParameterLocals();
void AllocateVariablesRecursively();
+ // Resolve and fill in the allocation information for all variables
+ // in this scopes. Must be called *after* all scopes have been
+ // processed (parsed) to ensure that unresolved variables can be
+ // resolved properly.
+ //
+ // In the case of code compiled and run using 'eval', the context
+ // parameter is the context in which eval was called. In all other
+ // cases the context parameter is an empty handle.
+ MUST_USE_RESULT
+ bool AllocateVariables(CompilationInfo* info,
+ AstNodeFactory<AstNullVisitor>* factory);
+
private:
// Construct a scope based on the scope info.
Scope(Scope* inner_scope, ScopeType type, Handle<ScopeInfo> scope_info);
diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc
index 81a94ddc8a0..4249d369d54 100644
--- a/deps/v8/src/serialize.cc
+++ b/deps/v8/src/serialize.cc
@@ -273,14 +273,22 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
STUB_CACHE_TABLE,
2,
"StubCache::primary_->value");
- Add(stub_cache->key_reference(StubCache::kSecondary).address(),
+ Add(stub_cache->map_reference(StubCache::kPrimary).address(),
STUB_CACHE_TABLE,
3,
+ "StubCache::primary_->map");
+ Add(stub_cache->key_reference(StubCache::kSecondary).address(),
+ STUB_CACHE_TABLE,
+ 4,
"StubCache::secondary_->key");
Add(stub_cache->value_reference(StubCache::kSecondary).address(),
STUB_CACHE_TABLE,
- 4,
+ 5,
"StubCache::secondary_->value");
+ Add(stub_cache->map_reference(StubCache::kSecondary).address(),
+ STUB_CACHE_TABLE,
+ 6,
+ "StubCache::secondary_->map");
// Runtime entries
Add(ExternalReference::perform_gc_function(isolate).address(),
@@ -494,6 +502,14 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
UNCLASSIFIED,
45,
"the_hole_nan");
+ Add(ExternalReference::get_date_field_function(isolate).address(),
+ UNCLASSIFIED,
+ 46,
+ "JSDate::GetField");
+ Add(ExternalReference::date_cache_stamp(isolate).address(),
+ UNCLASSIFIED,
+ 47,
+ "date_cache_stamp");
}
diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc
index 1fbad551aff..d7061a1a792 100644
--- a/deps/v8/src/spaces.cc
+++ b/deps/v8/src/spaces.cc
@@ -2217,7 +2217,9 @@ bool PagedSpace::ReserveSpace(int size_in_bytes) {
// You have to call this last, since the implementation from PagedSpace
// doesn't know that memory was 'promised' to large object space.
bool LargeObjectSpace::ReserveSpace(int bytes) {
- return heap()->OldGenerationSpaceAvailable() >= bytes;
+ return heap()->OldGenerationCapacityAvailable() >= bytes &&
+ (!heap()->incremental_marking()->IsStopped() ||
+ heap()->OldGenerationSpaceAvailable() >= bytes);
}
diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h
index 599e9dd6fab..75ca53444ab 100644
--- a/deps/v8/src/spaces.h
+++ b/deps/v8/src/spaces.h
@@ -506,6 +506,11 @@ class MemoryChunk {
size_ = size;
}
+ void SetArea(Address area_start, Address area_end) {
+ area_start_ = area_start;
+ area_end_ = area_end;
+ }
+
Executability executable() {
return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
}
diff --git a/deps/v8/src/strtod.cc b/deps/v8/src/strtod.cc
index be79c800852..0dc618a399f 100644
--- a/deps/v8/src/strtod.cc
+++ b/deps/v8/src/strtod.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -175,13 +175,15 @@ static void ReadDiyFp(Vector<const char> buffer,
static bool DoubleStrtod(Vector<const char> trimmed,
int exponent,
double* result) {
-#if (defined(V8_TARGET_ARCH_IA32) || defined(USE_SIMULATOR)) && !defined(WIN32)
+#if (defined(V8_TARGET_ARCH_IA32) || defined(USE_SIMULATOR)) \
+ && !defined(_MSC_VER)
// On x86 the floating-point stack can be 64 or 80 bits wide. If it is
// 80 bits wide (as is the case on Linux) then double-rounding occurs and the
// result is not accurate.
- // We know that Windows32 uses 64 bits and is therefore accurate.
- // Note that the ARM simulator is compiled for 32bits. It therefore exhibits
- // the same problem.
+ // We know that Windows32 with MSVC, unlike with MinGW32, uses 64 bits and is
+ // therefore accurate.
+ // Note that the ARM and MIPS simulators are compiled for 32bits. They
+ // therefore exhibit the same problem.
return false;
#endif
if (trimmed.length() <= kMaxExactDoubleIntegerDecimalDigits) {
diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc
index 4bbfe176875..3371b1bf4a2 100644
--- a/deps/v8/src/stub-cache.cc
+++ b/deps/v8/src/stub-cache.cc
@@ -45,26 +45,13 @@ namespace internal {
StubCache::StubCache(Isolate* isolate) : isolate_(isolate) {
ASSERT(isolate == Isolate::Current());
- memset(primary_, 0, sizeof(primary_[0]) * StubCache::kPrimaryTableSize);
- memset(secondary_, 0, sizeof(secondary_[0]) * StubCache::kSecondaryTableSize);
}
-void StubCache::Initialize(bool create_heap_objects) {
+void StubCache::Initialize() {
ASSERT(IsPowerOf2(kPrimaryTableSize));
ASSERT(IsPowerOf2(kSecondaryTableSize));
- if (create_heap_objects) {
- HandleScope scope;
- Code* empty = isolate_->builtins()->builtin(Builtins::kIllegal);
- for (int i = 0; i < kPrimaryTableSize; i++) {
- primary_[i].key = heap()->empty_string();
- primary_[i].value = empty;
- }
- for (int j = 0; j < kSecondaryTableSize; j++) {
- secondary_[j].key = heap()->empty_string();
- secondary_[j].value = empty;
- }
- }
+ Clear();
}
@@ -90,14 +77,15 @@ Code* StubCache::Set(String* name, Map* map, Code* code) {
// Compute the primary entry.
int primary_offset = PrimaryOffset(name, flags, map);
Entry* primary = entry(primary_, primary_offset);
- Code* hit = primary->value;
+ Code* old_code = primary->value;
// If the primary entry has useful data in it, we retire it to the
// secondary cache before overwriting it.
- if (hit != isolate_->builtins()->builtin(Builtins::kIllegal)) {
- Code::Flags primary_flags = Code::RemoveTypeFromFlags(hit->flags());
- int secondary_offset =
- SecondaryOffset(primary->key, primary_flags, primary_offset);
+ if (old_code != isolate_->builtins()->builtin(Builtins::kIllegal)) {
+ Map* old_map = primary->map;
+ Code::Flags old_flags = Code::RemoveTypeFromFlags(old_code->flags());
+ int seed = PrimaryOffset(primary->key, old_flags, old_map);
+ int secondary_offset = SecondaryOffset(primary->key, old_flags, seed);
Entry* secondary = entry(secondary_, secondary_offset);
*secondary = *primary;
}
@@ -105,6 +93,8 @@ Code* StubCache::Set(String* name, Map* map, Code* code) {
// Update primary cache.
primary->key = name;
primary->value = code;
+ primary->map = map;
+ isolate()->counters()->megamorphic_stub_cache_updates()->Increment();
return code;
}
diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h
index fdd8a12da6b..29bdb61e32c 100644
--- a/deps/v8/src/stub-cache.h
+++ b/deps/v8/src/stub-cache.h
@@ -69,9 +69,10 @@ class StubCache {
struct Entry {
String* key;
Code* value;
+ Map* map;
};
- void Initialize(bool create_heap_objects);
+ void Initialize();
// Computes the right stub matching. Inserts the result in the
@@ -252,7 +253,7 @@ class StubCache {
Handle<Context> global_context);
// Generate code for probing the stub cache table.
- // Arguments extra and extra2 may be used to pass additional scratch
+ // Arguments extra, extra2 and extra3 may be used to pass additional scratch
// registers. Set to no_reg if not needed.
void GenerateProbe(MacroAssembler* masm,
Code::Flags flags,
@@ -260,7 +261,8 @@ class StubCache {
Register name,
Register scratch,
Register extra,
- Register extra2 = no_reg);
+ Register extra2 = no_reg,
+ Register extra3 = no_reg);
enum Table {
kPrimary,
@@ -274,6 +276,12 @@ class StubCache {
}
+ SCTableReference map_reference(StubCache::Table table) {
+ return SCTableReference(
+ reinterpret_cast<Address>(&first_entry(table)->map));
+ }
+
+
SCTableReference value_reference(StubCache::Table table) {
return SCTableReference(
reinterpret_cast<Address>(&first_entry(table)->value));
@@ -300,7 +308,16 @@ class StubCache {
RelocInfo::Mode mode,
Code::Kind kind);
- // Computes the hashed offsets for primary and secondary caches.
+ // The stub cache has a primary and secondary level. The two levels have
+ // different hashing algorithms in order to avoid simultaneous collisions
+ // in both caches. Unlike a probing strategy (quadratic or otherwise) the
+ // update strategy on updates is fairly clear and simple: Any existing entry
+ // in the primary cache is moved to the secondary cache, and secondary cache
+ // entries are overwritten.
+
+ // Hash algorithm for the primary table. This algorithm is replicated in
+ // assembler for every architecture. Returns an index into the table that
+ // is scaled by 1 << kHeapObjectTagSize.
static int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
// This works well because the heap object tag size and the hash
// shift are equal. Shifting down the length field to get the
@@ -324,23 +341,30 @@ class StubCache {
return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
}
+ // Hash algorithm for the secondary table. This algorithm is replicated in
+ // assembler for every architecture. Returns an index into the table that
+ // is scaled by 1 << kHeapObjectTagSize.
static int SecondaryOffset(String* name, Code::Flags flags, int seed) {
// Use the seed from the primary cache in the secondary cache.
uint32_t string_low32bits =
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
- uint32_t key = seed - string_low32bits + flags;
+ // We always set the in_loop bit to zero when generating the lookup code
+ // so do it here too so the hash codes match.
+ uint32_t iflags =
+ (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
+ uint32_t key = (seed - string_low32bits) + iflags;
return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
}
// Compute the entry for a given offset in exactly the same way as
// we do in generated code. We generate an hash code that already
- // ends in String::kHashShift 0s. Then we shift it so it is a multiple
+ // ends in String::kHashShift 0s. Then we multiply it so it is a multiple
// of sizeof(Entry). This makes it easier to avoid making mistakes
// in the hashed offset computations.
static Entry* entry(Entry* table, int offset) {
- const int shift_amount = kPointerSizeLog2 + 1 - String::kHashShift;
+ const int multiplier = sizeof(*table) >> String::kHashShift;
return reinterpret_cast<Entry*>(
- reinterpret_cast<Address>(table) + (offset << shift_amount));
+ reinterpret_cast<Address>(table) + offset * multiplier);
}
static const int kPrimaryTableBits = 11;
diff --git a/deps/v8/src/token.h b/deps/v8/src/token.h
index 1eeb60d8766..3036e5512a0 100644
--- a/deps/v8/src/token.h
+++ b/deps/v8/src/token.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -215,7 +215,7 @@ class Token {
return EQ <= op && op <= IN;
}
- static bool IsOrderedCompareOp(Value op) {
+ static bool IsOrderedRelationalCompareOp(Value op) {
return op == LT || op == LTE || op == GT || op == GTE;
}
diff --git a/deps/v8/src/type-info.cc b/deps/v8/src/type-info.cc
index 2fb4e2aad3d..fa479b2e515 100644
--- a/deps/v8/src/type-info.cc
+++ b/deps/v8/src/type-info.cc
@@ -154,6 +154,13 @@ bool TypeFeedbackOracle::CallNewIsMonomorphic(CallNew* expr) {
}
+bool TypeFeedbackOracle::IsForInFastCase(ForInStatement* stmt) {
+ Handle<Object> value = GetInfo(stmt->PrepareId());
+ return value->IsSmi() &&
+ Smi::cast(*value)->value() == TypeFeedbackCells::kForInFastCaseMarker;
+}
+
+
Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
ASSERT(LoadIsMonomorphicNormal(expr));
Handle<Object> map_or_code = GetInfo(expr->id());
@@ -256,6 +263,11 @@ Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(Call* expr) {
}
+Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(CallNew* expr) {
+ return Handle<JSFunction>::cast(GetInfo(expr->id()));
+}
+
+
bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
return *GetInfo(expr->id()) ==
isolate_->builtins()->builtin(id);
@@ -654,9 +666,10 @@ void TypeFeedbackOracle::ProcessTypeFeedbackCells(Handle<Code> code) {
for (int i = 0; i < cache->CellCount(); i++) {
unsigned ast_id = cache->AstId(i)->value();
Object* value = cache->Cell(i)->value();
- if (value->IsJSFunction() &&
- !CanRetainOtherContext(JSFunction::cast(value),
- *global_context_)) {
+ if (value->IsSmi() ||
+ (value->IsJSFunction() &&
+ !CanRetainOtherContext(JSFunction::cast(value),
+ *global_context_))) {
SetInfo(ast_id, value);
}
}
diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h
index 9b8b431cde5..84ec51d9756 100644
--- a/deps/v8/src/type-info.h
+++ b/deps/v8/src/type-info.h
@@ -228,6 +228,7 @@ class Expression;
class Property;
class SmallMapList;
class UnaryOperation;
+class ForInStatement;
class TypeFeedbackOracle BASE_EMBEDDED {
@@ -243,6 +244,8 @@ class TypeFeedbackOracle BASE_EMBEDDED {
bool CallIsMonomorphic(Call* expr);
bool CallNewIsMonomorphic(CallNew* expr);
+ bool IsForInFastCase(ForInStatement* expr);
+
Handle<Map> LoadMonomorphicReceiverType(Property* expr);
Handle<Map> StoreMonomorphicReceiverType(Expression* expr);
@@ -267,6 +270,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
Handle<JSObject> GetPrototypeForPrimitiveCheck(CheckType check);
Handle<JSFunction> GetCallTarget(Call* expr);
+ Handle<JSFunction> GetCallNewTarget(CallNew* expr);
bool LoadIsBuiltin(Property* expr, Builtins::Name id);
diff --git a/deps/v8/src/unicode.cc b/deps/v8/src/unicode.cc
index 147f716c469..61c649f5e4b 100644
--- a/deps/v8/src/unicode.cc
+++ b/deps/v8/src/unicode.cc
@@ -1,4 +1,4 @@
-// Copyright 2007-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
-// This file was generated at 2011-01-03 10:57:02.088925
+// This file was generated at 2012-03-06 09:55:58.934483
#include "unicode-inl.h"
#include <stdlib.h>
@@ -347,8 +347,8 @@ void CharacterStream::Seek(unsigned position) {
// Uppercase: point.category == 'Lu'
-static const uint16_t kUppercaseTable0Size = 430;
-static const int32_t kUppercaseTable0[430] = {
+static const uint16_t kUppercaseTable0Size = 450;
+static const int32_t kUppercaseTable0[450] = {
1073741889, 90, 1073742016, 214, 1073742040, 222, 256, 258, // NOLINT
260, 262, 264, 266, 268, 270, 272, 274, // NOLINT
276, 278, 280, 282, 284, 286, 288, 290, // NOLINT
@@ -369,22 +369,24 @@ static const int32_t kUppercaseTable0[430] = {
530, 532, 534, 536, 538, 540, 542, 544, // NOLINT
546, 548, 550, 552, 554, 556, 558, 560, // NOLINT
562, 1073742394, 571, 1073742397, 574, 577, 1073742403, 582, // NOLINT
- 584, 586, 588, 590, 902, 1073742728, 906, 908, // NOLINT
- 1073742734, 911, 1073742737, 929, 1073742755, 939, 1073742802, 980, // NOLINT
- 984, 986, 988, 990, 992, 994, 996, 998, // NOLINT
- 1000, 1002, 1004, 1006, 1012, 1015, 1073742841, 1018, // NOLINT
- 1073742845, 1071, 1120, 1122, 1124, 1126, 1128, 1130, // NOLINT
- 1132, 1134, 1136, 1138, 1140, 1142, 1144, 1146, // NOLINT
- 1148, 1150, 1152, 1162, 1164, 1166, 1168, 1170, // NOLINT
- 1172, 1174, 1176, 1178, 1180, 1182, 1184, 1186, // NOLINT
- 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, // NOLINT
- 1204, 1206, 1208, 1210, 1212, 1214, 1073743040, 1217, // NOLINT
- 1219, 1221, 1223, 1225, 1227, 1229, 1232, 1234, // NOLINT
- 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, // NOLINT
- 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, // NOLINT
- 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, // NOLINT
- 1284, 1286, 1288, 1290, 1292, 1294, 1296, 1298, // NOLINT
- 1073743153, 1366, 1073746080, 4293, 7680, 7682, 7684, 7686, // NOLINT
+ 584, 586, 588, 590, 880, 882, 886, 902, // NOLINT
+ 1073742728, 906, 908, 1073742734, 911, 1073742737, 929, 1073742755, // NOLINT
+ 939, 975, 1073742802, 980, 984, 986, 988, 990, // NOLINT
+ 992, 994, 996, 998, 1000, 1002, 1004, 1006, // NOLINT
+ 1012, 1015, 1073742841, 1018, 1073742845, 1071, 1120, 1122, // NOLINT
+ 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, // NOLINT
+ 1140, 1142, 1144, 1146, 1148, 1150, 1152, 1162, // NOLINT
+ 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178, // NOLINT
+ 1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, // NOLINT
+ 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, // NOLINT
+ 1212, 1214, 1073743040, 1217, 1219, 1221, 1223, 1225, // NOLINT
+ 1227, 1229, 1232, 1234, 1236, 1238, 1240, 1242, // NOLINT
+ 1244, 1246, 1248, 1250, 1252, 1254, 1256, 1258, // NOLINT
+ 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, // NOLINT
+ 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, // NOLINT
+ 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, // NOLINT
+ 1308, 1310, 1312, 1314, 1316, 1318, 1073743153, 1366, // NOLINT
+ 1073746080, 4293, 4295, 4301, 7680, 7682, 7684, 7686, // NOLINT
7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, // NOLINT
7704, 7706, 7708, 7710, 7712, 7714, 7716, 7718, // NOLINT
7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, // NOLINT
@@ -393,28 +395,44 @@ static const int32_t kUppercaseTable0[430] = {
7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, // NOLINT
7784, 7786, 7788, 7790, 7792, 7794, 7796, 7798, // NOLINT
7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, // NOLINT
- 7816, 7818, 7820, 7822, 7824, 7826, 7828, 7840, // NOLINT
- 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, // NOLINT
- 7858, 7860, 7862, 7864, 7866, 7868, 7870, 7872, // NOLINT
- 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, // NOLINT
- 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904, // NOLINT
- 7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, // NOLINT
- 7922, 7924, 7926, 7928, 1073749768, 7951, 1073749784, 7965, // NOLINT
- 1073749800, 7983, 1073749816, 7999, 1073749832, 8013, 8025, 8027, // NOLINT
- 8029, 8031, 1073749864, 8047, 1073749944, 8123, 1073749960, 8139, // NOLINT
- 1073749976, 8155, 1073749992, 8172, 1073750008, 8187 }; // NOLINT
-static const uint16_t kUppercaseTable1Size = 79;
-static const int32_t kUppercaseTable1[79] = {
+ 7816, 7818, 7820, 7822, 7824, 7826, 7828, 7838, // NOLINT
+ 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, // NOLINT
+ 7856, 7858, 7860, 7862, 7864, 7866, 7868, 7870, // NOLINT
+ 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, // NOLINT
+ 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, // NOLINT
+ 7904, 7906, 7908, 7910, 7912, 7914, 7916, 7918, // NOLINT
+ 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, // NOLINT
+ 1073749768, 7951, 1073749784, 7965, 1073749800, 7983, 1073749816, 7999, // NOLINT
+ 1073749832, 8013, 8025, 8027, 8029, 8031, 1073749864, 8047, // NOLINT
+ 1073749944, 8123, 1073749960, 8139, 1073749976, 8155, 1073749992, 8172, // NOLINT
+ 1073750008, 8187 }; // NOLINT
+static const uint16_t kUppercaseTable1Size = 86;
+static const int32_t kUppercaseTable1[86] = {
258, 263, 1073742091, 269, 1073742096, 274, 277, 1073742105, // NOLINT
285, 292, 294, 296, 1073742122, 301, 1073742128, 307, // NOLINT
1073742142, 319, 325, 387, 1073744896, 3118, 3168, 1073744994, // NOLINT
- 3172, 3175, 3177, 3179, 3189, 3200, 3202, 3204, // NOLINT
- 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, // NOLINT
- 3222, 3224, 3226, 3228, 3230, 3232, 3234, 3236, // NOLINT
- 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, // NOLINT
- 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268, // NOLINT
- 3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, // NOLINT
- 3286, 3288, 3290, 3292, 3294, 3296, 3298 }; // NOLINT
+ 3172, 3175, 3177, 3179, 1073745005, 3184, 3186, 3189, // NOLINT
+ 1073745022, 3200, 3202, 3204, 3206, 3208, 3210, 3212, // NOLINT
+ 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, // NOLINT
+ 3230, 3232, 3234, 3236, 3238, 3240, 3242, 3244, // NOLINT
+ 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, // NOLINT
+ 3262, 3264, 3266, 3268, 3270, 3272, 3274, 3276, // NOLINT
+ 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, // NOLINT
+ 3294, 3296, 3298, 3307, 3309, 3314 }; // NOLINT
+static const uint16_t kUppercaseTable5Size = 91;
+static const int32_t kUppercaseTable5[91] = {
+ 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, // NOLINT
+ 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, // NOLINT
+ 1632, 1634, 1636, 1638, 1640, 1642, 1644, 1664, // NOLINT
+ 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, // NOLINT
+ 1682, 1684, 1686, 1826, 1828, 1830, 1832, 1834, // NOLINT
+ 1836, 1838, 1842, 1844, 1846, 1848, 1850, 1852, // NOLINT
+ 1854, 1856, 1858, 1860, 1862, 1864, 1866, 1868, // NOLINT
+ 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, // NOLINT
+ 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900, // NOLINT
+ 1902, 1913, 1915, 1073743741, 1918, 1920, 1922, 1924, // NOLINT
+ 1926, 1931, 1933, 1936, 1938, 1952, 1954, 1956, // NOLINT
+ 1958, 1960, 1962 }; // NOLINT
static const uint16_t kUppercaseTable7Size = 2;
static const int32_t kUppercaseTable7[2] = {
1073749793, 7994 }; // NOLINT
@@ -427,6 +445,9 @@ bool Uppercase::Is(uchar c) {
case 1: return LookupPredicate(kUppercaseTable1,
kUppercaseTable1Size,
c);
+ case 5: return LookupPredicate(kUppercaseTable5,
+ kUppercaseTable5Size,
+ c);
case 7: return LookupPredicate(kUppercaseTable7,
kUppercaseTable7Size,
c);
@@ -436,77 +457,93 @@ bool Uppercase::Is(uchar c) {
// Lowercase: point.category == 'Ll'
-static const uint16_t kLowercaseTable0Size = 449;
-static const int32_t kLowercaseTable0[449] = {
- 1073741921, 122, 170, 181, 186, 1073742047, 246, 1073742072, // NOLINT
- 255, 257, 259, 261, 263, 265, 267, 269, // NOLINT
- 271, 273, 275, 277, 279, 281, 283, 285, // NOLINT
- 287, 289, 291, 293, 295, 297, 299, 301, // NOLINT
- 303, 305, 307, 309, 1073742135, 312, 314, 316, // NOLINT
- 318, 320, 322, 324, 326, 1073742152, 329, 331, // NOLINT
- 333, 335, 337, 339, 341, 343, 345, 347, // NOLINT
- 349, 351, 353, 355, 357, 359, 361, 363, // NOLINT
- 365, 367, 369, 371, 373, 375, 378, 380, // NOLINT
- 1073742206, 384, 387, 389, 392, 1073742220, 397, 402, // NOLINT
- 405, 1073742233, 411, 414, 417, 419, 421, 424, // NOLINT
- 1073742250, 427, 429, 432, 436, 438, 1073742265, 442, // NOLINT
- 1073742269, 447, 454, 457, 460, 462, 464, 466, // NOLINT
- 468, 470, 472, 474, 1073742300, 477, 479, 481, // NOLINT
- 483, 485, 487, 489, 491, 493, 1073742319, 496, // NOLINT
- 499, 501, 505, 507, 509, 511, 513, 515, // NOLINT
- 517, 519, 521, 523, 525, 527, 529, 531, // NOLINT
- 533, 535, 537, 539, 541, 543, 545, 547, // NOLINT
- 549, 551, 553, 555, 557, 559, 561, 1073742387, // NOLINT
- 569, 572, 1073742399, 576, 578, 583, 585, 587, // NOLINT
- 589, 1073742415, 659, 1073742485, 687, 1073742715, 893, 912, // NOLINT
- 1073742764, 974, 1073742800, 977, 1073742805, 983, 985, 987, // NOLINT
- 989, 991, 993, 995, 997, 999, 1001, 1003, // NOLINT
- 1005, 1073742831, 1011, 1013, 1016, 1073742843, 1020, 1073742896, // NOLINT
- 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, // NOLINT
- 1135, 1137, 1139, 1141, 1143, 1145, 1147, 1149, // NOLINT
- 1151, 1153, 1163, 1165, 1167, 1169, 1171, 1173, // NOLINT
- 1175, 1177, 1179, 1181, 1183, 1185, 1187, 1189, // NOLINT
- 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, // NOLINT
- 1207, 1209, 1211, 1213, 1215, 1218, 1220, 1222, // NOLINT
- 1224, 1226, 1228, 1073743054, 1231, 1233, 1235, 1237, // NOLINT
- 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, // NOLINT
- 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, // NOLINT
- 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, // NOLINT
- 1287, 1289, 1291, 1293, 1295, 1297, 1299, 1073743201, // NOLINT
- 1415, 1073749248, 7467, 1073749346, 7543, 1073749369, 7578, 7681, // NOLINT
- 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, // NOLINT
- 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, // NOLINT
- 7715, 7717, 7719, 7721, 7723, 7725, 7727, 7729, // NOLINT
- 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, // NOLINT
- 7747, 7749, 7751, 7753, 7755, 7757, 7759, 7761, // NOLINT
- 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, // NOLINT
- 7779, 7781, 7783, 7785, 7787, 7789, 7791, 7793, // NOLINT
- 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, // NOLINT
- 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, // NOLINT
- 7827, 1073749653, 7835, 7841, 7843, 7845, 7847, 7849, // NOLINT
- 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, // NOLINT
- 7867, 7869, 7871, 7873, 7875, 7877, 7879, 7881, // NOLINT
- 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, // NOLINT
- 7899, 7901, 7903, 7905, 7907, 7909, 7911, 7913, // NOLINT
- 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, // NOLINT
- 1073749760, 7943, 1073749776, 7957, 1073749792, 7975, 1073749808, 7991, // NOLINT
- 1073749824, 8005, 1073749840, 8023, 1073749856, 8039, 1073749872, 8061, // NOLINT
- 1073749888, 8071, 1073749904, 8087, 1073749920, 8103, 1073749936, 8116, // NOLINT
- 1073749942, 8119, 8126, 1073749954, 8132, 1073749958, 8135, 1073749968, // NOLINT
- 8147, 1073749974, 8151, 1073749984, 8167, 1073750002, 8180, 1073750006, // NOLINT
- 8183 }; // NOLINT
-static const uint16_t kLowercaseTable1Size = 79;
-static const int32_t kLowercaseTable1[79] = {
- 113, 127, 266, 1073742094, 271, 275, 303, 308, // NOLINT
- 313, 1073742140, 317, 1073742150, 329, 334, 388, 1073744944, // NOLINT
- 3166, 3169, 1073744997, 3174, 3176, 3178, 3180, 3188, // NOLINT
- 1073745014, 3191, 3201, 3203, 3205, 3207, 3209, 3211, // NOLINT
+static const uint16_t kLowercaseTable0Size = 463;
+static const int32_t kLowercaseTable0[463] = {
+ 1073741921, 122, 181, 1073742047, 246, 1073742072, 255, 257, // NOLINT
+ 259, 261, 263, 265, 267, 269, 271, 273, // NOLINT
+ 275, 277, 279, 281, 283, 285, 287, 289, // NOLINT
+ 291, 293, 295, 297, 299, 301, 303, 305, // NOLINT
+ 307, 309, 1073742135, 312, 314, 316, 318, 320, // NOLINT
+ 322, 324, 326, 1073742152, 329, 331, 333, 335, // NOLINT
+ 337, 339, 341, 343, 345, 347, 349, 351, // NOLINT
+ 353, 355, 357, 359, 361, 363, 365, 367, // NOLINT
+ 369, 371, 373, 375, 378, 380, 1073742206, 384, // NOLINT
+ 387, 389, 392, 1073742220, 397, 402, 405, 1073742233, // NOLINT
+ 411, 414, 417, 419, 421, 424, 1073742250, 427, // NOLINT
+ 429, 432, 436, 438, 1073742265, 442, 1073742269, 447, // NOLINT
+ 454, 457, 460, 462, 464, 466, 468, 470, // NOLINT
+ 472, 474, 1073742300, 477, 479, 481, 483, 485, // NOLINT
+ 487, 489, 491, 493, 1073742319, 496, 499, 501, // NOLINT
+ 505, 507, 509, 511, 513, 515, 517, 519, // NOLINT
+ 521, 523, 525, 527, 529, 531, 533, 535, // NOLINT
+ 537, 539, 541, 543, 545, 547, 549, 551, // NOLINT
+ 553, 555, 557, 559, 561, 1073742387, 569, 572, // NOLINT
+ 1073742399, 576, 578, 583, 585, 587, 589, 1073742415, // NOLINT
+ 659, 1073742485, 687, 881, 883, 887, 1073742715, 893, // NOLINT
+ 912, 1073742764, 974, 1073742800, 977, 1073742805, 983, 985, // NOLINT
+ 987, 989, 991, 993, 995, 997, 999, 1001, // NOLINT
+ 1003, 1005, 1073742831, 1011, 1013, 1016, 1073742843, 1020, // NOLINT
+ 1073742896, 1119, 1121, 1123, 1125, 1127, 1129, 1131, // NOLINT
+ 1133, 1135, 1137, 1139, 1141, 1143, 1145, 1147, // NOLINT
+ 1149, 1151, 1153, 1163, 1165, 1167, 1169, 1171, // NOLINT
+ 1173, 1175, 1177, 1179, 1181, 1183, 1185, 1187, // NOLINT
+ 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, // NOLINT
+ 1205, 1207, 1209, 1211, 1213, 1215, 1218, 1220, // NOLINT
+ 1222, 1224, 1226, 1228, 1073743054, 1231, 1233, 1235, // NOLINT
+ 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, // NOLINT
+ 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, // NOLINT
+ 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, // NOLINT
+ 1285, 1287, 1289, 1291, 1293, 1295, 1297, 1299, // NOLINT
+ 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, // NOLINT
+ 1317, 1319, 1073743201, 1415, 1073749248, 7467, 1073749355, 7543, // NOLINT
+ 1073749369, 7578, 7681, 7683, 7685, 7687, 7689, 7691, // NOLINT
+ 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, // NOLINT
+ 7709, 7711, 7713, 7715, 7717, 7719, 7721, 7723, // NOLINT
+ 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, // NOLINT
+ 7741, 7743, 7745, 7747, 7749, 7751, 7753, 7755, // NOLINT
+ 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, // NOLINT
+ 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, // NOLINT
+ 7789, 7791, 7793, 7795, 7797, 7799, 7801, 7803, // NOLINT
+ 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, // NOLINT
+ 7821, 7823, 7825, 7827, 1073749653, 7837, 7839, 7841, // NOLINT
+ 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, // NOLINT
+ 7859, 7861, 7863, 7865, 7867, 7869, 7871, 7873, // NOLINT
+ 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, // NOLINT
+ 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905, // NOLINT
+ 7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, // NOLINT
+ 7923, 7925, 7927, 7929, 7931, 7933, 1073749759, 7943, // NOLINT
+ 1073749776, 7957, 1073749792, 7975, 1073749808, 7991, 1073749824, 8005, // NOLINT
+ 1073749840, 8023, 1073749856, 8039, 1073749872, 8061, 1073749888, 8071, // NOLINT
+ 1073749904, 8087, 1073749920, 8103, 1073749936, 8116, 1073749942, 8119, // NOLINT
+ 8126, 1073749954, 8132, 1073749958, 8135, 1073749968, 8147, 1073749974, // NOLINT
+ 8151, 1073749984, 8167, 1073750002, 8180, 1073750006, 8183 }; // NOLINT
+static const uint16_t kLowercaseTable1Size = 84;
+static const int32_t kLowercaseTable1[84] = {
+ 266, 1073742094, 271, 275, 303, 308, 313, 1073742140, // NOLINT
+ 317, 1073742150, 329, 334, 388, 1073744944, 3166, 3169, // NOLINT
+ 1073744997, 3174, 3176, 3178, 3180, 3185, 1073745011, 3188, // NOLINT
+ 1073745014, 3195, 3201, 3203, 3205, 3207, 3209, 3211, // NOLINT
3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, // NOLINT
3229, 3231, 3233, 3235, 3237, 3239, 3241, 3243, // NOLINT
3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, // NOLINT
3261, 3263, 3265, 3267, 3269, 3271, 3273, 3275, // NOLINT
3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, // NOLINT
- 3293, 3295, 3297, 1073745123, 3300, 1073745152, 3365 }; // NOLINT
+ 3293, 3295, 3297, 1073745123, 3300, 3308, 3310, 3315, // NOLINT
+ 1073745152, 3365, 3367, 3373 }; // NOLINT
+static const uint16_t kLowercaseTable5Size = 93;
+static const int32_t kLowercaseTable5[93] = {
+ 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, // NOLINT
+ 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, // NOLINT
+ 1633, 1635, 1637, 1639, 1641, 1643, 1645, 1665, // NOLINT
+ 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, // NOLINT
+ 1683, 1685, 1687, 1827, 1829, 1831, 1833, 1835, // NOLINT
+ 1837, 1073743663, 1841, 1843, 1845, 1847, 1849, 1851, // NOLINT
+ 1853, 1855, 1857, 1859, 1861, 1863, 1865, 1867, // NOLINT
+ 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, // NOLINT
+ 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, // NOLINT
+ 1901, 1903, 1073743729, 1912, 1914, 1916, 1919, 1921, // NOLINT
+ 1923, 1925, 1927, 1932, 1934, 1937, 1939, 1953, // NOLINT
+ 1955, 1957, 1959, 1961, 2042 }; // NOLINT
static const uint16_t kLowercaseTable7Size = 6;
static const int32_t kLowercaseTable7[6] = {
1073748736, 6918, 1073748755, 6935, 1073749825, 8026 }; // NOLINT
@@ -519,6 +556,9 @@ bool Lowercase::Is(uchar c) {
case 1: return LookupPredicate(kLowercaseTable1,
kLowercaseTable1Size,
c);
+ case 5: return LookupPredicate(kLowercaseTable5,
+ kLowercaseTable5Size,
+ c);
case 7: return LookupPredicate(kLowercaseTable7,
kLowercaseTable7Size,
c);
@@ -528,71 +568,76 @@ bool Lowercase::Is(uchar c) {
// Letter: point.category in ['Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl' ]
-static const uint16_t kLetterTable0Size = 394;
-static const int32_t kLetterTable0[394] = {
+static const uint16_t kLetterTable0Size = 435;
+static const int32_t kLetterTable0[435] = {
1073741889, 90, 1073741921, 122, 170, 181, 186, 1073742016, // NOLINT
214, 1073742040, 246, 1073742072, 705, 1073742534, 721, 1073742560, // NOLINT
- 740, 750, 1073742714, 893, 902, 1073742728, 906, 908, // NOLINT
- 1073742734, 929, 1073742755, 974, 1073742800, 1013, 1073742839, 1153, // NOLINT
- 1073742986, 1299, 1073743153, 1366, 1369, 1073743201, 1415, 1073743312, // NOLINT
- 1514, 1073743344, 1522, 1073743393, 1594, 1073743424, 1610, 1073743470, // NOLINT
- 1647, 1073743473, 1747, 1749, 1073743589, 1766, 1073743598, 1775, // NOLINT
- 1073743610, 1788, 1791, 1808, 1073743634, 1839, 1073743693, 1901, // NOLINT
- 1073743744, 1957, 1969, 1073743818, 2026, 1073743860, 2037, 2042, // NOLINT
- 1073744132, 2361, 2365, 2384, 1073744216, 2401, 1073744251, 2431, // NOLINT
- 1073744261, 2444, 1073744271, 2448, 1073744275, 2472, 1073744298, 2480, // NOLINT
- 2482, 1073744310, 2489, 2493, 2510, 1073744348, 2525, 1073744351, // NOLINT
- 2529, 1073744368, 2545, 1073744389, 2570, 1073744399, 2576, 1073744403, // NOLINT
- 2600, 1073744426, 2608, 1073744434, 2611, 1073744437, 2614, 1073744440, // NOLINT
- 2617, 1073744473, 2652, 2654, 1073744498, 2676, 1073744517, 2701, // NOLINT
- 1073744527, 2705, 1073744531, 2728, 1073744554, 2736, 1073744562, 2739, // NOLINT
- 1073744565, 2745, 2749, 2768, 1073744608, 2785, 1073744645, 2828, // NOLINT
- 1073744655, 2832, 1073744659, 2856, 1073744682, 2864, 1073744690, 2867, // NOLINT
- 1073744693, 2873, 2877, 1073744732, 2909, 1073744735, 2913, 2929, // NOLINT
- 2947, 1073744773, 2954, 1073744782, 2960, 1073744786, 2965, 1073744793, // NOLINT
- 2970, 2972, 1073744798, 2975, 1073744803, 2980, 1073744808, 2986, // NOLINT
- 1073744814, 3001, 1073744901, 3084, 1073744910, 3088, 1073744914, 3112, // NOLINT
- 1073744938, 3123, 1073744949, 3129, 1073744992, 3169, 1073745029, 3212, // NOLINT
- 1073745038, 3216, 1073745042, 3240, 1073745066, 3251, 1073745077, 3257, // NOLINT
- 3261, 3294, 1073745120, 3297, 1073745157, 3340, 1073745166, 3344, // NOLINT
- 1073745170, 3368, 1073745194, 3385, 1073745248, 3425, 1073745285, 3478, // NOLINT
- 1073745306, 3505, 1073745331, 3515, 3517, 1073745344, 3526, 1073745409, // NOLINT
- 3632, 1073745458, 3635, 1073745472, 3654, 1073745537, 3714, 3716, // NOLINT
- 1073745543, 3720, 3722, 3725, 1073745556, 3735, 1073745561, 3743, // NOLINT
- 1073745569, 3747, 3749, 3751, 1073745578, 3755, 1073745581, 3760, // NOLINT
- 1073745586, 3763, 3773, 1073745600, 3780, 3782, 1073745628, 3805, // NOLINT
- 3840, 1073745728, 3911, 1073745737, 3946, 1073745800, 3979, 1073745920, // NOLINT
- 4129, 1073745955, 4135, 1073745961, 4138, 1073746000, 4181, 1073746080, // NOLINT
- 4293, 1073746128, 4346, 4348, 1073746176, 4441, 1073746271, 4514, // NOLINT
- 1073746344, 4601, 1073746432, 4680, 1073746506, 4685, 1073746512, 4694, // NOLINT
- 4696, 1073746522, 4701, 1073746528, 4744, 1073746570, 4749, 1073746576, // NOLINT
- 4784, 1073746610, 4789, 1073746616, 4798, 4800, 1073746626, 4805, // NOLINT
- 1073746632, 4822, 1073746648, 4880, 1073746706, 4885, 1073746712, 4954, // NOLINT
- 1073746816, 5007, 1073746848, 5108, 1073746945, 5740, 1073747567, 5750, // NOLINT
- 1073747585, 5786, 1073747616, 5866, 1073747694, 5872, 1073747712, 5900, // NOLINT
- 1073747726, 5905, 1073747744, 5937, 1073747776, 5969, 1073747808, 5996, // NOLINT
- 1073747822, 6000, 1073747840, 6067, 6103, 6108, 1073748000, 6263, // NOLINT
- 1073748096, 6312, 1073748224, 6428, 1073748304, 6509, 1073748336, 6516, // NOLINT
- 1073748352, 6569, 1073748417, 6599, 1073748480, 6678, 1073748741, 6963, // NOLINT
- 1073748805, 6987, 1073749248, 7615, 1073749504, 7835, 1073749664, 7929, // NOLINT
- 1073749760, 7957, 1073749784, 7965, 1073749792, 8005, 1073749832, 8013, // NOLINT
- 1073749840, 8023, 8025, 8027, 8029, 1073749855, 8061, 1073749888, // NOLINT
- 8116, 1073749942, 8124, 8126, 1073749954, 8132, 1073749958, 8140, // NOLINT
- 1073749968, 8147, 1073749974, 8155, 1073749984, 8172, 1073750002, 8180, // NOLINT
- 1073750006, 8188 }; // NOLINT
-static const uint16_t kLetterTable1Size = 84;
-static const int32_t kLetterTable1[84] = {
- 113, 127, 1073741968, 148, 258, 263, 1073742090, 275, // NOLINT
+ 740, 748, 750, 1073742704, 884, 1073742710, 887, 1073742714, // NOLINT
+ 893, 902, 1073742728, 906, 908, 1073742734, 929, 1073742755, // NOLINT
+ 1013, 1073742839, 1153, 1073742986, 1319, 1073743153, 1366, 1369, // NOLINT
+ 1073743201, 1415, 1073743312, 1514, 1073743344, 1522, 1073743392, 1610, // NOLINT
+ 1073743470, 1647, 1073743473, 1747, 1749, 1073743589, 1766, 1073743598, // NOLINT
+ 1775, 1073743610, 1788, 1791, 1808, 1073743634, 1839, 1073743693, // NOLINT
+ 1957, 1969, 1073743818, 2026, 1073743860, 2037, 2042, 1073743872, // NOLINT
+ 2069, 2074, 2084, 2088, 1073743936, 2136, 2208, 1073744034, // NOLINT
+ 2220, 1073744132, 2361, 2365, 2384, 1073744216, 2401, 1073744241, // NOLINT
+ 2423, 1073744249, 2431, 1073744261, 2444, 1073744271, 2448, 1073744275, // NOLINT
+ 2472, 1073744298, 2480, 2482, 1073744310, 2489, 2493, 2510, // NOLINT
+ 1073744348, 2525, 1073744351, 2529, 1073744368, 2545, 1073744389, 2570, // NOLINT
+ 1073744399, 2576, 1073744403, 2600, 1073744426, 2608, 1073744434, 2611, // NOLINT
+ 1073744437, 2614, 1073744440, 2617, 1073744473, 2652, 2654, 1073744498, // NOLINT
+ 2676, 1073744517, 2701, 1073744527, 2705, 1073744531, 2728, 1073744554, // NOLINT
+ 2736, 1073744562, 2739, 1073744565, 2745, 2749, 2768, 1073744608, // NOLINT
+ 2785, 1073744645, 2828, 1073744655, 2832, 1073744659, 2856, 1073744682, // NOLINT
+ 2864, 1073744690, 2867, 1073744693, 2873, 2877, 1073744732, 2909, // NOLINT
+ 1073744735, 2913, 2929, 2947, 1073744773, 2954, 1073744782, 2960, // NOLINT
+ 1073744786, 2965, 1073744793, 2970, 2972, 1073744798, 2975, 1073744803, // NOLINT
+ 2980, 1073744808, 2986, 1073744814, 3001, 3024, 1073744901, 3084, // NOLINT
+ 1073744910, 3088, 1073744914, 3112, 1073744938, 3123, 1073744949, 3129, // NOLINT
+ 3133, 1073744984, 3161, 1073744992, 3169, 1073745029, 3212, 1073745038, // NOLINT
+ 3216, 1073745042, 3240, 1073745066, 3251, 1073745077, 3257, 3261, // NOLINT
+ 3294, 1073745120, 3297, 1073745137, 3314, 1073745157, 3340, 1073745166, // NOLINT
+ 3344, 1073745170, 3386, 3389, 3406, 1073745248, 3425, 1073745274, // NOLINT
+ 3455, 1073745285, 3478, 1073745306, 3505, 1073745331, 3515, 3517, // NOLINT
+ 1073745344, 3526, 1073745409, 3632, 1073745458, 3635, 1073745472, 3654, // NOLINT
+ 1073745537, 3714, 3716, 1073745543, 3720, 3722, 3725, 1073745556, // NOLINT
+ 3735, 1073745561, 3743, 1073745569, 3747, 3749, 3751, 1073745578, // NOLINT
+ 3755, 1073745581, 3760, 1073745586, 3763, 3773, 1073745600, 3780, // NOLINT
+ 3782, 1073745628, 3807, 3840, 1073745728, 3911, 1073745737, 3948, // NOLINT
+ 1073745800, 3980, 1073745920, 4138, 4159, 1073746000, 4181, 1073746010, // NOLINT
+ 4189, 4193, 1073746021, 4198, 1073746030, 4208, 1073746037, 4225, // NOLINT
+ 4238, 1073746080, 4293, 4295, 4301, 1073746128, 4346, 1073746172, // NOLINT
+ 4680, 1073746506, 4685, 1073746512, 4694, 4696, 1073746522, 4701, // NOLINT
+ 1073746528, 4744, 1073746570, 4749, 1073746576, 4784, 1073746610, 4789, // NOLINT
+ 1073746616, 4798, 4800, 1073746626, 4805, 1073746632, 4822, 1073746648, // NOLINT
+ 4880, 1073746706, 4885, 1073746712, 4954, 1073746816, 5007, 1073746848, // NOLINT
+ 5108, 1073746945, 5740, 1073747567, 5759, 1073747585, 5786, 1073747616, // NOLINT
+ 5866, 1073747694, 5872, 1073747712, 5900, 1073747726, 5905, 1073747744, // NOLINT
+ 5937, 1073747776, 5969, 1073747808, 5996, 1073747822, 6000, 1073747840, // NOLINT
+ 6067, 6103, 6108, 1073748000, 6263, 1073748096, 6312, 6314, // NOLINT
+ 1073748144, 6389, 1073748224, 6428, 1073748304, 6509, 1073748336, 6516, // NOLINT
+ 1073748352, 6571, 1073748417, 6599, 1073748480, 6678, 1073748512, 6740, // NOLINT
+ 6823, 1073748741, 6963, 1073748805, 6987, 1073748867, 7072, 1073748910, // NOLINT
+ 7087, 1073748922, 7141, 1073748992, 7203, 1073749069, 7247, 1073749082, // NOLINT
+ 7293, 1073749225, 7404, 1073749230, 7409, 1073749237, 7414, 1073749248, // NOLINT
+ 7615, 1073749504, 7957, 1073749784, 7965, 1073749792, 8005, 1073749832, // NOLINT
+ 8013, 1073749840, 8023, 8025, 8027, 8029, 1073749855, 8061, // NOLINT
+ 1073749888, 8116, 1073749942, 8124, 8126, 1073749954, 8132, 1073749958, // NOLINT
+ 8140, 1073749968, 8147, 1073749974, 8155, 1073749984, 8172, 1073750002, // NOLINT
+ 8180, 1073750006, 8188 }; // NOLINT
+static const uint16_t kLetterTable1Size = 87;
+static const int32_t kLetterTable1[87] = {
+ 113, 127, 1073741968, 156, 258, 263, 1073742090, 275, // NOLINT
277, 1073742105, 285, 292, 294, 296, 1073742122, 301, // NOLINT
1073742127, 313, 1073742140, 319, 1073742149, 329, 334, 1073742176, // NOLINT
- 388, 1073744896, 3118, 1073744944, 3166, 1073744992, 3180, 1073745012, // NOLINT
- 3191, 1073745024, 3300, 1073745152, 3365, 1073745200, 3429, 3439, // NOLINT
- 1073745280, 3478, 1073745312, 3494, 1073745320, 3502, 1073745328, 3510, // NOLINT
- 1073745336, 3518, 1073745344, 3526, 1073745352, 3534, 1073745360, 3542, // NOLINT
- 1073745368, 3550, 1073745925, 4103, 1073745953, 4137, 1073745969, 4149, // NOLINT
- 1073745976, 4156, 1073745985, 4246, 1073746077, 4255, 1073746081, 4346, // NOLINT
- 1073746172, 4351, 1073746181, 4396, 1073746225, 4494, 1073746336, 4535, // NOLINT
- 1073746416, 4607, 1073746944, 8191 }; // NOLINT
+ 392, 1073744896, 3118, 1073744944, 3166, 1073744992, 3300, 1073745131, // NOLINT
+ 3310, 1073745138, 3315, 1073745152, 3365, 3367, 3373, 1073745200, // NOLINT
+ 3431, 3439, 1073745280, 3478, 1073745312, 3494, 1073745320, 3502, // NOLINT
+ 1073745328, 3510, 1073745336, 3518, 1073745344, 3526, 1073745352, 3534, // NOLINT
+ 1073745360, 3542, 1073745368, 3550, 3631, 1073745925, 4103, 1073745953, // NOLINT
+ 4137, 1073745969, 4149, 1073745976, 4156, 1073745985, 4246, 1073746077, // NOLINT
+ 4255, 1073746081, 4346, 1073746172, 4351, 1073746181, 4397, 1073746225, // NOLINT
+ 4494, 1073746336, 4538, 1073746416, 4607, 1073746944, 8191 }; // NOLINT
static const uint16_t kLetterTable2Size = 4;
static const int32_t kLetterTable2[4] = {
1073741824, 3509, 1073745408, 8191 }; // NOLINT
@@ -601,23 +646,31 @@ static const int32_t kLetterTable3[2] = {
1073741824, 8191 }; // NOLINT
static const uint16_t kLetterTable4Size = 2;
static const int32_t kLetterTable4[2] = {
- 1073741824, 8123 }; // NOLINT
-static const uint16_t kLetterTable5Size = 16;
-static const int32_t kLetterTable5[16] = {
- 1073741824, 1164, 1073743639, 1818, 1073743872, 2049, 1073743875, 2053, // NOLINT
- 1073743879, 2058, 1073743884, 2082, 1073743936, 2163, 1073744896, 8191 }; // NOLINT
-static const uint16_t kLetterTable6Size = 2;
-static const int32_t kLetterTable6[2] = {
- 1073741824, 6051 }; // NOLINT
-static const uint16_t kLetterTable7Size = 50;
-static const int32_t kLetterTable7[50] = {
- 1073748224, 6701, 1073748528, 6762, 1073748592, 6873, 1073748736, 6918, // NOLINT
- 1073748755, 6935, 6941, 1073748767, 6952, 1073748778, 6966, 1073748792, // NOLINT
- 6972, 6974, 1073748800, 6977, 1073748803, 6980, 1073748806, 7089, // NOLINT
- 1073748947, 7485, 1073749328, 7567, 1073749394, 7623, 1073749488, 7675, // NOLINT
- 1073749616, 7796, 1073749622, 7932, 1073749793, 7994, 1073749825, 8026, // NOLINT
- 1073749862, 8126, 1073749954, 8135, 1073749962, 8143, 1073749970, 8151, // NOLINT
- 1073749978, 8156 }; // NOLINT
+ 1073741824, 8140 }; // NOLINT
+static const uint16_t kLetterTable5Size = 88;
+static const int32_t kLetterTable5[88] = {
+ 1073741824, 1164, 1073743056, 1277, 1073743104, 1548, 1073743376, 1567, // NOLINT
+ 1073743402, 1579, 1073743424, 1646, 1073743487, 1687, 1073743520, 1775, // NOLINT
+ 1073743639, 1823, 1073743650, 1928, 1073743755, 1934, 1073743760, 1939, // NOLINT
+ 1073743776, 1962, 1073743864, 2049, 1073743875, 2053, 1073743879, 2058, // NOLINT
+ 1073743884, 2082, 1073743936, 2163, 1073744002, 2227, 1073744114, 2295, // NOLINT
+ 2299, 1073744138, 2341, 1073744176, 2374, 1073744224, 2428, 1073744260, // NOLINT
+ 2482, 2511, 1073744384, 2600, 1073744448, 2626, 1073744452, 2635, // NOLINT
+ 1073744480, 2678, 2682, 1073744512, 2735, 2737, 1073744565, 2742, // NOLINT
+ 1073744569, 2749, 2752, 2754, 1073744603, 2781, 1073744608, 2794, // NOLINT
+ 1073744626, 2804, 1073744641, 2822, 1073744649, 2830, 1073744657, 2838, // NOLINT
+ 1073744672, 2854, 1073744680, 2862, 1073744832, 3042, 1073744896, 8191 }; // NOLINT
+static const uint16_t kLetterTable6Size = 6;
+static const int32_t kLetterTable6[6] = {
+ 1073741824, 6051, 1073747888, 6086, 1073747915, 6139 }; // NOLINT
+static const uint16_t kLetterTable7Size = 48;
+static const int32_t kLetterTable7[48] = {
+ 1073748224, 6765, 1073748592, 6873, 1073748736, 6918, 1073748755, 6935, // NOLINT
+ 6941, 1073748767, 6952, 1073748778, 6966, 1073748792, 6972, 6974, // NOLINT
+ 1073748800, 6977, 1073748803, 6980, 1073748806, 7089, 1073748947, 7485, // NOLINT
+ 1073749328, 7567, 1073749394, 7623, 1073749488, 7675, 1073749616, 7796, // NOLINT
+ 1073749622, 7932, 1073749793, 7994, 1073749825, 8026, 1073749862, 8126, // NOLINT
+ 1073749954, 8135, 1073749962, 8143, 1073749970, 8151, 1073749978, 8156 }; // NOLINT
bool Letter::Is(uchar c) {
int chunk_index = c >> 13;
switch (chunk_index) {
@@ -672,14 +725,19 @@ bool Space::Is(uchar c) {
// Number: point.category == 'Nd'
-static const uint16_t kNumberTable0Size = 44;
-static const int32_t kNumberTable0[44] = {
+static const uint16_t kNumberTable0Size = 56;
+static const int32_t kNumberTable0[56] = {
1073741872, 57, 1073743456, 1641, 1073743600, 1785, 1073743808, 1993, // NOLINT
1073744230, 2415, 1073744358, 2543, 1073744486, 2671, 1073744614, 2799, // NOLINT
1073744742, 2927, 1073744870, 3055, 1073744998, 3183, 1073745126, 3311, // NOLINT
1073745254, 3439, 1073745488, 3673, 1073745616, 3801, 1073745696, 3881, // NOLINT
- 1073745984, 4169, 1073747936, 6121, 1073747984, 6169, 1073748294, 6479, // NOLINT
- 1073748432, 6617, 1073748816, 7001 }; // NOLINT
+ 1073745984, 4169, 1073746064, 4249, 1073747936, 6121, 1073747984, 6169, // NOLINT
+ 1073748294, 6479, 1073748432, 6617, 1073748608, 6793, 1073748624, 6809, // NOLINT
+ 1073748816, 7001, 1073748912, 7097, 1073749056, 7241, 1073749072, 7257 }; // NOLINT
+static const uint16_t kNumberTable5Size = 12;
+static const int32_t kNumberTable5[12] = {
+ 1073743392, 1577, 1073744080, 2265, 1073744128, 2313, 1073744336, 2521, // NOLINT
+ 1073744464, 2649, 1073744880, 3065 }; // NOLINT
static const uint16_t kNumberTable7Size = 2;
static const int32_t kNumberTable7[2] = {
1073749776, 7961 }; // NOLINT
@@ -689,6 +747,9 @@ bool Number::Is(uchar c) {
case 0: return LookupPredicate(kNumberTable0,
kNumberTable0Size,
c);
+ case 5: return LookupPredicate(kNumberTable5,
+ kNumberTable5Size,
+ c);
case 7: return LookupPredicate(kNumberTable7,
kNumberTable7Size,
c);
@@ -740,44 +801,56 @@ bool LineTerminator::Is(uchar c) {
// CombiningMark: point.category in ['Mn', 'Mc']
-static const uint16_t kCombiningMarkTable0Size = 205;
-static const int32_t kCombiningMarkTable0[205] = {
- 1073742592, 879, 1073742979, 1158, 1073743249, 1469, 1471, 1073743297, // NOLINT
- 1474, 1073743300, 1477, 1479, 1073743376, 1557, 1073743435, 1630, // NOLINT
+static const uint16_t kCombiningMarkTable0Size = 258;
+static const int32_t kCombiningMarkTable0[258] = {
+ 1073742592, 879, 1073742979, 1159, 1073743249, 1469, 1471, 1073743297, // NOLINT
+ 1474, 1073743300, 1477, 1479, 1073743376, 1562, 1073743435, 1631, // NOLINT
1648, 1073743574, 1756, 1073743583, 1764, 1073743591, 1768, 1073743594, // NOLINT
1773, 1809, 1073743664, 1866, 1073743782, 1968, 1073743851, 2035, // NOLINT
- 1073744129, 2307, 2364, 1073744190, 2381, 1073744209, 2388, 1073744226, // NOLINT
- 2403, 1073744257, 2435, 2492, 1073744318, 2500, 1073744327, 2504, // NOLINT
- 1073744331, 2509, 2519, 1073744354, 2531, 1073744385, 2563, 2620, // NOLINT
- 1073744446, 2626, 1073744455, 2632, 1073744459, 2637, 1073744496, 2673, // NOLINT
- 1073744513, 2691, 2748, 1073744574, 2757, 1073744583, 2761, 1073744587, // NOLINT
- 2765, 1073744610, 2787, 1073744641, 2819, 2876, 1073744702, 2883, // NOLINT
- 1073744711, 2888, 1073744715, 2893, 1073744726, 2903, 2946, 1073744830, // NOLINT
- 3010, 1073744838, 3016, 1073744842, 3021, 3031, 1073744897, 3075, // NOLINT
- 1073744958, 3140, 1073744966, 3144, 1073744970, 3149, 1073744981, 3158, // NOLINT
- 1073745026, 3203, 3260, 1073745086, 3268, 1073745094, 3272, 1073745098, // NOLINT
- 3277, 1073745109, 3286, 1073745122, 3299, 1073745154, 3331, 1073745214, // NOLINT
- 3395, 1073745222, 3400, 1073745226, 3405, 3415, 1073745282, 3459, // NOLINT
- 3530, 1073745359, 3540, 3542, 1073745368, 3551, 1073745394, 3571, // NOLINT
- 3633, 1073745460, 3642, 1073745479, 3662, 3761, 1073745588, 3769, // NOLINT
- 1073745595, 3772, 1073745608, 3789, 1073745688, 3865, 3893, 3895, // NOLINT
- 3897, 1073745726, 3903, 1073745777, 3972, 1073745798, 3975, 1073745808, // NOLINT
- 3991, 1073745817, 4028, 4038, 1073745964, 4146, 1073745974, 4153, // NOLINT
- 1073746006, 4185, 4959, 1073747730, 5908, 1073747762, 5940, 1073747794, // NOLINT
- 5971, 1073747826, 6003, 1073747894, 6099, 6109, 1073747979, 6157, // NOLINT
- 6313, 1073748256, 6443, 1073748272, 6459, 1073748400, 6592, 1073748424, // NOLINT
- 6601, 1073748503, 6683, 1073748736, 6916, 1073748788, 6980, 1073748843, // NOLINT
- 7027, 1073749440, 7626, 1073749502, 7679 }; // NOLINT
-static const uint16_t kCombiningMarkTable1Size = 9;
-static const int32_t kCombiningMarkTable1[9] = {
- 1073742032, 220, 225, 1073742053, 239, 1073745962, 4143, 1073746073, // NOLINT
- 4250 }; // NOLINT
-static const uint16_t kCombiningMarkTable5Size = 5;
-static const int32_t kCombiningMarkTable5[5] = {
- 2050, 2054, 2059, 1073743907, 2087 }; // NOLINT
+ 1073743894, 2073, 1073743899, 2083, 1073743909, 2087, 1073743913, 2093, // NOLINT
+ 1073743961, 2139, 1073744100, 2302, 1073744128, 2307, 1073744186, 2364, // NOLINT
+ 1073744190, 2383, 1073744209, 2391, 1073744226, 2403, 1073744257, 2435, // NOLINT
+ 2492, 1073744318, 2500, 1073744327, 2504, 1073744331, 2509, 2519, // NOLINT
+ 1073744354, 2531, 1073744385, 2563, 2620, 1073744446, 2626, 1073744455, // NOLINT
+ 2632, 1073744459, 2637, 2641, 1073744496, 2673, 2677, 1073744513, // NOLINT
+ 2691, 2748, 1073744574, 2757, 1073744583, 2761, 1073744587, 2765, // NOLINT
+ 1073744610, 2787, 1073744641, 2819, 2876, 1073744702, 2884, 1073744711, // NOLINT
+ 2888, 1073744715, 2893, 1073744726, 2903, 1073744738, 2915, 2946, // NOLINT
+ 1073744830, 3010, 1073744838, 3016, 1073744842, 3021, 3031, 1073744897, // NOLINT
+ 3075, 1073744958, 3140, 1073744966, 3144, 1073744970, 3149, 1073744981, // NOLINT
+ 3158, 1073744994, 3171, 1073745026, 3203, 3260, 1073745086, 3268, // NOLINT
+ 1073745094, 3272, 1073745098, 3277, 1073745109, 3286, 1073745122, 3299, // NOLINT
+ 1073745154, 3331, 1073745214, 3396, 1073745222, 3400, 1073745226, 3405, // NOLINT
+ 3415, 1073745250, 3427, 1073745282, 3459, 3530, 1073745359, 3540, // NOLINT
+ 3542, 1073745368, 3551, 1073745394, 3571, 3633, 1073745460, 3642, // NOLINT
+ 1073745479, 3662, 3761, 1073745588, 3769, 1073745595, 3772, 1073745608, // NOLINT
+ 3789, 1073745688, 3865, 3893, 3895, 3897, 1073745726, 3903, // NOLINT
+ 1073745777, 3972, 1073745798, 3975, 1073745805, 3991, 1073745817, 4028, // NOLINT
+ 4038, 1073745963, 4158, 1073746006, 4185, 1073746014, 4192, 1073746018, // NOLINT
+ 4196, 1073746023, 4205, 1073746033, 4212, 1073746050, 4237, 4239, // NOLINT
+ 1073746074, 4253, 1073746781, 4959, 1073747730, 5908, 1073747762, 5940, // NOLINT
+ 1073747794, 5971, 1073747826, 6003, 1073747892, 6099, 6109, 1073747979, // NOLINT
+ 6157, 6313, 1073748256, 6443, 1073748272, 6459, 1073748400, 6592, // NOLINT
+ 1073748424, 6601, 1073748503, 6683, 1073748565, 6750, 1073748576, 6780, // NOLINT
+ 6783, 1073748736, 6916, 1073748788, 6980, 1073748843, 7027, 1073748864, // NOLINT
+ 7042, 1073748897, 7085, 1073748966, 7155, 1073749028, 7223, 1073749200, // NOLINT
+ 7378, 1073749204, 7400, 7405, 1073749234, 7412, 1073749440, 7654, // NOLINT
+ 1073749500, 7679 }; // NOLINT
+static const uint16_t kCombiningMarkTable1Size = 14;
+static const int32_t kCombiningMarkTable1[14] = {
+ 1073742032, 220, 225, 1073742053, 240, 1073745135, 3313, 3455, // NOLINT
+ 1073745376, 3583, 1073745962, 4143, 1073746073, 4250 }; // NOLINT
+static const uint16_t kCombiningMarkTable5Size = 47;
+static const int32_t kCombiningMarkTable5[47] = {
+ 1647, 1073743476, 1661, 1695, 1073743600, 1777, 2050, 2054, // NOLINT
+ 2059, 1073743907, 2087, 1073744000, 2177, 1073744052, 2244, 1073744096, // NOLINT
+ 2289, 1073744166, 2349, 1073744199, 2387, 1073744256, 2435, 1073744307, // NOLINT
+ 2496, 1073744425, 2614, 2627, 1073744460, 2637, 2683, 2736, // NOLINT
+ 1073744562, 2740, 1073744567, 2744, 1073744574, 2751, 2753, 1073744619, // NOLINT
+ 2799, 1073744629, 2806, 1073744867, 3050, 1073744876, 3053 }; // NOLINT
static const uint16_t kCombiningMarkTable7Size = 5;
static const int32_t kCombiningMarkTable7[5] = {
- 6942, 1073749504, 7695, 1073749536, 7715 }; // NOLINT
+ 6942, 1073749504, 7695, 1073749536, 7718 }; // NOLINT
bool CombiningMark::Is(uchar c) {
int chunk_index = c >> 13;
switch (chunk_index) {
@@ -826,8 +899,8 @@ bool ConnectorPunctuation::Is(uchar c) {
static const MultiCharacterSpecialCase<2> kToLowercaseMultiStrings0[2] = { // NOLINT
{{105, 775}}, {{kSentinel}} }; // NOLINT
-static const uint16_t kToLowercaseTable0Size = 463; // NOLINT
-static const int32_t kToLowercaseTable0[926] = {
+static const uint16_t kToLowercaseTable0Size = 483; // NOLINT
+static const int32_t kToLowercaseTable0[966] = {
1073741889, 128, 90, 128, 1073742016, 128, 214, 128, 1073742040, 128, 222, 128, 256, 4, 258, 4, // NOLINT
260, 4, 262, 4, 264, 4, 266, 4, 268, 4, 270, 4, 272, 4, 274, 4, // NOLINT
276, 4, 278, 4, 280, 4, 282, 4, 284, 4, 286, 4, 288, 4, 290, 4, // NOLINT
@@ -850,22 +923,24 @@ static const int32_t kToLowercaseTable0[926] = {
542, 4, 544, -520, 546, 4, 548, 4, 550, 4, 552, 4, 554, 4, 556, 4, // NOLINT
558, 4, 560, 4, 562, 4, 570, 43180, 571, 4, 573, -652, 574, 43168, 577, 4, // NOLINT
579, -780, 580, 276, 581, 284, 582, 4, 584, 4, 586, 4, 588, 4, 590, 4, // NOLINT
- 902, 152, 1073742728, 148, 906, 148, 908, 256, 1073742734, 252, 911, 252, 1073742737, 128, 929, 128, // NOLINT
- 931, 6, 1073742756, 128, 939, 128, 984, 4, 986, 4, 988, 4, 990, 4, 992, 4, // NOLINT
- 994, 4, 996, 4, 998, 4, 1000, 4, 1002, 4, 1004, 4, 1006, 4, 1012, -240, // NOLINT
- 1015, 4, 1017, -28, 1018, 4, 1073742845, -520, 1023, -520, 1073742848, 320, 1039, 320, 1073742864, 128, // NOLINT
- 1071, 128, 1120, 4, 1122, 4, 1124, 4, 1126, 4, 1128, 4, 1130, 4, 1132, 4, // NOLINT
- 1134, 4, 1136, 4, 1138, 4, 1140, 4, 1142, 4, 1144, 4, 1146, 4, 1148, 4, // NOLINT
- 1150, 4, 1152, 4, 1162, 4, 1164, 4, 1166, 4, 1168, 4, 1170, 4, 1172, 4, // NOLINT
- 1174, 4, 1176, 4, 1178, 4, 1180, 4, 1182, 4, 1184, 4, 1186, 4, 1188, 4, // NOLINT
- 1190, 4, 1192, 4, 1194, 4, 1196, 4, 1198, 4, 1200, 4, 1202, 4, 1204, 4, // NOLINT
- 1206, 4, 1208, 4, 1210, 4, 1212, 4, 1214, 4, 1216, 60, 1217, 4, 1219, 4, // NOLINT
- 1221, 4, 1223, 4, 1225, 4, 1227, 4, 1229, 4, 1232, 4, 1234, 4, 1236, 4, // NOLINT
- 1238, 4, 1240, 4, 1242, 4, 1244, 4, 1246, 4, 1248, 4, 1250, 4, 1252, 4, // NOLINT
- 1254, 4, 1256, 4, 1258, 4, 1260, 4, 1262, 4, 1264, 4, 1266, 4, 1268, 4, // NOLINT
- 1270, 4, 1272, 4, 1274, 4, 1276, 4, 1278, 4, 1280, 4, 1282, 4, 1284, 4, // NOLINT
- 1286, 4, 1288, 4, 1290, 4, 1292, 4, 1294, 4, 1296, 4, 1298, 4, 1073743153, 192, // NOLINT
- 1366, 192, 1073746080, 29056, 4293, 29056, 7680, 4, 7682, 4, 7684, 4, 7686, 4, 7688, 4, // NOLINT
+ 880, 4, 882, 4, 886, 4, 902, 152, 1073742728, 148, 906, 148, 908, 256, 1073742734, 252, // NOLINT
+ 911, 252, 1073742737, 128, 929, 128, 931, 6, 1073742756, 128, 939, 128, 975, 32, 984, 4, // NOLINT
+ 986, 4, 988, 4, 990, 4, 992, 4, 994, 4, 996, 4, 998, 4, 1000, 4, // NOLINT
+ 1002, 4, 1004, 4, 1006, 4, 1012, -240, 1015, 4, 1017, -28, 1018, 4, 1073742845, -520, // NOLINT
+ 1023, -520, 1073742848, 320, 1039, 320, 1073742864, 128, 1071, 128, 1120, 4, 1122, 4, 1124, 4, // NOLINT
+ 1126, 4, 1128, 4, 1130, 4, 1132, 4, 1134, 4, 1136, 4, 1138, 4, 1140, 4, // NOLINT
+ 1142, 4, 1144, 4, 1146, 4, 1148, 4, 1150, 4, 1152, 4, 1162, 4, 1164, 4, // NOLINT
+ 1166, 4, 1168, 4, 1170, 4, 1172, 4, 1174, 4, 1176, 4, 1178, 4, 1180, 4, // NOLINT
+ 1182, 4, 1184, 4, 1186, 4, 1188, 4, 1190, 4, 1192, 4, 1194, 4, 1196, 4, // NOLINT
+ 1198, 4, 1200, 4, 1202, 4, 1204, 4, 1206, 4, 1208, 4, 1210, 4, 1212, 4, // NOLINT
+ 1214, 4, 1216, 60, 1217, 4, 1219, 4, 1221, 4, 1223, 4, 1225, 4, 1227, 4, // NOLINT
+ 1229, 4, 1232, 4, 1234, 4, 1236, 4, 1238, 4, 1240, 4, 1242, 4, 1244, 4, // NOLINT
+ 1246, 4, 1248, 4, 1250, 4, 1252, 4, 1254, 4, 1256, 4, 1258, 4, 1260, 4, // NOLINT
+ 1262, 4, 1264, 4, 1266, 4, 1268, 4, 1270, 4, 1272, 4, 1274, 4, 1276, 4, // NOLINT
+ 1278, 4, 1280, 4, 1282, 4, 1284, 4, 1286, 4, 1288, 4, 1290, 4, 1292, 4, // NOLINT
+ 1294, 4, 1296, 4, 1298, 4, 1300, 4, 1302, 4, 1304, 4, 1306, 4, 1308, 4, // NOLINT
+ 1310, 4, 1312, 4, 1314, 4, 1316, 4, 1318, 4, 1073743153, 192, 1366, 192, 1073746080, 29056, // NOLINT
+ 4293, 29056, 4295, 29056, 4301, 29056, 7680, 4, 7682, 4, 7684, 4, 7686, 4, 7688, 4, // NOLINT
7690, 4, 7692, 4, 7694, 4, 7696, 4, 7698, 4, 7700, 4, 7702, 4, 7704, 4, // NOLINT
7706, 4, 7708, 4, 7710, 4, 7712, 4, 7714, 4, 7716, 4, 7718, 4, 7720, 4, // NOLINT
7722, 4, 7724, 4, 7726, 4, 7728, 4, 7730, 4, 7732, 4, 7734, 4, 7736, 4, // NOLINT
@@ -874,33 +949,52 @@ static const int32_t kToLowercaseTable0[926] = {
7770, 4, 7772, 4, 7774, 4, 7776, 4, 7778, 4, 7780, 4, 7782, 4, 7784, 4, // NOLINT
7786, 4, 7788, 4, 7790, 4, 7792, 4, 7794, 4, 7796, 4, 7798, 4, 7800, 4, // NOLINT
7802, 4, 7804, 4, 7806, 4, 7808, 4, 7810, 4, 7812, 4, 7814, 4, 7816, 4, // NOLINT
- 7818, 4, 7820, 4, 7822, 4, 7824, 4, 7826, 4, 7828, 4, 7840, 4, 7842, 4, // NOLINT
- 7844, 4, 7846, 4, 7848, 4, 7850, 4, 7852, 4, 7854, 4, 7856, 4, 7858, 4, // NOLINT
- 7860, 4, 7862, 4, 7864, 4, 7866, 4, 7868, 4, 7870, 4, 7872, 4, 7874, 4, // NOLINT
- 7876, 4, 7878, 4, 7880, 4, 7882, 4, 7884, 4, 7886, 4, 7888, 4, 7890, 4, // NOLINT
- 7892, 4, 7894, 4, 7896, 4, 7898, 4, 7900, 4, 7902, 4, 7904, 4, 7906, 4, // NOLINT
- 7908, 4, 7910, 4, 7912, 4, 7914, 4, 7916, 4, 7918, 4, 7920, 4, 7922, 4, // NOLINT
- 7924, 4, 7926, 4, 7928, 4, 1073749768, -32, 7951, -32, 1073749784, -32, 7965, -32, 1073749800, -32, // NOLINT
- 7983, -32, 1073749816, -32, 7999, -32, 1073749832, -32, 8013, -32, 8025, -32, 8027, -32, 8029, -32, // NOLINT
- 8031, -32, 1073749864, -32, 8047, -32, 1073749896, -32, 8079, -32, 1073749912, -32, 8095, -32, 1073749928, -32, // NOLINT
- 8111, -32, 1073749944, -32, 8121, -32, 1073749946, -296, 8123, -296, 8124, -36, 1073749960, -344, 8139, -344, // NOLINT
- 8140, -36, 1073749976, -32, 8153, -32, 1073749978, -400, 8155, -400, 1073749992, -32, 8169, -32, 1073749994, -448, // NOLINT
- 8171, -448, 8172, -28, 1073750008, -512, 8185, -512, 1073750010, -504, 8187, -504, 8188, -36 }; // NOLINT
+ 7818, 4, 7820, 4, 7822, 4, 7824, 4, 7826, 4, 7828, 4, 7838, -30460, 7840, 4, // NOLINT
+ 7842, 4, 7844, 4, 7846, 4, 7848, 4, 7850, 4, 7852, 4, 7854, 4, 7856, 4, // NOLINT
+ 7858, 4, 7860, 4, 7862, 4, 7864, 4, 7866, 4, 7868, 4, 7870, 4, 7872, 4, // NOLINT
+ 7874, 4, 7876, 4, 7878, 4, 7880, 4, 7882, 4, 7884, 4, 7886, 4, 7888, 4, // NOLINT
+ 7890, 4, 7892, 4, 7894, 4, 7896, 4, 7898, 4, 7900, 4, 7902, 4, 7904, 4, // NOLINT
+ 7906, 4, 7908, 4, 7910, 4, 7912, 4, 7914, 4, 7916, 4, 7918, 4, 7920, 4, // NOLINT
+ 7922, 4, 7924, 4, 7926, 4, 7928, 4, 7930, 4, 7932, 4, 7934, 4, 1073749768, -32, // NOLINT
+ 7951, -32, 1073749784, -32, 7965, -32, 1073749800, -32, 7983, -32, 1073749816, -32, 7999, -32, 1073749832, -32, // NOLINT
+ 8013, -32, 8025, -32, 8027, -32, 8029, -32, 8031, -32, 1073749864, -32, 8047, -32, 1073749896, -32, // NOLINT
+ 8079, -32, 1073749912, -32, 8095, -32, 1073749928, -32, 8111, -32, 1073749944, -32, 8121, -32, 1073749946, -296, // NOLINT
+ 8123, -296, 8124, -36, 1073749960, -344, 8139, -344, 8140, -36, 1073749976, -32, 8153, -32, 1073749978, -400, // NOLINT
+ 8155, -400, 1073749992, -32, 8169, -32, 1073749994, -448, 8171, -448, 8172, -28, 1073750008, -512, 8185, -512, // NOLINT
+ 1073750010, -504, 8187, -504, 8188, -36 }; // NOLINT
static const uint16_t kToLowercaseMultiStrings0Size = 2; // NOLINT
static const MultiCharacterSpecialCase<1> kToLowercaseMultiStrings1[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kToLowercaseTable1Size = 69; // NOLINT
-static const int32_t kToLowercaseTable1[138] = {
+static const uint16_t kToLowercaseTable1Size = 79; // NOLINT
+static const int32_t kToLowercaseTable1[158] = {
294, -30068, 298, -33532, 299, -33048, 306, 112, 1073742176, 64, 367, 64, 387, 4, 1073743030, 104, // NOLINT
1231, 104, 1073744896, 192, 3118, 192, 3168, 4, 3170, -42972, 3171, -15256, 3172, -42908, 3175, 4, // NOLINT
- 3177, 4, 3179, 4, 3189, 4, 3200, 4, 3202, 4, 3204, 4, 3206, 4, 3208, 4, // NOLINT
- 3210, 4, 3212, 4, 3214, 4, 3216, 4, 3218, 4, 3220, 4, 3222, 4, 3224, 4, // NOLINT
- 3226, 4, 3228, 4, 3230, 4, 3232, 4, 3234, 4, 3236, 4, 3238, 4, 3240, 4, // NOLINT
- 3242, 4, 3244, 4, 3246, 4, 3248, 4, 3250, 4, 3252, 4, 3254, 4, 3256, 4, // NOLINT
- 3258, 4, 3260, 4, 3262, 4, 3264, 4, 3266, 4, 3268, 4, 3270, 4, 3272, 4, // NOLINT
- 3274, 4, 3276, 4, 3278, 4, 3280, 4, 3282, 4, 3284, 4, 3286, 4, 3288, 4, // NOLINT
- 3290, 4, 3292, 4, 3294, 4, 3296, 4, 3298, 4 }; // NOLINT
+ 3177, 4, 3179, 4, 3181, -43120, 3182, -42996, 3183, -43132, 3184, -43128, 3186, 4, 3189, 4, // NOLINT
+ 1073745022, -43260, 3199, -43260, 3200, 4, 3202, 4, 3204, 4, 3206, 4, 3208, 4, 3210, 4, // NOLINT
+ 3212, 4, 3214, 4, 3216, 4, 3218, 4, 3220, 4, 3222, 4, 3224, 4, 3226, 4, // NOLINT
+ 3228, 4, 3230, 4, 3232, 4, 3234, 4, 3236, 4, 3238, 4, 3240, 4, 3242, 4, // NOLINT
+ 3244, 4, 3246, 4, 3248, 4, 3250, 4, 3252, 4, 3254, 4, 3256, 4, 3258, 4, // NOLINT
+ 3260, 4, 3262, 4, 3264, 4, 3266, 4, 3268, 4, 3270, 4, 3272, 4, 3274, 4, // NOLINT
+ 3276, 4, 3278, 4, 3280, 4, 3282, 4, 3284, 4, 3286, 4, 3288, 4, 3290, 4, // NOLINT
+ 3292, 4, 3294, 4, 3296, 4, 3298, 4, 3307, 4, 3309, 4, 3314, 4 }; // NOLINT
static const uint16_t kToLowercaseMultiStrings1Size = 1; // NOLINT
+static const MultiCharacterSpecialCase<1> kToLowercaseMultiStrings5[1] = { // NOLINT
+ {{kSentinel}} }; // NOLINT
+static const uint16_t kToLowercaseTable5Size = 91; // NOLINT
+static const int32_t kToLowercaseTable5[182] = {
+ 1600, 4, 1602, 4, 1604, 4, 1606, 4, 1608, 4, 1610, 4, 1612, 4, 1614, 4, // NOLINT
+ 1616, 4, 1618, 4, 1620, 4, 1622, 4, 1624, 4, 1626, 4, 1628, 4, 1630, 4, // NOLINT
+ 1632, 4, 1634, 4, 1636, 4, 1638, 4, 1640, 4, 1642, 4, 1644, 4, 1664, 4, // NOLINT
+ 1666, 4, 1668, 4, 1670, 4, 1672, 4, 1674, 4, 1676, 4, 1678, 4, 1680, 4, // NOLINT
+ 1682, 4, 1684, 4, 1686, 4, 1826, 4, 1828, 4, 1830, 4, 1832, 4, 1834, 4, // NOLINT
+ 1836, 4, 1838, 4, 1842, 4, 1844, 4, 1846, 4, 1848, 4, 1850, 4, 1852, 4, // NOLINT
+ 1854, 4, 1856, 4, 1858, 4, 1860, 4, 1862, 4, 1864, 4, 1866, 4, 1868, 4, // NOLINT
+ 1870, 4, 1872, 4, 1874, 4, 1876, 4, 1878, 4, 1880, 4, 1882, 4, 1884, 4, // NOLINT
+ 1886, 4, 1888, 4, 1890, 4, 1892, 4, 1894, 4, 1896, 4, 1898, 4, 1900, 4, // NOLINT
+ 1902, 4, 1913, 4, 1915, 4, 1917, -141328, 1918, 4, 1920, 4, 1922, 4, 1924, 4, // NOLINT
+ 1926, 4, 1931, 4, 1933, -169120, 1936, 4, 1938, 4, 1952, 4, 1954, 4, 1956, 4, // NOLINT
+ 1958, 4, 1960, 4, 1962, -169232 }; // NOLINT
+static const uint16_t kToLowercaseMultiStrings5Size = 1; // NOLINT
static const MultiCharacterSpecialCase<1> kToLowercaseMultiStrings7[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
static const uint16_t kToLowercaseTable7Size = 2; // NOLINT
@@ -927,6 +1021,13 @@ int ToLowercase::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kToLowercaseTable5,
+ kToLowercaseTable5Size,
+ kToLowercaseMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kToLowercaseTable7,
kToLowercaseTable7Size,
kToLowercaseMultiStrings7,
@@ -955,8 +1056,8 @@ static const MultiCharacterSpecialCase<3> kToUppercaseMultiStrings0[62] = { //
{{933, 776, 768}}, {{929, 787, kSentinel}}, {{933, 834, kSentinel}}, {{933, 776, 834}}, // NOLINT
{{8186, 921, kSentinel}}, {{937, 921, kSentinel}}, {{911, 921, kSentinel}}, {{937, 834, kSentinel}}, // NOLINT
{{937, 834, 921}}, {{kSentinel}} }; // NOLINT
-static const uint16_t kToUppercaseTable0Size = 554; // NOLINT
-static const int32_t kToUppercaseTable0[1108] = {
+static const uint16_t kToUppercaseTable0Size = 580; // NOLINT
+static const int32_t kToUppercaseTable0[1160] = {
1073741921, -128, 122, -128, 181, 2972, 223, 1, 1073742048, -128, 246, -128, 1073742072, -128, 254, -128, // NOLINT
255, 484, 257, -4, 259, -4, 261, -4, 263, -4, 265, -4, 267, -4, 269, -4, // NOLINT
271, -4, 273, -4, 275, -4, 277, -4, 279, -4, 281, -4, 283, -4, 285, -4, // NOLINT
@@ -976,72 +1077,92 @@ static const int32_t kToUppercaseTable0[1108] = {
517, -4, 519, -4, 521, -4, 523, -4, 525, -4, 527, -4, 529, -4, 531, -4, // NOLINT
533, -4, 535, -4, 537, -4, 539, -4, 541, -4, 543, -4, 547, -4, 549, -4, // NOLINT
551, -4, 553, -4, 555, -4, 557, -4, 559, -4, 561, -4, 563, -4, 572, -4, // NOLINT
- 578, -4, 583, -4, 585, -4, 587, -4, 589, -4, 591, -4, 595, -840, 596, -824, // NOLINT
- 1073742422, -820, 599, -820, 601, -808, 603, -812, 608, -820, 611, -828, 616, -836, 617, -844, // NOLINT
- 619, 42972, 623, -844, 626, -852, 629, -856, 637, 42908, 640, -872, 643, -872, 648, -872, // NOLINT
- 649, -276, 1073742474, -868, 651, -868, 652, -284, 658, -876, 837, 336, 1073742715, 520, 893, 520, // NOLINT
- 912, 13, 940, -152, 1073742765, -148, 943, -148, 944, 17, 1073742769, -128, 961, -128, 962, -124, // NOLINT
- 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, 974, -252, 976, -248, 977, -228, 981, -188, // NOLINT
- 982, -216, 985, -4, 987, -4, 989, -4, 991, -4, 993, -4, 995, -4, 997, -4, // NOLINT
- 999, -4, 1001, -4, 1003, -4, 1005, -4, 1007, -4, 1008, -344, 1009, -320, 1010, 28, // NOLINT
- 1013, -384, 1016, -4, 1019, -4, 1073742896, -128, 1103, -128, 1073742928, -320, 1119, -320, 1121, -4, // NOLINT
- 1123, -4, 1125, -4, 1127, -4, 1129, -4, 1131, -4, 1133, -4, 1135, -4, 1137, -4, // NOLINT
- 1139, -4, 1141, -4, 1143, -4, 1145, -4, 1147, -4, 1149, -4, 1151, -4, 1153, -4, // NOLINT
- 1163, -4, 1165, -4, 1167, -4, 1169, -4, 1171, -4, 1173, -4, 1175, -4, 1177, -4, // NOLINT
- 1179, -4, 1181, -4, 1183, -4, 1185, -4, 1187, -4, 1189, -4, 1191, -4, 1193, -4, // NOLINT
- 1195, -4, 1197, -4, 1199, -4, 1201, -4, 1203, -4, 1205, -4, 1207, -4, 1209, -4, // NOLINT
- 1211, -4, 1213, -4, 1215, -4, 1218, -4, 1220, -4, 1222, -4, 1224, -4, 1226, -4, // NOLINT
- 1228, -4, 1230, -4, 1231, -60, 1233, -4, 1235, -4, 1237, -4, 1239, -4, 1241, -4, // NOLINT
- 1243, -4, 1245, -4, 1247, -4, 1249, -4, 1251, -4, 1253, -4, 1255, -4, 1257, -4, // NOLINT
- 1259, -4, 1261, -4, 1263, -4, 1265, -4, 1267, -4, 1269, -4, 1271, -4, 1273, -4, // NOLINT
- 1275, -4, 1277, -4, 1279, -4, 1281, -4, 1283, -4, 1285, -4, 1287, -4, 1289, -4, // NOLINT
- 1291, -4, 1293, -4, 1295, -4, 1297, -4, 1299, -4, 1073743201, -192, 1414, -192, 1415, 21, // NOLINT
- 7549, 15256, 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, // NOLINT
- 7695, -4, 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, // NOLINT
- 7711, -4, 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, // NOLINT
- 7727, -4, 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, // NOLINT
- 7743, -4, 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, // NOLINT
- 7759, -4, 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, // NOLINT
- 7775, -4, 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, // NOLINT
- 7791, -4, 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, // NOLINT
- 7807, -4, 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, // NOLINT
- 7823, -4, 7825, -4, 7827, -4, 7829, -4, 7830, 25, 7831, 29, 7832, 33, 7833, 37, // NOLINT
- 7834, 41, 7835, -236, 7841, -4, 7843, -4, 7845, -4, 7847, -4, 7849, -4, 7851, -4, // NOLINT
- 7853, -4, 7855, -4, 7857, -4, 7859, -4, 7861, -4, 7863, -4, 7865, -4, 7867, -4, // NOLINT
- 7869, -4, 7871, -4, 7873, -4, 7875, -4, 7877, -4, 7879, -4, 7881, -4, 7883, -4, // NOLINT
- 7885, -4, 7887, -4, 7889, -4, 7891, -4, 7893, -4, 7895, -4, 7897, -4, 7899, -4, // NOLINT
- 7901, -4, 7903, -4, 7905, -4, 7907, -4, 7909, -4, 7911, -4, 7913, -4, 7915, -4, // NOLINT
- 7917, -4, 7919, -4, 7921, -4, 7923, -4, 7925, -4, 7927, -4, 7929, -4, 1073749760, 32, // NOLINT
- 7943, 32, 1073749776, 32, 7957, 32, 1073749792, 32, 7975, 32, 1073749808, 32, 7991, 32, 1073749824, 32, // NOLINT
- 8005, 32, 8016, 45, 8017, 32, 8018, 49, 8019, 32, 8020, 53, 8021, 32, 8022, 57, // NOLINT
- 8023, 32, 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, 8053, 344, 1073749878, 400, // NOLINT
- 8055, 400, 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, 8061, 504, 8064, 61, // NOLINT
- 8065, 65, 8066, 69, 8067, 73, 8068, 77, 8069, 81, 8070, 85, 8071, 89, 8072, 61, // NOLINT
- 8073, 65, 8074, 69, 8075, 73, 8076, 77, 8077, 81, 8078, 85, 8079, 89, 8080, 93, // NOLINT
- 8081, 97, 8082, 101, 8083, 105, 8084, 109, 8085, 113, 8086, 117, 8087, 121, 8088, 93, // NOLINT
- 8089, 97, 8090, 101, 8091, 105, 8092, 109, 8093, 113, 8094, 117, 8095, 121, 8096, 125, // NOLINT
- 8097, 129, 8098, 133, 8099, 137, 8100, 141, 8101, 145, 8102, 149, 8103, 153, 8104, 125, // NOLINT
- 8105, 129, 8106, 133, 8107, 137, 8108, 141, 8109, 145, 8110, 149, 8111, 153, 1073749936, 32, // NOLINT
- 8113, 32, 8114, 157, 8115, 161, 8116, 165, 8118, 169, 8119, 173, 8124, 161, 8126, -28820, // NOLINT
- 8130, 177, 8131, 181, 8132, 185, 8134, 189, 8135, 193, 8140, 181, 1073749968, 32, 8145, 32, // NOLINT
- 8146, 197, 8147, 13, 8150, 201, 8151, 205, 1073749984, 32, 8161, 32, 8162, 209, 8163, 17, // NOLINT
- 8164, 213, 8165, 28, 8166, 217, 8167, 221, 8178, 225, 8179, 229, 8180, 233, 8182, 237, // NOLINT
- 8183, 241, 8188, 229 }; // NOLINT
+ 1073742399, 43260, 576, 43260, 578, -4, 583, -4, 585, -4, 587, -4, 589, -4, 591, -4, // NOLINT
+ 592, 43132, 593, 43120, 594, 43128, 595, -840, 596, -824, 1073742422, -820, 599, -820, 601, -808, // NOLINT
+ 603, -812, 608, -820, 611, -828, 613, 169120, 614, 169232, 616, -836, 617, -844, 619, 42972, // NOLINT
+ 623, -844, 625, 42996, 626, -852, 629, -856, 637, 42908, 640, -872, 643, -872, 648, -872, // NOLINT
+ 649, -276, 1073742474, -868, 651, -868, 652, -284, 658, -876, 837, 336, 881, -4, 883, -4, // NOLINT
+ 887, -4, 1073742715, 520, 893, 520, 912, 13, 940, -152, 1073742765, -148, 943, -148, 944, 17, // NOLINT
+ 1073742769, -128, 961, -128, 962, -124, 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, 974, -252, // NOLINT
+ 976, -248, 977, -228, 981, -188, 982, -216, 983, -32, 985, -4, 987, -4, 989, -4, // NOLINT
+ 991, -4, 993, -4, 995, -4, 997, -4, 999, -4, 1001, -4, 1003, -4, 1005, -4, // NOLINT
+ 1007, -4, 1008, -344, 1009, -320, 1010, 28, 1013, -384, 1016, -4, 1019, -4, 1073742896, -128, // NOLINT
+ 1103, -128, 1073742928, -320, 1119, -320, 1121, -4, 1123, -4, 1125, -4, 1127, -4, 1129, -4, // NOLINT
+ 1131, -4, 1133, -4, 1135, -4, 1137, -4, 1139, -4, 1141, -4, 1143, -4, 1145, -4, // NOLINT
+ 1147, -4, 1149, -4, 1151, -4, 1153, -4, 1163, -4, 1165, -4, 1167, -4, 1169, -4, // NOLINT
+ 1171, -4, 1173, -4, 1175, -4, 1177, -4, 1179, -4, 1181, -4, 1183, -4, 1185, -4, // NOLINT
+ 1187, -4, 1189, -4, 1191, -4, 1193, -4, 1195, -4, 1197, -4, 1199, -4, 1201, -4, // NOLINT
+ 1203, -4, 1205, -4, 1207, -4, 1209, -4, 1211, -4, 1213, -4, 1215, -4, 1218, -4, // NOLINT
+ 1220, -4, 1222, -4, 1224, -4, 1226, -4, 1228, -4, 1230, -4, 1231, -60, 1233, -4, // NOLINT
+ 1235, -4, 1237, -4, 1239, -4, 1241, -4, 1243, -4, 1245, -4, 1247, -4, 1249, -4, // NOLINT
+ 1251, -4, 1253, -4, 1255, -4, 1257, -4, 1259, -4, 1261, -4, 1263, -4, 1265, -4, // NOLINT
+ 1267, -4, 1269, -4, 1271, -4, 1273, -4, 1275, -4, 1277, -4, 1279, -4, 1281, -4, // NOLINT
+ 1283, -4, 1285, -4, 1287, -4, 1289, -4, 1291, -4, 1293, -4, 1295, -4, 1297, -4, // NOLINT
+ 1299, -4, 1301, -4, 1303, -4, 1305, -4, 1307, -4, 1309, -4, 1311, -4, 1313, -4, // NOLINT
+ 1315, -4, 1317, -4, 1319, -4, 1073743201, -192, 1414, -192, 1415, 21, 7545, 141328, 7549, 15256, // NOLINT
+ 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, 7695, -4, // NOLINT
+ 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, 7711, -4, // NOLINT
+ 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, 7727, -4, // NOLINT
+ 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, 7743, -4, // NOLINT
+ 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, 7759, -4, // NOLINT
+ 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, 7775, -4, // NOLINT
+ 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, 7791, -4, // NOLINT
+ 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, 7807, -4, // NOLINT
+ 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, 7823, -4, // NOLINT
+ 7825, -4, 7827, -4, 7829, -4, 7830, 25, 7831, 29, 7832, 33, 7833, 37, 7834, 41, // NOLINT
+ 7835, -236, 7841, -4, 7843, -4, 7845, -4, 7847, -4, 7849, -4, 7851, -4, 7853, -4, // NOLINT
+ 7855, -4, 7857, -4, 7859, -4, 7861, -4, 7863, -4, 7865, -4, 7867, -4, 7869, -4, // NOLINT
+ 7871, -4, 7873, -4, 7875, -4, 7877, -4, 7879, -4, 7881, -4, 7883, -4, 7885, -4, // NOLINT
+ 7887, -4, 7889, -4, 7891, -4, 7893, -4, 7895, -4, 7897, -4, 7899, -4, 7901, -4, // NOLINT
+ 7903, -4, 7905, -4, 7907, -4, 7909, -4, 7911, -4, 7913, -4, 7915, -4, 7917, -4, // NOLINT
+ 7919, -4, 7921, -4, 7923, -4, 7925, -4, 7927, -4, 7929, -4, 7931, -4, 7933, -4, // NOLINT
+ 7935, -4, 1073749760, 32, 7943, 32, 1073749776, 32, 7957, 32, 1073749792, 32, 7975, 32, 1073749808, 32, // NOLINT
+ 7991, 32, 1073749824, 32, 8005, 32, 8016, 45, 8017, 32, 8018, 49, 8019, 32, 8020, 53, // NOLINT
+ 8021, 32, 8022, 57, 8023, 32, 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, // NOLINT
+ 8053, 344, 1073749878, 400, 8055, 400, 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, // NOLINT
+ 8061, 504, 8064, 61, 8065, 65, 8066, 69, 8067, 73, 8068, 77, 8069, 81, 8070, 85, // NOLINT
+ 8071, 89, 8072, 61, 8073, 65, 8074, 69, 8075, 73, 8076, 77, 8077, 81, 8078, 85, // NOLINT
+ 8079, 89, 8080, 93, 8081, 97, 8082, 101, 8083, 105, 8084, 109, 8085, 113, 8086, 117, // NOLINT
+ 8087, 121, 8088, 93, 8089, 97, 8090, 101, 8091, 105, 8092, 109, 8093, 113, 8094, 117, // NOLINT
+ 8095, 121, 8096, 125, 8097, 129, 8098, 133, 8099, 137, 8100, 141, 8101, 145, 8102, 149, // NOLINT
+ 8103, 153, 8104, 125, 8105, 129, 8106, 133, 8107, 137, 8108, 141, 8109, 145, 8110, 149, // NOLINT
+ 8111, 153, 1073749936, 32, 8113, 32, 8114, 157, 8115, 161, 8116, 165, 8118, 169, 8119, 173, // NOLINT
+ 8124, 161, 8126, -28820, 8130, 177, 8131, 181, 8132, 185, 8134, 189, 8135, 193, 8140, 181, // NOLINT
+ 1073749968, 32, 8145, 32, 8146, 197, 8147, 13, 8150, 201, 8151, 205, 1073749984, 32, 8161, 32, // NOLINT
+ 8162, 209, 8163, 17, 8164, 213, 8165, 28, 8166, 217, 8167, 221, 8178, 225, 8179, 229, // NOLINT
+ 8180, 233, 8182, 237, 8183, 241, 8188, 229 }; // NOLINT
static const uint16_t kToUppercaseMultiStrings0Size = 62; // NOLINT
static const MultiCharacterSpecialCase<1> kToUppercaseMultiStrings1[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kToUppercaseTable1Size = 67; // NOLINT
-static const int32_t kToUppercaseTable1[134] = {
+static const uint16_t kToUppercaseTable1Size = 73; // NOLINT
+static const int32_t kToUppercaseTable1[146] = {
334, -112, 1073742192, -64, 383, -64, 388, -4, 1073743056, -104, 1257, -104, 1073744944, -192, 3166, -192, // NOLINT
- 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3190, -4, 3201, -4, // NOLINT
- 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, 3217, -4, // NOLINT
- 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, 3233, -4, // NOLINT
- 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, 3249, -4, // NOLINT
- 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, 3265, -4, // NOLINT
- 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, 3281, -4, // NOLINT
- 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, 3297, -4, // NOLINT
- 3299, -4, 1073745152, -29056, 3365, -29056 }; // NOLINT
+ 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3187, -4, 3190, -4, // NOLINT
+ 3201, -4, 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, // NOLINT
+ 3217, -4, 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, // NOLINT
+ 3233, -4, 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, // NOLINT
+ 3249, -4, 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, // NOLINT
+ 3265, -4, 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, // NOLINT
+ 3281, -4, 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, // NOLINT
+ 3297, -4, 3299, -4, 3308, -4, 3310, -4, 3315, -4, 1073745152, -29056, 3365, -29056, 3367, -29056, // NOLINT
+ 3373, -29056 }; // NOLINT
static const uint16_t kToUppercaseMultiStrings1Size = 1; // NOLINT
+static const MultiCharacterSpecialCase<1> kToUppercaseMultiStrings5[1] = { // NOLINT
+ {{kSentinel}} }; // NOLINT
+static const uint16_t kToUppercaseTable5Size = 88; // NOLINT
+static const int32_t kToUppercaseTable5[176] = {
+ 1601, -4, 1603, -4, 1605, -4, 1607, -4, 1609, -4, 1611, -4, 1613, -4, 1615, -4, // NOLINT
+ 1617, -4, 1619, -4, 1621, -4, 1623, -4, 1625, -4, 1627, -4, 1629, -4, 1631, -4, // NOLINT
+ 1633, -4, 1635, -4, 1637, -4, 1639, -4, 1641, -4, 1643, -4, 1645, -4, 1665, -4, // NOLINT
+ 1667, -4, 1669, -4, 1671, -4, 1673, -4, 1675, -4, 1677, -4, 1679, -4, 1681, -4, // NOLINT
+ 1683, -4, 1685, -4, 1687, -4, 1827, -4, 1829, -4, 1831, -4, 1833, -4, 1835, -4, // NOLINT
+ 1837, -4, 1839, -4, 1843, -4, 1845, -4, 1847, -4, 1849, -4, 1851, -4, 1853, -4, // NOLINT
+ 1855, -4, 1857, -4, 1859, -4, 1861, -4, 1863, -4, 1865, -4, 1867, -4, 1869, -4, // NOLINT
+ 1871, -4, 1873, -4, 1875, -4, 1877, -4, 1879, -4, 1881, -4, 1883, -4, 1885, -4, // NOLINT
+ 1887, -4, 1889, -4, 1891, -4, 1893, -4, 1895, -4, 1897, -4, 1899, -4, 1901, -4, // NOLINT
+ 1903, -4, 1914, -4, 1916, -4, 1919, -4, 1921, -4, 1923, -4, 1925, -4, 1927, -4, // NOLINT
+ 1932, -4, 1937, -4, 1939, -4, 1953, -4, 1955, -4, 1957, -4, 1959, -4, 1961, -4 }; // NOLINT
+static const uint16_t kToUppercaseMultiStrings5Size = 1; // NOLINT
static const MultiCharacterSpecialCase<3> kToUppercaseMultiStrings7[12] = { // NOLINT
{{70, 70, kSentinel}}, {{70, 73, kSentinel}}, {{70, 76, kSentinel}}, {{70, 70, 73}}, // NOLINT
{{70, 70, 76}}, {{83, 84, kSentinel}}, {{1348, 1350, kSentinel}}, {{1348, 1333, kSentinel}}, // NOLINT
@@ -1071,6 +1192,13 @@ int ToUppercase::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kToUppercaseTable5,
+ kToUppercaseTable5Size,
+ kToUppercaseMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kToUppercaseTable7,
kToUppercaseTable7Size,
kToUppercaseMultiStrings7,
@@ -1084,8 +1212,8 @@ int ToUppercase::Convert(uchar c,
static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings0[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262CanonicalizeTable0Size = 462; // NOLINT
-static const int32_t kEcma262CanonicalizeTable0[924] = {
+static const uint16_t kEcma262CanonicalizeTable0Size = 488; // NOLINT
+static const int32_t kEcma262CanonicalizeTable0[976] = {
1073741921, -128, 122, -128, 181, 2972, 1073742048, -128, 246, -128, 1073742072, -128, 254, -128, 255, 484, // NOLINT
257, -4, 259, -4, 261, -4, 263, -4, 265, -4, 267, -4, 269, -4, 271, -4, // NOLINT
273, -4, 275, -4, 277, -4, 279, -4, 281, -4, 283, -4, 285, -4, 287, -4, // NOLINT
@@ -1104,61 +1232,81 @@ static const int32_t kEcma262CanonicalizeTable0[924] = {
511, -4, 513, -4, 515, -4, 517, -4, 519, -4, 521, -4, 523, -4, 525, -4, // NOLINT
527, -4, 529, -4, 531, -4, 533, -4, 535, -4, 537, -4, 539, -4, 541, -4, // NOLINT
543, -4, 547, -4, 549, -4, 551, -4, 553, -4, 555, -4, 557, -4, 559, -4, // NOLINT
- 561, -4, 563, -4, 572, -4, 578, -4, 583, -4, 585, -4, 587, -4, 589, -4, // NOLINT
- 591, -4, 595, -840, 596, -824, 1073742422, -820, 599, -820, 601, -808, 603, -812, 608, -820, // NOLINT
- 611, -828, 616, -836, 617, -844, 619, 42972, 623, -844, 626, -852, 629, -856, 637, 42908, // NOLINT
+ 561, -4, 563, -4, 572, -4, 1073742399, 43260, 576, 43260, 578, -4, 583, -4, 585, -4, // NOLINT
+ 587, -4, 589, -4, 591, -4, 592, 43132, 593, 43120, 594, 43128, 595, -840, 596, -824, // NOLINT
+ 1073742422, -820, 599, -820, 601, -808, 603, -812, 608, -820, 611, -828, 613, 169120, 614, 169232, // NOLINT
+ 616, -836, 617, -844, 619, 42972, 623, -844, 625, 42996, 626, -852, 629, -856, 637, 42908, // NOLINT
640, -872, 643, -872, 648, -872, 649, -276, 1073742474, -868, 651, -868, 652, -284, 658, -876, // NOLINT
- 837, 336, 1073742715, 520, 893, 520, 940, -152, 1073742765, -148, 943, -148, 1073742769, -128, 961, -128, // NOLINT
- 962, -124, 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, 974, -252, 976, -248, 977, -228, // NOLINT
- 981, -188, 982, -216, 985, -4, 987, -4, 989, -4, 991, -4, 993, -4, 995, -4, // NOLINT
- 997, -4, 999, -4, 1001, -4, 1003, -4, 1005, -4, 1007, -4, 1008, -344, 1009, -320, // NOLINT
- 1010, 28, 1013, -384, 1016, -4, 1019, -4, 1073742896, -128, 1103, -128, 1073742928, -320, 1119, -320, // NOLINT
- 1121, -4, 1123, -4, 1125, -4, 1127, -4, 1129, -4, 1131, -4, 1133, -4, 1135, -4, // NOLINT
- 1137, -4, 1139, -4, 1141, -4, 1143, -4, 1145, -4, 1147, -4, 1149, -4, 1151, -4, // NOLINT
- 1153, -4, 1163, -4, 1165, -4, 1167, -4, 1169, -4, 1171, -4, 1173, -4, 1175, -4, // NOLINT
- 1177, -4, 1179, -4, 1181, -4, 1183, -4, 1185, -4, 1187, -4, 1189, -4, 1191, -4, // NOLINT
- 1193, -4, 1195, -4, 1197, -4, 1199, -4, 1201, -4, 1203, -4, 1205, -4, 1207, -4, // NOLINT
- 1209, -4, 1211, -4, 1213, -4, 1215, -4, 1218, -4, 1220, -4, 1222, -4, 1224, -4, // NOLINT
- 1226, -4, 1228, -4, 1230, -4, 1231, -60, 1233, -4, 1235, -4, 1237, -4, 1239, -4, // NOLINT
- 1241, -4, 1243, -4, 1245, -4, 1247, -4, 1249, -4, 1251, -4, 1253, -4, 1255, -4, // NOLINT
- 1257, -4, 1259, -4, 1261, -4, 1263, -4, 1265, -4, 1267, -4, 1269, -4, 1271, -4, // NOLINT
- 1273, -4, 1275, -4, 1277, -4, 1279, -4, 1281, -4, 1283, -4, 1285, -4, 1287, -4, // NOLINT
- 1289, -4, 1291, -4, 1293, -4, 1295, -4, 1297, -4, 1299, -4, 1073743201, -192, 1414, -192, // NOLINT
- 7549, 15256, 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, // NOLINT
- 7695, -4, 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, // NOLINT
- 7711, -4, 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, // NOLINT
- 7727, -4, 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, // NOLINT
- 7743, -4, 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, // NOLINT
- 7759, -4, 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, // NOLINT
- 7775, -4, 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, // NOLINT
- 7791, -4, 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, // NOLINT
- 7807, -4, 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, // NOLINT
- 7823, -4, 7825, -4, 7827, -4, 7829, -4, 7835, -236, 7841, -4, 7843, -4, 7845, -4, // NOLINT
- 7847, -4, 7849, -4, 7851, -4, 7853, -4, 7855, -4, 7857, -4, 7859, -4, 7861, -4, // NOLINT
- 7863, -4, 7865, -4, 7867, -4, 7869, -4, 7871, -4, 7873, -4, 7875, -4, 7877, -4, // NOLINT
- 7879, -4, 7881, -4, 7883, -4, 7885, -4, 7887, -4, 7889, -4, 7891, -4, 7893, -4, // NOLINT
- 7895, -4, 7897, -4, 7899, -4, 7901, -4, 7903, -4, 7905, -4, 7907, -4, 7909, -4, // NOLINT
- 7911, -4, 7913, -4, 7915, -4, 7917, -4, 7919, -4, 7921, -4, 7923, -4, 7925, -4, // NOLINT
- 7927, -4, 7929, -4, 1073749760, 32, 7943, 32, 1073749776, 32, 7957, 32, 1073749792, 32, 7975, 32, // NOLINT
- 1073749808, 32, 7991, 32, 1073749824, 32, 8005, 32, 8017, 32, 8019, 32, 8021, 32, 8023, 32, // NOLINT
- 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, 8053, 344, 1073749878, 400, 8055, 400, // NOLINT
- 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, 8061, 504, 1073749936, 32, 8113, 32, // NOLINT
- 8126, -28820, 1073749968, 32, 8145, 32, 1073749984, 32, 8161, 32, 8165, 28 }; // NOLINT
+ 837, 336, 881, -4, 883, -4, 887, -4, 1073742715, 520, 893, 520, 940, -152, 1073742765, -148, // NOLINT
+ 943, -148, 1073742769, -128, 961, -128, 962, -124, 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, // NOLINT
+ 974, -252, 976, -248, 977, -228, 981, -188, 982, -216, 983, -32, 985, -4, 987, -4, // NOLINT
+ 989, -4, 991, -4, 993, -4, 995, -4, 997, -4, 999, -4, 1001, -4, 1003, -4, // NOLINT
+ 1005, -4, 1007, -4, 1008, -344, 1009, -320, 1010, 28, 1013, -384, 1016, -4, 1019, -4, // NOLINT
+ 1073742896, -128, 1103, -128, 1073742928, -320, 1119, -320, 1121, -4, 1123, -4, 1125, -4, 1127, -4, // NOLINT
+ 1129, -4, 1131, -4, 1133, -4, 1135, -4, 1137, -4, 1139, -4, 1141, -4, 1143, -4, // NOLINT
+ 1145, -4, 1147, -4, 1149, -4, 1151, -4, 1153, -4, 1163, -4, 1165, -4, 1167, -4, // NOLINT
+ 1169, -4, 1171, -4, 1173, -4, 1175, -4, 1177, -4, 1179, -4, 1181, -4, 1183, -4, // NOLINT
+ 1185, -4, 1187, -4, 1189, -4, 1191, -4, 1193, -4, 1195, -4, 1197, -4, 1199, -4, // NOLINT
+ 1201, -4, 1203, -4, 1205, -4, 1207, -4, 1209, -4, 1211, -4, 1213, -4, 1215, -4, // NOLINT
+ 1218, -4, 1220, -4, 1222, -4, 1224, -4, 1226, -4, 1228, -4, 1230, -4, 1231, -60, // NOLINT
+ 1233, -4, 1235, -4, 1237, -4, 1239, -4, 1241, -4, 1243, -4, 1245, -4, 1247, -4, // NOLINT
+ 1249, -4, 1251, -4, 1253, -4, 1255, -4, 1257, -4, 1259, -4, 1261, -4, 1263, -4, // NOLINT
+ 1265, -4, 1267, -4, 1269, -4, 1271, -4, 1273, -4, 1275, -4, 1277, -4, 1279, -4, // NOLINT
+ 1281, -4, 1283, -4, 1285, -4, 1287, -4, 1289, -4, 1291, -4, 1293, -4, 1295, -4, // NOLINT
+ 1297, -4, 1299, -4, 1301, -4, 1303, -4, 1305, -4, 1307, -4, 1309, -4, 1311, -4, // NOLINT
+ 1313, -4, 1315, -4, 1317, -4, 1319, -4, 1073743201, -192, 1414, -192, 7545, 141328, 7549, 15256, // NOLINT
+ 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, 7695, -4, // NOLINT
+ 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, 7711, -4, // NOLINT
+ 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, 7727, -4, // NOLINT
+ 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, 7743, -4, // NOLINT
+ 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, 7759, -4, // NOLINT
+ 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, 7775, -4, // NOLINT
+ 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, 7791, -4, // NOLINT
+ 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, 7807, -4, // NOLINT
+ 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, 7823, -4, // NOLINT
+ 7825, -4, 7827, -4, 7829, -4, 7835, -236, 7841, -4, 7843, -4, 7845, -4, 7847, -4, // NOLINT
+ 7849, -4, 7851, -4, 7853, -4, 7855, -4, 7857, -4, 7859, -4, 7861, -4, 7863, -4, // NOLINT
+ 7865, -4, 7867, -4, 7869, -4, 7871, -4, 7873, -4, 7875, -4, 7877, -4, 7879, -4, // NOLINT
+ 7881, -4, 7883, -4, 7885, -4, 7887, -4, 7889, -4, 7891, -4, 7893, -4, 7895, -4, // NOLINT
+ 7897, -4, 7899, -4, 7901, -4, 7903, -4, 7905, -4, 7907, -4, 7909, -4, 7911, -4, // NOLINT
+ 7913, -4, 7915, -4, 7917, -4, 7919, -4, 7921, -4, 7923, -4, 7925, -4, 7927, -4, // NOLINT
+ 7929, -4, 7931, -4, 7933, -4, 7935, -4, 1073749760, 32, 7943, 32, 1073749776, 32, 7957, 32, // NOLINT
+ 1073749792, 32, 7975, 32, 1073749808, 32, 7991, 32, 1073749824, 32, 8005, 32, 8017, 32, 8019, 32, // NOLINT
+ 8021, 32, 8023, 32, 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, 8053, 344, // NOLINT
+ 1073749878, 400, 8055, 400, 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, 8061, 504, // NOLINT
+ 1073749936, 32, 8113, 32, 8126, -28820, 1073749968, 32, 8145, 32, 1073749984, 32, 8161, 32, 8165, 28 }; // NOLINT
static const uint16_t kEcma262CanonicalizeMultiStrings0Size = 1; // NOLINT
static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings1[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262CanonicalizeTable1Size = 67; // NOLINT
-static const int32_t kEcma262CanonicalizeTable1[134] = {
+static const uint16_t kEcma262CanonicalizeTable1Size = 73; // NOLINT
+static const int32_t kEcma262CanonicalizeTable1[146] = {
334, -112, 1073742192, -64, 383, -64, 388, -4, 1073743056, -104, 1257, -104, 1073744944, -192, 3166, -192, // NOLINT
- 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3190, -4, 3201, -4, // NOLINT
- 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, 3217, -4, // NOLINT
- 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, 3233, -4, // NOLINT
- 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, 3249, -4, // NOLINT
- 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, 3265, -4, // NOLINT
- 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, 3281, -4, // NOLINT
- 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, 3297, -4, // NOLINT
- 3299, -4, 1073745152, -29056, 3365, -29056 }; // NOLINT
+ 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3187, -4, 3190, -4, // NOLINT
+ 3201, -4, 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, // NOLINT
+ 3217, -4, 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, // NOLINT
+ 3233, -4, 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, // NOLINT
+ 3249, -4, 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, // NOLINT
+ 3265, -4, 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, // NOLINT
+ 3281, -4, 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, // NOLINT
+ 3297, -4, 3299, -4, 3308, -4, 3310, -4, 3315, -4, 1073745152, -29056, 3365, -29056, 3367, -29056, // NOLINT
+ 3373, -29056 }; // NOLINT
static const uint16_t kEcma262CanonicalizeMultiStrings1Size = 1; // NOLINT
+static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings5[1] = { // NOLINT
+ {{kSentinel}} }; // NOLINT
+static const uint16_t kEcma262CanonicalizeTable5Size = 88; // NOLINT
+static const int32_t kEcma262CanonicalizeTable5[176] = {
+ 1601, -4, 1603, -4, 1605, -4, 1607, -4, 1609, -4, 1611, -4, 1613, -4, 1615, -4, // NOLINT
+ 1617, -4, 1619, -4, 1621, -4, 1623, -4, 1625, -4, 1627, -4, 1629, -4, 1631, -4, // NOLINT
+ 1633, -4, 1635, -4, 1637, -4, 1639, -4, 1641, -4, 1643, -4, 1645, -4, 1665, -4, // NOLINT
+ 1667, -4, 1669, -4, 1671, -4, 1673, -4, 1675, -4, 1677, -4, 1679, -4, 1681, -4, // NOLINT
+ 1683, -4, 1685, -4, 1687, -4, 1827, -4, 1829, -4, 1831, -4, 1833, -4, 1835, -4, // NOLINT
+ 1837, -4, 1839, -4, 1843, -4, 1845, -4, 1847, -4, 1849, -4, 1851, -4, 1853, -4, // NOLINT
+ 1855, -4, 1857, -4, 1859, -4, 1861, -4, 1863, -4, 1865, -4, 1867, -4, 1869, -4, // NOLINT
+ 1871, -4, 1873, -4, 1875, -4, 1877, -4, 1879, -4, 1881, -4, 1883, -4, 1885, -4, // NOLINT
+ 1887, -4, 1889, -4, 1891, -4, 1893, -4, 1895, -4, 1897, -4, 1899, -4, 1901, -4, // NOLINT
+ 1903, -4, 1914, -4, 1916, -4, 1919, -4, 1921, -4, 1923, -4, 1925, -4, 1927, -4, // NOLINT
+ 1932, -4, 1937, -4, 1939, -4, 1953, -4, 1955, -4, 1957, -4, 1959, -4, 1961, -4 }; // NOLINT
+static const uint16_t kEcma262CanonicalizeMultiStrings5Size = 1; // NOLINT
static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings7[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
static const uint16_t kEcma262CanonicalizeTable7Size = 2; // NOLINT
@@ -1185,6 +1333,13 @@ int Ecma262Canonicalize::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kEcma262CanonicalizeTable5,
+ kEcma262CanonicalizeTable5Size,
+ kEcma262CanonicalizeMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kEcma262CanonicalizeTable7,
kEcma262CanonicalizeTable7Size,
kEcma262CanonicalizeMultiStrings7,
@@ -1196,7 +1351,7 @@ int Ecma262Canonicalize::Convert(uchar c,
}
}
-static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[469] = { // NOLINT
+static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[497] = { // NOLINT
{{65, 97, kSentinel}}, {{90, 122, kSentinel}}, {{181, 924, 956, kSentinel}}, {{192, 224, kSentinel}}, // NOLINT
{{214, 246, kSentinel}}, {{216, 248, kSentinel}}, {{222, 254, kSentinel}}, {{255, 376, kSentinel}}, // NOLINT
{{256, 257, kSentinel}}, {{258, 259, kSentinel}}, {{260, 261, kSentinel}}, {{262, 263, kSentinel}}, // NOLINT
@@ -1238,16 +1393,19 @@ static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[46
{{546, 547, kSentinel}}, {{548, 549, kSentinel}}, {{550, 551, kSentinel}}, {{552, 553, kSentinel}}, // NOLINT
{{554, 555, kSentinel}}, {{556, 557, kSentinel}}, {{558, 559, kSentinel}}, {{560, 561, kSentinel}}, // NOLINT
{{562, 563, kSentinel}}, {{570, 11365, kSentinel}}, {{571, 572, kSentinel}}, {{574, 11366, kSentinel}}, // NOLINT
- {{577, 578, kSentinel}}, {{580, 649, kSentinel}}, {{581, 652, kSentinel}}, {{582, 583, kSentinel}}, // NOLINT
- {{584, 585, kSentinel}}, {{586, 587, kSentinel}}, {{588, 589, kSentinel}}, {{590, 591, kSentinel}}, // NOLINT
- {{619, 11362, kSentinel}}, {{637, 11364, kSentinel}}, {{837, 921, 953, 8126}}, {{891, 1021, kSentinel}}, // NOLINT
- {{893, 1023, kSentinel}}, {{902, 940, kSentinel}}, {{904, 941, kSentinel}}, {{906, 943, kSentinel}}, // NOLINT
- {{908, 972, kSentinel}}, {{910, 973, kSentinel}}, {{911, 974, kSentinel}}, {{913, 945, kSentinel}}, // NOLINT
- {{914, 946, 976, kSentinel}}, {{915, 947, kSentinel}}, {{916, 948, kSentinel}}, {{917, 949, 1013, kSentinel}}, // NOLINT
- {{918, 950, kSentinel}}, {{919, 951, kSentinel}}, {{920, 952, 977, kSentinel}}, {{922, 954, 1008, kSentinel}}, // NOLINT
- {{923, 955, kSentinel}}, {{925, 957, kSentinel}}, {{927, 959, kSentinel}}, {{928, 960, 982, kSentinel}}, // NOLINT
- {{929, 961, 1009, kSentinel}}, {{931, 962, 963, kSentinel}}, {{932, 964, kSentinel}}, {{933, 965, kSentinel}}, // NOLINT
- {{934, 966, 981, kSentinel}}, {{935, 967, kSentinel}}, {{939, 971, kSentinel}}, {{984, 985, kSentinel}}, // NOLINT
+ {{575, 11390, kSentinel}}, {{576, 11391, kSentinel}}, {{577, 578, kSentinel}}, {{580, 649, kSentinel}}, // NOLINT
+ {{581, 652, kSentinel}}, {{582, 583, kSentinel}}, {{584, 585, kSentinel}}, {{586, 587, kSentinel}}, // NOLINT
+ {{588, 589, kSentinel}}, {{590, 591, kSentinel}}, {{592, 11375, kSentinel}}, {{593, 11373, kSentinel}}, // NOLINT
+ {{594, 11376, kSentinel}}, {{613, 42893, kSentinel}}, {{614, 42922, kSentinel}}, {{619, 11362, kSentinel}}, // NOLINT
+ {{625, 11374, kSentinel}}, {{637, 11364, kSentinel}}, {{837, 921, 953, 8126}}, {{880, 881, kSentinel}}, // NOLINT
+ {{882, 883, kSentinel}}, {{886, 887, kSentinel}}, {{891, 1021, kSentinel}}, {{893, 1023, kSentinel}}, // NOLINT
+ {{902, 940, kSentinel}}, {{904, 941, kSentinel}}, {{906, 943, kSentinel}}, {{908, 972, kSentinel}}, // NOLINT
+ {{910, 973, kSentinel}}, {{911, 974, kSentinel}}, {{913, 945, kSentinel}}, {{914, 946, 976, kSentinel}}, // NOLINT
+ {{915, 947, kSentinel}}, {{916, 948, kSentinel}}, {{917, 949, 1013, kSentinel}}, {{918, 950, kSentinel}}, // NOLINT
+ {{919, 951, kSentinel}}, {{920, 952, 977, kSentinel}}, {{922, 954, 1008, kSentinel}}, {{923, 955, kSentinel}}, // NOLINT
+ {{925, 957, kSentinel}}, {{927, 959, kSentinel}}, {{928, 960, 982, kSentinel}}, {{929, 961, 1009, kSentinel}}, // NOLINT
+ {{931, 962, 963, kSentinel}}, {{932, 964, kSentinel}}, {{933, 965, kSentinel}}, {{934, 966, 981, kSentinel}}, // NOLINT
+ {{935, 967, kSentinel}}, {{939, 971, kSentinel}}, {{975, 983, kSentinel}}, {{984, 985, kSentinel}}, // NOLINT
{{986, 987, kSentinel}}, {{988, 989, kSentinel}}, {{990, 991, kSentinel}}, {{992, 993, kSentinel}}, // NOLINT
{{994, 995, kSentinel}}, {{996, 997, kSentinel}}, {{998, 999, kSentinel}}, {{1000, 1001, kSentinel}}, // NOLINT
{{1002, 1003, kSentinel}}, {{1004, 1005, kSentinel}}, {{1006, 1007, kSentinel}}, {{1010, 1017, kSentinel}}, // NOLINT
@@ -1274,38 +1432,42 @@ static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[46
{{1276, 1277, kSentinel}}, {{1278, 1279, kSentinel}}, {{1280, 1281, kSentinel}}, {{1282, 1283, kSentinel}}, // NOLINT
{{1284, 1285, kSentinel}}, {{1286, 1287, kSentinel}}, {{1288, 1289, kSentinel}}, {{1290, 1291, kSentinel}}, // NOLINT
{{1292, 1293, kSentinel}}, {{1294, 1295, kSentinel}}, {{1296, 1297, kSentinel}}, {{1298, 1299, kSentinel}}, // NOLINT
- {{1329, 1377, kSentinel}}, {{1366, 1414, kSentinel}}, {{4256, 11520, kSentinel}}, {{4293, 11557, kSentinel}}, // NOLINT
- {{7549, 11363, kSentinel}}, {{7680, 7681, kSentinel}}, {{7682, 7683, kSentinel}}, {{7684, 7685, kSentinel}}, // NOLINT
- {{7686, 7687, kSentinel}}, {{7688, 7689, kSentinel}}, {{7690, 7691, kSentinel}}, {{7692, 7693, kSentinel}}, // NOLINT
- {{7694, 7695, kSentinel}}, {{7696, 7697, kSentinel}}, {{7698, 7699, kSentinel}}, {{7700, 7701, kSentinel}}, // NOLINT
- {{7702, 7703, kSentinel}}, {{7704, 7705, kSentinel}}, {{7706, 7707, kSentinel}}, {{7708, 7709, kSentinel}}, // NOLINT
- {{7710, 7711, kSentinel}}, {{7712, 7713, kSentinel}}, {{7714, 7715, kSentinel}}, {{7716, 7717, kSentinel}}, // NOLINT
- {{7718, 7719, kSentinel}}, {{7720, 7721, kSentinel}}, {{7722, 7723, kSentinel}}, {{7724, 7725, kSentinel}}, // NOLINT
- {{7726, 7727, kSentinel}}, {{7728, 7729, kSentinel}}, {{7730, 7731, kSentinel}}, {{7732, 7733, kSentinel}}, // NOLINT
- {{7734, 7735, kSentinel}}, {{7736, 7737, kSentinel}}, {{7738, 7739, kSentinel}}, {{7740, 7741, kSentinel}}, // NOLINT
- {{7742, 7743, kSentinel}}, {{7744, 7745, kSentinel}}, {{7746, 7747, kSentinel}}, {{7748, 7749, kSentinel}}, // NOLINT
- {{7750, 7751, kSentinel}}, {{7752, 7753, kSentinel}}, {{7754, 7755, kSentinel}}, {{7756, 7757, kSentinel}}, // NOLINT
- {{7758, 7759, kSentinel}}, {{7760, 7761, kSentinel}}, {{7762, 7763, kSentinel}}, {{7764, 7765, kSentinel}}, // NOLINT
- {{7766, 7767, kSentinel}}, {{7768, 7769, kSentinel}}, {{7770, 7771, kSentinel}}, {{7772, 7773, kSentinel}}, // NOLINT
- {{7774, 7775, kSentinel}}, {{7776, 7777, 7835, kSentinel}}, {{7778, 7779, kSentinel}}, {{7780, 7781, kSentinel}}, // NOLINT
- {{7782, 7783, kSentinel}}, {{7784, 7785, kSentinel}}, {{7786, 7787, kSentinel}}, {{7788, 7789, kSentinel}}, // NOLINT
- {{7790, 7791, kSentinel}}, {{7792, 7793, kSentinel}}, {{7794, 7795, kSentinel}}, {{7796, 7797, kSentinel}}, // NOLINT
- {{7798, 7799, kSentinel}}, {{7800, 7801, kSentinel}}, {{7802, 7803, kSentinel}}, {{7804, 7805, kSentinel}}, // NOLINT
- {{7806, 7807, kSentinel}}, {{7808, 7809, kSentinel}}, {{7810, 7811, kSentinel}}, {{7812, 7813, kSentinel}}, // NOLINT
- {{7814, 7815, kSentinel}}, {{7816, 7817, kSentinel}}, {{7818, 7819, kSentinel}}, {{7820, 7821, kSentinel}}, // NOLINT
- {{7822, 7823, kSentinel}}, {{7824, 7825, kSentinel}}, {{7826, 7827, kSentinel}}, {{7828, 7829, kSentinel}}, // NOLINT
- {{7840, 7841, kSentinel}}, {{7842, 7843, kSentinel}}, {{7844, 7845, kSentinel}}, {{7846, 7847, kSentinel}}, // NOLINT
- {{7848, 7849, kSentinel}}, {{7850, 7851, kSentinel}}, {{7852, 7853, kSentinel}}, {{7854, 7855, kSentinel}}, // NOLINT
- {{7856, 7857, kSentinel}}, {{7858, 7859, kSentinel}}, {{7860, 7861, kSentinel}}, {{7862, 7863, kSentinel}}, // NOLINT
- {{7864, 7865, kSentinel}}, {{7866, 7867, kSentinel}}, {{7868, 7869, kSentinel}}, {{7870, 7871, kSentinel}}, // NOLINT
- {{7872, 7873, kSentinel}}, {{7874, 7875, kSentinel}}, {{7876, 7877, kSentinel}}, {{7878, 7879, kSentinel}}, // NOLINT
- {{7880, 7881, kSentinel}}, {{7882, 7883, kSentinel}}, {{7884, 7885, kSentinel}}, {{7886, 7887, kSentinel}}, // NOLINT
- {{7888, 7889, kSentinel}}, {{7890, 7891, kSentinel}}, {{7892, 7893, kSentinel}}, {{7894, 7895, kSentinel}}, // NOLINT
- {{7896, 7897, kSentinel}}, {{7898, 7899, kSentinel}}, {{7900, 7901, kSentinel}}, {{7902, 7903, kSentinel}}, // NOLINT
- {{7904, 7905, kSentinel}}, {{7906, 7907, kSentinel}}, {{7908, 7909, kSentinel}}, {{7910, 7911, kSentinel}}, // NOLINT
- {{7912, 7913, kSentinel}}, {{7914, 7915, kSentinel}}, {{7916, 7917, kSentinel}}, {{7918, 7919, kSentinel}}, // NOLINT
- {{7920, 7921, kSentinel}}, {{7922, 7923, kSentinel}}, {{7924, 7925, kSentinel}}, {{7926, 7927, kSentinel}}, // NOLINT
- {{7928, 7929, kSentinel}}, {{7936, 7944, kSentinel}}, {{7943, 7951, kSentinel}}, {{7952, 7960, kSentinel}}, // NOLINT
+ {{1300, 1301, kSentinel}}, {{1302, 1303, kSentinel}}, {{1304, 1305, kSentinel}}, {{1306, 1307, kSentinel}}, // NOLINT
+ {{1308, 1309, kSentinel}}, {{1310, 1311, kSentinel}}, {{1312, 1313, kSentinel}}, {{1314, 1315, kSentinel}}, // NOLINT
+ {{1316, 1317, kSentinel}}, {{1318, 1319, kSentinel}}, {{1329, 1377, kSentinel}}, {{1366, 1414, kSentinel}}, // NOLINT
+ {{4256, 11520, kSentinel}}, {{4293, 11557, kSentinel}}, {{4295, 11559, kSentinel}}, {{4301, 11565, kSentinel}}, // NOLINT
+ {{7545, 42877, kSentinel}}, {{7549, 11363, kSentinel}}, {{7680, 7681, kSentinel}}, {{7682, 7683, kSentinel}}, // NOLINT
+ {{7684, 7685, kSentinel}}, {{7686, 7687, kSentinel}}, {{7688, 7689, kSentinel}}, {{7690, 7691, kSentinel}}, // NOLINT
+ {{7692, 7693, kSentinel}}, {{7694, 7695, kSentinel}}, {{7696, 7697, kSentinel}}, {{7698, 7699, kSentinel}}, // NOLINT
+ {{7700, 7701, kSentinel}}, {{7702, 7703, kSentinel}}, {{7704, 7705, kSentinel}}, {{7706, 7707, kSentinel}}, // NOLINT
+ {{7708, 7709, kSentinel}}, {{7710, 7711, kSentinel}}, {{7712, 7713, kSentinel}}, {{7714, 7715, kSentinel}}, // NOLINT
+ {{7716, 7717, kSentinel}}, {{7718, 7719, kSentinel}}, {{7720, 7721, kSentinel}}, {{7722, 7723, kSentinel}}, // NOLINT
+ {{7724, 7725, kSentinel}}, {{7726, 7727, kSentinel}}, {{7728, 7729, kSentinel}}, {{7730, 7731, kSentinel}}, // NOLINT
+ {{7732, 7733, kSentinel}}, {{7734, 7735, kSentinel}}, {{7736, 7737, kSentinel}}, {{7738, 7739, kSentinel}}, // NOLINT
+ {{7740, 7741, kSentinel}}, {{7742, 7743, kSentinel}}, {{7744, 7745, kSentinel}}, {{7746, 7747, kSentinel}}, // NOLINT
+ {{7748, 7749, kSentinel}}, {{7750, 7751, kSentinel}}, {{7752, 7753, kSentinel}}, {{7754, 7755, kSentinel}}, // NOLINT
+ {{7756, 7757, kSentinel}}, {{7758, 7759, kSentinel}}, {{7760, 7761, kSentinel}}, {{7762, 7763, kSentinel}}, // NOLINT
+ {{7764, 7765, kSentinel}}, {{7766, 7767, kSentinel}}, {{7768, 7769, kSentinel}}, {{7770, 7771, kSentinel}}, // NOLINT
+ {{7772, 7773, kSentinel}}, {{7774, 7775, kSentinel}}, {{7776, 7777, 7835, kSentinel}}, {{7778, 7779, kSentinel}}, // NOLINT
+ {{7780, 7781, kSentinel}}, {{7782, 7783, kSentinel}}, {{7784, 7785, kSentinel}}, {{7786, 7787, kSentinel}}, // NOLINT
+ {{7788, 7789, kSentinel}}, {{7790, 7791, kSentinel}}, {{7792, 7793, kSentinel}}, {{7794, 7795, kSentinel}}, // NOLINT
+ {{7796, 7797, kSentinel}}, {{7798, 7799, kSentinel}}, {{7800, 7801, kSentinel}}, {{7802, 7803, kSentinel}}, // NOLINT
+ {{7804, 7805, kSentinel}}, {{7806, 7807, kSentinel}}, {{7808, 7809, kSentinel}}, {{7810, 7811, kSentinel}}, // NOLINT
+ {{7812, 7813, kSentinel}}, {{7814, 7815, kSentinel}}, {{7816, 7817, kSentinel}}, {{7818, 7819, kSentinel}}, // NOLINT
+ {{7820, 7821, kSentinel}}, {{7822, 7823, kSentinel}}, {{7824, 7825, kSentinel}}, {{7826, 7827, kSentinel}}, // NOLINT
+ {{7828, 7829, kSentinel}}, {{7840, 7841, kSentinel}}, {{7842, 7843, kSentinel}}, {{7844, 7845, kSentinel}}, // NOLINT
+ {{7846, 7847, kSentinel}}, {{7848, 7849, kSentinel}}, {{7850, 7851, kSentinel}}, {{7852, 7853, kSentinel}}, // NOLINT
+ {{7854, 7855, kSentinel}}, {{7856, 7857, kSentinel}}, {{7858, 7859, kSentinel}}, {{7860, 7861, kSentinel}}, // NOLINT
+ {{7862, 7863, kSentinel}}, {{7864, 7865, kSentinel}}, {{7866, 7867, kSentinel}}, {{7868, 7869, kSentinel}}, // NOLINT
+ {{7870, 7871, kSentinel}}, {{7872, 7873, kSentinel}}, {{7874, 7875, kSentinel}}, {{7876, 7877, kSentinel}}, // NOLINT
+ {{7878, 7879, kSentinel}}, {{7880, 7881, kSentinel}}, {{7882, 7883, kSentinel}}, {{7884, 7885, kSentinel}}, // NOLINT
+ {{7886, 7887, kSentinel}}, {{7888, 7889, kSentinel}}, {{7890, 7891, kSentinel}}, {{7892, 7893, kSentinel}}, // NOLINT
+ {{7894, 7895, kSentinel}}, {{7896, 7897, kSentinel}}, {{7898, 7899, kSentinel}}, {{7900, 7901, kSentinel}}, // NOLINT
+ {{7902, 7903, kSentinel}}, {{7904, 7905, kSentinel}}, {{7906, 7907, kSentinel}}, {{7908, 7909, kSentinel}}, // NOLINT
+ {{7910, 7911, kSentinel}}, {{7912, 7913, kSentinel}}, {{7914, 7915, kSentinel}}, {{7916, 7917, kSentinel}}, // NOLINT
+ {{7918, 7919, kSentinel}}, {{7920, 7921, kSentinel}}, {{7922, 7923, kSentinel}}, {{7924, 7925, kSentinel}}, // NOLINT
+ {{7926, 7927, kSentinel}}, {{7928, 7929, kSentinel}}, {{7930, 7931, kSentinel}}, {{7932, 7933, kSentinel}}, // NOLINT
+ {{7934, 7935, kSentinel}}, {{7936, 7944, kSentinel}}, {{7943, 7951, kSentinel}}, {{7952, 7960, kSentinel}}, // NOLINT
{{7957, 7965, kSentinel}}, {{7968, 7976, kSentinel}}, {{7975, 7983, kSentinel}}, {{7984, 7992, kSentinel}}, // NOLINT
{{7991, 7999, kSentinel}}, {{8000, 8008, kSentinel}}, {{8005, 8013, kSentinel}}, {{8017, 8025, kSentinel}}, // NOLINT
{{8019, 8027, kSentinel}}, {{8021, 8029, kSentinel}}, {{8023, 8031, kSentinel}}, {{8032, 8040, kSentinel}}, // NOLINT
@@ -1315,8 +1477,8 @@ static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[46
{{8061, 8187, kSentinel}}, {{8112, 8120, kSentinel}}, {{8113, 8121, kSentinel}}, {{8144, 8152, kSentinel}}, // NOLINT
{{8145, 8153, kSentinel}}, {{8160, 8168, kSentinel}}, {{8161, 8169, kSentinel}}, {{8165, 8172, kSentinel}}, // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeTable0Size = 945; // NOLINT
-static const int32_t kEcma262UnCanonicalizeTable0[1890] = {
+static const uint16_t kEcma262UnCanonicalizeTable0Size = 990; // NOLINT
+static const int32_t kEcma262UnCanonicalizeTable0[1980] = {
1073741889, 1, 90, 5, 1073741921, 1, 122, 5, 181, 9, 1073742016, 13, 214, 17, 1073742040, 21, // NOLINT
222, 25, 1073742048, 13, 246, 17, 1073742072, 21, 254, 25, 255, 29, 256, 33, 257, 33, // NOLINT
258, 37, 259, 37, 260, 41, 261, 41, 262, 45, 263, 45, 264, 49, 265, 49, // NOLINT
@@ -1355,127 +1517,187 @@ static const int32_t kEcma262UnCanonicalizeTable0[1890] = {
539, 597, 540, 601, 541, 601, 542, 605, 543, 605, 544, 365, 546, 609, 547, 609, // NOLINT
548, 613, 549, 613, 550, 617, 551, 617, 552, 621, 553, 621, 554, 625, 555, 625, // NOLINT
556, 629, 557, 629, 558, 633, 559, 633, 560, 637, 561, 637, 562, 641, 563, 641, // NOLINT
- 570, 645, 571, 649, 572, 649, 573, 353, 574, 653, 577, 657, 578, 657, 579, 277, // NOLINT
- 580, 661, 581, 665, 582, 669, 583, 669, 584, 673, 585, 673, 586, 677, 587, 677, // NOLINT
- 588, 681, 589, 681, 590, 685, 591, 685, 595, 281, 596, 293, 1073742422, 301, 599, 305, // NOLINT
- 601, 317, 603, 321, 608, 329, 611, 333, 616, 345, 617, 341, 619, 689, 623, 357, // NOLINT
- 626, 361, 629, 369, 637, 693, 640, 385, 643, 393, 648, 401, 649, 661, 1073742474, 409, // NOLINT
- 651, 413, 652, 665, 658, 425, 837, 697, 1073742715, 701, 893, 705, 902, 709, 1073742728, 713, // NOLINT
- 906, 717, 908, 721, 1073742734, 725, 911, 729, 913, 733, 914, 737, 1073742739, 741, 916, 745, // NOLINT
- 917, 749, 1073742742, 753, 919, 757, 920, 761, 921, 697, 922, 765, 923, 769, 924, 9, // NOLINT
- 1073742749, 773, 927, 777, 928, 781, 929, 785, 931, 789, 1073742756, 793, 933, 797, 934, 801, // NOLINT
- 1073742759, 805, 939, 809, 940, 709, 1073742765, 713, 943, 717, 945, 733, 946, 737, 1073742771, 741, // NOLINT
- 948, 745, 949, 749, 1073742774, 753, 951, 757, 952, 761, 953, 697, 954, 765, 955, 769, // NOLINT
- 956, 9, 1073742781, 773, 959, 777, 960, 781, 961, 785, 962, 789, 963, 789, 1073742788, 793, // NOLINT
- 965, 797, 966, 801, 1073742791, 805, 971, 809, 972, 721, 1073742797, 725, 974, 729, 976, 737, // NOLINT
- 977, 761, 981, 801, 982, 781, 984, 813, 985, 813, 986, 817, 987, 817, 988, 821, // NOLINT
- 989, 821, 990, 825, 991, 825, 992, 829, 993, 829, 994, 833, 995, 833, 996, 837, // NOLINT
- 997, 837, 998, 841, 999, 841, 1000, 845, 1001, 845, 1002, 849, 1003, 849, 1004, 853, // NOLINT
- 1005, 853, 1006, 857, 1007, 857, 1008, 765, 1009, 785, 1010, 861, 1013, 749, 1015, 865, // NOLINT
- 1016, 865, 1017, 861, 1018, 869, 1019, 869, 1073742845, 701, 1023, 705, 1073742848, 873, 1039, 877, // NOLINT
- 1073742864, 881, 1071, 885, 1073742896, 881, 1103, 885, 1073742928, 873, 1119, 877, 1120, 889, 1121, 889, // NOLINT
- 1122, 893, 1123, 893, 1124, 897, 1125, 897, 1126, 901, 1127, 901, 1128, 905, 1129, 905, // NOLINT
- 1130, 909, 1131, 909, 1132, 913, 1133, 913, 1134, 917, 1135, 917, 1136, 921, 1137, 921, // NOLINT
- 1138, 925, 1139, 925, 1140, 929, 1141, 929, 1142, 933, 1143, 933, 1144, 937, 1145, 937, // NOLINT
- 1146, 941, 1147, 941, 1148, 945, 1149, 945, 1150, 949, 1151, 949, 1152, 953, 1153, 953, // NOLINT
- 1162, 957, 1163, 957, 1164, 961, 1165, 961, 1166, 965, 1167, 965, 1168, 969, 1169, 969, // NOLINT
- 1170, 973, 1171, 973, 1172, 977, 1173, 977, 1174, 981, 1175, 981, 1176, 985, 1177, 985, // NOLINT
- 1178, 989, 1179, 989, 1180, 993, 1181, 993, 1182, 997, 1183, 997, 1184, 1001, 1185, 1001, // NOLINT
- 1186, 1005, 1187, 1005, 1188, 1009, 1189, 1009, 1190, 1013, 1191, 1013, 1192, 1017, 1193, 1017, // NOLINT
- 1194, 1021, 1195, 1021, 1196, 1025, 1197, 1025, 1198, 1029, 1199, 1029, 1200, 1033, 1201, 1033, // NOLINT
- 1202, 1037, 1203, 1037, 1204, 1041, 1205, 1041, 1206, 1045, 1207, 1045, 1208, 1049, 1209, 1049, // NOLINT
- 1210, 1053, 1211, 1053, 1212, 1057, 1213, 1057, 1214, 1061, 1215, 1061, 1216, 1065, 1217, 1069, // NOLINT
- 1218, 1069, 1219, 1073, 1220, 1073, 1221, 1077, 1222, 1077, 1223, 1081, 1224, 1081, 1225, 1085, // NOLINT
- 1226, 1085, 1227, 1089, 1228, 1089, 1229, 1093, 1230, 1093, 1231, 1065, 1232, 1097, 1233, 1097, // NOLINT
- 1234, 1101, 1235, 1101, 1236, 1105, 1237, 1105, 1238, 1109, 1239, 1109, 1240, 1113, 1241, 1113, // NOLINT
- 1242, 1117, 1243, 1117, 1244, 1121, 1245, 1121, 1246, 1125, 1247, 1125, 1248, 1129, 1249, 1129, // NOLINT
- 1250, 1133, 1251, 1133, 1252, 1137, 1253, 1137, 1254, 1141, 1255, 1141, 1256, 1145, 1257, 1145, // NOLINT
- 1258, 1149, 1259, 1149, 1260, 1153, 1261, 1153, 1262, 1157, 1263, 1157, 1264, 1161, 1265, 1161, // NOLINT
- 1266, 1165, 1267, 1165, 1268, 1169, 1269, 1169, 1270, 1173, 1271, 1173, 1272, 1177, 1273, 1177, // NOLINT
- 1274, 1181, 1275, 1181, 1276, 1185, 1277, 1185, 1278, 1189, 1279, 1189, 1280, 1193, 1281, 1193, // NOLINT
- 1282, 1197, 1283, 1197, 1284, 1201, 1285, 1201, 1286, 1205, 1287, 1205, 1288, 1209, 1289, 1209, // NOLINT
- 1290, 1213, 1291, 1213, 1292, 1217, 1293, 1217, 1294, 1221, 1295, 1221, 1296, 1225, 1297, 1225, // NOLINT
- 1298, 1229, 1299, 1229, 1073743153, 1233, 1366, 1237, 1073743201, 1233, 1414, 1237, 1073746080, 1241, 4293, 1245, // NOLINT
- 7549, 1249, 7680, 1253, 7681, 1253, 7682, 1257, 7683, 1257, 7684, 1261, 7685, 1261, 7686, 1265, // NOLINT
- 7687, 1265, 7688, 1269, 7689, 1269, 7690, 1273, 7691, 1273, 7692, 1277, 7693, 1277, 7694, 1281, // NOLINT
- 7695, 1281, 7696, 1285, 7697, 1285, 7698, 1289, 7699, 1289, 7700, 1293, 7701, 1293, 7702, 1297, // NOLINT
- 7703, 1297, 7704, 1301, 7705, 1301, 7706, 1305, 7707, 1305, 7708, 1309, 7709, 1309, 7710, 1313, // NOLINT
- 7711, 1313, 7712, 1317, 7713, 1317, 7714, 1321, 7715, 1321, 7716, 1325, 7717, 1325, 7718, 1329, // NOLINT
- 7719, 1329, 7720, 1333, 7721, 1333, 7722, 1337, 7723, 1337, 7724, 1341, 7725, 1341, 7726, 1345, // NOLINT
- 7727, 1345, 7728, 1349, 7729, 1349, 7730, 1353, 7731, 1353, 7732, 1357, 7733, 1357, 7734, 1361, // NOLINT
- 7735, 1361, 7736, 1365, 7737, 1365, 7738, 1369, 7739, 1369, 7740, 1373, 7741, 1373, 7742, 1377, // NOLINT
- 7743, 1377, 7744, 1381, 7745, 1381, 7746, 1385, 7747, 1385, 7748, 1389, 7749, 1389, 7750, 1393, // NOLINT
- 7751, 1393, 7752, 1397, 7753, 1397, 7754, 1401, 7755, 1401, 7756, 1405, 7757, 1405, 7758, 1409, // NOLINT
- 7759, 1409, 7760, 1413, 7761, 1413, 7762, 1417, 7763, 1417, 7764, 1421, 7765, 1421, 7766, 1425, // NOLINT
- 7767, 1425, 7768, 1429, 7769, 1429, 7770, 1433, 7771, 1433, 7772, 1437, 7773, 1437, 7774, 1441, // NOLINT
- 7775, 1441, 7776, 1445, 7777, 1445, 7778, 1449, 7779, 1449, 7780, 1453, 7781, 1453, 7782, 1457, // NOLINT
- 7783, 1457, 7784, 1461, 7785, 1461, 7786, 1465, 7787, 1465, 7788, 1469, 7789, 1469, 7790, 1473, // NOLINT
- 7791, 1473, 7792, 1477, 7793, 1477, 7794, 1481, 7795, 1481, 7796, 1485, 7797, 1485, 7798, 1489, // NOLINT
- 7799, 1489, 7800, 1493, 7801, 1493, 7802, 1497, 7803, 1497, 7804, 1501, 7805, 1501, 7806, 1505, // NOLINT
- 7807, 1505, 7808, 1509, 7809, 1509, 7810, 1513, 7811, 1513, 7812, 1517, 7813, 1517, 7814, 1521, // NOLINT
- 7815, 1521, 7816, 1525, 7817, 1525, 7818, 1529, 7819, 1529, 7820, 1533, 7821, 1533, 7822, 1537, // NOLINT
- 7823, 1537, 7824, 1541, 7825, 1541, 7826, 1545, 7827, 1545, 7828, 1549, 7829, 1549, 7835, 1445, // NOLINT
- 7840, 1553, 7841, 1553, 7842, 1557, 7843, 1557, 7844, 1561, 7845, 1561, 7846, 1565, 7847, 1565, // NOLINT
- 7848, 1569, 7849, 1569, 7850, 1573, 7851, 1573, 7852, 1577, 7853, 1577, 7854, 1581, 7855, 1581, // NOLINT
- 7856, 1585, 7857, 1585, 7858, 1589, 7859, 1589, 7860, 1593, 7861, 1593, 7862, 1597, 7863, 1597, // NOLINT
- 7864, 1601, 7865, 1601, 7866, 1605, 7867, 1605, 7868, 1609, 7869, 1609, 7870, 1613, 7871, 1613, // NOLINT
- 7872, 1617, 7873, 1617, 7874, 1621, 7875, 1621, 7876, 1625, 7877, 1625, 7878, 1629, 7879, 1629, // NOLINT
- 7880, 1633, 7881, 1633, 7882, 1637, 7883, 1637, 7884, 1641, 7885, 1641, 7886, 1645, 7887, 1645, // NOLINT
- 7888, 1649, 7889, 1649, 7890, 1653, 7891, 1653, 7892, 1657, 7893, 1657, 7894, 1661, 7895, 1661, // NOLINT
- 7896, 1665, 7897, 1665, 7898, 1669, 7899, 1669, 7900, 1673, 7901, 1673, 7902, 1677, 7903, 1677, // NOLINT
- 7904, 1681, 7905, 1681, 7906, 1685, 7907, 1685, 7908, 1689, 7909, 1689, 7910, 1693, 7911, 1693, // NOLINT
- 7912, 1697, 7913, 1697, 7914, 1701, 7915, 1701, 7916, 1705, 7917, 1705, 7918, 1709, 7919, 1709, // NOLINT
- 7920, 1713, 7921, 1713, 7922, 1717, 7923, 1717, 7924, 1721, 7925, 1721, 7926, 1725, 7927, 1725, // NOLINT
- 7928, 1729, 7929, 1729, 1073749760, 1733, 7943, 1737, 1073749768, 1733, 7951, 1737, 1073749776, 1741, 7957, 1745, // NOLINT
- 1073749784, 1741, 7965, 1745, 1073749792, 1749, 7975, 1753, 1073749800, 1749, 7983, 1753, 1073749808, 1757, 7991, 1761, // NOLINT
- 1073749816, 1757, 7999, 1761, 1073749824, 1765, 8005, 1769, 1073749832, 1765, 8013, 1769, 8017, 1773, 8019, 1777, // NOLINT
- 8021, 1781, 8023, 1785, 8025, 1773, 8027, 1777, 8029, 1781, 8031, 1785, 1073749856, 1789, 8039, 1793, // NOLINT
- 1073749864, 1789, 8047, 1793, 1073749872, 1797, 8049, 1801, 1073749874, 1805, 8053, 1809, 1073749878, 1813, 8055, 1817, // NOLINT
- 1073749880, 1821, 8057, 1825, 1073749882, 1829, 8059, 1833, 1073749884, 1837, 8061, 1841, 1073749936, 1845, 8113, 1849, // NOLINT
- 1073749944, 1845, 8121, 1849, 1073749946, 1797, 8123, 1801, 8126, 697, 1073749960, 1805, 8139, 1809, 1073749968, 1853, // NOLINT
- 8145, 1857, 1073749976, 1853, 8153, 1857, 1073749978, 1813, 8155, 1817, 1073749984, 1861, 8161, 1865, 8165, 1869, // NOLINT
- 1073749992, 1861, 8169, 1865, 1073749994, 1829, 8171, 1833, 8172, 1869, 1073750008, 1821, 8185, 1825, 1073750010, 1837, // NOLINT
- 8187, 1841 }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeMultiStrings0Size = 469; // NOLINT
-static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings1[71] = { // NOLINT
+ 570, 645, 571, 649, 572, 649, 573, 353, 574, 653, 1073742399, 657, 576, 661, 577, 665, // NOLINT
+ 578, 665, 579, 277, 580, 669, 581, 673, 582, 677, 583, 677, 584, 681, 585, 681, // NOLINT
+ 586, 685, 587, 685, 588, 689, 589, 689, 590, 693, 591, 693, 592, 697, 593, 701, // NOLINT
+ 594, 705, 595, 281, 596, 293, 1073742422, 301, 599, 305, 601, 317, 603, 321, 608, 329, // NOLINT
+ 611, 333, 613, 709, 614, 713, 616, 345, 617, 341, 619, 717, 623, 357, 625, 721, // NOLINT
+ 626, 361, 629, 369, 637, 725, 640, 385, 643, 393, 648, 401, 649, 669, 1073742474, 409, // NOLINT
+ 651, 413, 652, 673, 658, 425, 837, 729, 880, 733, 881, 733, 882, 737, 883, 737, // NOLINT
+ 886, 741, 887, 741, 1073742715, 745, 893, 749, 902, 753, 1073742728, 757, 906, 761, 908, 765, // NOLINT
+ 1073742734, 769, 911, 773, 913, 777, 914, 781, 1073742739, 785, 916, 789, 917, 793, 1073742742, 797, // NOLINT
+ 919, 801, 920, 805, 921, 729, 922, 809, 923, 813, 924, 9, 1073742749, 817, 927, 821, // NOLINT
+ 928, 825, 929, 829, 931, 833, 1073742756, 837, 933, 841, 934, 845, 1073742759, 849, 939, 853, // NOLINT
+ 940, 753, 1073742765, 757, 943, 761, 945, 777, 946, 781, 1073742771, 785, 948, 789, 949, 793, // NOLINT
+ 1073742774, 797, 951, 801, 952, 805, 953, 729, 954, 809, 955, 813, 956, 9, 1073742781, 817, // NOLINT
+ 959, 821, 960, 825, 961, 829, 962, 833, 963, 833, 1073742788, 837, 965, 841, 966, 845, // NOLINT
+ 1073742791, 849, 971, 853, 972, 765, 1073742797, 769, 974, 773, 975, 857, 976, 781, 977, 805, // NOLINT
+ 981, 845, 982, 825, 983, 857, 984, 861, 985, 861, 986, 865, 987, 865, 988, 869, // NOLINT
+ 989, 869, 990, 873, 991, 873, 992, 877, 993, 877, 994, 881, 995, 881, 996, 885, // NOLINT
+ 997, 885, 998, 889, 999, 889, 1000, 893, 1001, 893, 1002, 897, 1003, 897, 1004, 901, // NOLINT
+ 1005, 901, 1006, 905, 1007, 905, 1008, 809, 1009, 829, 1010, 909, 1013, 793, 1015, 913, // NOLINT
+ 1016, 913, 1017, 909, 1018, 917, 1019, 917, 1073742845, 745, 1023, 749, 1073742848, 921, 1039, 925, // NOLINT
+ 1073742864, 929, 1071, 933, 1073742896, 929, 1103, 933, 1073742928, 921, 1119, 925, 1120, 937, 1121, 937, // NOLINT
+ 1122, 941, 1123, 941, 1124, 945, 1125, 945, 1126, 949, 1127, 949, 1128, 953, 1129, 953, // NOLINT
+ 1130, 957, 1131, 957, 1132, 961, 1133, 961, 1134, 965, 1135, 965, 1136, 969, 1137, 969, // NOLINT
+ 1138, 973, 1139, 973, 1140, 977, 1141, 977, 1142, 981, 1143, 981, 1144, 985, 1145, 985, // NOLINT
+ 1146, 989, 1147, 989, 1148, 993, 1149, 993, 1150, 997, 1151, 997, 1152, 1001, 1153, 1001, // NOLINT
+ 1162, 1005, 1163, 1005, 1164, 1009, 1165, 1009, 1166, 1013, 1167, 1013, 1168, 1017, 1169, 1017, // NOLINT
+ 1170, 1021, 1171, 1021, 1172, 1025, 1173, 1025, 1174, 1029, 1175, 1029, 1176, 1033, 1177, 1033, // NOLINT
+ 1178, 1037, 1179, 1037, 1180, 1041, 1181, 1041, 1182, 1045, 1183, 1045, 1184, 1049, 1185, 1049, // NOLINT
+ 1186, 1053, 1187, 1053, 1188, 1057, 1189, 1057, 1190, 1061, 1191, 1061, 1192, 1065, 1193, 1065, // NOLINT
+ 1194, 1069, 1195, 1069, 1196, 1073, 1197, 1073, 1198, 1077, 1199, 1077, 1200, 1081, 1201, 1081, // NOLINT
+ 1202, 1085, 1203, 1085, 1204, 1089, 1205, 1089, 1206, 1093, 1207, 1093, 1208, 1097, 1209, 1097, // NOLINT
+ 1210, 1101, 1211, 1101, 1212, 1105, 1213, 1105, 1214, 1109, 1215, 1109, 1216, 1113, 1217, 1117, // NOLINT
+ 1218, 1117, 1219, 1121, 1220, 1121, 1221, 1125, 1222, 1125, 1223, 1129, 1224, 1129, 1225, 1133, // NOLINT
+ 1226, 1133, 1227, 1137, 1228, 1137, 1229, 1141, 1230, 1141, 1231, 1113, 1232, 1145, 1233, 1145, // NOLINT
+ 1234, 1149, 1235, 1149, 1236, 1153, 1237, 1153, 1238, 1157, 1239, 1157, 1240, 1161, 1241, 1161, // NOLINT
+ 1242, 1165, 1243, 1165, 1244, 1169, 1245, 1169, 1246, 1173, 1247, 1173, 1248, 1177, 1249, 1177, // NOLINT
+ 1250, 1181, 1251, 1181, 1252, 1185, 1253, 1185, 1254, 1189, 1255, 1189, 1256, 1193, 1257, 1193, // NOLINT
+ 1258, 1197, 1259, 1197, 1260, 1201, 1261, 1201, 1262, 1205, 1263, 1205, 1264, 1209, 1265, 1209, // NOLINT
+ 1266, 1213, 1267, 1213, 1268, 1217, 1269, 1217, 1270, 1221, 1271, 1221, 1272, 1225, 1273, 1225, // NOLINT
+ 1274, 1229, 1275, 1229, 1276, 1233, 1277, 1233, 1278, 1237, 1279, 1237, 1280, 1241, 1281, 1241, // NOLINT
+ 1282, 1245, 1283, 1245, 1284, 1249, 1285, 1249, 1286, 1253, 1287, 1253, 1288, 1257, 1289, 1257, // NOLINT
+ 1290, 1261, 1291, 1261, 1292, 1265, 1293, 1265, 1294, 1269, 1295, 1269, 1296, 1273, 1297, 1273, // NOLINT
+ 1298, 1277, 1299, 1277, 1300, 1281, 1301, 1281, 1302, 1285, 1303, 1285, 1304, 1289, 1305, 1289, // NOLINT
+ 1306, 1293, 1307, 1293, 1308, 1297, 1309, 1297, 1310, 1301, 1311, 1301, 1312, 1305, 1313, 1305, // NOLINT
+ 1314, 1309, 1315, 1309, 1316, 1313, 1317, 1313, 1318, 1317, 1319, 1317, 1073743153, 1321, 1366, 1325, // NOLINT
+ 1073743201, 1321, 1414, 1325, 1073746080, 1329, 4293, 1333, 4295, 1337, 4301, 1341, 7545, 1345, 7549, 1349, // NOLINT
+ 7680, 1353, 7681, 1353, 7682, 1357, 7683, 1357, 7684, 1361, 7685, 1361, 7686, 1365, 7687, 1365, // NOLINT
+ 7688, 1369, 7689, 1369, 7690, 1373, 7691, 1373, 7692, 1377, 7693, 1377, 7694, 1381, 7695, 1381, // NOLINT
+ 7696, 1385, 7697, 1385, 7698, 1389, 7699, 1389, 7700, 1393, 7701, 1393, 7702, 1397, 7703, 1397, // NOLINT
+ 7704, 1401, 7705, 1401, 7706, 1405, 7707, 1405, 7708, 1409, 7709, 1409, 7710, 1413, 7711, 1413, // NOLINT
+ 7712, 1417, 7713, 1417, 7714, 1421, 7715, 1421, 7716, 1425, 7717, 1425, 7718, 1429, 7719, 1429, // NOLINT
+ 7720, 1433, 7721, 1433, 7722, 1437, 7723, 1437, 7724, 1441, 7725, 1441, 7726, 1445, 7727, 1445, // NOLINT
+ 7728, 1449, 7729, 1449, 7730, 1453, 7731, 1453, 7732, 1457, 7733, 1457, 7734, 1461, 7735, 1461, // NOLINT
+ 7736, 1465, 7737, 1465, 7738, 1469, 7739, 1469, 7740, 1473, 7741, 1473, 7742, 1477, 7743, 1477, // NOLINT
+ 7744, 1481, 7745, 1481, 7746, 1485, 7747, 1485, 7748, 1489, 7749, 1489, 7750, 1493, 7751, 1493, // NOLINT
+ 7752, 1497, 7753, 1497, 7754, 1501, 7755, 1501, 7756, 1505, 7757, 1505, 7758, 1509, 7759, 1509, // NOLINT
+ 7760, 1513, 7761, 1513, 7762, 1517, 7763, 1517, 7764, 1521, 7765, 1521, 7766, 1525, 7767, 1525, // NOLINT
+ 7768, 1529, 7769, 1529, 7770, 1533, 7771, 1533, 7772, 1537, 7773, 1537, 7774, 1541, 7775, 1541, // NOLINT
+ 7776, 1545, 7777, 1545, 7778, 1549, 7779, 1549, 7780, 1553, 7781, 1553, 7782, 1557, 7783, 1557, // NOLINT
+ 7784, 1561, 7785, 1561, 7786, 1565, 7787, 1565, 7788, 1569, 7789, 1569, 7790, 1573, 7791, 1573, // NOLINT
+ 7792, 1577, 7793, 1577, 7794, 1581, 7795, 1581, 7796, 1585, 7797, 1585, 7798, 1589, 7799, 1589, // NOLINT
+ 7800, 1593, 7801, 1593, 7802, 1597, 7803, 1597, 7804, 1601, 7805, 1601, 7806, 1605, 7807, 1605, // NOLINT
+ 7808, 1609, 7809, 1609, 7810, 1613, 7811, 1613, 7812, 1617, 7813, 1617, 7814, 1621, 7815, 1621, // NOLINT
+ 7816, 1625, 7817, 1625, 7818, 1629, 7819, 1629, 7820, 1633, 7821, 1633, 7822, 1637, 7823, 1637, // NOLINT
+ 7824, 1641, 7825, 1641, 7826, 1645, 7827, 1645, 7828, 1649, 7829, 1649, 7835, 1545, 7840, 1653, // NOLINT
+ 7841, 1653, 7842, 1657, 7843, 1657, 7844, 1661, 7845, 1661, 7846, 1665, 7847, 1665, 7848, 1669, // NOLINT
+ 7849, 1669, 7850, 1673, 7851, 1673, 7852, 1677, 7853, 1677, 7854, 1681, 7855, 1681, 7856, 1685, // NOLINT
+ 7857, 1685, 7858, 1689, 7859, 1689, 7860, 1693, 7861, 1693, 7862, 1697, 7863, 1697, 7864, 1701, // NOLINT
+ 7865, 1701, 7866, 1705, 7867, 1705, 7868, 1709, 7869, 1709, 7870, 1713, 7871, 1713, 7872, 1717, // NOLINT
+ 7873, 1717, 7874, 1721, 7875, 1721, 7876, 1725, 7877, 1725, 7878, 1729, 7879, 1729, 7880, 1733, // NOLINT
+ 7881, 1733, 7882, 1737, 7883, 1737, 7884, 1741, 7885, 1741, 7886, 1745, 7887, 1745, 7888, 1749, // NOLINT
+ 7889, 1749, 7890, 1753, 7891, 1753, 7892, 1757, 7893, 1757, 7894, 1761, 7895, 1761, 7896, 1765, // NOLINT
+ 7897, 1765, 7898, 1769, 7899, 1769, 7900, 1773, 7901, 1773, 7902, 1777, 7903, 1777, 7904, 1781, // NOLINT
+ 7905, 1781, 7906, 1785, 7907, 1785, 7908, 1789, 7909, 1789, 7910, 1793, 7911, 1793, 7912, 1797, // NOLINT
+ 7913, 1797, 7914, 1801, 7915, 1801, 7916, 1805, 7917, 1805, 7918, 1809, 7919, 1809, 7920, 1813, // NOLINT
+ 7921, 1813, 7922, 1817, 7923, 1817, 7924, 1821, 7925, 1821, 7926, 1825, 7927, 1825, 7928, 1829, // NOLINT
+ 7929, 1829, 7930, 1833, 7931, 1833, 7932, 1837, 7933, 1837, 7934, 1841, 7935, 1841, 1073749760, 1845, // NOLINT
+ 7943, 1849, 1073749768, 1845, 7951, 1849, 1073749776, 1853, 7957, 1857, 1073749784, 1853, 7965, 1857, 1073749792, 1861, // NOLINT
+ 7975, 1865, 1073749800, 1861, 7983, 1865, 1073749808, 1869, 7991, 1873, 1073749816, 1869, 7999, 1873, 1073749824, 1877, // NOLINT
+ 8005, 1881, 1073749832, 1877, 8013, 1881, 8017, 1885, 8019, 1889, 8021, 1893, 8023, 1897, 8025, 1885, // NOLINT
+ 8027, 1889, 8029, 1893, 8031, 1897, 1073749856, 1901, 8039, 1905, 1073749864, 1901, 8047, 1905, 1073749872, 1909, // NOLINT
+ 8049, 1913, 1073749874, 1917, 8053, 1921, 1073749878, 1925, 8055, 1929, 1073749880, 1933, 8057, 1937, 1073749882, 1941, // NOLINT
+ 8059, 1945, 1073749884, 1949, 8061, 1953, 1073749936, 1957, 8113, 1961, 1073749944, 1957, 8121, 1961, 1073749946, 1909, // NOLINT
+ 8123, 1913, 8126, 729, 1073749960, 1917, 8139, 1921, 1073749968, 1965, 8145, 1969, 1073749976, 1965, 8153, 1969, // NOLINT
+ 1073749978, 1925, 8155, 1929, 1073749984, 1973, 8161, 1977, 8165, 1981, 1073749992, 1973, 8169, 1977, 1073749994, 1941, // NOLINT
+ 8171, 1945, 8172, 1981, 1073750008, 1933, 8185, 1937, 1073750010, 1949, 8187, 1953 }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeMultiStrings0Size = 497; // NOLINT
+static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings1[83] = { // NOLINT
{{8498, 8526}}, {{8544, 8560}}, {{8559, 8575}}, {{8579, 8580}}, // NOLINT
{{9398, 9424}}, {{9423, 9449}}, {{11264, 11312}}, {{11310, 11358}}, // NOLINT
{{11360, 11361}}, {{619, 11362}}, {{7549, 11363}}, {{637, 11364}}, // NOLINT
{{570, 11365}}, {{574, 11366}}, {{11367, 11368}}, {{11369, 11370}}, // NOLINT
- {{11371, 11372}}, {{11381, 11382}}, {{11392, 11393}}, {{11394, 11395}}, // NOLINT
- {{11396, 11397}}, {{11398, 11399}}, {{11400, 11401}}, {{11402, 11403}}, // NOLINT
- {{11404, 11405}}, {{11406, 11407}}, {{11408, 11409}}, {{11410, 11411}}, // NOLINT
- {{11412, 11413}}, {{11414, 11415}}, {{11416, 11417}}, {{11418, 11419}}, // NOLINT
- {{11420, 11421}}, {{11422, 11423}}, {{11424, 11425}}, {{11426, 11427}}, // NOLINT
- {{11428, 11429}}, {{11430, 11431}}, {{11432, 11433}}, {{11434, 11435}}, // NOLINT
- {{11436, 11437}}, {{11438, 11439}}, {{11440, 11441}}, {{11442, 11443}}, // NOLINT
- {{11444, 11445}}, {{11446, 11447}}, {{11448, 11449}}, {{11450, 11451}}, // NOLINT
- {{11452, 11453}}, {{11454, 11455}}, {{11456, 11457}}, {{11458, 11459}}, // NOLINT
- {{11460, 11461}}, {{11462, 11463}}, {{11464, 11465}}, {{11466, 11467}}, // NOLINT
- {{11468, 11469}}, {{11470, 11471}}, {{11472, 11473}}, {{11474, 11475}}, // NOLINT
- {{11476, 11477}}, {{11478, 11479}}, {{11480, 11481}}, {{11482, 11483}}, // NOLINT
- {{11484, 11485}}, {{11486, 11487}}, {{11488, 11489}}, {{11490, 11491}}, // NOLINT
- {{4256, 11520}}, {{4293, 11557}}, {{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeTable1Size = 133; // NOLINT
-static const int32_t kEcma262UnCanonicalizeTable1[266] = {
+ {{11371, 11372}}, {{593, 11373}}, {{625, 11374}}, {{592, 11375}}, // NOLINT
+ {{594, 11376}}, {{11378, 11379}}, {{11381, 11382}}, {{575, 11390}}, // NOLINT
+ {{576, 11391}}, {{11392, 11393}}, {{11394, 11395}}, {{11396, 11397}}, // NOLINT
+ {{11398, 11399}}, {{11400, 11401}}, {{11402, 11403}}, {{11404, 11405}}, // NOLINT
+ {{11406, 11407}}, {{11408, 11409}}, {{11410, 11411}}, {{11412, 11413}}, // NOLINT
+ {{11414, 11415}}, {{11416, 11417}}, {{11418, 11419}}, {{11420, 11421}}, // NOLINT
+ {{11422, 11423}}, {{11424, 11425}}, {{11426, 11427}}, {{11428, 11429}}, // NOLINT
+ {{11430, 11431}}, {{11432, 11433}}, {{11434, 11435}}, {{11436, 11437}}, // NOLINT
+ {{11438, 11439}}, {{11440, 11441}}, {{11442, 11443}}, {{11444, 11445}}, // NOLINT
+ {{11446, 11447}}, {{11448, 11449}}, {{11450, 11451}}, {{11452, 11453}}, // NOLINT
+ {{11454, 11455}}, {{11456, 11457}}, {{11458, 11459}}, {{11460, 11461}}, // NOLINT
+ {{11462, 11463}}, {{11464, 11465}}, {{11466, 11467}}, {{11468, 11469}}, // NOLINT
+ {{11470, 11471}}, {{11472, 11473}}, {{11474, 11475}}, {{11476, 11477}}, // NOLINT
+ {{11478, 11479}}, {{11480, 11481}}, {{11482, 11483}}, {{11484, 11485}}, // NOLINT
+ {{11486, 11487}}, {{11488, 11489}}, {{11490, 11491}}, {{11499, 11500}}, // NOLINT
+ {{11501, 11502}}, {{11506, 11507}}, {{4256, 11520}}, {{4293, 11557}}, // NOLINT
+ {{4295, 11559}}, {{4301, 11565}}, {{kSentinel}} }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeTable1Size = 149; // NOLINT
+static const int32_t kEcma262UnCanonicalizeTable1[298] = {
306, 1, 334, 1, 1073742176, 5, 367, 9, 1073742192, 5, 383, 9, 387, 13, 388, 13, // NOLINT
1073743030, 17, 1231, 21, 1073743056, 17, 1257, 21, 1073744896, 25, 3118, 29, 1073744944, 25, 3166, 29, // NOLINT
3168, 33, 3169, 33, 3170, 37, 3171, 41, 3172, 45, 3173, 49, 3174, 53, 3175, 57, // NOLINT
- 3176, 57, 3177, 61, 3178, 61, 3179, 65, 3180, 65, 3189, 69, 3190, 69, 3200, 73, // NOLINT
- 3201, 73, 3202, 77, 3203, 77, 3204, 81, 3205, 81, 3206, 85, 3207, 85, 3208, 89, // NOLINT
- 3209, 89, 3210, 93, 3211, 93, 3212, 97, 3213, 97, 3214, 101, 3215, 101, 3216, 105, // NOLINT
- 3217, 105, 3218, 109, 3219, 109, 3220, 113, 3221, 113, 3222, 117, 3223, 117, 3224, 121, // NOLINT
- 3225, 121, 3226, 125, 3227, 125, 3228, 129, 3229, 129, 3230, 133, 3231, 133, 3232, 137, // NOLINT
- 3233, 137, 3234, 141, 3235, 141, 3236, 145, 3237, 145, 3238, 149, 3239, 149, 3240, 153, // NOLINT
- 3241, 153, 3242, 157, 3243, 157, 3244, 161, 3245, 161, 3246, 165, 3247, 165, 3248, 169, // NOLINT
- 3249, 169, 3250, 173, 3251, 173, 3252, 177, 3253, 177, 3254, 181, 3255, 181, 3256, 185, // NOLINT
- 3257, 185, 3258, 189, 3259, 189, 3260, 193, 3261, 193, 3262, 197, 3263, 197, 3264, 201, // NOLINT
- 3265, 201, 3266, 205, 3267, 205, 3268, 209, 3269, 209, 3270, 213, 3271, 213, 3272, 217, // NOLINT
- 3273, 217, 3274, 221, 3275, 221, 3276, 225, 3277, 225, 3278, 229, 3279, 229, 3280, 233, // NOLINT
- 3281, 233, 3282, 237, 3283, 237, 3284, 241, 3285, 241, 3286, 245, 3287, 245, 3288, 249, // NOLINT
- 3289, 249, 3290, 253, 3291, 253, 3292, 257, 3293, 257, 3294, 261, 3295, 261, 3296, 265, // NOLINT
- 3297, 265, 3298, 269, 3299, 269, 1073745152, 273, 3365, 277 }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeMultiStrings1Size = 71; // NOLINT
+ 3176, 57, 3177, 61, 3178, 61, 3179, 65, 3180, 65, 3181, 69, 3182, 73, 3183, 77, // NOLINT
+ 3184, 81, 3186, 85, 3187, 85, 3189, 89, 3190, 89, 1073745022, 93, 3199, 97, 3200, 101, // NOLINT
+ 3201, 101, 3202, 105, 3203, 105, 3204, 109, 3205, 109, 3206, 113, 3207, 113, 3208, 117, // NOLINT
+ 3209, 117, 3210, 121, 3211, 121, 3212, 125, 3213, 125, 3214, 129, 3215, 129, 3216, 133, // NOLINT
+ 3217, 133, 3218, 137, 3219, 137, 3220, 141, 3221, 141, 3222, 145, 3223, 145, 3224, 149, // NOLINT
+ 3225, 149, 3226, 153, 3227, 153, 3228, 157, 3229, 157, 3230, 161, 3231, 161, 3232, 165, // NOLINT
+ 3233, 165, 3234, 169, 3235, 169, 3236, 173, 3237, 173, 3238, 177, 3239, 177, 3240, 181, // NOLINT
+ 3241, 181, 3242, 185, 3243, 185, 3244, 189, 3245, 189, 3246, 193, 3247, 193, 3248, 197, // NOLINT
+ 3249, 197, 3250, 201, 3251, 201, 3252, 205, 3253, 205, 3254, 209, 3255, 209, 3256, 213, // NOLINT
+ 3257, 213, 3258, 217, 3259, 217, 3260, 221, 3261, 221, 3262, 225, 3263, 225, 3264, 229, // NOLINT
+ 3265, 229, 3266, 233, 3267, 233, 3268, 237, 3269, 237, 3270, 241, 3271, 241, 3272, 245, // NOLINT
+ 3273, 245, 3274, 249, 3275, 249, 3276, 253, 3277, 253, 3278, 257, 3279, 257, 3280, 261, // NOLINT
+ 3281, 261, 3282, 265, 3283, 265, 3284, 269, 3285, 269, 3286, 273, 3287, 273, 3288, 277, // NOLINT
+ 3289, 277, 3290, 281, 3291, 281, 3292, 285, 3293, 285, 3294, 289, 3295, 289, 3296, 293, // NOLINT
+ 3297, 293, 3298, 297, 3299, 297, 3307, 301, 3308, 301, 3309, 305, 3310, 305, 3314, 309, // NOLINT
+ 3315, 309, 1073745152, 313, 3365, 317, 3367, 321, 3373, 325 }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeMultiStrings1Size = 83; // NOLINT
+static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings5[92] = { // NOLINT
+ {{42560, 42561}}, {{42562, 42563}}, {{42564, 42565}}, {{42566, 42567}}, // NOLINT
+ {{42568, 42569}}, {{42570, 42571}}, {{42572, 42573}}, {{42574, 42575}}, // NOLINT
+ {{42576, 42577}}, {{42578, 42579}}, {{42580, 42581}}, {{42582, 42583}}, // NOLINT
+ {{42584, 42585}}, {{42586, 42587}}, {{42588, 42589}}, {{42590, 42591}}, // NOLINT
+ {{42592, 42593}}, {{42594, 42595}}, {{42596, 42597}}, {{42598, 42599}}, // NOLINT
+ {{42600, 42601}}, {{42602, 42603}}, {{42604, 42605}}, {{42624, 42625}}, // NOLINT
+ {{42626, 42627}}, {{42628, 42629}}, {{42630, 42631}}, {{42632, 42633}}, // NOLINT
+ {{42634, 42635}}, {{42636, 42637}}, {{42638, 42639}}, {{42640, 42641}}, // NOLINT
+ {{42642, 42643}}, {{42644, 42645}}, {{42646, 42647}}, {{42786, 42787}}, // NOLINT
+ {{42788, 42789}}, {{42790, 42791}}, {{42792, 42793}}, {{42794, 42795}}, // NOLINT
+ {{42796, 42797}}, {{42798, 42799}}, {{42802, 42803}}, {{42804, 42805}}, // NOLINT
+ {{42806, 42807}}, {{42808, 42809}}, {{42810, 42811}}, {{42812, 42813}}, // NOLINT
+ {{42814, 42815}}, {{42816, 42817}}, {{42818, 42819}}, {{42820, 42821}}, // NOLINT
+ {{42822, 42823}}, {{42824, 42825}}, {{42826, 42827}}, {{42828, 42829}}, // NOLINT
+ {{42830, 42831}}, {{42832, 42833}}, {{42834, 42835}}, {{42836, 42837}}, // NOLINT
+ {{42838, 42839}}, {{42840, 42841}}, {{42842, 42843}}, {{42844, 42845}}, // NOLINT
+ {{42846, 42847}}, {{42848, 42849}}, {{42850, 42851}}, {{42852, 42853}}, // NOLINT
+ {{42854, 42855}}, {{42856, 42857}}, {{42858, 42859}}, {{42860, 42861}}, // NOLINT
+ {{42862, 42863}}, {{42873, 42874}}, {{42875, 42876}}, {{7545, 42877}}, // NOLINT
+ {{42878, 42879}}, {{42880, 42881}}, {{42882, 42883}}, {{42884, 42885}}, // NOLINT
+ {{42886, 42887}}, {{42891, 42892}}, {{613, 42893}}, {{42896, 42897}}, // NOLINT
+ {{42898, 42899}}, {{42912, 42913}}, {{42914, 42915}}, {{42916, 42917}}, // NOLINT
+ {{42918, 42919}}, {{42920, 42921}}, {{614, 42922}}, {{kSentinel}} }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeTable5Size = 179; // NOLINT
+static const int32_t kEcma262UnCanonicalizeTable5[358] = {
+ 1600, 1, 1601, 1, 1602, 5, 1603, 5, 1604, 9, 1605, 9, 1606, 13, 1607, 13, // NOLINT
+ 1608, 17, 1609, 17, 1610, 21, 1611, 21, 1612, 25, 1613, 25, 1614, 29, 1615, 29, // NOLINT
+ 1616, 33, 1617, 33, 1618, 37, 1619, 37, 1620, 41, 1621, 41, 1622, 45, 1623, 45, // NOLINT
+ 1624, 49, 1625, 49, 1626, 53, 1627, 53, 1628, 57, 1629, 57, 1630, 61, 1631, 61, // NOLINT
+ 1632, 65, 1633, 65, 1634, 69, 1635, 69, 1636, 73, 1637, 73, 1638, 77, 1639, 77, // NOLINT
+ 1640, 81, 1641, 81, 1642, 85, 1643, 85, 1644, 89, 1645, 89, 1664, 93, 1665, 93, // NOLINT
+ 1666, 97, 1667, 97, 1668, 101, 1669, 101, 1670, 105, 1671, 105, 1672, 109, 1673, 109, // NOLINT
+ 1674, 113, 1675, 113, 1676, 117, 1677, 117, 1678, 121, 1679, 121, 1680, 125, 1681, 125, // NOLINT
+ 1682, 129, 1683, 129, 1684, 133, 1685, 133, 1686, 137, 1687, 137, 1826, 141, 1827, 141, // NOLINT
+ 1828, 145, 1829, 145, 1830, 149, 1831, 149, 1832, 153, 1833, 153, 1834, 157, 1835, 157, // NOLINT
+ 1836, 161, 1837, 161, 1838, 165, 1839, 165, 1842, 169, 1843, 169, 1844, 173, 1845, 173, // NOLINT
+ 1846, 177, 1847, 177, 1848, 181, 1849, 181, 1850, 185, 1851, 185, 1852, 189, 1853, 189, // NOLINT
+ 1854, 193, 1855, 193, 1856, 197, 1857, 197, 1858, 201, 1859, 201, 1860, 205, 1861, 205, // NOLINT
+ 1862, 209, 1863, 209, 1864, 213, 1865, 213, 1866, 217, 1867, 217, 1868, 221, 1869, 221, // NOLINT
+ 1870, 225, 1871, 225, 1872, 229, 1873, 229, 1874, 233, 1875, 233, 1876, 237, 1877, 237, // NOLINT
+ 1878, 241, 1879, 241, 1880, 245, 1881, 245, 1882, 249, 1883, 249, 1884, 253, 1885, 253, // NOLINT
+ 1886, 257, 1887, 257, 1888, 261, 1889, 261, 1890, 265, 1891, 265, 1892, 269, 1893, 269, // NOLINT
+ 1894, 273, 1895, 273, 1896, 277, 1897, 277, 1898, 281, 1899, 281, 1900, 285, 1901, 285, // NOLINT
+ 1902, 289, 1903, 289, 1913, 293, 1914, 293, 1915, 297, 1916, 297, 1917, 301, 1918, 305, // NOLINT
+ 1919, 305, 1920, 309, 1921, 309, 1922, 313, 1923, 313, 1924, 317, 1925, 317, 1926, 321, // NOLINT
+ 1927, 321, 1931, 325, 1932, 325, 1933, 329, 1936, 333, 1937, 333, 1938, 337, 1939, 337, // NOLINT
+ 1952, 341, 1953, 341, 1954, 345, 1955, 345, 1956, 349, 1957, 349, 1958, 353, 1959, 353, // NOLINT
+ 1960, 357, 1961, 357, 1962, 361 }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeMultiStrings5Size = 92; // NOLINT
static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings7[3] = { // NOLINT
{{65313, 65345}}, {{65338, 65370}}, {{kSentinel}} }; // NOLINT
static const uint16_t kEcma262UnCanonicalizeTable7Size = 4; // NOLINT
@@ -1502,6 +1724,13 @@ int Ecma262UnCanonicalize::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kEcma262UnCanonicalizeTable5,
+ kEcma262UnCanonicalizeTable5Size,
+ kEcma262UnCanonicalizeMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kEcma262UnCanonicalizeTable7,
kEcma262UnCanonicalizeTable7Size,
kEcma262UnCanonicalizeMultiStrings7,
@@ -1577,9 +1806,11 @@ const uchar UnicodeData::kMaxCodePoint = 65533;
int UnicodeData::GetByteCount() {
return kUppercaseTable0Size * sizeof(int32_t) // NOLINT
+ kUppercaseTable1Size * sizeof(int32_t) // NOLINT
+ + kUppercaseTable5Size * sizeof(int32_t) // NOLINT
+ kUppercaseTable7Size * sizeof(int32_t) // NOLINT
+ kLowercaseTable0Size * sizeof(int32_t) // NOLINT
+ kLowercaseTable1Size * sizeof(int32_t) // NOLINT
+ + kLowercaseTable5Size * sizeof(int32_t) // NOLINT
+ kLowercaseTable7Size * sizeof(int32_t) // NOLINT
+ kLetterTable0Size * sizeof(int32_t) // NOLINT
+ kLetterTable1Size * sizeof(int32_t) // NOLINT
@@ -1592,6 +1823,7 @@ int UnicodeData::GetByteCount() {
+ kSpaceTable0Size * sizeof(int32_t) // NOLINT
+ kSpaceTable1Size * sizeof(int32_t) // NOLINT
+ kNumberTable0Size * sizeof(int32_t) // NOLINT
+ + kNumberTable5Size * sizeof(int32_t) // NOLINT
+ kNumberTable7Size * sizeof(int32_t) // NOLINT
+ kWhiteSpaceTable0Size * sizeof(int32_t) // NOLINT
+ kWhiteSpaceTable1Size * sizeof(int32_t) // NOLINT
@@ -1606,15 +1838,19 @@ int UnicodeData::GetByteCount() {
+ kConnectorPunctuationTable7Size * sizeof(int32_t) // NOLINT
+ kToLowercaseMultiStrings0Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ kToLowercaseMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ + kToLowercaseMultiStrings5Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kToLowercaseMultiStrings7Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kToUppercaseMultiStrings0Size * sizeof(MultiCharacterSpecialCase<3>) // NOLINT
+ kToUppercaseMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ + kToUppercaseMultiStrings5Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kToUppercaseMultiStrings7Size * sizeof(MultiCharacterSpecialCase<3>) // NOLINT
+ kEcma262CanonicalizeMultiStrings0Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kEcma262CanonicalizeMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ + kEcma262CanonicalizeMultiStrings5Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kEcma262CanonicalizeMultiStrings7Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kEcma262UnCanonicalizeMultiStrings0Size * sizeof(MultiCharacterSpecialCase<4>) // NOLINT
+ kEcma262UnCanonicalizeMultiStrings1Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ + kEcma262UnCanonicalizeMultiStrings5Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ kEcma262UnCanonicalizeMultiStrings7Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ kCanonicalizationRangeMultiStrings0Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kCanonicalizationRangeMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
diff --git a/deps/v8/src/v8-counters.h b/deps/v8/src/v8-counters.h
index 47341e72c55..6db9c77edcf 100644
--- a/deps/v8/src/v8-counters.h
+++ b/deps/v8/src/v8-counters.h
@@ -198,6 +198,9 @@ namespace internal {
SC(constructed_objects_stub, V8.ConstructedObjectsStub) \
SC(negative_lookups, V8.NegativeLookups) \
SC(negative_lookups_miss, V8.NegativeLookupsMiss) \
+ SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
+ SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
+ SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
SC(array_function_runtime, V8.ArrayFunctionRuntime) \
SC(array_function_native, V8.ArrayFunctionNative) \
SC(for_in, V8.ForIn) \
diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc
index e4b37b180e1..98b30385937 100644
--- a/deps/v8/src/v8.cc
+++ b/deps/v8/src/v8.cc
@@ -146,6 +146,12 @@ void V8::SetEntropySource(EntropySource source) {
}
+void V8::SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver) {
+ StackFrame::SetReturnAddressLocationResolver(resolver);
+}
+
+
// Used by JavaScript APIs
uint32_t V8::Random(Context* context) {
ASSERT(context->IsGlobalContext());
@@ -217,19 +223,17 @@ typedef union {
Object* V8::FillHeapNumberWithRandom(Object* heap_number,
Context* context) {
+ double_int_union r;
uint64_t random_bits = Random(context);
- // Make a double* from address (heap_number + sizeof(double)).
- double_int_union* r = reinterpret_cast<double_int_union*>(
- reinterpret_cast<char*>(heap_number) +
- HeapNumber::kValueOffset - kHeapObjectTag);
// Convert 32 random bits to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
- const double binary_million = 1048576.0;
- r->double_value = binary_million;
- r->uint64_t_value |= random_bits;
- r->double_value -= binary_million;
+ static const double binary_million = 1048576.0;
+ r.double_value = binary_million;
+ r.uint64_t_value |= random_bits;
+ r.double_value -= binary_million;
+ HeapNumber::cast(heap_number)->set_value(r.double_value);
return heap_number;
}
diff --git a/deps/v8/src/v8.h b/deps/v8/src/v8.h
index adfdb3ea885..699c5a09b93 100644
--- a/deps/v8/src/v8.h
+++ b/deps/v8/src/v8.h
@@ -95,6 +95,9 @@ class V8 : public AllStatic {
// Allows an entropy source to be provided for use in random number
// generation.
static void SetEntropySource(EntropySource source);
+ // Support for return-address rewriting profilers.
+ static void SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver);
// Random number generation support. Not cryptographically safe.
static uint32_t Random(Context* context);
// We use random numbers internally in memory allocation and in the
diff --git a/deps/v8/src/v8natives.js b/deps/v8/src/v8natives.js
index 381d34139e7..f1e8084a530 100644
--- a/deps/v8/src/v8natives.js
+++ b/deps/v8/src/v8natives.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -332,7 +332,7 @@ function ObjectLookupSetter(name) {
function ObjectKeys(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["keys"]);
+ throw MakeTypeError("called_on_non_object", ["Object.keys"]);
}
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
@@ -834,10 +834,6 @@ function DefineObjectProperty(obj, p, desc, should_throw) {
}
%DefineOrRedefineDataProperty(obj, p, value, flag);
- } else if (IsGenericDescriptor(desc)) {
- // Step 12 - updating an existing accessor property with generic
- // descriptor. Changing flags only.
- %DefineOrRedefineAccessorProperty(obj, p, GETTER, current.getGet(), flag);
} else {
// There are 3 cases that lead here:
// Step 4b - defining a new accessor property.
@@ -845,12 +841,9 @@ function DefineObjectProperty(obj, p, desc, should_throw) {
// property.
// Step 12 - updating an existing accessor property with an accessor
// descriptor.
- if (desc.hasGetter()) {
- %DefineOrRedefineAccessorProperty(obj, p, GETTER, desc.getGet(), flag);
- }
- if (desc.hasSetter()) {
- %DefineOrRedefineAccessorProperty(obj, p, SETTER, desc.getSet(), flag);
- }
+ var getter = desc.hasGetter() ? desc.getGet() : null;
+ var setter = desc.hasSetter() ? desc.getSet() : null;
+ %DefineOrRedefineAccessorProperty(obj, p, getter, setter, flag);
}
return true;
}
@@ -943,7 +936,7 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
// ES5 section 15.2.3.2.
function ObjectGetPrototypeOf(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["getPrototypeOf"]);
+ throw MakeTypeError("called_on_non_object", ["Object.getPrototypeOf"]);
}
return %GetPrototype(obj);
}
@@ -952,8 +945,8 @@ function ObjectGetPrototypeOf(obj) {
// ES5 section 15.2.3.3
function ObjectGetOwnPropertyDescriptor(obj, p) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object",
- ["getOwnPropertyDescriptor"]);
+ throw MakeTypeError("called_on_non_object",
+ ["Object.getOwnPropertyDescriptor"]);
}
var desc = GetOwnProperty(obj, p);
return FromPropertyDescriptor(desc);
@@ -983,8 +976,7 @@ function ToStringArray(obj, trap) {
// ES5 section 15.2.3.4.
function ObjectGetOwnPropertyNames(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object",
- ["getOwnPropertyNames"]);
+ throw MakeTypeError("called_on_non_object", ["Object.getOwnPropertyNames"]);
}
// Special handling for proxies.
if (%IsJSProxy(obj)) {
@@ -1057,7 +1049,7 @@ function ObjectCreate(proto, properties) {
// ES5 section 15.2.3.6.
function ObjectDefineProperty(obj, p, attributes) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["defineProperty"]);
+ throw MakeTypeError("called_on_non_object", ["Object.defineProperty"]);
}
var name = ToString(p);
if (%IsJSProxy(obj)) {
@@ -1109,7 +1101,7 @@ function GetOwnEnumerablePropertyNames(properties) {
// ES5 section 15.2.3.7.
function ObjectDefineProperties(obj, properties) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["defineProperties"]);
+ throw MakeTypeError("called_on_non_object", ["Object.defineProperties"]);
}
var props = ToObject(properties);
var names = GetOwnEnumerablePropertyNames(props);
@@ -1156,7 +1148,7 @@ function ProxyFix(obj) {
// ES5 section 15.2.3.8.
function ObjectSeal(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["seal"]);
+ throw MakeTypeError("called_on_non_object", ["Object.seal"]);
}
if (%IsJSProxy(obj)) {
ProxyFix(obj);
@@ -1178,7 +1170,7 @@ function ObjectSeal(obj) {
// ES5 section 15.2.3.9.
function ObjectFreeze(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["freeze"]);
+ throw MakeTypeError("called_on_non_object", ["Object.freeze"]);
}
if (%IsJSProxy(obj)) {
ProxyFix(obj);
@@ -1201,7 +1193,7 @@ function ObjectFreeze(obj) {
// ES5 section 15.2.3.10
function ObjectPreventExtension(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["preventExtension"]);
+ throw MakeTypeError("called_on_non_object", ["Object.preventExtension"]);
}
if (%IsJSProxy(obj)) {
ProxyFix(obj);
@@ -1214,7 +1206,7 @@ function ObjectPreventExtension(obj) {
// ES5 section 15.2.3.11
function ObjectIsSealed(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["isSealed"]);
+ throw MakeTypeError("called_on_non_object", ["Object.isSealed"]);
}
if (%IsJSProxy(obj)) {
return false;
@@ -1235,7 +1227,7 @@ function ObjectIsSealed(obj) {
// ES5 section 15.2.3.12
function ObjectIsFrozen(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["isFrozen"]);
+ throw MakeTypeError("called_on_non_object", ["Object.isFrozen"]);
}
if (%IsJSProxy(obj)) {
return false;
@@ -1257,7 +1249,7 @@ function ObjectIsFrozen(obj) {
// ES5 section 15.2.3.13
function ObjectIsExtensible(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["isExtensible"]);
+ throw MakeTypeError("called_on_non_object", ["Object.isExtensible"]);
}
if (%IsJSProxy(obj)) {
return true;
@@ -1266,6 +1258,16 @@ function ObjectIsExtensible(obj) {
}
+// Harmony egal.
+function ObjectIs(obj1, obj2) {
+ if (obj1 === obj2) {
+ return (obj1 !== 0) || (1 / obj1 === 1 / obj2);
+ } else {
+ return (obj1 !== obj1) && (obj2 !== obj2);
+ }
+}
+
+
%SetCode($Object, function(x) {
if (%_IsConstructCall()) {
if (x == null) return this;
@@ -1305,6 +1307,7 @@ function SetUpObject() {
"getPrototypeOf", ObjectGetPrototypeOf,
"getOwnPropertyDescriptor", ObjectGetOwnPropertyDescriptor,
"getOwnPropertyNames", ObjectGetOwnPropertyNames,
+ "is", ObjectIs,
"isExtensible", ObjectIsExtensible,
"isFrozen", ObjectIsFrozen,
"isSealed", ObjectIsSealed,
@@ -1469,6 +1472,18 @@ function NumberToPrecision(precision) {
}
+// Harmony isFinite.
+function NumberIsFinite(number) {
+ return IS_NUMBER(number) && NUMBER_IS_FINITE(number);
+}
+
+
+// Harmony isNaN.
+function NumberIsNaN(number) {
+ return IS_NUMBER(number) && NUMBER_IS_NAN(number);
+}
+
+
// ----------------------------------------------------------------------------
function SetUpNumber() {
@@ -1513,6 +1528,10 @@ function SetUpNumber() {
"toExponential", NumberToExponential,
"toPrecision", NumberToPrecision
));
+ InstallFunctions($Number, DONT_ENUM, $Array(
+ "isFinite", NumberIsFinite,
+ "isNaN", NumberIsNaN
+ ));
}
SetUpNumber();
diff --git a/deps/v8/src/variables.cc b/deps/v8/src/variables.cc
index aa6a010facd..32ad5bc5dd4 100644
--- a/deps/v8/src/variables.cc
+++ b/deps/v8/src/variables.cc
@@ -59,7 +59,8 @@ Variable::Variable(Scope* scope,
VariableMode mode,
bool is_valid_LHS,
Kind kind,
- InitializationFlag initialization_flag)
+ InitializationFlag initialization_flag,
+ Interface* interface)
: scope_(scope),
name_(name),
mode_(mode),
@@ -71,7 +72,8 @@ Variable::Variable(Scope* scope,
is_valid_LHS_(is_valid_LHS),
force_context_allocation_(false),
is_used_(false),
- initialization_flag_(initialization_flag) {
+ initialization_flag_(initialization_flag),
+ interface_(interface) {
// Names must be canonicalized for fast equality checks.
ASSERT(name->IsSymbol());
// Var declared variables never need initialization.
diff --git a/deps/v8/src/variables.h b/deps/v8/src/variables.h
index f20bd399c54..f49b6e12763 100644
--- a/deps/v8/src/variables.h
+++ b/deps/v8/src/variables.h
@@ -29,6 +29,7 @@
#define V8_VARIABLES_H_
#include "zone.h"
+#include "interface.h"
namespace v8 {
namespace internal {
@@ -78,7 +79,8 @@ class Variable: public ZoneObject {
VariableMode mode,
bool is_valid_lhs,
Kind kind,
- InitializationFlag initialization_flag);
+ InitializationFlag initialization_flag,
+ Interface* interface = Interface::NewValue());
// Printing support
static const char* Mode2String(VariableMode mode);
@@ -153,6 +155,7 @@ class Variable: public ZoneObject {
InitializationFlag initialization_flag() const {
return initialization_flag_;
}
+ Interface* interface() const { return interface_; }
void AllocateTo(Location location, int index) {
location_ = location;
@@ -183,6 +186,9 @@ class Variable: public ZoneObject {
bool force_context_allocation_; // set by variable resolver
bool is_used_;
InitializationFlag initialization_flag_;
+
+ // Module type info.
+ Interface* interface_;
};
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index f0af68de445..57e65943409 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 9
-#define BUILD_NUMBER 11
+#define BUILD_NUMBER 17
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
diff --git a/deps/v8/src/win32-headers.h b/deps/v8/src/win32-headers.h
index 87f078dc0ca..5d9c89e3121 100644
--- a/deps/v8/src/win32-headers.h
+++ b/deps/v8/src/win32-headers.h
@@ -56,6 +56,7 @@
#include <windows.h>
#ifdef V8_WIN32_HEADERS_FULL
+#include <signal.h> // For raise().
#include <time.h> // For LocalOffset() implementation.
#include <mmsystem.h> // For timeGetTime().
#ifdef __MINGW32__
@@ -78,7 +79,7 @@
#ifndef __MINGW32__
#include <wspiapi.h>
#endif // __MINGW32__
-#include <process.h> // for _beginthreadex()
+#include <process.h> // For _beginthreadex().
#include <stdlib.h>
#endif // V8_WIN32_HEADERS_FULL
diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc
index 2ea68b33b0d..4e037ff465f 100644
--- a/deps/v8/src/x64/builtins-x64.cc
+++ b/deps/v8/src/x64/builtins-x64.cc
@@ -329,6 +329,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
+
// Restore context from the frame.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -1427,9 +1432,130 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
- // TODO(849): implement custom construct stub.
- // Generate a copy of the generic stub for now.
- Generate_JSConstructStubGeneric(masm);
+ // ----------- S t a t e -------------
+ // -- rax : number of arguments
+ // -- rdi : constructor function
+ // -- rsp[0] : return address
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
+ // -- rsp[(argc + 1) * 8] : receiver
+ // -----------------------------------
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_ctor_calls(), 1);
+
+ if (FLAG_debug_code) {
+ __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
+ __ cmpq(rdi, rcx);
+ __ Assert(equal, "Unexpected String function");
+ }
+
+ // Load the first argument into rax and get rid of the rest
+ // (including the receiver).
+ Label no_arguments;
+ __ testq(rax, rax);
+ __ j(zero, &no_arguments);
+ __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
+ __ pop(rcx);
+ __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
+ __ push(rcx);
+ __ movq(rax, rbx);
+
+ // Lookup the argument in the number to string cache.
+ Label not_cached, argument_is_string;
+ NumberToStringStub::GenerateLookupNumberStringCache(
+ masm,
+ rax, // Input.
+ rbx, // Result.
+ rcx, // Scratch 1.
+ rdx, // Scratch 2.
+ false, // Input is known to be smi?
+ &not_cached);
+ __ IncrementCounter(counters->string_ctor_cached_number(), 1);
+ __ bind(&argument_is_string);
+
+ // ----------- S t a t e -------------
+ // -- rbx : argument converted to string
+ // -- rdi : constructor function
+ // -- rsp[0] : return address
+ // -----------------------------------
+
+ // Allocate a JSValue and put the tagged pointer into rax.
+ Label gc_required;
+ __ AllocateInNewSpace(JSValue::kSize,
+ rax, // Result.
+ rcx, // New allocation top (we ignore it).
+ no_reg,
+ &gc_required,
+ TAG_OBJECT);
+
+ // Set the map.
+ __ LoadGlobalFunctionInitialMap(rdi, rcx);
+ if (FLAG_debug_code) {
+ __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
+ Immediate(JSValue::kSize >> kPointerSizeLog2));
+ __ Assert(equal, "Unexpected string wrapper instance size");
+ __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
+ __ Assert(equal, "Unexpected unused properties of string wrapper");
+ }
+ __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
+
+ // Set properties and elements.
+ __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
+ __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
+ __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
+
+ // Set the value.
+ __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx);
+
+ // Ensure the object is fully initialized.
+ STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
+
+ // We're done. Return.
+ __ ret(0);
+
+ // The argument was not found in the number to string cache. Check
+ // if it's a string already before calling the conversion builtin.
+ Label convert_argument;
+ __ bind(&not_cached);
+ STATIC_ASSERT(kSmiTag == 0);
+ __ JumpIfSmi(rax, &convert_argument);
+ Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
+ __ j(NegateCondition(is_string), &convert_argument);
+ __ movq(rbx, rax);
+ __ IncrementCounter(counters->string_ctor_string_value(), 1);
+ __ jmp(&argument_is_string);
+
+ // Invoke the conversion builtin and put the result into rbx.
+ __ bind(&convert_argument);
+ __ IncrementCounter(counters->string_ctor_conversions(), 1);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ push(rdi); // Preserve the function.
+ __ push(rax);
+ __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
+ __ pop(rdi);
+ }
+ __ movq(rbx, rax);
+ __ jmp(&argument_is_string);
+
+ // Load the empty string into rbx, remove the receiver from the
+ // stack, and jump back to the case where the argument is a string.
+ __ bind(&no_arguments);
+ __ LoadRoot(rbx, Heap::kEmptyStringRootIndex);
+ __ pop(rcx);
+ __ lea(rsp, Operand(rsp, kPointerSize));
+ __ push(rcx);
+ __ jmp(&argument_is_string);
+
+ // At this point the argument is already a string. Call runtime to
+ // create a string wrapper.
+ __ bind(&gc_required);
+ __ IncrementCounter(counters->string_ctor_gc_required(), 1);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ push(rbx);
+ __ CallRuntime(Runtime::kNewStringWrapper, 1);
+ }
+ __ ret(0);
}
@@ -1538,7 +1664,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&invoke);
__ call(rdx);
+ // Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Leave frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ ret(0);
diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc
index 61404fa2d17..d616749e02f 100644
--- a/deps/v8/src/x64/code-stubs-x64.cc
+++ b/deps/v8/src/x64/code-stubs-x64.cc
@@ -1628,7 +1628,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
__ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
}
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ movq(Operand(rcx, 0), rbx);
__ movq(Operand(rcx, 2 * kIntSize), rax);
__ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
@@ -1643,7 +1643,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ subq(rsp, Immediate(kDoubleSize));
__ movsd(Operand(rsp, 0), xmm1);
__ fld_d(Operand(rsp, 0));
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ fstp_d(Operand(rsp, 0));
__ movsd(xmm1, Operand(rsp, 0));
__ addq(rsp, Immediate(kDoubleSize));
@@ -1695,16 +1695,17 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
}
-void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
+void TranscendentalCacheStub::GenerateOperation(
+ MacroAssembler* masm, TranscendentalCache::Type type) {
// Registers:
// rax: Newly allocated HeapNumber, which must be preserved.
// rbx: Bits of input double. Must be preserved.
// rcx: Pointer to cache entry. Must be preserved.
// st(0): Input double
Label done;
- if (type_ == TranscendentalCache::SIN ||
- type_ == TranscendentalCache::COS ||
- type_ == TranscendentalCache::TAN) {
+ if (type == TranscendentalCache::SIN ||
+ type == TranscendentalCache::COS ||
+ type == TranscendentalCache::TAN) {
// Both fsin and fcos require arguments in the range +/-2^63 and
// return NaN for infinities and NaN. They can share all code except
// the actual fsin/fcos operation.
@@ -1725,8 +1726,12 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
__ j(not_equal, &non_nan_result, Label::kNear);
// Input is +/-Infinity or NaN. Result is NaN.
__ fstp(0);
- __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex);
- __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
+ // NaN is represented by 0x7ff8000000000000.
+ __ subq(rsp, Immediate(kPointerSize));
+ __ movl(Operand(rsp, 4), Immediate(0x7ff80000));
+ __ movl(Operand(rsp, 0), Immediate(0x00000000));
+ __ fld_d(Operand(rsp, 0));
+ __ addq(rsp, Immediate(kPointerSize));
__ jmp(&done);
__ bind(&non_nan_result);
@@ -1767,7 +1772,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// FPU Stack: input % 2*pi
__ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
__ bind(&in_range);
- switch (type_) {
+ switch (type) {
case TranscendentalCache::SIN:
__ fsin();
break;
@@ -1785,7 +1790,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
}
__ bind(&done);
} else {
- ASSERT(type_ == TranscendentalCache::LOG);
+ ASSERT(type == TranscendentalCache::LOG);
__ fldln2();
__ fxch();
__ fyl2x();
@@ -5224,12 +5229,12 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&two_byte_slice);
__ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
__ bind(&set_slice_header);
- __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
__ Integer32ToSmi(rcx, rcx);
__ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
- __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
__ movq(FieldOperand(rax, SlicedString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
+ __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
+ __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(kArgumentsSize);
@@ -5449,7 +5454,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
__ movb(scratch, Operand(left, index, times_1, 0));
__ cmpb(scratch, Operand(right, index, times_1, 0));
__ j(not_equal, chars_not_equal, near_jump);
- __ addq(index, Immediate(1));
+ __ incq(index);
__ j(not_zero, &loop);
}
@@ -5522,15 +5527,15 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
Condition either_smi = masm->CheckEitherSmi(rax, rdx);
__ j(either_smi, &generic_stub, Label::kNear);
__ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined1, Label::kNear);
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
// Load left and right operand
__ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
@@ -5551,11 +5556,25 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ ret(0);
__ bind(&unordered);
-
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
__ bind(&generic_stub);
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ Cmp(rax, masm->isolate()->factory()->undefined_value());
+ __ j(not_equal, &miss);
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
+ __ j(equal, &unordered);
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -5606,9 +5625,10 @@ void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
- ASSERT(GetCondition() == equal);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = rdx;
Register right = rax;
@@ -5646,24 +5666,31 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
// Check that both strings are symbols. If they are, we're done
// because we already know they are not identical.
- Label do_compare;
- STATIC_ASSERT(kSymbolTag != 0);
- __ and_(tmp1, tmp2);
- __ testb(tmp1, Immediate(kIsSymbolMask));
- __ j(zero, &do_compare, Label::kNear);
- // Make sure rax is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(rax));
- __ ret(0);
+ if (equality) {
+ Label do_compare;
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ and_(tmp1, tmp2);
+ __ testb(tmp1, Immediate(kIsSymbolMask));
+ __ j(zero, &do_compare, Label::kNear);
+ // Make sure rax is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(rax));
+ __ ret(0);
+ __ bind(&do_compare);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ bind(&do_compare);
__ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
@@ -5671,7 +5698,11 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ push(left);
__ push(right);
__ push(tmp1);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -5753,7 +5784,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// r0 points to properties hash.
// Compute the masked index: (hash + i + i * i) & mask.
@@ -5782,11 +5813,18 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ Cmp(entity_name, Handle<String>(name));
__ j(equal, miss);
+ Label the_hole;
+ // Check for the hole and skip.
+ __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
+ __ j(equal, &the_hole, Label::kNear);
+
// Check if the entry name is not a symbol.
__ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
Immediate(kIsSymbolMask));
__ j(zero, miss);
+
+ __ bind(&the_hole);
}
StringDictionaryLookupStub stub(properties,
diff --git a/deps/v8/src/x64/code-stubs-x64.h b/deps/v8/src/x64/code-stubs-x64.h
index 30ef3e8c533..6a1a18f830a 100644
--- a/deps/v8/src/x64/code-stubs-x64.h
+++ b/deps/v8/src/x64/code-stubs-x64.h
@@ -48,6 +48,8 @@ class TranscendentalCacheStub: public CodeStub {
ArgumentType argument_type)
: type_(type), argument_type_(argument_type) {}
void Generate(MacroAssembler* masm);
+ static void GenerateOperation(MacroAssembler* masm,
+ TranscendentalCache::Type type);
private:
TranscendentalCache::Type type_;
ArgumentType argument_type_;
@@ -55,7 +57,6 @@ class TranscendentalCacheStub: public CodeStub {
Major MajorKey() { return TranscendentalCache; }
int MinorKey() { return type_ | argument_type_; }
Runtime::FunctionId RuntimeFunction();
- void GenerateOperation(MacroAssembler* masm);
};
diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc
index 8947f70055f..902f7e93a31 100644
--- a/deps/v8/src/x64/codegen-x64.cc
+++ b/deps/v8/src/x64/codegen-x64.cc
@@ -54,6 +54,52 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
#define __ masm.
+
+TranscendentalFunction CreateTranscendentalFunction(
+ TranscendentalCache::Type type) {
+ size_t actual_size;
+ // Allocate buffer in executable space.
+ byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB,
+ &actual_size,
+ true));
+ if (buffer == NULL) {
+ // Fallback to library function if function cannot be created.
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ }
+
+ MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
+ // xmm0: raw double input.
+ // Move double input into registers.
+ __ push(rbx);
+ __ push(rdi);
+ __ movq(rbx, xmm0);
+ __ push(rbx);
+ __ fld_d(Operand(rsp, 0));
+ TranscendentalCacheStub::GenerateOperation(&masm, type);
+ // The return value is expected to be in xmm0.
+ __ fstp_d(Operand(rsp, 0));
+ __ pop(rbx);
+ __ movq(xmm0, rbx);
+ __ pop(rdi);
+ __ pop(rbx);
+ __ Ret();
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ ASSERT(desc.reloc_size == 0);
+
+ CPU::FlushICache(buffer, actual_size);
+ OS::ProtectCode(buffer, actual_size);
+ return FUNCTION_CAST<TranscendentalFunction>(buffer);
+}
+
+
#ifdef _WIN64
typedef double (*ModuloFunction)(double, double);
// Define custom fmod implementation.
@@ -182,7 +228,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// -- rsp[0] : return address
// -----------------------------------
// The fail label is not actually used since we do not allocate.
- Label allocated, cow_array, only_change_map, done;
+ Label allocated, new_backing_store, only_change_map, done;
// Check for empty arrays, which only require a map transition and no changes
// to the backing store.
@@ -190,16 +236,20 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
__ j(equal, &only_change_map);
- // Check backing store for COW-ness. If the negative case, we do not have to
- // allocate a new array, since FixedArray and FixedDoubleArray do not differ
- // in size.
+ // Check backing store for COW-ness. For COW arrays we have to
+ // allocate a new backing store.
__ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
__ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
Heap::kFixedCOWArrayMapRootIndex);
- __ j(equal, &cow_array);
+ __ j(equal, &new_backing_store);
+ // Check if the backing store is in new-space. If not, we need to allocate
+ // a new one since the old one is in pointer-space.
+ // If in new space, we can reuse the old backing store because it is
+ // the same size.
+ __ JumpIfNotInNewSpace(r8, rdi, &new_backing_store);
+
__ movq(r14, r8); // Destination array equals source array.
- __ bind(&allocated);
// r8 : source FixedArray
// r9 : elements array length
// r14: destination FixedDoubleArray
@@ -207,6 +257,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
__ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi);
+ __ bind(&allocated);
// Set transitioned map.
__ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
__ RecordWriteField(rdx,
@@ -227,10 +278,13 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// r15: the-hole NaN
__ jmp(&entry);
- // Allocate new array if the source array is a COW array.
- __ bind(&cow_array);
+ // Allocate new backing store.
+ __ bind(&new_backing_store);
__ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize));
__ AllocateInNewSpace(rdi, r14, r11, r15, fail, TAG_OBJECT);
+ // Set backing store's map
+ __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
+ __ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi);
// Set receiver's backing store.
__ movq(FieldOperand(rdx, JSObject::kElementsOffset), r14);
__ movq(r11, r14);
diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc
index efa988874ed..2adf587f8c4 100644
--- a/deps/v8/src/x64/deoptimizer-x64.cc
+++ b/deps/v8/src/x64/deoptimizer-x64.cc
@@ -347,7 +347,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
- unsigned input_frame_size = input_->GetFrameSize();
unsigned output_frame_size = height_in_bytes + fixed_frame_size;
// Allocate and store the output frame description.
@@ -369,16 +368,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Compute the incoming parameter translation.
int parameter_count = height;
unsigned output_offset = output_frame_size;
- unsigned input_offset = input_frame_size;
for (int i = 0; i < parameter_count; ++i) {
output_offset -= kPointerSize;
DoTranslateCommand(iterator, frame_index, output_offset);
}
- input_offset -= (parameter_count * kPointerSize);
// Read caller's PC from the previous frame.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t callers_pc = output_[frame_index - 1]->GetPc();
output_frame->SetFrameSlot(output_offset, callers_pc);
if (FLAG_trace_deopt) {
@@ -389,7 +385,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Read caller's FP from the previous frame, and set this frame's FP.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t value = output_[frame_index - 1]->GetFp();
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
@@ -402,7 +397,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// A marker value is used in place of the context.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
intptr_t context = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
output_frame->SetFrameSlot(output_offset, context);
@@ -414,7 +408,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(function);
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -425,7 +418,6 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
// Number of incoming arguments.
output_offset -= kPointerSize;
- input_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
output_frame->SetFrameSlot(output_offset, value);
if (FLAG_trace_deopt) {
@@ -446,6 +438,116 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
}
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 6 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ intptr_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ intptr_t pc = reinterpret_cast<intptr_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
int frame_index) {
int node_id = iterator->Next();
@@ -555,6 +657,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
value = reinterpret_cast<intptr_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
+ output_frame->SetContext(value);
if (is_topmost) output_frame->SetRegister(rsi.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc
index 9c03053afe0..6739cc84a24 100644
--- a/deps/v8/src/x64/full-codegen-x64.cc
+++ b/deps/v8/src/x64/full-codegen-x64.cc
@@ -969,6 +969,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register rax. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(rbx, cell);
+ __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
+
__ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
__ movq(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -1449,10 +1459,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
case ObjectLiteral::Property::GETTER:
__ push(Operand(rsp, 0)); // Duplicate receiver.
VisitForStackValue(key);
- __ Push(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0));
- VisitForStackValue(value);
+ if (property->kind() == ObjectLiteral::Property::GETTER) {
+ VisitForStackValue(value);
+ __ PushRoot(Heap::kNullValueRootIndex);
+ } else {
+ __ PushRoot(Heap::kNullValueRootIndex);
+ VisitForStackValue(value);
+ }
__ Push(Smi::FromInt(NONE));
__ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
break;
@@ -2280,6 +2293,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
CallConstructStub stub(flags);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(rax);
}
@@ -2820,6 +2834,54 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = rax;
+ Register result = rax;
+ Register scratch = rcx;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, scratch);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ movq(scratch, stamp);
+ __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2);
+#ifdef _WIN64
+ __ movq(rcx, object);
+ __ movq(rdx, index, RelocInfo::NONE);
+#else
+ __ movq(rdi, object);
+ __ movq(rsi, index, RelocInfo::NONE);
+#endif
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ __ bind(&done);
+ }
+ context()->Plug(rax);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc
index 2151cf49790..f707df030f9 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.cc
+++ b/deps/v8/src/x64/lithium-codegen-x64.cc
@@ -67,7 +67,7 @@ class SafepointGenerator : public CallWrapper {
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
@@ -368,10 +368,18 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- if (environment->is_arguments_adaptor()) {
- translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
- } else {
- translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
}
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
@@ -511,7 +519,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
- if (!e->is_arguments_adaptor()) {
+ if (e->frame_type() == JS_FUNCTION) {
++jsframe_count;
}
}
@@ -1216,6 +1224,49 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(result));
+ ASSERT(object.is(rax));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ movq(kScratchRegister, stamp);
+ __ cmpq(kScratchRegister, FieldOperand(object,
+ JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2);
+#ifdef _WIN64
+ __ movq(rcx, object);
+ __ movq(rdx, index, RelocInfo::NONE);
+#else
+ __ movq(rdi, object);
+ __ movq(rsi, index, RelocInfo::NONE);
+#endif
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->Equals(instr->result()));
@@ -2958,6 +3009,18 @@ void LCodeGen::DoPower(LPower* instr) {
void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
+
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
@@ -2972,12 +3035,49 @@ void LCodeGen::DoRandom(LRandom* instr) {
Register global_object = rdi;
#endif
- __ PrepareCallCFunction(1);
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == 2 * kSeedSize);
+
__ movq(global_object,
FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
- __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ movq(rbx, FieldOperand(global_object, kRandomSeedOffset));
+ // rbx: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ movl(rax, FieldOperand(rbx, ByteArray::kHeaderSize));
+ // If state[0] == 0, call runtime to initialize seeds.
+ __ testl(rax, rax);
+ __ j(zero, deferred->entry());
+ // Load state[1].
+ __ movl(rcx, FieldOperand(rbx, ByteArray::kHeaderSize + kSeedSize));
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ // Only operate on the lower 32 bit of rax.
+ __ movl(rdx, rax);
+ __ andl(rdx, Immediate(0xFFFF));
+ __ imull(rdx, rdx, Immediate(18273));
+ __ shrl(rax, Immediate(16));
+ __ addl(rax, rdx);
+ // Save state[0].
+ __ movl(FieldOperand(rbx, ByteArray::kHeaderSize), rax);
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ movl(rdx, rcx);
+ __ andl(rdx, Immediate(0xFFFF));
+ __ imull(rdx, rdx, Immediate(36969));
+ __ shrl(rcx, Immediate(16));
+ __ addl(rcx, rdx);
+ // Save state[1].
+ __ movl(FieldOperand(rbx, ByteArray::kHeaderSize + kSeedSize), rcx);
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ shll(rax, Immediate(14));
+ __ andl(rcx, Immediate(0x3FFFF));
+ __ addl(rax, rcx);
+ __ bind(deferred->exit());
// Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
@@ -2990,6 +3090,14 @@ void LCodeGen::DoRandom(LRandom* instr) {
}
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ // Return value is in rax.
+}
+
+
void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3925,6 +4033,94 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
}
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ no_reg,
+ scratch,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(scratch, constructor);
+ __ movq(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
+
+ if (FLAG_debug_code) {
+ __ AbortIfSmi(map);
+ __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
+ Immediate(instance_size >> kPointerSizeLog2));
+ __ Assert(equal, "Unexpected instance size");
+ __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset),
+ Immediate(initial_map->pre_allocated_property_fields()));
+ __ Assert(equal, "Unexpected pre-allocated property fields count");
+ __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset),
+ Immediate(initial_map->unused_property_fields()));
+ __ Assert(equal, "Unexpected unused property fields count");
+ __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset),
+ Immediate(initial_map->inobject_properties()));
+ __ Assert(equal, "Unexpected in-object property fields count");
+ }
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ movq(FieldOperand(result, JSObject::kMapOffset), map);
+ __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
+ __ movq(FieldOperand(result, JSObject::kElementsOffset), scratch);
+ __ movq(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
+ if (initial_map->inobject_properties() != 0) {
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ movq(FieldOperand(result, property_offset), scratch);
+ }
+ }
+
+ __ bind(deferred->exit());
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ Set(result, 0);
+
+ PushSafepointRegistersScope scope(this);
+ __ PushHeapObject(constructor);
+ CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+ __ StoreToSafepointRegisterSlot(result, rax);
+}
+
+
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Heap* heap = isolate()->heap();
ElementsKind boilerplate_elements_kind =
diff --git a/deps/v8/src/x64/lithium-codegen-x64.h b/deps/v8/src/x64/lithium-codegen-x64.h
index 2890c530b74..f5045b66a2d 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.h
+++ b/deps/v8/src/x64/lithium-codegen-x64.h
@@ -97,8 +97,10 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
diff --git a/deps/v8/src/x64/lithium-x64.cc b/deps/v8/src/x64/lithium-x64.cc
index d373e191fec..e2569c9f4da 100644
--- a/deps/v8/src/x64/lithium-x64.cc
+++ b/deps/v8/src/x64/lithium-x64.cc
@@ -382,7 +382,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -469,7 +469,7 @@ void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new LInstructionGap(block);
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
instructions_.Add(gap);
@@ -544,8 +544,8 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ chunk_ = new(zone()) LChunk(info(), graph());
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -575,14 +575,14 @@ void LChunkBuilder::Abort(const char* format, ...) {
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
- return new LUnallocated(LUnallocated::FIXED_REGISTER,
- Register::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
}
LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
- return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
- XMMRegister::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ XMMRegister::ToAllocationIndex(reg));
}
@@ -597,29 +597,29 @@ LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
LOperand* LChunkBuilder::UseRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
return Use(value,
- new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
LUnallocated::USED_AT_START));
}
LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
}
LOperand* LChunkBuilder::Use(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
}
LOperand* LChunkBuilder::UseAtStart(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE,
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
LUnallocated::USED_AT_START));
}
@@ -655,7 +655,7 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
LOperand* LChunkBuilder::UseAny(HValue* value) {
return value->IsConstant()
? chunk_->DefineConstantOperand(HConstant::cast(value))
- : Use(value, new LUnallocated(LUnallocated::ANY));
+ : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
}
@@ -681,7 +681,8 @@ LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
@@ -689,14 +690,16 @@ template<int I, int T>
LInstruction* LChunkBuilder::DefineAsSpilled(
LTemplateInstruction<1, I, T>* instr,
int index) {
- return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineSameAsFirst(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -779,13 +782,14 @@ LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_));
return instr;
}
LUnallocated* LChunkBuilder::TempRegister() {
- LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ LUnallocated* operand =
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
operand->set_virtual_register(allocator_->GetVirtualRegister());
if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
return operand;
@@ -807,17 +811,17 @@ LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
- return new LLabel(instr->block());
+ return new(zone()) LLabel(instr->block());
}
LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
@@ -829,7 +833,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LArithmeticT* result = new LArithmeticT(op, left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -863,7 +867,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
}
LInstruction* result =
- DefineSameAsFirst(new LShiftI(op, left, right, does_deopt));
+ DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
return does_deopt ? AssignEnvironment(result) : result;
}
@@ -876,7 +880,7 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
ASSERT(op != Token::MOD);
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
return DefineSameAsFirst(result);
}
@@ -894,7 +898,8 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
ASSERT(right->representation().IsTagged());
LOperand* left_operand = UseFixed(left, rdx);
LOperand* right_operand = UseFixed(right, rax);
- LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand);
+ LArithmeticT* result =
+ new(zone()) LArithmeticT(op, left_operand, right_operand);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -990,15 +995,17 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber || hydrogen_env->is_arguments_adaptor());
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
- LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
- hydrogen_env->is_arguments_adaptor(),
- ast_id,
- hydrogen_env->parameter_count(),
- argument_count_,
- value_count,
- outer);
+ LEnvironment* result = new(zone()) LEnvironment(
+ hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1008,14 +1015,14 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new LArgument(argument_index++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
- if (!hydrogen_env->is_arguments_adaptor()) {
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
*argument_index_accumulator = argument_index;
}
@@ -1024,7 +1031,7 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
- return new LGoto(instr->FirstSuccessor()->block_id());
+ return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
}
@@ -1036,10 +1043,10 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
- return new LGoto(successor->block_id());
+ return new(zone()) LGoto(successor->block_id());
}
- LBranch* result = new LBranch(UseRegister(value));
+ LBranch* result = new(zone()) LBranch(UseRegister(value));
// Tagged values that are not known smis or booleans require a
// deoptimization environment.
Representation rep = value->representation();
@@ -1054,24 +1061,24 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return new LCmpMapAndBranch(value);
+ return new(zone()) LCmpMapAndBranch(value);
}
LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- return DefineAsRegister(new LArgumentsLength(Use(length->value())));
+ return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- return DefineAsRegister(new LArgumentsElements);
+ return DefineAsRegister(new(zone()) LArgumentsElements);
}
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left = UseFixed(instr->left(), rax);
LOperand* right = UseFixed(instr->right(), rdx);
- LInstanceOf* result = new LInstanceOf(left, right);
+ LInstanceOf* result = new(zone()) LInstanceOf(left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1079,8 +1086,8 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseFixed(instr->left(), rax),
- FixedTemp(rdi));
+ new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), rax),
+ FixedTemp(rdi));
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1090,7 +1097,7 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* receiver = UseFixed(instr->receiver(), rax);
LOperand* length = UseFixed(instr->length(), rbx);
LOperand* elements = UseFixed(instr->elements(), rcx);
- LApplyArguments* result = new LApplyArguments(function,
+ LApplyArguments* result = new(zone()) LApplyArguments(function,
receiver,
length,
elements);
@@ -1101,66 +1108,68 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
++argument_count_;
LOperand* argument = UseOrConstant(instr->argument());
- return new LPushArgument(argument);
+ return new(zone()) LPushArgument(argument);
}
LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LThisFunction);
+ return instr->HasNoUses()
+ ? NULL
+ : DefineAsRegister(new(zone()) LThisFunction);
}
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LContext);
+ return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
}
LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LOuterContext(context));
+ return DefineAsRegister(new(zone()) LOuterContext(context));
}
LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
- return MarkAsCall(new LDeclareGlobals, instr);
+ return MarkAsCall(new(zone()) LDeclareGlobals, instr);
}
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
- return DefineAsRegister(new LGlobalObject);
+ return DefineAsRegister(new(zone()) LGlobalObject);
}
LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
LOperand* global_object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalReceiver(global_object));
+ return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
}
LInstruction* LChunkBuilder::DoCallConstantFunction(
HCallConstantFunction* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallConstantFunction, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, rax), instr);
}
LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
LOperand* function = UseFixed(instr->function(), rdi);
argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
+ LInvokeFunction* result = new(zone()) LInvokeFunction(function);
return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
- if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* input = UseFixedDouble(instr->value(), xmm1);
- LUnaryMathOperation* result = new LUnaryMathOperation(input);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input);
return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
- LUnaryMathOperation* result = new LUnaryMathOperation(input);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input);
switch (op) {
case kMathAbs:
return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
@@ -1184,33 +1193,33 @@ LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
LOperand* key = UseFixed(instr->key(), rcx);
argument_count_ -= instr->argument_count();
- LCallKeyed* result = new LCallKeyed(key);
+ LCallKeyed* result = new(zone()) LCallKeyed(key);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallNamed, rax), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, rax), instr);
}
LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallKnownGlobal, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, rax), instr);
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* constructor = UseFixed(instr->constructor(), rdi);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new(zone()) LCallNew(constructor);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1218,14 +1227,14 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* function = UseFixed(instr->function(), rdi);
argument_count_ -= instr->argument_count();
- LCallFunction* result = new LCallFunction(function);
+ LCallFunction* result = new(zone()) LCallFunction(function);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallRuntime, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, rax), instr);
}
@@ -1251,7 +1260,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- return DefineSameAsFirst(new LBitI(left, right));
+ return DefineSameAsFirst(new(zone()) LBitI(left, right));
} else {
ASSERT(instr->representation().IsTagged());
ASSERT(instr->left()->representation().IsTagged());
@@ -1259,7 +1268,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LArithmeticT* result = new LArithmeticT(instr->op(), left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
}
@@ -1269,7 +1278,7 @@ LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
LOperand* input = UseRegisterAtStart(instr->value());
- LBitNotI* result = new LBitNotI(input);
+ LBitNotI* result = new(zone()) LBitNotI(input);
return DefineSameAsFirst(result);
}
@@ -1283,7 +1292,7 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
LOperand* temp = FixedTemp(rdx);
LOperand* dividend = UseFixed(instr->left(), rax);
LOperand* divisor = UseRegister(instr->right());
- LDivI* result = new LDivI(dividend, divisor, temp);
+ LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
return AssignEnvironment(DefineFixed(result, rax));
} else {
ASSERT(instr->representation().IsTagged());
@@ -1301,7 +1310,8 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->HasPowerOf2Divisor()) {
ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
LOperand* value = UseRegisterAtStart(instr->left());
- LModI* mod = new LModI(value, UseOrConstant(instr->right()), NULL);
+ LModI* mod =
+ new(zone()) LModI(value, UseOrConstant(instr->right()), NULL);
result = DefineSameAsFirst(mod);
} else {
// The temporary operand is necessary to ensure that right is not
@@ -1309,7 +1319,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
LOperand* temp = FixedTemp(rdx);
LOperand* value = UseFixed(instr->left(), rax);
LOperand* divisor = UseRegister(instr->right());
- LModI* mod = new LModI(value, divisor, temp);
+ LModI* mod = new(zone()) LModI(value, divisor, temp);
result = DefineFixed(mod, rdx);
}
@@ -1326,7 +1336,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
// TODO(fschneider): Allow any register as input registers.
LOperand* left = UseFixedDouble(instr->left(), xmm2);
LOperand* right = UseFixedDouble(instr->right(), xmm1);
- LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
}
}
@@ -1338,7 +1348,7 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstant(instr->MostConstantOperand());
- LMulI* mul = new LMulI(left, right);
+ LMulI* mul = new(zone()) LMulI(left, right);
if (instr->CheckFlag(HValue::kCanOverflow) ||
instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
AssignEnvironment(mul);
@@ -1359,7 +1369,7 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- LSubI* sub = new LSubI(left, right);
+ LSubI* sub = new(zone()) LSubI(left, right);
LInstruction* result = DefineSameAsFirst(sub);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1380,7 +1390,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- LAddI* add = new LAddI(left, right);
+ LAddI* add = new(zone()) LAddI(left, right);
LInstruction* result = DefineSameAsFirst(add);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1410,7 +1420,7 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
#else
UseFixed(instr->right(), rdi);
#endif
- LPower* result = new LPower(left, right);
+ LPower* result = new(zone()) LPower(left, right);
return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1424,7 +1434,7 @@ LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
#else
LOperand* global_object = UseFixed(instr->global_object(), rdi);
#endif
- LRandom* result = new LRandom(global_object);
+ LRandom* result = new(zone()) LRandom(global_object);
return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
}
@@ -1434,7 +1444,7 @@ LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LCmpT* result = new LCmpT(left, right);
+ LCmpT* result = new(zone()) LCmpT(left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1447,7 +1457,7 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterOrConstantAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
} else {
ASSERT(r.IsDouble());
ASSERT(instr->left()->representation().IsDouble());
@@ -1461,7 +1471,7 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
left = UseRegisterAtStart(instr->left());
right = UseRegisterAtStart(instr->right());
}
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
}
}
@@ -1470,47 +1480,50 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
HCompareObjectEqAndBranch* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpObjectEqAndBranch(left, right);
+ return new(zone()) LCmpObjectEqAndBranch(left, right);
}
LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
HCompareConstantEqAndBranch* instr) {
- return new LCmpConstantEqAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LCmpConstantEqAndBranch(value);
}
LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = instr->kind() == kStrictEquality ? NULL : TempRegister();
- return new LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LIsStringAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsStringAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsSmiAndBranch(Use(instr->value()));
+ return new(zone()) LIsSmiAndBranch(Use(instr->value()));
}
LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
HIsUndetectableAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsUndetectableAndBranch(UseRegisterAtStart(instr->value()),
- TempRegister());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* temp = TempRegister();
+ return new(zone()) LIsUndetectableAndBranch(value, temp);
}
@@ -1521,7 +1534,8 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LStringCompareAndBranch* result = new LStringCompareAndBranch(left, right);
+ LStringCompareAndBranch* result =
+ new(zone()) LStringCompareAndBranch(left, right);
return MarkAsCall(result, instr);
}
@@ -1530,7 +1544,8 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasInstanceTypeAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasInstanceTypeAndBranch(value);
}
@@ -1539,55 +1554,64 @@ LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGetCachedArrayIndex(value));
+ return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
}
LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
HHasCachedArrayIndexAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasCachedArrayIndexAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasCachedArrayIndexAndBranch(value);
}
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
- return new LClassOfTestAndBranch(UseRegister(instr->value()),
- TempRegister(),
- TempRegister());
+ LOperand* value = UseRegister(instr->value());
+ return new(zone()) LClassOfTestAndBranch(value,
+ TempRegister(),
+ TempRegister());
}
LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LJSArrayLength(array));
+ return DefineAsRegister(new(zone()) LJSArrayLength(array));
}
LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayBaseLength(array));
+ return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
}
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LElementsKind(object));
+ return DefineAsRegister(new(zone()) LElementsKind(object));
}
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
- LValueOf* result = new LValueOf(object);
+ LValueOf* result = new(zone()) LValueOf(object);
return DefineSameAsFirst(result);
}
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* object = UseFixed(instr->value(), rax);
+ LDateField* result = new LDateField(object, instr->index());
+ return MarkAsCall(DefineFixed(result, rax), instr);
+}
+
+
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(
- UseRegisterOrConstantAtStart(instr->index()),
- Use(instr->length())));
+ LOperand* value = UseRegisterOrConstantAtStart(instr->index());
+ LOperand* length = Use(instr->length());
+ return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1600,7 +1624,7 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), rax);
- return MarkAsCall(new LThrow(value), instr);
+ return MarkAsCall(new(zone()) LThrow(value), instr);
}
@@ -1623,7 +1647,7 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new LNumberUntagD(value);
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
@@ -1632,10 +1656,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (needs_check) {
bool truncating = instr->CanTruncateToInt32();
LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
- LTaggedToI* res = new LTaggedToI(value, xmm_temp);
+ LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
return AssignEnvironment(DefineSameAsFirst(res));
} else {
- return DefineSameAsFirst(new LSmiUntag(value, needs_check));
+ return DefineSameAsFirst(new(zone()) LSmiUntag(value, needs_check));
}
}
} else if (from.IsDouble()) {
@@ -1645,26 +1669,27 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
// Make sure that temp and result_temp are different registers.
LUnallocated* result_temp = TempRegister();
- LNumberTagD* result = new LNumberTagD(value, temp);
+ LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
return AssignPointerMap(Define(result, result_temp));
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- return AssignEnvironment(DefineAsRegister(new LDoubleToI(value)));
+ return AssignEnvironment(DefineAsRegister(new(zone()) LDoubleToI(value)));
}
} else if (from.IsInteger32()) {
if (to.IsTagged()) {
HValue* val = instr->value();
LOperand* value = UseRegister(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
- return DefineSameAsFirst(new LSmiTag(value));
+ return DefineSameAsFirst(new(zone()) LSmiTag(value));
} else {
- LNumberTagI* result = new LNumberTagI(value);
+ LNumberTagI* result = new(zone()) LNumberTagI(value);
return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
}
} else {
ASSERT(to.IsDouble());
- return DefineAsRegister(new LInteger32ToDouble(Use(instr->value())));
+ LOperand* value = Use(instr->value());
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
}
}
UNREACHABLE();
@@ -1674,39 +1699,39 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckNonSmi(value));
+ return AssignEnvironment(new(zone()) LCheckNonSmi(value));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckInstanceType* result = new LCheckInstanceType(value);
+ LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
LOperand* temp = TempRegister();
- LCheckPrototypeMaps* result = new LCheckPrototypeMaps(temp);
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckSmi(value));
+ return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckFunction(value));
+ return AssignEnvironment(new(zone()) LCheckFunction(value));
}
LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckMap* result = new LCheckMap(value);
+ LCheckMap* result = new(zone()) LCheckMap(value);
return AssignEnvironment(result);
}
@@ -1716,62 +1741,36 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
Representation input_rep = value->representation();
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
- return DefineAsRegister(new LClampDToUint8(reg,
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg,
TempRegister()));
} else if (input_rep.IsInteger32()) {
- return DefineSameAsFirst(new LClampIToUint8(reg));
+ return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
} else {
ASSERT(input_rep.IsTagged());
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve xmm1 explicitly.
- LClampTToUint8* result = new LClampTToUint8(reg,
- TempRegister(),
- FixedTemp(xmm1));
+ LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
+ TempRegister(),
+ FixedTemp(xmm1));
return AssignEnvironment(DefineSameAsFirst(result));
}
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
- LOperand* reg = UseRegister(value);
- if (input_rep.IsDouble()) {
- return AssignEnvironment(DefineAsRegister(new LDoubleToI(reg)));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* reg = UseRegister(value);
- // Register allocator doesn't (yet) support allocation of double
- // temps. Reserve xmm1 explicitly.
- LOperand* xmm_temp =
- CpuFeatures::IsSupported(SSE3)
- ? NULL
- : FixedTemp(xmm1);
- return AssignEnvironment(
- DefineSameAsFirst(new LTaggedToI(reg, xmm_temp)));
- }
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
- return new LReturn(UseFixed(instr->value(), rax));
+ return new(zone()) LReturn(UseFixed(instr->value(), rax));
}
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- return DefineAsRegister(new LConstantI);
+ return DefineAsRegister(new(zone()) LConstantI);
} else if (r.IsDouble()) {
LOperand* temp = TempRegister();
- return DefineAsRegister(new LConstantD(temp));
+ return DefineAsRegister(new(zone()) LConstantD(temp));
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT);
+ return DefineAsRegister(new(zone()) LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1780,7 +1779,7 @@ LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
- LLoadGlobalCell* result = new LLoadGlobalCell;
+ LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
return instr->RequiresHoleCheck()
? AssignEnvironment(DefineAsRegister(result))
: DefineAsRegister(result);
@@ -1789,7 +1788,7 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), rax);
- LLoadGlobalGeneric* result = new LLoadGlobalGeneric(global_object);
+ LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1799,22 +1798,24 @@ LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
// Use a temp to avoid reloading the cell value address in the case where
// we perform a hole check.
return instr->RequiresHoleCheck()
- ? AssignEnvironment(new LStoreGlobalCell(value, TempRegister()))
- : new LStoreGlobalCell(value, NULL);
+ ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
+ : new(zone()) LStoreGlobalCell(value, NULL);
}
LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), rdx);
LOperand* value = UseFixed(instr->value(), rax);
- LStoreGlobalGeneric* result = new LStoreGlobalGeneric(global_object, value);
+ LStoreGlobalGeneric* result = new(zone()) LStoreGlobalGeneric(global_object,
+ value);
return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- LInstruction* result = DefineAsRegister(new LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1832,7 +1833,7 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
value = UseRegister(instr->value());
temp = NULL;
}
- LInstruction* result = new LStoreContextSlot(context, value, temp);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1840,7 +1841,7 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
ASSERT(instr->representation().IsTagged());
LOperand* obj = UseRegisterAtStart(instr->object());
- return DefineAsRegister(new LLoadNamedField(obj));
+ return DefineAsRegister(new(zone()) LLoadNamedField(obj));
}
@@ -1849,11 +1850,13 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* obj = UseFixed(instr->object(), rax);
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return MarkAsCall(DefineFixed(result, rax), instr);
} else {
LOperand* obj = UseRegisterAtStart(instr->object());
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return AssignEnvironment(DefineAsRegister(result));
}
}
@@ -1861,7 +1864,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), rax);
- LLoadNamedGeneric* result = new LLoadNamedGeneric(object);
+ LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1869,20 +1872,20 @@ LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
return AssignEnvironment(DefineAsRegister(
- new LLoadFunctionPrototype(UseRegister(instr->function()))));
+ new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadElements(input));
+ return DefineAsRegister(new(zone()) LLoadElements(input));
}
LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
HLoadExternalArrayPointer* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadExternalArrayPointer(input));
+ return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
}
@@ -1892,7 +1895,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
+ LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
if (instr->RequiresHoleCheck()) AssignEnvironment(result);
return DefineAsRegister(result);
}
@@ -1905,7 +1908,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LOperand* elements = UseRegisterAtStart(instr->elements());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastDoubleElement* result =
- new LLoadKeyedFastDoubleElement(elements, key);
+ new(zone()) LLoadKeyedFastDoubleElement(elements, key);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -1924,7 +1927,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new LLoadKeyedSpecializedArrayElement(external_pointer, key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -1937,7 +1940,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), rdx);
LOperand* key = UseFixed(instr->key(), rax);
- LLoadKeyedGeneric* result = new LLoadKeyedGeneric(object, key);
+ LLoadKeyedGeneric* result = new(zone()) LLoadKeyedGeneric(object, key);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1956,7 +1959,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastElement(obj, key, val);
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1970,7 +1973,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LOperand* val = UseTempRegister(instr->value());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastDoubleElement(elements, key, val);
+ return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
}
@@ -1996,9 +1999,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
: UseRegister(instr->value());
LOperand* key = UseRegisterOrConstant(instr->key());
- return new LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
+ key,
+ val);
}
@@ -2011,7 +2014,8 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- LStoreKeyedGeneric* result = new LStoreKeyedGeneric(object, key, value);
+ LStoreKeyedGeneric* result =
+ new(zone()) LStoreKeyedGeneric(object, key, value);
return MarkAsCall(result, instr);
}
@@ -2024,14 +2028,16 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LOperand* new_map_reg = TempRegister();
LOperand* temp_reg = TempRegister();
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, temp_reg);
+ new(zone()) LTransitionElementsKind(object, new_map_reg, temp_reg);
return DefineSameAsFirst(result);
} else {
LOperand* object = UseFixed(instr->object(), rax);
LOperand* fixed_object_reg = FixedTemp(rdx);
LOperand* new_map_reg = FixedTemp(rbx);
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+ new(zone()) LTransitionElementsKind(object,
+ new_map_reg,
+ fixed_object_reg);
return MarkAsCall(DefineFixed(result, rax), instr);
}
}
@@ -2053,7 +2059,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
? TempRegister() : NULL;
- return new LStoreNamedField(obj, val, temp);
+ return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2061,7 +2067,7 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), rdx);
LOperand* value = UseFixed(instr->value(), rax);
- LStoreNamedGeneric* result = new LStoreNamedGeneric(object, value);
+ LStoreNamedGeneric* result = new(zone()) LStoreNamedGeneric(object, value);
return MarkAsCall(result, instr);
}
@@ -2069,60 +2075,67 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
LOperand* left = UseOrConstantAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), rax),
+ instr);
}
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseTempRegister(instr->string());
LOperand* index = UseTempRegister(instr->index());
- LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
+ LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
LOperand* char_code = UseRegister(instr->value());
- LStringCharFromCode* result = new LStringCharFromCode(char_code);
+ LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LStringLength(string));
+ return DefineAsRegister(new(zone()) LStringLength(string));
+}
+
+
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LAllocateObject* result = new LAllocateObject(TempRegister());
+ return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFastLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
- return MarkAsCall(DefineFixed(new LArrayLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
- return MarkAsCall(DefineFixed(new LRegExpLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFunctionLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
- LDeleteProperty* result =
- new LDeleteProperty(UseAtStart(instr->object()),
- UseOrConstantAtStart(instr->key()));
+ LOperand* object = UseAtStart(instr->object());
+ LOperand* key = UseOrConstantAtStart(instr->key());
+ LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -2130,13 +2143,13 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
- return AssignEnvironment(new LOsrEntry);
+ return AssignEnvironment(new(zone()) LOsrEntry);
}
LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
int spill_index = chunk()->GetParameterStackSlot(instr->index());
- return DefineAsSpilled(new LParameter, spill_index);
+ return DefineAsSpilled(new(zone()) LParameter, spill_index);
}
@@ -2146,13 +2159,13 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
- return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+ return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
}
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr);
}
@@ -2169,32 +2182,33 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LOperand* arguments = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = Use(instr->index());
- LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ LAccessArgumentsAt* result =
+ new(zone()) LAccessArgumentsAt(arguments, length, index);
return AssignEnvironment(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
LOperand* object = UseFixed(instr->value(), rax);
- LToFastProperties* result = new LToFastProperties(object);
+ LToFastProperties* result = new(zone()) LToFastProperties(object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseAtStart(instr->value()));
+ LTypeof* result = new(zone()) LTypeof(UseAtStart(instr->value()));
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
- return new LTypeofIsAndBranch(UseTempRegister(instr->value()));
+ return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
}
LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
HIsConstructCallAndBranch* instr) {
- return new LIsConstructCallAndBranch(TempRegister());
+ return new(zone()) LIsConstructCallAndBranch(TempRegister());
}
@@ -2217,7 +2231,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LLazyBailout* lazy_bailout = new LLazyBailout;
+ LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
instruction_pending_deoptimization_environment_->
set_deoptimization_environment(result->environment());
@@ -2231,10 +2245,10 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
- return MarkAsCall(new LStackCheck, instr);
+ return MarkAsCall(new(zone()) LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
- return AssignEnvironment(AssignPointerMap(new LStackCheck));
+ return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
}
}
@@ -2246,7 +2260,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2264,14 +2279,14 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
LInstruction* LChunkBuilder::DoIn(HIn* instr) {
LOperand* key = UseOrConstantAtStart(instr->key());
LOperand* object = UseOrConstantAtStart(instr->object());
- LIn* result = new LIn(key, object);
+ LIn* result = new(zone()) LIn(key, object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
LOperand* object = UseFixed(instr->enumerable(), rax);
- LForInPrepareMap* result = new LForInPrepareMap(object);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -2279,21 +2294,21 @@ LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
LOperand* map = UseRegister(instr->map());
return AssignEnvironment(DefineAsRegister(
- new LForInCacheArray(map)));
+ new(zone()) LForInCacheArray(map)));
}
LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* map = UseRegisterAtStart(instr->map());
- return AssignEnvironment(new LCheckMapValue(value, map));
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
}
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* index = UseTempRegister(instr->index());
- return DefineSameAsFirst(new LLoadFieldByIndex(object, index));
+ return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
}
diff --git a/deps/v8/src/x64/lithium-x64.h b/deps/v8/src/x64/lithium-x64.h
index b91cbc457a3..3d5d8548cba 100644
--- a/deps/v8/src/x64/lithium-x64.h
+++ b/deps/v8/src/x64/lithium-x64.h
@@ -49,6 +49,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -176,7 +177,8 @@ class LCodeGen;
V(ForInPrepareMap) \
V(ForInCacheArray) \
V(CheckMapValue) \
- V(LoadFieldByIndex)
+ V(LoadFieldByIndex) \
+ V(DateField)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -986,6 +988,22 @@ class LValueOf: public LTemplateInstruction<1, 1, 0> {
};
+class LDateField: public LTemplateInstruction<1, 1, 0> {
+ public:
+ LDateField(LOperand* date, Smi* index) : index_(index) {
+ inputs_[0] = date;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(ValueOf)
+
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 1, 0> {
public:
explicit LThrow(LOperand* value) {
@@ -1910,6 +1928,17 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
};
+class LAllocateObject: public LTemplateInstruction<1, 0, 1> {
+ public:
+ explicit LAllocateObject(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
@@ -2173,6 +2202,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2202,6 +2232,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2310,6 +2341,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
index 16730d21bf0..773fc4c16cc 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
@@ -1192,7 +1192,7 @@ int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid
intptr_t delta = code_handle->address() - re_code->address();
diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc
index 0e7f20676f7..18cb3c062a0 100644
--- a/deps/v8/src/x64/stub-cache-x64.cc
+++ b/deps/v8/src/x64/stub-cache-x64.cc
@@ -43,32 +43,61 @@ static void ProbeTable(Isolate* isolate,
MacroAssembler* masm,
Code::Flags flags,
StubCache::Table table,
+ Register receiver,
Register name,
+ // The offset is scaled by 4, based on
+ // kHeapObjectTagSize, which is two bits
Register offset) {
- ASSERT_EQ(8, kPointerSize);
- ASSERT_EQ(16, sizeof(StubCache::Entry));
+ // We need to scale up the pointer by 2 because the offset is scaled by less
+ // than the pointer size.
+ ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
+ ScaleFactor scale_factor = times_2;
+
+ ASSERT_EQ(24, sizeof(StubCache::Entry));
// The offset register holds the entry offset times four (due to masking
// and shifting optimizations).
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
+ ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
Label miss;
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ lea(offset, Operand(offset, offset, times_2, 0));
+
__ LoadAddress(kScratchRegister, key_offset);
+
// Check that the key in the entry matches the name.
// Multiply entry offset by 16 to get the entry address. Since the
// offset register already holds the entry offset times four, multiply
// by a further four.
- __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
+ __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
__ j(not_equal, &miss);
+
+ // Get the map entry from the cache.
+ // Use key_offset + kPointerSize * 2, rather than loading map_offset.
+ __ movq(kScratchRegister,
+ Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
+ __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ j(not_equal, &miss);
+
// Get the code entry from the cache.
- // Use key_offset + kPointerSize, rather than loading value_offset.
+ __ LoadAddress(kScratchRegister, value_offset);
__ movq(kScratchRegister,
- Operand(kScratchRegister, offset, times_4, kPointerSize));
+ Operand(kScratchRegister, offset, scale_factor, 0));
+
// Check that the flags match what we're looking for.
__ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
__ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
__ cmpl(offset, Immediate(flags));
__ j(not_equal, &miss);
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
+
// Jump to the first instruction in the code stub.
__ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ jmp(kScratchRegister);
@@ -134,14 +163,16 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Isolate* isolate = masm->isolate();
Label miss;
USE(extra); // The register extra is not used on the X64 platform.
USE(extra2); // The register extra2 is not used on the X64 platform.
- // Make sure that code is valid. The shifting code relies on the
- // entry size being 16.
- ASSERT(sizeof(Entry) == 16);
+ USE(extra3); // The register extra2 is not used on the X64 platform.
+ // Make sure that code is valid. The multiplying code relies on the
+ // entry size being 24.
+ ASSERT(sizeof(Entry) == 24);
// Make sure the flags do not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -153,6 +184,10 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
// Check scratch register is valid, extra and extra2 are unused.
ASSERT(!scratch.is(no_reg));
ASSERT(extra2.is(no_reg));
+ ASSERT(extra3.is(no_reg));
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &miss);
@@ -162,10 +197,12 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
// Use only the low 32 bits of the map pointer.
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
__ xor_(scratch, Immediate(flags));
+ // We mask out the last two bits because they are not part of the hash and
+ // they are always 01 for maps. Also in the two 'and' instructions below.
__ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
// Probe the primary table.
- ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
+ ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
// Primary miss: Compute hash for secondary probe.
__ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -177,11 +214,12 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
__ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
// Probe the secondary table.
- ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
+ ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
}
diff --git a/deps/v8/test/cctest/SConscript b/deps/v8/test/cctest/SConscript
index edb859e9cbc..bcd1e987c3b 100644
--- a/deps/v8/test/cctest/SConscript
+++ b/deps/v8/test/cctest/SConscript
@@ -1,4 +1,4 @@
-# Copyright 2008 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -62,6 +62,7 @@ SOURCES = {
'test-conversions.cc',
'test-cpu-profiler.cc',
'test-dataflow.cc',
+ 'test-date.cc',
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
@@ -86,6 +87,7 @@ SOURCES = {
'test-parsing.cc',
'test-platform-tls.cc',
'test-profile-generator.cc',
+ 'test-random.cc',
'test-regexp.cc',
'test-reloc-info.cc',
'test-serialize.cc',
diff --git a/deps/v8/test/cctest/cctest.gyp b/deps/v8/test/cctest/cctest.gyp
index 3b8f4f653d7..a242fe3c851 100644
--- a/deps/v8/test/cctest/cctest.gyp
+++ b/deps/v8/test/cctest/cctest.gyp
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -57,6 +57,7 @@
'test-conversions.cc',
'test-cpu-profiler.cc',
'test-dataflow.cc',
+ 'test-date.cc',
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
@@ -81,6 +82,7 @@
'test-parsing.cc',
'test-platform-tls.cc',
'test-profile-generator.cc',
+ 'test-random.cc',
'test-regexp.cc',
'test-reloc-info.cc',
'test-serialize.cc',
diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc
index a7e45d18ef5..5137c656376 100644
--- a/deps/v8/test/cctest/test-api.cc
+++ b/deps/v8/test/cctest/test-api.cc
@@ -450,8 +450,7 @@ THREADED_TEST(ScriptMakingExternalString) {
CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
- // TODO(1608): This should use kAbortIncrementalMarking.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(1, dispose_count);
}
@@ -477,8 +476,7 @@ THREADED_TEST(ScriptMakingExternalAsciiString) {
CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
- // TODO(1608): This should use kAbortIncrementalMarking.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(1, dispose_count);
}
@@ -2253,9 +2251,8 @@ THREADED_TEST(ApiObjectGroups) {
V8::AddObjectGroup(g2_objects, 2);
V8::AddImplicitReferences(g2s2, g2_children, 1);
}
- // Do a single full GC. Use kMakeHeapIterableMask to ensure that
- // incremental garbage collection is stopped.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ // Do a single full GC, ensure incremental marking is stopped.
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@@ -2279,7 +2276,7 @@ THREADED_TEST(ApiObjectGroups) {
V8::AddImplicitReferences(g2s2, g2_children, 1);
}
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, counter.NumberOfWeakCalls());
@@ -2288,7 +2285,7 @@ THREADED_TEST(ApiObjectGroups) {
g1c1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
g2c1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(7, counter.NumberOfWeakCalls());
}
@@ -2344,7 +2341,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
V8::AddImplicitReferences(g3s1, g3_children, 1);
}
// Do a single full GC
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@@ -2368,7 +2365,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
V8::AddImplicitReferences(g3s1, g3_children, 1);
}
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All objects should be gone. 7 global handles in total.
CHECK_EQ(7, counter.NumberOfWeakCalls());
@@ -12910,7 +12907,7 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
if (array_type == v8::kExternalDoubleArray ||
array_type == v8::kExternalFloatArray) {
CHECK_EQ(
- static_cast<int>(0x80000000),
+ static_cast<int>(i::OS::nan_value()),
static_cast<int>(jsobj->GetElement(7)->ToObjectChecked()->Number()));
} else {
CHECK_EQ(0, static_cast<int>(
@@ -16102,3 +16099,72 @@ TEST(CallCompletedCallbackTwoExceptions) {
v8::V8::AddCallCompletedCallback(CallCompletedCallbackException);
CompileRun("throw 'first exception';");
}
+
+
+static int probes_counter = 0;
+static int misses_counter = 0;
+static int updates_counter = 0;
+
+
+static int* LookupCounter(const char* name) {
+ if (strcmp(name, "c:V8.MegamorphicStubCacheProbes") == 0) {
+ return &probes_counter;
+ } else if (strcmp(name, "c:V8.MegamorphicStubCacheMisses") == 0) {
+ return &misses_counter;
+ } else if (strcmp(name, "c:V8.MegamorphicStubCacheUpdates") == 0) {
+ return &updates_counter;
+ }
+ return NULL;
+}
+
+
+static const char* kMegamorphicTestProgram =
+ "function ClassA() { };"
+ "function ClassB() { };"
+ "ClassA.prototype.foo = function() { };"
+ "ClassB.prototype.foo = function() { };"
+ "function fooify(obj) { obj.foo(); };"
+ "var a = new ClassA();"
+ "var b = new ClassB();"
+ "for (var i = 0; i < 10000; i++) {"
+ " fooify(a);"
+ " fooify(b);"
+ "}";
+
+
+static void StubCacheHelper(bool primary) {
+ V8::SetCounterFunction(LookupCounter);
+ USE(kMegamorphicTestProgram);
+#ifdef DEBUG
+ i::FLAG_native_code_counters = true;
+ if (primary) {
+ i::FLAG_test_primary_stub_cache = true;
+ } else {
+ i::FLAG_test_secondary_stub_cache = true;
+ }
+ i::FLAG_crankshaft = false;
+ v8::HandleScope scope;
+ LocalContext env;
+ int initial_probes = probes_counter;
+ int initial_misses = misses_counter;
+ int initial_updates = updates_counter;
+ CompileRun(kMegamorphicTestProgram);
+ int probes = probes_counter - initial_probes;
+ int misses = misses_counter - initial_misses;
+ int updates = updates_counter - initial_updates;
+ CHECK_LT(updates, 10);
+ CHECK_LT(misses, 10);
+ CHECK_GE(probes, 10000);
+#endif
+}
+
+
+TEST(SecondaryStubCache) {
+ StubCacheHelper(true);
+}
+
+
+TEST(PrimaryStubCache) {
+ StubCacheHelper(false);
+}
+
diff --git a/deps/v8/test/cctest/test-date.cc b/deps/v8/test/cctest/test-date.cc
new file mode 100644
index 00000000000..903a63a5d96
--- /dev/null
+++ b/deps/v8/test/cctest/test-date.cc
@@ -0,0 +1,168 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "global-handles.h"
+#include "snapshot.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+class DateCacheMock: public DateCache {
+ public:
+ struct Rule {
+ int year, start_month, start_day, end_month, end_day, offset_sec;
+ };
+
+ DateCacheMock(int local_offset, Rule* rules, int rules_count)
+ : local_offset_(local_offset), rules_(rules), rules_count_(rules_count) {}
+
+ protected:
+ virtual int GetDaylightSavingsOffsetFromOS(int64_t time_sec) {
+ int days = DaysFromTime(time_sec * 1000);
+ int time_in_day_sec = TimeInDay(time_sec * 1000, days) / 1000;
+ int year, month, day;
+ YearMonthDayFromDays(days, &year, &month, &day);
+ Rule* rule = FindRuleFor(year, month, day, time_in_day_sec);
+ return rule == NULL ? 0 : rule->offset_sec * 1000;
+ }
+
+
+ virtual int GetLocalOffsetFromOS() {
+ return local_offset_;
+ }
+
+ private:
+ Rule* FindRuleFor(int year, int month, int day, int time_in_day_sec) {
+ Rule* result = NULL;
+ for (int i = 0; i < rules_count_; i++)
+ if (Match(&rules_[i], year, month, day, time_in_day_sec)) {
+ result = &rules_[i];
+ }
+ return result;
+ }
+
+
+ bool Match(Rule* rule, int year, int month, int day, int time_in_day_sec) {
+ if (rule->year != 0 && rule->year != year) return false;
+ if (rule->start_month > month) return false;
+ if (rule->end_month < month) return false;
+ int start_day = ComputeRuleDay(year, rule->start_month, rule->start_day);
+ if (rule->start_month == month && start_day > day) return false;
+ if (rule->start_month == month && start_day == day &&
+ 2 * 3600 > time_in_day_sec)
+ return false;
+ int end_day = ComputeRuleDay(year, rule->end_month, rule->end_day);
+ if (rule->end_month == month && end_day < day) return false;
+ if (rule->end_month == month && end_day == day &&
+ 2 * 3600 <= time_in_day_sec)
+ return false;
+ return true;
+ }
+
+
+ int ComputeRuleDay(int year, int month, int day) {
+ if (day != 0) return day;
+ int days = DaysFromYearMonth(year, month);
+ // Find the first Sunday of the month.
+ while (Weekday(days + day) != 6) day++;
+ return day + 1;
+ }
+
+ int local_offset_;
+ Rule* rules_;
+ int rules_count_;
+};
+
+static int64_t TimeFromYearMonthDay(DateCache* date_cache,
+ int year,
+ int month,
+ int day) {
+ int64_t result = date_cache->DaysFromYearMonth(year, month);
+ return (result + day - 1) * DateCache::kMsPerDay;
+}
+
+static void CheckDST(int64_t time) {
+ Isolate* isolate = Isolate::Current();
+ DateCache* date_cache = isolate->date_cache();
+ int64_t actual = date_cache->ToLocal(time);
+ int64_t expected = time + date_cache->GetLocalOffsetFromOS() +
+ date_cache->GetDaylightSavingsOffsetFromOS(time / 1000);
+ CHECK_EQ(actual, expected);
+}
+
+
+TEST(DaylightSavingsTime) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ DateCacheMock::Rule rules[] = {
+ {0, 2, 0, 10, 0, 3600}, // DST from March to November in any year.
+ {2010, 2, 0, 7, 20, 3600}, // DST from March to August 20 in 2010.
+ {2010, 7, 20, 8, 10, 0}, // No DST from August 20 to September 10 in 2010.
+ {2010, 8, 10, 10, 0, 3600}, // DST from September 10 to November in 2010.
+ };
+
+ int local_offset_ms = -36000000; // -10 hours.
+
+ DateCacheMock* date_cache =
+ new DateCacheMock(local_offset_ms, rules, ARRAY_SIZE(rules));
+
+ isolate->set_date_cache(date_cache);
+
+ int64_t start_of_2010 = TimeFromYearMonthDay(date_cache, 2010, 0, 1);
+ int64_t start_of_2011 = TimeFromYearMonthDay(date_cache, 2011, 0, 1);
+ int64_t august_20 = TimeFromYearMonthDay(date_cache, 2010, 7, 20);
+ int64_t september_10 = TimeFromYearMonthDay(date_cache, 2010, 8, 10);
+ CheckDST((august_20 + september_10) / 2);
+ CheckDST(september_10);
+ CheckDST(september_10 + 2 * 3600);
+ CheckDST(september_10 + 2 * 3600 - 1000);
+ CheckDST(august_20 + 2 * 3600);
+ CheckDST(august_20 + 2 * 3600 - 1000);
+ CheckDST(august_20);
+ // Check each day of 2010.
+ for (int64_t time = start_of_2011 + 2 * 3600;
+ time >= start_of_2010;
+ time -= DateCache::kMsPerDay) {
+ CheckDST(time);
+ CheckDST(time - 1000);
+ CheckDST(time + 1000);
+ }
+ // Check one day from 2010 to 2100.
+ for (int year = 2100; year >= 2010; year--) {
+ CheckDST(TimeFromYearMonthDay(date_cache, year, 5, 5));
+ }
+ CheckDST((august_20 + september_10) / 2);
+ CheckDST(september_10);
+ CheckDST(september_10 + 2 * 3600);
+ CheckDST(september_10 + 2 * 3600 - 1000);
+ CheckDST(august_20 + 2 * 3600);
+ CheckDST(august_20 + 2 * 3600 - 1000);
+ CheckDST(august_20);
+}
diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc
index 783c36d1de4..ffa845813f8 100644
--- a/deps/v8/test/cctest/test-debug.cc
+++ b/deps/v8/test/cctest/test-debug.cc
@@ -2745,7 +2745,7 @@ TEST(DebugStepKeyedLoadLoop) {
foo->Call(env->Global(), kArgc, args);
// With stepping all break locations are hit.
- CHECK_EQ(33, break_point_hit_count);
+ CHECK_EQ(34, break_point_hit_count);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -2792,7 +2792,7 @@ TEST(DebugStepKeyedStoreLoop) {
foo->Call(env->Global(), kArgc, args);
// With stepping all break locations are hit.
- CHECK_EQ(32, break_point_hit_count);
+ CHECK_EQ(33, break_point_hit_count);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -2836,7 +2836,7 @@ TEST(DebugStepNamedLoadLoop) {
foo->Call(env->Global(), 0, NULL);
// With stepping all break locations are hit.
- CHECK_EQ(53, break_point_hit_count);
+ CHECK_EQ(54, break_point_hit_count);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -2880,7 +2880,7 @@ static void DoDebugStepNamedStoreLoop(int expected) {
// Test of the stepping mechanism for named load in a loop.
TEST(DebugStepNamedStoreLoop) {
- DoDebugStepNamedStoreLoop(22);
+ DoDebugStepNamedStoreLoop(23);
}
@@ -3252,7 +3252,7 @@ TEST(DebugStepForContinue) {
v8::Handle<v8::Value> argv_10[argc] = { v8::Number::New(10) };
result = foo->Call(env->Global(), argc, argv_10);
CHECK_EQ(5, result->Int32Value());
- CHECK_EQ(50, break_point_hit_count);
+ CHECK_EQ(51, break_point_hit_count);
// Looping 100 times.
step_action = StepIn;
@@ -3260,7 +3260,7 @@ TEST(DebugStepForContinue) {
v8::Handle<v8::Value> argv_100[argc] = { v8::Number::New(100) };
result = foo->Call(env->Global(), argc, argv_100);
CHECK_EQ(50, result->Int32Value());
- CHECK_EQ(455, break_point_hit_count);
+ CHECK_EQ(456, break_point_hit_count);
// Get rid of the debug event listener.
v8::Debug::SetDebugEventListener(NULL);
@@ -3304,7 +3304,7 @@ TEST(DebugStepForBreak) {
v8::Handle<v8::Value> argv_10[argc] = { v8::Number::New(10) };
result = foo->Call(env->Global(), argc, argv_10);
CHECK_EQ(9, result->Int32Value());
- CHECK_EQ(53, break_point_hit_count);
+ CHECK_EQ(54, break_point_hit_count);
// Looping 100 times.
step_action = StepIn;
@@ -3312,7 +3312,7 @@ TEST(DebugStepForBreak) {
v8::Handle<v8::Value> argv_100[argc] = { v8::Number::New(100) };
result = foo->Call(env->Global(), argc, argv_100);
CHECK_EQ(99, result->Int32Value());
- CHECK_EQ(503, break_point_hit_count);
+ CHECK_EQ(504, break_point_hit_count);
// Get rid of the debug event listener.
v8::Debug::SetDebugEventListener(NULL);
@@ -7208,10 +7208,10 @@ static void TestDebugBreakInLoop(const char* loop_head,
// Receive 100 breaks for each test and then terminate JavaScript execution.
static const int kBreaksPerTest = 100;
- for (int i = 0; i < 1 && loop_bodies[i] != NULL; i++) {
+ for (int i = 0; loop_bodies[i] != NULL; i++) {
// Perform a lazy deoptimization after various numbers of breaks
// have been hit.
- for (int j = 0; j < 10; j++) {
+ for (int j = 0; j < 11; j++) {
break_point_hit_count_deoptimize = j;
if (j == 10) {
break_point_hit_count_deoptimize = kBreaksPerTest;
diff --git a/deps/v8/test/cctest/test-deoptimization.cc b/deps/v8/test/cctest/test-deoptimization.cc
index ee57d65b242..c52c5788dbc 100644
--- a/deps/v8/test/cctest/test-deoptimization.cc
+++ b/deps/v8/test/cctest/test-deoptimization.cc
@@ -100,8 +100,7 @@ class AllowNativesSyntaxNoInlining {
// Abort any ongoing incremental marking to make sure that all weak global
// handle callbacks are processed.
static void NonIncrementalGC() {
- // TODO(1608): This should use kAbortIncrementalMarking.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
}
diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc
index a6f04b308d0..7a227cdf9b2 100644
--- a/deps/v8/test/cctest/test-heap-profiler.cc
+++ b/deps/v8/test/cctest/test-heap-profiler.cc
@@ -18,14 +18,30 @@ class NamedEntriesDetector {
: has_A2(false), has_B2(false), has_C2(false) {
}
- void Apply(i::HeapEntry** entry_ptr) {
- if (IsReachableNodeWithName(*entry_ptr, "A2")) has_A2 = true;
- if (IsReachableNodeWithName(*entry_ptr, "B2")) has_B2 = true;
- if (IsReachableNodeWithName(*entry_ptr, "C2")) has_C2 = true;
+ void CheckEntry(i::HeapEntry* entry) {
+ if (strcmp(entry->name(), "A2") == 0) has_A2 = true;
+ if (strcmp(entry->name(), "B2") == 0) has_B2 = true;
+ if (strcmp(entry->name(), "C2") == 0) has_C2 = true;
}
- static bool IsReachableNodeWithName(i::HeapEntry* entry, const char* name) {
- return strcmp(name, entry->name()) == 0 && entry->painted_reachable();
+ void CheckAllReachables(i::HeapEntry* root) {
+ i::List<i::HeapEntry*> list(10);
+ list.Add(root);
+ root->paint();
+ CheckEntry(root);
+ while (!list.is_empty()) {
+ i::HeapEntry* entry = list.RemoveLast();
+ i::Vector<i::HeapGraphEdge> children = entry->children();
+ for (int i = 0; i < children.length(); ++i) {
+ if (children[i].type() == i::HeapGraphEdge::kShortcut) continue;
+ i::HeapEntry* child = children[i].to();
+ if (!child->painted()) {
+ list.Add(child);
+ child->paint();
+ CheckEntry(child);
+ }
+ }
+ }
}
bool has_A2;
@@ -90,10 +106,6 @@ TEST(HeapSnapshot) {
const_cast<i::HeapSnapshot*>(
reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2));
const v8::HeapGraphNode* global_env2 = GetGlobalObject(snapshot_env2);
- // Paint all nodes reachable from global object.
- i_snapshot_env2->ClearPaint();
- const_cast<i::HeapEntry*>(
- reinterpret_cast<const i::HeapEntry*>(global_env2))->PaintAllReachable();
// Verify, that JS global object of env2 has '..2' properties.
const v8::HeapGraphNode* a2_node =
@@ -105,8 +117,11 @@ TEST(HeapSnapshot) {
NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "b2_2"));
CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "c2"));
+ // Paint all nodes reachable from global object.
NamedEntriesDetector det;
- i_snapshot_env2->IterateEntries(&det);
+ i_snapshot_env2->ClearPaint();
+ det.CheckAllReachables(const_cast<i::HeapEntry*>(
+ reinterpret_cast<const i::HeapEntry*>(global_env2)));
CHECK(det.has_A2);
CHECK(det.has_B2);
CHECK(det.has_C2);
@@ -136,14 +151,10 @@ TEST(HeapSnapshotObjectSizes) {
GetProperty(x, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, x2);
- // Test approximate sizes.
- CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize(false));
- CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize(false));
- CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize(false));
- // Test exact sizes.
- CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize(true));
- CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize(true));
- CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize(true));
+ // Test sizes.
+ CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize());
+ CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize());
+ CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize());
}
@@ -1268,3 +1279,37 @@ TEST(SfiAndJsFunctionWeakRefs) {
GetProperty(fun, v8::HeapGraphEdge::kInternal, "shared");
CHECK(HasWeakEdge(shared));
}
+
+
+TEST(PersistentHandleCount) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ // V8 also uses global handles internally, so we can't test for an absolute
+ // number.
+ int global_handle_count = v8::HeapProfiler::GetPersistentHandleCount();
+
+ // Create some persistent handles.
+ v8::Persistent<v8::String> p_AAA =
+ v8::Persistent<v8::String>::New(v8_str("AAA"));
+ CHECK_EQ(global_handle_count + 1,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ v8::Persistent<v8::String> p_BBB =
+ v8::Persistent<v8::String>::New(v8_str("BBB"));
+ CHECK_EQ(global_handle_count + 2,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ v8::Persistent<v8::String> p_CCC =
+ v8::Persistent<v8::String>::New(v8_str("CCC"));
+ CHECK_EQ(global_handle_count + 3,
+ v8::HeapProfiler::GetPersistentHandleCount());
+
+ // Dipose the persistent handles in a different order.
+ p_AAA.Dispose();
+ CHECK_EQ(global_handle_count + 2,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ p_CCC.Dispose();
+ CHECK_EQ(global_handle_count + 1,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ p_BBB.Dispose();
+ CHECK_EQ(global_handle_count, v8::HeapProfiler::GetPersistentHandleCount());
+}
diff --git a/deps/v8/test/cctest/test-heap.cc b/deps/v8/test/cctest/test-heap.cc
index 10bacf50432..999e2c66518 100644
--- a/deps/v8/test/cctest/test-heap.cc
+++ b/deps/v8/test/cctest/test-heap.cc
@@ -959,17 +959,17 @@ TEST(TestCodeFlushing) {
CHECK(function->shared()->is_compiled());
// TODO(1609) Currently incremental marker does not support code flushing.
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK(function->shared()->is_compiled());
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
diff --git a/deps/v8/test/cctest/test-random.cc b/deps/v8/test/cctest/test-random.cc
new file mode 100644
index 00000000000..a1f49318f5e
--- /dev/null
+++ b/deps/v8/test/cctest/test-random.cc
@@ -0,0 +1,109 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+#include "v8.h"
+
+#include "cctest.h"
+#include "compiler.h"
+#include "execution.h"
+#include "isolate.h"
+
+
+using namespace v8::internal;
+
+static v8::Persistent<v8::Context> env;
+
+
+void SetSeeds(Handle<ByteArray> seeds, uint32_t state0, uint32_t state1) {
+ for (int i = 0; i < 4; i++) {
+ seeds->set(i, static_cast<byte>(state0 >> (i * kBitsPerByte)));
+ seeds->set(i + 4, static_cast<byte>(state1 >> (i * kBitsPerByte)));
+ }
+}
+
+
+void TestSeeds(Handle<JSFunction> fun,
+ Handle<Context> context,
+ uint32_t state0,
+ uint32_t state1) {
+ bool has_pending_exception;
+ Handle<JSObject> global(context->global());
+ Handle<ByteArray> seeds(context->random_seed());
+
+ SetSeeds(seeds, state0, state1);
+ Handle<Object> value =
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ CHECK(value->IsHeapNumber());
+ CHECK(fun->IsOptimized());
+ double crankshaft_value = HeapNumber::cast(*value)->value();
+
+ SetSeeds(seeds, state0, state1);
+ V8::FillHeapNumberWithRandom(*value, *context);
+ double runtime_value = HeapNumber::cast(*value)->value();
+ CHECK_EQ(runtime_value, crankshaft_value);
+}
+
+
+TEST(CrankshaftRandom) {
+ if (env.IsEmpty()) env = v8::Context::New();
+ // Skip test if crankshaft is disabled.
+ if (!V8::UseCrankshaft()) return;
+ v8::HandleScope scope;
+ env->Enter();
+
+ Handle<Context> context(Isolate::Current()->context());
+ Handle<JSObject> global(context->global());
+ Handle<ByteArray> seeds(context->random_seed());
+ bool has_pending_exception;
+
+ CompileRun("function f() { return Math.random(); }");
+
+ Object* symbol = FACTORY->LookupAsciiSymbol("f")->ToObjectChecked();
+ MaybeObject* fun_object =
+ context->global()->GetProperty(String::cast(symbol));
+ Handle<JSFunction> fun(JSFunction::cast(fun_object->ToObjectChecked()));
+
+ // Optimize function.
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ if (!fun->IsOptimized()) fun->MarkForLazyRecompilation();
+
+ // Test with some random values.
+ TestSeeds(fun, context, 0xC0C0AFFE, 0x31415926);
+ TestSeeds(fun, context, 0x01020304, 0xFFFFFFFF);
+ TestSeeds(fun, context, 0x00000001, 0x00000000);
+
+ // Test that we bail out to runtime when seeds are uninitialized (zeros).
+ SetSeeds(seeds, 0, 0);
+ Handle<Object> value =
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ CHECK(value->IsHeapNumber());
+ CHECK(fun->IsOptimized());
+ double crankshaft_value = HeapNumber::cast(*value)->value();
+ CHECK_NE(0.0, crankshaft_value);
+}
diff --git a/deps/v8/test/mjsunit/comparison-ops-and-undefined.js b/deps/v8/test/mjsunit/comparison-ops-and-undefined.js
new file mode 100644
index 00000000000..06db076580d
--- /dev/null
+++ b/deps/v8/test/mjsunit/comparison-ops-and-undefined.js
@@ -0,0 +1,128 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function test_helper_for_ics(func, b1, b2, b3, b4) {
+ assertEquals(b1, func(.5, .5));
+ assertEquals(b2, func(.5, undefined));
+ assertEquals(b3, func(undefined, .5));
+ assertEquals(b4, func(undefined, undefined));
+}
+
+function test_helper_for_crankshaft(func, b1, b2, b3, b4) {
+ assertEquals(b1, func(.5, .5));
+ %OptimizeFunctionOnNextCall(func);
+ assertEquals(b1, func(.5, .5));
+ assertEquals(b2, func(.5, undefined));
+ assertEquals(b3, func(undefined, .5));
+ assertEquals(b4, func(undefined, undefined));
+}
+
+function less_1(a, b) {
+ return a < b;
+}
+
+test_helper_for_ics(less_1, false, false, false, false);
+
+function less_2(a, b) {
+ return a < b;
+}
+
+test_helper_for_crankshaft(less_1, false, false, false, false);
+
+function greater_1(a, b) {
+ return a > b;
+}
+
+test_helper_for_ics(greater_1, false, false, false, false);
+
+function greater_2(a, b) {
+ return a > b;
+}
+
+test_helper_for_crankshaft(greater_1, false, false, false, false);
+
+function less_equal_1(a, b) {
+ return a <= b;
+}
+
+test_helper_for_ics(less_equal_1, true, false, false, false);
+
+function less_equal_2(a, b) {
+ return a <= b;
+}
+
+test_helper_for_crankshaft(less_equal_1, true, false, false, false);
+
+function greater_equal_1(a, b) {
+ return a >= b;
+}
+
+test_helper_for_ics(greater_equal_1, true, false, false, false);
+
+function greater_equal_2(a, b) {
+ return a >= b;
+}
+
+test_helper_for_crankshaft(greater_equal_1, true, false, false, false);
+
+function equal_1(a, b) {
+ return a == b;
+}
+
+test_helper_for_ics(equal_1, true, false, false, true);
+
+function equal_2(a, b) {
+ return a == b;
+}
+
+test_helper_for_crankshaft(equal_2, true, false, false, true);
+
+function strict_equal_1(a, b) {
+ return a === b;
+}
+
+test_helper_for_ics(strict_equal_1, true, false, false, true);
+
+function strict_equal_2(a, b) {
+ return a === b;
+}
+
+test_helper_for_crankshaft(strict_equal_2, true, false, false, true);
+
+function not_equal_1(a, b) {
+ return a != b;
+}
+
+test_helper_for_ics(not_equal_1, false, true, true, false);
+
+function not_equal_2(a, b) {
+ return a != b;
+}
+
+test_helper_for_crankshaft(not_equal_2, false, true, true, false);
diff --git a/deps/v8/test/mjsunit/compiler/alloc-object-huge.js b/deps/v8/test/mjsunit/compiler/alloc-object-huge.js
new file mode 100644
index 00000000000..d6d9f1b7218
--- /dev/null
+++ b/deps/v8/test/mjsunit/compiler/alloc-object-huge.js
@@ -0,0 +1,308 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --inline-construct --nolimit-inlining
+
+// Test that huge constructors (more than 256 this assignments) are
+// handled correctly.
+
+// Test huge constructor when being inlined into hydrogen.
+function test() {
+ return new huge();
+}
+test();
+test();
+%OptimizeFunctionOnNextCall(test);
+var o = test();
+assertEquals(1, o.foo1);
+assertEquals(257, o.foo257);
+
+// Test huge constructor with specialized constructor stub.
+var o = new huge();
+assertEquals(1, o.foo1);
+assertEquals(257, o.foo257);
+
+// The huge constructor, nothing interesting beyond this point.
+function huge() {
+ this.foo1 = 1;
+ this.foo2 = 2;
+ this.foo3 = 3;
+ this.foo4 = 4;
+ this.foo5 = 5;
+ this.foo6 = 6;
+ this.foo7 = 7;
+ this.foo8 = 8;
+ this.foo9 = 9;
+ this.foo10 = 10;
+ this.foo11 = 11;
+ this.foo12 = 12;
+ this.foo13 = 13;
+ this.foo14 = 14;
+ this.foo15 = 15;
+ this.foo16 = 16;
+ this.foo17 = 17;
+ this.foo18 = 18;
+ this.foo19 = 19;
+ this.foo20 = 20;
+ this.foo21 = 21;
+ this.foo22 = 22;
+ this.foo23 = 23;
+ this.foo24 = 24;
+ this.foo25 = 25;
+ this.foo26 = 26;
+ this.foo27 = 27;
+ this.foo28 = 28;
+ this.foo29 = 29;
+ this.foo30 = 30;
+ this.foo31 = 31;
+ this.foo32 = 32;
+ this.foo33 = 33;
+ this.foo34 = 34;
+ this.foo35 = 35;
+ this.foo36 = 36;
+ this.foo37 = 37;
+ this.foo38 = 38;
+ this.foo39 = 39;
+ this.foo40 = 40;
+ this.foo41 = 41;
+ this.foo42 = 42;
+ this.foo43 = 43;
+ this.foo44 = 44;
+ this.foo45 = 45;
+ this.foo46 = 46;
+ this.foo47 = 47;
+ this.foo48 = 48;
+ this.foo49 = 49;
+ this.foo50 = 50;
+ this.foo51 = 51;
+ this.foo52 = 52;
+ this.foo53 = 53;
+ this.foo54 = 54;
+ this.foo55 = 55;
+ this.foo56 = 56;
+ this.foo57 = 57;
+ this.foo58 = 58;
+ this.foo59 = 59;
+ this.foo60 = 60;
+ this.foo61 = 61;
+ this.foo62 = 62;
+ this.foo63 = 63;
+ this.foo64 = 64;
+ this.foo65 = 65;
+ this.foo66 = 66;
+ this.foo67 = 67;
+ this.foo68 = 68;
+ this.foo69 = 69;
+ this.foo70 = 70;
+ this.foo71 = 71;
+ this.foo72 = 72;
+ this.foo73 = 73;
+ this.foo74 = 74;
+ this.foo75 = 75;
+ this.foo76 = 76;
+ this.foo77 = 77;
+ this.foo78 = 78;
+ this.foo79 = 79;
+ this.foo80 = 80;
+ this.foo81 = 81;
+ this.foo82 = 82;
+ this.foo83 = 83;
+ this.foo84 = 84;
+ this.foo85 = 85;
+ this.foo86 = 86;
+ this.foo87 = 87;
+ this.foo88 = 88;
+ this.foo89 = 89;
+ this.foo90 = 90;
+ this.foo91 = 91;
+ this.foo92 = 92;
+ this.foo93 = 93;
+ this.foo94 = 94;
+ this.foo95 = 95;
+ this.foo96 = 96;
+ this.foo97 = 97;
+ this.foo98 = 98;
+ this.foo99 = 99;
+ this.foo100 = 100;
+ this.foo101 = 101;
+ this.foo102 = 102;
+ this.foo103 = 103;
+ this.foo104 = 104;
+ this.foo105 = 105;
+ this.foo106 = 106;
+ this.foo107 = 107;
+ this.foo108 = 108;
+ this.foo109 = 109;
+ this.foo110 = 110;
+ this.foo111 = 111;
+ this.foo112 = 112;
+ this.foo113 = 113;
+ this.foo114 = 114;
+ this.foo115 = 115;
+ this.foo116 = 116;
+ this.foo117 = 117;
+ this.foo118 = 118;
+ this.foo119 = 119;
+ this.foo120 = 120;
+ this.foo121 = 121;
+ this.foo122 = 122;
+ this.foo123 = 123;
+ this.foo124 = 124;
+ this.foo125 = 125;
+ this.foo126 = 126;
+ this.foo127 = 127;
+ this.foo128 = 128;
+ this.foo129 = 129;
+ this.foo130 = 130;
+ this.foo131 = 131;
+ this.foo132 = 132;
+ this.foo133 = 133;
+ this.foo134 = 134;
+ this.foo135 = 135;
+ this.foo136 = 136;
+ this.foo137 = 137;
+ this.foo138 = 138;
+ this.foo139 = 139;
+ this.foo140 = 140;
+ this.foo141 = 141;
+ this.foo142 = 142;
+ this.foo143 = 143;
+ this.foo144 = 144;
+ this.foo145 = 145;
+ this.foo146 = 146;
+ this.foo147 = 147;
+ this.foo148 = 148;
+ this.foo149 = 149;
+ this.foo150 = 150;
+ this.foo151 = 151;
+ this.foo152 = 152;
+ this.foo153 = 153;
+ this.foo154 = 154;
+ this.foo155 = 155;
+ this.foo156 = 156;
+ this.foo157 = 157;
+ this.foo158 = 158;
+ this.foo159 = 159;
+ this.foo160 = 160;
+ this.foo161 = 161;
+ this.foo162 = 162;
+ this.foo163 = 163;
+ this.foo164 = 164;
+ this.foo165 = 165;
+ this.foo166 = 166;
+ this.foo167 = 167;
+ this.foo168 = 168;
+ this.foo169 = 169;
+ this.foo170 = 170;
+ this.foo171 = 171;
+ this.foo172 = 172;
+ this.foo173 = 173;
+ this.foo174 = 174;
+ this.foo175 = 175;
+ this.foo176 = 176;
+ this.foo177 = 177;
+ this.foo178 = 178;
+ this.foo179 = 179;
+ this.foo180 = 180;
+ this.foo181 = 181;
+ this.foo182 = 182;
+ this.foo183 = 183;
+ this.foo184 = 184;
+ this.foo185 = 185;
+ this.foo186 = 186;
+ this.foo187 = 187;
+ this.foo188 = 188;
+ this.foo189 = 189;
+ this.foo190 = 190;
+ this.foo191 = 191;
+ this.foo192 = 192;
+ this.foo193 = 193;
+ this.foo194 = 194;
+ this.foo195 = 195;
+ this.foo196 = 196;
+ this.foo197 = 197;
+ this.foo198 = 198;
+ this.foo199 = 199;
+ this.foo200 = 200;
+ this.foo201 = 201;
+ this.foo202 = 202;
+ this.foo203 = 203;
+ this.foo204 = 204;
+ this.foo205 = 205;
+ this.foo206 = 206;
+ this.foo207 = 207;
+ this.foo208 = 208;
+ this.foo209 = 209;
+ this.foo210 = 210;
+ this.foo211 = 211;
+ this.foo212 = 212;
+ this.foo213 = 213;
+ this.foo214 = 214;
+ this.foo215 = 215;
+ this.foo216 = 216;
+ this.foo217 = 217;
+ this.foo218 = 218;
+ this.foo219 = 219;
+ this.foo220 = 220;
+ this.foo221 = 221;
+ this.foo222 = 222;
+ this.foo223 = 223;
+ this.foo224 = 224;
+ this.foo225 = 225;
+ this.foo226 = 226;
+ this.foo227 = 227;
+ this.foo228 = 228;
+ this.foo229 = 229;
+ this.foo230 = 230;
+ this.foo231 = 231;
+ this.foo232 = 232;
+ this.foo233 = 233;
+ this.foo234 = 234;
+ this.foo235 = 235;
+ this.foo236 = 236;
+ this.foo237 = 237;
+ this.foo238 = 238;
+ this.foo239 = 239;
+ this.foo240 = 240;
+ this.foo241 = 241;
+ this.foo242 = 242;
+ this.foo243 = 243;
+ this.foo244 = 244;
+ this.foo245 = 245;
+ this.foo246 = 246;
+ this.foo247 = 247;
+ this.foo248 = 248;
+ this.foo249 = 249;
+ this.foo250 = 250;
+ this.foo251 = 251;
+ this.foo252 = 252;
+ this.foo253 = 253;
+ this.foo254 = 254;
+ this.foo255 = 255;
+ this.foo256 = 256;
+ this.foo257 = 257;
+}
diff --git a/deps/v8/test/mjsunit/compiler/alloc-object.js b/deps/v8/test/mjsunit/compiler/alloc-object.js
new file mode 100644
index 00000000000..1d44efb549e
--- /dev/null
+++ b/deps/v8/test/mjsunit/compiler/alloc-object.js
@@ -0,0 +1,90 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc --inline-construct
+
+// Test that inlined object allocation works for different layouts of
+// objects (e.g. in object properties, slack tracking in progress or
+// changing of function prototypes)
+
+function test_helper(construct, a, b) {
+ return new construct(a, b);
+}
+
+function test(construct) {
+ %DeoptimizeFunction(test);
+ test_helper(construct, 0, 0);
+ test_helper(construct, 0, 0);
+ %OptimizeFunctionOnNextCall(test_helper);
+ // Test adding a new property after allocation was inlined.
+ var o = test_helper(construct, 1, 2);
+ o.z = 3;
+ assertEquals(1, o.x);
+ assertEquals(2, o.y);
+ assertEquals(3, o.z);
+ // Test changing the prototype after allocation was inlined.
+ construct.prototype = { z:6 };
+ var o = test_helper(construct, 4, 5);
+ assertEquals(4, o.x);
+ assertEquals(5, o.y);
+ assertEquals(6, o.z);
+ %DeoptimizeFunction(test_helper);
+ gc(); // Makes V8 forget about type information for test_helper.
+}
+
+function finalize_slack_tracking(construct) {
+ // Value chosen based on kGenerousAllocationCount = 8.
+ for (var i = 0; i < 8; i++) {
+ new construct(0, 0);
+ }
+}
+
+
+// Both properties are pre-allocated in object properties.
+function ConstructInObjectPreAllocated(a, b) {
+ this.x = a;
+ this.y = b;
+}
+finalize_slack_tracking(ConstructInObjectPreAllocated);
+test(ConstructInObjectPreAllocated);
+
+
+// Both properties are unused in object properties.
+function ConstructInObjectUnused(a, b) {
+ this.x = a < 0 ? 0 : a;
+ this.y = b > 0 ? b : 0;
+}
+finalize_slack_tracking(ConstructInObjectUnused);
+test(ConstructInObjectUnused);
+
+
+// Test inlined allocation while slack tracking is still in progress.
+function ConstructWhileSlackTracking(a, b) {
+ this.x = a;
+ this.y = b;
+}
+test(ConstructWhileSlackTracking);
diff --git a/deps/v8/test/mjsunit/compiler/inline-construct.js b/deps/v8/test/mjsunit/compiler/inline-construct.js
new file mode 100644
index 00000000000..af9e69c9402
--- /dev/null
+++ b/deps/v8/test/mjsunit/compiler/inline-construct.js
@@ -0,0 +1,152 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc --inline-construct
+
+// Test inlining of constructor calls.
+
+function TestInlinedConstructor(closure) {
+ var result;
+ var counter = { value:0 };
+ result = closure(11, 12, counter);
+ assertEquals(23, result);
+ assertEquals(1, counter.value);
+ result = closure(23, 19, counter);
+ assertEquals(42, result);
+ assertEquals(2, counter.value);
+ %OptimizeFunctionOnNextCall(closure);
+ result = closure(1, 42, counter)
+ assertEquals(43, result);
+ assertEquals(3, counter.value);
+ result = closure("foo", "bar", counter)
+ assertEquals("foobar", result)
+ assertEquals(4, counter.value);
+}
+
+function TestInAllContexts(constructor) {
+ function value_context(a, b, counter) {
+ var obj = new constructor(a, b, counter);
+ return obj.x;
+ }
+ function test_context(a, b, counter) {
+ if (!new constructor(a, b, counter)) {
+ assertUnreachable("should not happen");
+ }
+ return a + b;
+ }
+ function effect_context(a, b, counter) {
+ new constructor(a, b, counter);
+ return a + b;
+ }
+ TestInlinedConstructor(value_context);
+ TestInlinedConstructor(test_context);
+ TestInlinedConstructor(effect_context);
+ %DeoptimizeFunction(value_context);
+ %DeoptimizeFunction(test_context);
+ %DeoptimizeFunction(effect_context);
+ gc(); // Makes V8 forget about type information for *_context.
+}
+
+
+// Test constructor returning nothing in all contexts.
+function c1(a, b, counter) {
+ this.x = a + b;
+ counter.value++;
+}
+TestInAllContexts(c1);
+
+
+// Test constructor returning an object in all contexts.
+function c2(a, b, counter) {
+ var obj = new Object();
+ obj.x = a + b;
+ counter.value++;
+ return obj;
+}
+TestInAllContexts(c2);
+
+
+// Test constructor returning a primitive value in all contexts.
+function c3(a, b, counter) {
+ this.x = a + b;
+ counter.value++;
+ return "not an object";
+}
+TestInAllContexts(c3);
+
+
+// Test constructor called with too many arguments.
+function c_too_many(a, b) {
+ this.x = a + b;
+}
+function f_too_many(a, b, c) {
+ var obj = new c_too_many(a, b, c);
+ return obj.x;
+}
+assertEquals(23, f_too_many(11, 12, 1));
+assertEquals(42, f_too_many(23, 19, 1));
+%OptimizeFunctionOnNextCall(f_too_many);
+assertEquals(43, f_too_many(1, 42, 1));
+assertEquals("foobar", f_too_many("foo", "bar", "baz"))
+
+
+// Test constructor called with too few arguments.
+function c_too_few(a, b) {
+ assertSame(undefined, b);
+ this.x = a + 1;
+}
+function f_too_few(a) {
+ var obj = new c_too_few(a);
+ return obj.x;
+}
+assertEquals(12, f_too_few(11));
+assertEquals(24, f_too_few(23));
+%OptimizeFunctionOnNextCall(f_too_few);
+assertEquals(2, f_too_few(1));
+assertEquals("foo1", f_too_few("foo"))
+
+
+// Test constructor that cannot be inlined.
+function c_unsupported_syntax(a, b, counter) {
+ try {
+ this.x = a + b;
+ counter.value++;
+ } catch(e) {
+ throw new Error();
+ }
+}
+TestInAllContexts(c_unsupported_syntax);
+
+
+// Regression test: Inlined constructors called as functions do not get their
+// implicit receiver object set to undefined, even in strict mode.
+function c_strict(a, b, counter) {
+ "use strict";
+ this.x = a + b;
+ counter.value++;
+}
+TestInAllContexts(c_strict);
diff --git a/deps/v8/test/mjsunit/compiler/optimized-for-in.js b/deps/v8/test/mjsunit/compiler/optimized-for-in.js
index 8b16101ee22..cb8c66df9fe 100644
--- a/deps/v8/test/mjsunit/compiler/optimized-for-in.js
+++ b/deps/v8/test/mjsunit/compiler/optimized-for-in.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --optimize-for-in --allow-natives-syntax
// Test for-in support in Crankshaft. For simplicity this tests assumes certain
// fixed iteration order for properties and will have to be adjusted if V8
@@ -247,13 +247,15 @@ tryFunction("a1b2c3d4e5f6", function () {
function osr_inner(t, limit) {
var r = 1;
for (var x in t) {
- for (var i = 0; i < t[x].length; i++) {
- r += t[x][i];
- if (i === limit) {
- %OptimizeFunctionOnNextCall(osr_inner, "osr");
+ if (t.hasOwnProperty(x)) {
+ for (var i = 0; i < t[x].length; i++) {
+ r += t[x][i];
+ if (i === limit) {
+ %OptimizeFunctionOnNextCall(osr_inner, "osr");
+ }
}
+ r += x;
}
- r += x;
}
return r;
}
@@ -290,7 +292,7 @@ function test_osr() {
arr[i] = i + 1;
}
arr.push(":"); // Force deopt at the end of the loop.
- assertEquals("211:x", osr_inner({x: arr}, (arr.length / 2) | 0));
+ assertEquals("211:x1234567891011121314151617181920:y", osr_inner({x: arr, y: arr}, (arr.length / 2) | 0));
assertEquals("7x456y", osr_outer({x: [1,2,3], y: [4,5,6]}, "x"));
assertEquals("101234567", osr_outer_and_deopt([1,2,3,4,5,6,7,8], "5"));
}
diff --git a/deps/v8/test/mjsunit/compiler/regress-toint32.js b/deps/v8/test/mjsunit/compiler/regress-toint32.js
new file mode 100644
index 00000000000..54c2f76dd75
--- /dev/null
+++ b/deps/v8/test/mjsunit/compiler/regress-toint32.js
@@ -0,0 +1,45 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --noenable-sse3
+
+var a = new Int32Array(1);
+var G = 0x80000000;
+
+function f(x) {
+ var v = x;
+ v = v + 1;
+ a[0] = v;
+ v = v - 1;
+ return v;
+}
+
+assertEquals(G, f(G));
+assertEquals(G, f(G));
+%OptimizeFunctionOnNextCall(f);
+assertEquals(G, f(G));
+
diff --git a/deps/v8/test/mjsunit/date.js b/deps/v8/test/mjsunit/date.js
index fa43cbb4315..3e153aba7f9 100644
--- a/deps/v8/test/mjsunit/date.js
+++ b/deps/v8/test/mjsunit/date.js
@@ -187,6 +187,12 @@ d = new Date(1969, 12, 1, Infinity);
assertTrue(isNaN(d.getTime()));
d = new Date(1969, 12, 1, -Infinity);
assertTrue(isNaN(d.getTime()));
+d = new Date(1969, 12, 1, 0);
+d.setTime(Math.pow(2, 64));
+assertTrue(isNaN(d.getTime()));
+d = new Date(1969, 12, 1, 0);
+d.setTime(Math.pow(-2, 64));
+assertTrue(isNaN(d.getTime()));
// Test creation with obscure date values.
diff --git a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
index 7178661f25e..cf25c0c095d 100644
--- a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
+++ b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
@@ -140,7 +140,13 @@ function listener(event, exec_state, event_data, data) {
}
// Check for construct call.
- assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+ if (i == 4) {
+ assertEquals(testingConstructCall, frame.isConstructCall());
+ } else if (i == 2) {
+ assertTrue(frame.isConstructCall());
+ } else {
+ assertFalse(frame.isConstructCall());
+ }
// When function f is optimized (1 means YES, see runtime.cc) we
// expect an optimized frame for f with g1, g2 and g3 inlined.
@@ -204,7 +210,7 @@ function g1(i, x3, y3, z3) {
var b3 = input[i].b;
a3 = a3 + a3 / 100;
b3 = b3 + b3 / 100;
- g2(i - 1, a3, b3);
+ new g2(i - 1, a3, b3);
};
function f(i, x4, y4) {
@@ -222,8 +228,11 @@ testingConstructCall = true;
new f(input.length - 1, 11.11, 12.12);
new f(input.length - 1, 11.11, 12.12, "");
-// Make sure that the debug event listener vas invoked.
+// Make sure that the debug event listener was invoked.
assertFalse(exception, "exception in listener " + exception)
assertTrue(listenerComplete);
+//Throw away type information for next run.
+gc();
+
Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
index 485f752f5c5..c88a683a8ce 100644
--- a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
+++ b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
@@ -130,7 +130,13 @@ function listener(event, exec_state, event_data, data) {
}
// Check for construct call.
- assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+ if (i == 4) {
+ assertEquals(testingConstructCall, frame.isConstructCall());
+ } else if (i == 2) {
+ assertTrue(frame.isConstructCall());
+ } else {
+ assertFalse(frame.isConstructCall());
+ }
// When function f is optimized (1 means YES, see runtime.cc) we
// expect an optimized frame for f with g1, g2 and g3 inlined.
@@ -185,7 +191,7 @@ function g2(i) {
function g1(i, x3, y3, z3) {
var a3 = expected[i].locals.a3;
var b3 = expected[i].locals.b3;
- g2(i - 1, a3, b3);
+ new g2(i - 1, a3, b3);
}
function f(i, x4, y4) {
@@ -201,8 +207,11 @@ testingConstructCall = true;
new f(expected.length - 1, 11, 12);
new f(expected.length - 1, 11, 12, 0);
-// Make sure that the debug event listener vas invoked.
+// Make sure that the debug event listener was invoked.
assertFalse(exception, "exception in listener " + exception)
assertTrue(listenerComplete);
+// Throw away type information for next run.
+gc();
+
Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/debug-stepin-accessor.js b/deps/v8/test/mjsunit/debug-stepin-accessor.js
index 2c9c8c324f9..70acd5ef6ba 100644
--- a/deps/v8/test/mjsunit/debug-stepin-accessor.js
+++ b/deps/v8/test/mjsunit/debug-stepin-accessor.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -112,8 +112,8 @@ function testGetter1_2() {
function testGetter1_3() {
expected_function_name = 'getter1';
expected_source_line_text = ' return this.name; // getter 1';
- debugger;
for (var i = 1; i < 2; i++) {
+ debugger;
var x = c['getter' + i];
}
}
diff --git a/deps/v8/test/mjsunit/function-call.js b/deps/v8/test/mjsunit/function-call.js
index 06479ad4e82..26890ed113c 100644
--- a/deps/v8/test/mjsunit/function-call.js
+++ b/deps/v8/test/mjsunit/function-call.js
@@ -68,8 +68,7 @@ var should_throw_on_null_and_undefined =
String.prototype.toUpperCase,
String.prototype.toLocaleUpperCase,
String.prototype.trim,
- Number.prototype.toLocaleString,
- Error.prototype.toString];
+ Number.prototype.toLocaleString];
// Non generic natives do not work on any input other than the specific
// type, but since this change will allow call to be invoked with undefined
@@ -134,7 +133,8 @@ var non_generic =
Date.prototype.toJSON,
RegExp.prototype.exec,
RegExp.prototype.test,
- RegExp.prototype.toString];
+ RegExp.prototype.toString,
+ Error.prototype.toString];
// Mapping functions.
diff --git a/deps/v8/test/mjsunit/fuzz-natives.js b/deps/v8/test/mjsunit/fuzz-natives.js
index c4d18d04110..2965e74573f 100644
--- a/deps/v8/test/mjsunit/fuzz-natives.js
+++ b/deps/v8/test/mjsunit/fuzz-natives.js
@@ -184,8 +184,9 @@ var knownProblems = {
"RegExpConstructResult": true,
"_RegExpConstructResult": true,
- // This function performs some checks compile time (it requires its first
- // argument to be a compile time smi).
+ // This functions perform some checks compile time (they require one of their
+ // arguments to be a compile time smi).
+ "_DateField": true,
"_GetFromCache": true,
// This function expects its first argument to be a non-smi.
diff --git a/deps/v8/test/mjsunit/harmony/module-parsing.js b/deps/v8/test/mjsunit/harmony/module-parsing.js
index 5a5e82fdb17..93e69e3ad9c 100644
--- a/deps/v8/test/mjsunit/harmony/module-parsing.js
+++ b/deps/v8/test/mjsunit/harmony/module-parsing.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,7 +27,7 @@
// Flags: --harmony-modules
-// Test basic module syntax, with and without ASI.
+// Test basic module syntax, with and without automatic semicolon insertion.
module A {}
@@ -36,19 +36,46 @@ module A2 = A;
module A3 = A2
module B {
- var x
- var x, y;
- var x = 0, y
- let x, y
- let z = 1
+ export vx
+ export vy, lz, c, f
+
+ var vx
+ var vx, vy;
+ var vx = 0, vy
+ let lx, ly
+ let lz = 1
const c = 9
function f() {}
- module C {
+
+ module C0 {}
+
+ export module C {
let x
- module D {}
+ export module D { export let x }
let y
}
+
let zz = ""
+
+ export var x0
+ export var x1, x2 = 6, x3
+ export let y0
+ export let y1 = 0, y2
+ export const z0 = 0
+ export const z1 = 2, z2 = 3
+ export function f0() {}
+ export module M1 {}
+ export module M2 = C.D
+ export module M3 at "http://where"
+
+ import i0 from I
+ import i1, i2, i3, M from I
+ import i4, i5 from "http://where"
+}
+
+module I {
+ export let i0, i1, i2, i3;
+ export module M {}
}
module C1 = B.C;
@@ -60,7 +87,6 @@ module E1 at "http://where"
module E2 at "http://where";
module E3 = E1.F
-
// Check that ASI does not interfere.
module X
@@ -76,6 +102,49 @@ module Z
at
"file://local"
+import
+x
+,
+y
+from
+"file://local"
+
+
+module Wrap {
+export
+x
+,
+y
+
+export
+var
+v1 = 1
+
+export
+let
+v2 = 2
+
+export
+const
+v3 = 3
+
+export
+function
+f
+(
+)
+{
+}
+
+export
+module V
+{
+}
+}
+
+export A, A1, A2, A3, B, I, C1, D1, D2, D3, E1, E2, E3, X, Y, Z, Wrap, x, y, UU
+
+
// Check that 'module' still works as an identifier.
diff --git a/deps/v8/test/mjsunit/harmony/module-resolution.js b/deps/v8/test/mjsunit/harmony/module-resolution.js
new file mode 100644
index 00000000000..f9f492cffce
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/module-resolution.js
@@ -0,0 +1,139 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-modules --harmony-scoping
+
+// Test basic module interface inference.
+
+"use strict";
+
+print("begin.")
+
+export let x = print("0")
+
+export module B = A.B
+
+export module A {
+ export let x = print("1")
+ export let f = function() { return B.x }
+ export module B {
+ module BB = B
+ export BB, x
+ let x = print("2")
+ let y = print("3")
+ let Ax = A.x
+ let ABx = A.B.x
+ let Ay = A.y
+ let BBx = BB.x
+ let Af = A.f
+ function f(x,y) { return x }
+ }
+ export let y = print("4")
+ let Ax = A.x
+ let Bx = B.x
+ let ABx = A.B.x
+ module C {
+ export let z = print("5")
+ export module D = B
+ // TODO(rossberg): turn these into proper negative test cases once we have
+ // suitable error messages.
+ // import C.z // multiple declarations
+ import x from B
+ }
+ module D {
+ // TODO(rossberg): Handle import *.
+ // import A.* // invalid forward import
+ }
+ module M {}
+ // TODO(rossberg): Handle import *.
+ // import M.* // invalid forward import
+ let Cz = C.z
+ let CDx = C.D.x
+}
+
+export module Imports {
+ module A1 {
+ export module A2 {}
+ }
+ module B {
+ // TODO(rossberg): Handle import *.
+ // import A1.*
+ // import A2.* // unbound variable A2
+ }
+}
+
+export module E {
+ export let xx = x
+ export y, B
+ let Bx = B.x
+ // TODO(rossberg): Handle import *.
+ // import A.*
+}
+
+export module M1 {
+ export module A2 = M2
+}
+export module M2 {
+ export module A1 = M1
+}
+
+// TODO(rossberg): turn these into proper negative test cases once we have
+// suitable error messages.
+// module W1 = W2.W
+// module W2 = { export module W = W3 }
+// module W3 = W1 // cyclic module definition
+
+// module W1 = W2.W3
+// module W2 = {
+// export module W3 = W4
+// export module W4 = W1
+// } // cyclic module definition
+
+// TODO(rossberg): Handle import *.
+//module M3B = M3.B
+//export module M3 {
+// export module B { export let x = "" }
+// module C1 = { import M3.* }
+// module C2 = { import M3.B.* }
+// module C3 = { import M3B.* }
+// module C4 = { export x import B.* }
+//// TODO(rossberg): turn these into proper negative test cases once we have
+//// suitable error messages.
+//// export module C5 = { import C5.* } // invalid forward import
+//// export module C6 = { import M3.C6.* } // invalid forward import
+//}
+
+export module External at "external.js"
+export module External1 = External
+export module ExternalA = External.A
+export module InnerExternal {
+ export module E at "external.js"
+}
+export module External2 = InnerExternal.E
+//export let xxx = InnerExternal.E.A.x
+
+print("end.")
diff --git a/deps/v8/test/mjsunit/mjsunit.js b/deps/v8/test/mjsunit/mjsunit.js
index 6f6e3230d57..033c78f4b07 100644
--- a/deps/v8/test/mjsunit/mjsunit.js
+++ b/deps/v8/test/mjsunit/mjsunit.js
@@ -221,6 +221,8 @@ var assertUnreachable;
assertSame = function assertSame(expected, found, name_opt) {
+ // TODO(mstarzinger): We should think about using Harmony's egal operator
+ // or the function equivalent Object.is() here.
if (found === expected) {
if (expected !== 0 || (1 / expected) == (1 / found)) return;
} else if ((expected !== expected) && (found !== found)) {
diff --git a/deps/v8/test/mjsunit/number-is.js b/deps/v8/test/mjsunit/number-is.js
new file mode 100644
index 00000000000..1589fc64ad4
--- /dev/null
+++ b/deps/v8/test/mjsunit/number-is.js
@@ -0,0 +1,58 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test Harmony Number.isFinite() and Number.isNaN() functions.
+
+assertTrue(Number.isFinite(0));
+assertTrue(Number.isFinite(Number.MIN_VALUE));
+assertTrue(Number.isFinite(Number.MAX_VALUE));
+assertFalse(Number.isFinite(Number.NaN));
+assertFalse(Number.isFinite(Number.POSITIVE_INFINITY));
+assertFalse(Number.isFinite(Number.NEGATIVE_INFINITY));
+assertFalse(Number.isFinite(new Number(0)));
+assertFalse(Number.isFinite(1/0));
+assertFalse(Number.isFinite(-1/0));
+assertFalse(Number.isFinite({}));
+assertFalse(Number.isFinite([]));
+assertFalse(Number.isFinite("s"));
+assertFalse(Number.isFinite(null));
+assertFalse(Number.isFinite(undefined));
+
+assertFalse(Number.isNaN(0));
+assertFalse(Number.isNaN(Number.MIN_VALUE));
+assertFalse(Number.isNaN(Number.MAX_VALUE));
+assertTrue(Number.isNaN(Number.NaN));
+assertFalse(Number.isNaN(Number.POSITIVE_INFINITY));
+assertFalse(Number.isNaN(Number.NEGATIVE_INFINITY));
+assertFalse(Number.isNaN(new Number(0)));
+assertFalse(Number.isNaN(1/0));
+assertFalse(Number.isNaN(-1/0));
+assertFalse(Number.isNaN({}));
+assertFalse(Number.isNaN([]));
+assertFalse(Number.isNaN("s"));
+assertFalse(Number.isNaN(null));
+assertFalse(Number.isNaN(undefined));
diff --git a/deps/v8/test/mjsunit/object-define-property.js b/deps/v8/test/mjsunit/object-define-property.js
index 432fbdf7fc3..fdaf82d105e 100644
--- a/deps/v8/test/mjsunit/object-define-property.js
+++ b/deps/v8/test/mjsunit/object-define-property.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -503,7 +503,7 @@ try {
// Defining properties null should fail even when we have
// other allowed values
try {
- %DefineOrRedefineAccessorProperty(null, 'foo', 0, func, 0);
+ %DefineOrRedefineAccessorProperty(null, 'foo', func, null, 0);
} catch (e) {
assertTrue(/illegal access/.test(e));
}
@@ -1075,3 +1075,13 @@ assertEquals(999, o[999]);
assertEquals(2, arg0);
assertEquals(3, arguments[0]);
})(0);
+
+
+// Regression test: We should never observe the hole value.
+var objectWithGetter = {};
+objectWithGetter.__defineGetter__('foo', function() {});
+assertEquals(undefined, objectWithGetter.__lookupSetter__('foo'));
+
+var objectWithSetter = {};
+objectWithSetter.__defineSetter__('foo', function(x) {});
+assertEquals(undefined, objectWithSetter.__lookupGetter__('foo'));
diff --git a/deps/v8/test/mjsunit/object-is.js b/deps/v8/test/mjsunit/object-is.js
new file mode 100644
index 00000000000..b9fdc844206
--- /dev/null
+++ b/deps/v8/test/mjsunit/object-is.js
@@ -0,0 +1,47 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test both the Harmony egal operator and it's function equivalent.
+
+function TestEgal(expected, x, y) {
+ // TODO(mstarzinger): Once we have the egal operator, we can test it here.
+ assertSame(expected, Object.is(x, y));
+}
+
+var test_set = [ {}, [], 1/0, -1/0, "s", 0, 0/-1, null, undefined ];
+print(test_set);
+for (var i = 0; i < test_set.length; i++) {
+ for (var j = 0; j < test_set.length; j++) {
+ if (i == j) {
+ assertSame(test_set[i], test_set[j]);
+ TestEgal(true, test_set[i], test_set[j]);
+ } else {
+ TestEgal(false, test_set[i], test_set[j]);
+ TestEgal(false, test_set[j], test_set[i]);
+ }
+ }
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-102153.js b/deps/v8/test/mjsunit/regress/regress-102153.js
new file mode 100644
index 00000000000..0f67656b61e
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-102153.js
@@ -0,0 +1,57 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test that the break point is set before initializing the loop variable
+// so that we break before any iteration has been run.
+
+Debug = debug.Debug;
+
+var break_hit = false;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ break_hit = true;
+ }
+}
+
+Debug.setListener(listener);
+
+function test() {
+ for (var i = 0; i < 3; i++) { // Break here.
+ if (i == 0) break;
+ }
+}
+
+Debug.setBreakPoint(test, 1, 0);
+
+assertTrue(Debug.showBreakPoints(test).indexOf("// Break here.") >= 0);
+
+test();
+
+assertTrue(break_hit);
diff --git a/deps/v8/test/mjsunit/regress/regress-1229.js b/deps/v8/test/mjsunit/regress/regress-1229.js
index 3d166d5b4f3..5447f3f7d22 100644
--- a/deps/v8/test/mjsunit/regress/regress-1229.js
+++ b/deps/v8/test/mjsunit/regress/regress-1229.js
@@ -139,6 +139,8 @@ function bar(x, y, z) {
assertSame(non_construct, NON_CONSTRUCT_MARKER);
var construct = new baz(0);
assertSame(construct, CONSTRUCT_MARKER);
+ var construct = new baz(0, 0);
+ assertSame(construct, CONSTRUCT_MARKER);
}
invoke(bar, [1, 2, 3]);
diff --git a/deps/v8/test/mjsunit/regress/regress-1853.js b/deps/v8/test/mjsunit/regress/regress-1853.js
new file mode 100644
index 00000000000..f80badecb6a
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1853.js
@@ -0,0 +1,116 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test whether scripts compiled after setting the break point are
+// updated correctly.
+
+Debug = debug.Debug;
+
+var break_count = 0;
+var test_break_1 = false;
+var test_break_2 = false;
+
+function sendCommand(state, cmd) {
+ // Get the debug command processor in paused state.
+ var dcp = state.debugCommandProcessor(false);
+ var request = JSON.stringify(cmd);
+ var response = dcp.processDebugJSONRequest(request);
+ return JSON.parse(response);
+}
+
+function setBreakPointByName(state) {
+ sendCommand(state, {
+ seq: 0,
+ type: "request",
+ command: "setbreakpoint",
+ arguments: {
+ type: "script",
+ target: "testScriptOne",
+ line: 2
+ }
+ });
+}
+
+function setBreakPointByRegExp(state) {
+ sendCommand(state, {
+ seq: 0,
+ type: "request",
+ command: "setbreakpoint",
+ arguments: {
+ type: "scriptRegExp",
+ target: "Scrip.Two",
+ line: 2
+ }
+ });
+}
+
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ switch (break_count) {
+ case 0:
+ // Set break points before the code has been compiled.
+ setBreakPointByName(exec_state);
+ setBreakPointByRegExp(exec_state);
+ break;
+ case 1:
+ // Set the flag to prove that we hit the first break point.
+ test_break_1 = true;
+ break;
+ case 2:
+ // Set the flag to prove that we hit the second break point.
+ test_break_2 = true;
+ break;
+ }
+ break_count++;
+ }
+ } catch (e) {
+ print(e);
+ }
+}
+
+Debug.setListener(listener);
+debugger;
+
+eval('function test1() { \n' +
+ ' assertFalse(test_break_1); \n' +
+ ' assertTrue(test_break_1); \n' +
+ '} \n' +
+ '//@ sourceURL=testScriptOne');
+
+eval('function test2() { \n' +
+ ' assertFalse(test_break_2); \n' +
+ ' assertTrue(test_break_2); \n' +
+ '} \n' +
+ '//@ sourceURL=testScriptTwo');
+
+test1();
+test2();
+assertEquals(3, break_count);
+
diff --git a/deps/v8/test/mjsunit/regress/regress-1980.js b/deps/v8/test/mjsunit/regress/regress-1980.js
new file mode 100644
index 00000000000..49dfd063bae
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-1980.js
@@ -0,0 +1,40 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=1980
+
+var invalid_this = [ "invalid", 23, undefined, null ];
+for (var i = 0; i < invalid_this.length; i++) {
+ var exception = false;
+ try {
+ Error.prototype.toString.call(invalid_this[i]);
+ } catch (e) {
+ exception = true;
+ assertTrue("called_on_non_object" == e.type);
+ }
+ assertTrue(exception);
+}
diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status
index 4d824187936..1a8a8dc7ddd 100644
--- a/deps/v8/test/test262/test262.status
+++ b/deps/v8/test/test262/test262.status
@@ -48,12 +48,12 @@ S10.4.2.1_A1: FAIL
# octal numbers in order to not break the web.
S15.1.2.2_A5.1_T1: FAIL_OK
-# This tests precision of trignometric functions. We're slightly off
-# from the implementation in libc (~ 1e-17) but it's not clear if we
-# or they are closer to the right answer, or if it even matters.
+# This tests precision of Math.tan and Math.sin. The implementation for those
+# trigonometric functions are platform/compiler dependent. Furthermore, the
+# expectation values by far deviates from the actual result given by an
+# arbitrary-precision calculator, making those tests partly bogus.
S15.8.2.16_A7: PASS || FAIL_OK
S15.8.2.18_A7: PASS || FAIL_OK
-S15.8.2.13_A23: PASS || FAIL_OK
# We are more lenient in which string character escapes we allow than
# the spec (7.8.4 p. 19) wants us to be. This is for compatibility.
@@ -70,8 +70,8 @@ S7.8.4_A7.2_T6: FAIL_OK
# Linux and Mac defaults to extended 80 bit floating point format in the FPU.
# We follow the other major JS engines by keeping this default.
-S8.5_A2.2: PASS, FAIL if $system == linux, FAIL if $system == macos
-S8.5_A2.1: PASS, FAIL if $system == linux, FAIL if $system == macos
+S8.5_A2.2: PASS if ($system != linux || $arch == x64), FAIL_OK if ($system == linux && $arch != x64)
+S8.5_A2.1: PASS if ($system != linux || $arch == x64), FAIL_OK if ($system == linux && $arch != x64)
############################ SKIPPED TESTS #############################
diff --git a/deps/v8/test/test262/testcfg.py b/deps/v8/test/test262/testcfg.py
index aefda196a30..294b39c9163 100644
--- a/deps/v8/test/test262/testcfg.py
+++ b/deps/v8/test/test262/testcfg.py
@@ -29,8 +29,14 @@
import test
import os
from os.path import join, exists
+import urllib
+import hashlib
+import tarfile
+TEST_262_ARCHIVE_REVISION = '3a890174343c' # This is the r309 revision.
+TEST_262_ARCHIVE_MD5 = 'be5d4cfbe69cef70430907b8f3a92b50'
+TEST_262_URL = 'http://hg.ecmascript.org/tests/test262/archive/%s.tar.bz2'
TEST_262_HARNESS = ['sta.js']
@@ -93,6 +99,28 @@ class Test262TestConfiguration(test.TestConfiguration):
tests.append(test)
return tests
+ def DownloadData(self):
+ revision = TEST_262_ARCHIVE_REVISION
+ archive_url = TEST_262_URL % revision
+ archive_name = join(self.root, 'test262-%s.tar.bz2' % revision)
+ directory_name = join(self.root, "test262-%s" % revision)
+ if not exists(directory_name) or not exists(archive_name):
+ if not exists(archive_name):
+ print "Downloading test data from %s ..." % archive_url
+ urllib.urlretrieve(archive_url, archive_name)
+ if not exists(directory_name):
+ print "Extracting test262-%s.tar.bz2 ..." % revision
+ md5 = hashlib.md5()
+ with open(archive_name,'rb') as f:
+ for chunk in iter(lambda: f.read(8192), ''):
+ md5.update(chunk)
+ if md5.hexdigest() != TEST_262_ARCHIVE_MD5:
+ raise Exception("Hash mismatch of test data file")
+ archive = tarfile.open(archive_name, 'r:bz2')
+ archive.extractall(join(self.root))
+ if not exists(join(self.root, 'data')):
+ os.symlink(directory_name, join(self.root, 'data'))
+
def GetBuildRequirements(self):
return ['d8']
diff --git a/deps/v8/tools/common-includes.sh b/deps/v8/tools/common-includes.sh
new file mode 100644
index 00000000000..98206899f4a
--- /dev/null
+++ b/deps/v8/tools/common-includes.sh
@@ -0,0 +1,194 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This file contains common function definitions for various other shell
+# scripts in this directory. It is not meant to be executed by itself.
+
+# Important: before including this file, the following variables must be set:
+# - BRANCHNAME
+# - PERSISTFILE_BASENAME
+
+TEMP_BRANCH=$BRANCHNAME-temporary-branch-created-by-script
+VERSION_FILE="src/version.cc"
+CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
+PATCH_FILE="$PERSISTFILE_BASENAME-patch"
+COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
+TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
+TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision"
+START_STEP=0
+CURRENT_STEP=0
+
+die() {
+ [[ -n "$1" ]] && echo "Error: $1"
+ echo "Exiting."
+ exit 1
+}
+
+confirm() {
+ echo -n "$1 [Y/n] "
+ read ANSWER
+ if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+delete_branch() {
+ local MATCH=$(git branch | grep $1 | awk '{print $NF}' )
+ if [ "$MATCH" == "$1" ] ; then
+ confirm "Branch $1 exists, do you want to delete it?"
+ if [ $? -eq 0 ] ; then
+ git branch -D $1 || die "Deleting branch '$1' failed."
+ echo "Branch $1 deleted."
+ else
+ die "Can't continue. Please delete branch $1 and try again."
+ fi
+ fi
+}
+
+# Persist and restore variables to support canceling/resuming execution
+# of this script.
+persist() {
+ local VARNAME=$1
+ local FILE="$PERSISTFILE_BASENAME-$VARNAME"
+ echo "${!VARNAME}" > $FILE
+}
+
+restore() {
+ local VARNAME=$1
+ local FILE="$PERSISTFILE_BASENAME-$VARNAME"
+ local VALUE="$(cat $FILE)"
+ eval "$VARNAME=\"$VALUE\""
+}
+
+restore_if_unset() {
+ local VARNAME=$1
+ [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
+ [[ -z "${!VARNAME}" ]] && die "Variable '$VARNAME' could not be restored."
+}
+
+initial_environment_checks() {
+ # Cancel if this is not a git checkout.
+ [[ -d .git ]] \
+ || die "This is not a git checkout, this script won't work for you."
+
+ # Cancel if EDITOR is unset or not executable.
+ [[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
+ || die "Please set your EDITOR environment variable, you'll need it."
+}
+
+common_prepare() {
+ # Check for a clean workdir.
+ [[ -z "$(git status -s -uno)" ]] \
+ || die "Workspace is not clean. Please commit or undo your changes."
+
+ # Persist current branch.
+ CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
+ persist "CURRENT_BRANCH"
+
+ # Fetch unfetched revisions.
+ git svn fetch || die "'git svn fetch' failed."
+
+ # Get ahold of a safe temporary branch and check it out.
+ if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then
+ delete_branch $TEMP_BRANCH
+ git checkout -b $TEMP_BRANCH
+ fi
+
+ # Delete the branch that will be created later if it exists already.
+ delete_branch $BRANCHNAME
+}
+
+common_cleanup() {
+ restore_if_unset "CURRENT_BRANCH"
+ git checkout -f $CURRENT_BRANCH
+ [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH
+ [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
+ # Clean up all temporary files.
+ rm -f "$PERSISTFILE_BASENAME"*
+}
+
+# These two functions take a prefix for the variable names as first argument.
+read_and_persist_version() {
+ for v in MAJOR_VERSION MINOR_VERSION BUILD_NUMBER PATCH_LEVEL; do
+ VARNAME="$1${v%%_*}"
+ VALUE=$(grep "#define $v" "$VERSION_FILE" | awk '{print $NF}')
+ eval "$VARNAME=\"$VALUE\""
+ persist "$VARNAME"
+ done
+}
+restore_version_if_unset() {
+ for v in MAJOR MINOR BUILD PATCH; do
+ restore_if_unset "$1$v"
+ done
+}
+
+upload_step() {
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Upload for code review."
+ echo -n "Please enter the email address of a V8 reviewer for your patch: "
+ read REVIEWER
+ git cl upload -r "$REVIEWER" --send-mail \
+ || die "'git cl upload' failed, please try again."
+ fi
+}
+
+wait_for_lgtm() {
+ echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
+change. (If you need to iterate on the patch or double check that it's \
+sane, do so in another shell, but remember to not change the headline of \
+the uploaded CL."
+ unset ANSWER
+ while [ "$ANSWER" != "LGTM" ] ; do
+ [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
+ echo -n "> "
+ read ANSWER
+ done
+}
+
+# Takes a file containing the patch to apply as first argument.
+apply_patch() {
+ patch -p1 < "$1" | tee >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
+ [[ $? -eq 0 ]] || die "Applying the patch failed."
+}
+
+stage_files() {
+ # Stage added and modified files.
+ TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
+ for FILE in $TOUCHED_FILES ; do
+ git add "$FILE"
+ done
+ # Stage deleted files.
+ DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
+ | awk '{print $NF}')
+ for FILE in $DELETED_FILES ; do
+ git rm "$FILE"
+ done
+ rm -f "$TOUCHED_FILES_FILE"
+}
diff --git a/deps/v8/tools/gen-postmortem-metadata.py b/deps/v8/tools/gen-postmortem-metadata.py
index 4aa8f5d5e5a..b9b162548f6 100644
--- a/deps/v8/tools/gen-postmortem-metadata.py
+++ b/deps/v8/tools/gen-postmortem-metadata.py
@@ -115,6 +115,9 @@ extras_accessors = [
'ConsString, second, String, kSecondOffset',
'ExternalString, resource, Object, kResourceOffset',
'SeqAsciiString, chars, char, kHeaderSize',
+ 'SharedFunctionInfo, code, Code, kCodeOffset',
+ 'Code, instruction_start, uintptr_t, kHeaderSize',
+ 'Code, instruction_size, int, kInstructionSizeOffset',
];
#
diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp
index b244bc18b2a..b8769510c85 100644
--- a/deps/v8/tools/gyp/v8.gyp
+++ b/deps/v8/tools/gyp/v8.gyp
@@ -282,6 +282,8 @@
'../../src/cpu-profiler.h',
'../../src/data-flow.cc',
'../../src/data-flow.h',
+ '../../src/date.cc',
+ '../../src/date.h',
'../../src/dateparser.cc',
'../../src/dateparser.h',
'../../src/dateparser-inl.h',
@@ -342,6 +344,8 @@
'../../src/incremental-marking.h',
'../../src/inspector.cc',
'../../src/inspector.h',
+ '../../src/interface.cc',
+ '../../src/interface.h',
'../../src/interpreter-irregexp.cc',
'../../src/interpreter-irregexp.h',
'../../src/json-parser.h',
diff --git a/deps/v8/tools/jsmin.py b/deps/v8/tools/jsmin.py
index 646bf143a5a..e82f3d031e8 100644
--- a/deps/v8/tools/jsmin.py
+++ b/deps/v8/tools/jsmin.py
@@ -232,7 +232,9 @@ class JavaScriptMinifier(object):
# A regexp that matches a regexp literal surrounded by /slashes/.
# Don't allow a regexp to have a ) before the first ( since that's a
# syntax error and it's probably just two unrelated slashes.
- slash_quoted_regexp = r"/(?:(?=\()|(?:[^()/\\]|\\.)+)(?:\([^/\\]|\\.)*/"
+ # Also don't allow it to come after anything that can only be the
+ # end of a primary expression.
+ slash_quoted_regexp = r"(?<![\w$'\")\]])/(?:(?=\()|(?:[^()/\\]|\\.)+)(?:\([^/\\]|\\.)*/"
# Replace multiple spaces with a single space.
line = re.sub("|".join([double_quoted_string,
single_quoted_string,
diff --git a/deps/v8/tools/merge-to-branch.sh b/deps/v8/tools/merge-to-branch.sh
index abe5fc24d44..484558cfad9 100644
--- a/deps/v8/tools/merge-to-branch.sh
+++ b/deps/v8/tools/merge-to-branch.sh
@@ -29,17 +29,14 @@
########## Global variable definitions
BRANCHNAME=prepare-merge
-VERSION_FILE="src/version.cc"
PERSISTFILE_BASENAME=/tmp/v8-merge-to-branch-tempfile
ALREADY_MERGING_SENTINEL_FILE="$PERSISTFILE_BASENAME-already-merging"
-CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
-PATCH_FILE="$PERSISTFILE_BASENAME-patch"
-COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
-COMMITMSG_FILE_COPY="$PERSISTFILE_BASENAME-commitmsg-copy"
-TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
-TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision"
-START_STEP=0
-CURRENT_STEP=0
+COMMIT_HASHES_FILE="$PERSISTFILE_BASENAME-PATCH_COMMIT_HASHES"
+TEMPORARY_PATCH_FILE="$PERSISTFILE_BASENAME-temporary-patch"
+
+########## Function definitions
+
+source $(dirname $BASH_SOURCE)/common-includes.sh
usage() {
cat << EOF
@@ -54,66 +51,12 @@ OPTIONS:
EOF
}
-########## Function definitions
-
-die() {
- [[ -n "$1" ]] && echo "Error: $1"
- echo "Exiting."
- exit 1
-}
-
-confirm() {
- echo -n "$1 [Y/n] "
- read ANSWER
- if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
- return 0
- else
- return 1
- fi
-}
-
-delete_branch() {
- local MATCH=$(git branch | grep $1 | awk '{print $NF}' )
- if [ "$MATCH" == "$1" ] ; then
- confirm "Branch $1 exists, do you want to delete it?"
- if [ $? -eq 0 ] ; then
- git branch -D $1 || die "Deleting branch '$1' failed."
- echo "Branch $1 deleted."
- else
- die "Can't continue. Please delete branch $1 and try again."
- fi
- fi
-}
-
-# Persist and restore variables to support canceling/resuming execution
-# of this script.
-persist() {
- local VARNAME=$1
- local FILE="$PERSISTFILE_BASENAME-$VARNAME"
- echo "${!VARNAME}" > $FILE
-}
-
-restore() {
- local VARNAME=$1
- local FILE="$PERSISTFILE_BASENAME-$VARNAME"
- local VALUE="$(cat $FILE)"
- eval "$VARNAME=\"$VALUE\""
-}
-
-restore_if_unset() {
- local VARNAME=$1
- [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
- [[ -z "${!VARNAME}" ]] && die "Variable '$VARNAME' could not be restored."
-}
-
persist_patch_commit_hashes() {
- local FILE="$PERSISTFILE_BASENAME-PATCH_COMMIT_HASHES"
- echo "PATCH_COMMIT_HASHES=( ${PATCH_COMMIT_HASHES[@]} )" > $FILE
+ echo "PATCH_COMMIT_HASHES=( ${PATCH_COMMIT_HASHES[@]} )" > $COMMIT_HASHES_FILE
}
restore_patch_commit_hashes() {
- local FILE="$PERSISTFILE_BASENAME-PATCH_COMMIT_HASHES"
- source $FILE
+ source $COMMIT_HASHES_FILE
}
restore_patch_commit_hashes_if_unset() {
@@ -149,42 +92,21 @@ shift $OPTION_COUNT
&& die "A merge is already in progress"
touch "$ALREADY_MERGING_SENTINEL_FILE"
-# Cancel if this is not a git checkout.
-[[ -d .git ]] \
- || die "This is not a git checkout, this script won't work for you."
-
-# Cancel if EDITOR is unset or not executable.
-[[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
- || die "Please set your EDITOR environment variable, you'll need it."
+initial_environment_checks
if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Preparation"
MERGE_TO_BRANCH=$1
- [[ -n "$MERGE_TO_BRANCH" ]] \
- || die "Please specify a branch to merge to"
+ [[ -n "$MERGE_TO_BRANCH" ]] || die "Please specify a branch to merge to"
shift
persist "MERGE_TO_BRANCH"
-
- echo ">>> Step $CURRENT_STEP: Preparation"
- # Check for a clean workdir.
- [[ -z "$(git status -s -uno)" ]] \
- || die "Workspace is not clean. Please commit or undo your changes."
-
- # Persist current branch.
- CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
- persist "CURRENT_BRANCH"
- delete_branch $BRANCHNAME
+ common_prepare
fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
- echo ">>> Step $CURRENT_STEP: Fetch unfetched revisions."
- git svn fetch || die "'git svn fetch' failed."
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
- restore_if_unset "MERGE_TO_BRANCH"
echo ">>> Step $CURRENT_STEP: Create a fresh branch for the patch."
+ restore_if_unset "MERGE_TO_BRANCH"
git checkout -b $BRANCHNAME svn/$MERGE_TO_BRANCH \
|| die "Creating branch $BRANCHNAME failed."
fi
@@ -204,24 +126,22 @@ revisions associated with the patches."
let current+=1
done
NEW_COMMIT_MSG="Merged$NEW_COMMIT_MSG into $MERGE_TO_BRANCH branch."
-
+
echo "$NEW_COMMIT_MSG" > $COMMITMSG_FILE
- echo >> $COMMITMSG_FILE
+ echo "" >> $COMMITMSG_FILE
for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
PATCH_MERGE_DESCRIPTION=$(git log -1 --format=%s $HASH)
echo "$PATCH_MERGE_DESCRIPTION" >> $COMMITMSG_FILE
- echo >> $COMMITMSG_FILE
+ echo "" >> $COMMITMSG_FILE
done
for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
BUG=$(git log -1 $HASH | grep "BUG=" | awk -F '=' '{print $NF}')
- if [ $BUG ] ; then
- if [ "$BUG_AGGREGATE" ] ; then
- BUG_AGGREGATE="$BUG_AGGREGATE,"
- fi
+ if [ -n "$BUG" ] ; then
+ [[ -n "$BUG_AGGREGATE" ]] && BUG_AGGREGATE="$BUG_AGGREGATE,"
BUG_AGGREGATE="$BUG_AGGREGATE$BUG"
fi
done
- if [ "$BUG_AGGREGATE" ] ; then
+ if [ -n "$BUG_AGGREGATE" ] ; then
echo "BUG=$BUG_AGGREGATE" >> $COMMITMSG_FILE
fi
persist "NEW_COMMIT_MSG"
@@ -230,37 +150,23 @@ fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Apply patches for selected revisions."
restore_if_unset "MERGE_TO_BRANCH"
restore_patch_commit_hashes_if_unset "PATCH_COMMIT_HASHES"
- echo "${PATCH_COMMIT_HASHES[@]}"
- echo ">>> Step $CURRENT_STEP: Apply patches for selected revisions."
rm -f "$TOUCHED_FILES_FILE"
for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
- git log -1 -p $HASH | patch -p1 \
- | tee >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
- [[ $? -eq 0 ]] \
- || die "Cannot apply the patch for $HASH to $MERGE_TO_BRANCH."
+ echo "Applying patch for $HASH to $MERGE_TO_BRANCH..."
+ git log -1 -p $HASH > "$TEMPORARY_PATCH_FILE"
+ apply_patch "$TEMPORARY_PATCH_FILE"
done
- # Stage added and modified files.
- TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
- for FILE in $TOUCHED_FILES ; do
- git add "$FILE"
- done
- # Stage deleted files.
- DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
- | awk '{print $NF}')
- for FILE in $DELETED_FILES ; do
- git rm "$FILE"
- done
- rm -f "$TOUCHED_FILES_FILE"
+ stage_files
fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
- echo ">>> Step $CURRENT_STEP: Prepare version.cc"
-# These version numbers are used again for creating the tag
- PATCH=$(grep "#define PATCH_LEVEL" "$VERSION_FILE" | awk '{print $NF}')
- persist "PATCH"
+ echo ">>> Step $CURRENT_STEP: Prepare $VERSION_FILE."
+ # These version numbers are used again for creating the tag
+ read_and_persist_version
fi
let CURRENT_STEP+=1
@@ -277,14 +183,7 @@ you're done, save the file and exit your EDITOR.)"
else
$EDITOR "$VERSION_FILE"
fi
- NEWMAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWMAJOR"
- NEWMINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWMINOR"
- NEWBUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWBUILD"
- NEWPATCH=$(grep "#define PATCH_LEVEL" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWPATCH"
+ read_and_persist_version "NEW"
fi
let CURRENT_STEP+=1
@@ -294,42 +193,26 @@ if [ $START_STEP -le $CURRENT_STEP ] ; then
|| die "'git commit -a' failed."
fi
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
- echo ">>> Step $CURRENT_STEP: Upload for code review."
- echo -n "Please enter the email address of a V8 reviewer for your patch: "
- read REVIEWER
- git cl upload -r "$REVIEWER" --send-mail \
- || die "'git cl upload' failed, please try again."
-fi
+upload_step
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to the repository."
restore_if_unset "MERGE_TO_BRANCH"
git checkout $BRANCHNAME \
- || die "cannot ensure that the current branch is $BRANCHNAME"
- echo ">>> Step $CURRENT_STEP: Commit to the repository."
- echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
-change. (If you need to iterate on the patch or double check that it's \
-sane, do so in another shell, but remember to not change the headline of \
-the uploaded CL."
- unset ANSWER
- while [ "$ANSWER" != "LGTM" ] ; do
- [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
- echo -n "> "
- read ANSWER
- done
+ || die "cannot ensure that the current branch is $BRANCHNAME"
+ wait_for_lgtm
git cl dcommit || die "failed to commit to $MERGE_TO_BRANCH"
fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Determine svn commit revision"
restore_if_unset "NEW_COMMIT_MSG"
restore_if_unset "MERGE_TO_BRANCH"
- echo ">>> Step $CURRENT_STEP: Determine svn commit revision"
git svn fetch || die "'git svn fetch' failed."
COMMIT_HASH=$(git log -1 --format=%H --grep="$NEW_COMMIT_MSG" \
-svn/$MERGE_TO_BRANCH)
+ svn/$MERGE_TO_BRANCH)
[[ -z "$COMMIT_HASH" ]] && die "Unable to map git commit to svn revision"
SVN_REVISION=$(git svn find-rev $COMMIT_HASH)
echo "subversion revision number is r$SVN_REVISION"
@@ -338,24 +221,23 @@ fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
- restore_if_unset "SVN_REVISION"
- restore_if_unset "NEWMAJOR"
- restore_if_unset "NEWMINOR"
- restore_if_unset "NEWBUILD"
- restore_if_unset "NEWPATCH"
echo ">>> Step $CURRENT_STEP: Create the tag."
+ restore_if_unset "SVN_REVISION"
+ restore_version_if_unset "NEW"
echo "Creating tag svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
+ if [ "$MERGE_TO_BRANCH" == "trunk" ] ; then
+ TO_URL="$MERGE_TO_BRANCH"
+ else
+ TO_URL="branches/$MERGE_TO_BRANCH"
+ fi
svn copy -r $SVN_REVISION \
-https://v8.googlecode.com/svn/branches/$MERGE_TO_BRANCH \
-https://v8.googlecode.com/svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH \
--m "Tagging version $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
+ https://v8.googlecode.com/svn/$TO_URL \
+ https://v8.googlecode.com/svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH \
+ -m "Tagging version $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
fi
let CURRENT_STEP+=1
if [ $START_STEP -le $CURRENT_STEP ] ; then
echo ">>> Step $CURRENT_STEP: Cleanup."
- restore_if_unset "CURRENT_BRANCH"
- git checkout -f $CURRENT_BRANCH
- [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
- rm -f "$ALREADY_MERGING_SENTINEL_FILE"
+ common_cleanup
fi
diff --git a/deps/v8/tools/push-to-trunk.sh b/deps/v8/tools/push-to-trunk.sh
index 302c5f299b5..c1f8e78594d 100755
--- a/deps/v8/tools/push-to-trunk.sh
+++ b/deps/v8/tools/push-to-trunk.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -31,19 +31,13 @@
BRANCHNAME=prepare-push
TRUNKBRANCH=trunk-push
-TEMP_BRANCH=v8-push-to-trunk-script-temporary-branch
-VERSION_FILE="src/version.cc"
PERSISTFILE_BASENAME=/tmp/v8-push-to-trunk-tempfile
-CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
-PATCH_FILE="$PERSISTFILE_BASENAME-patch"
-COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
-TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
-TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision"
-STEP=0
-
+CHROME_PATH=
########## Function definitions
+source $(dirname $BASH_SOURCE)/common-includes.sh
+
usage() {
cat << EOF
usage: $0 OPTIONS
@@ -55,71 +49,24 @@ OPTIONS:
-h Show this message
-s Specify the step where to start work. Default: 0.
-l Manually specify the git commit ID of the last push to trunk.
+ -c Specify the path to your Chromium src/ directory to automate the
+ V8 roll.
EOF
}
-die() {
- [[ -n "$1" ]] && echo "Error: $1"
- echo "Exiting."
- exit 1
-}
-
-confirm() {
- echo -n "$1 [Y/n] "
- read ANSWER
- if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
- return 0
- else
- return 1
- fi
-}
-
-delete_branch() {
- local MATCH=$(git branch | grep $1 | awk '{print $NF}' )
- if [ "$MATCH" == "$1" ] ; then
- confirm "Branch $1 exists, do you want to delete it?"
- if [ $? -eq 0 ] ; then
- git branch -D $1 || die "Deleting branch '$1' failed."
- echo "Branch $1 deleted."
- else
- die "Can't continue. Please delete branch $1 and try again."
- fi
- fi
-}
-
-# Persist and restore variables to support canceling/resuming execution
-# of this script.
-persist() {
- local VARNAME=$1
- local FILE="$PERSISTFILE_BASENAME-$VARNAME"
- echo "${!VARNAME}" > $FILE
-}
-
-restore() {
- local VARNAME=$1
- local FILE="$PERSISTFILE_BASENAME-$VARNAME"
- local VALUE="$(cat $FILE)"
- eval "$VARNAME=\"$VALUE\""
-}
-
-restore_if_unset() {
- local VARNAME=$1
- [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
- [[ -z "${!VARNAME}" ]] && die "Variable '$VARNAME' could not be restored."
-}
-
-
########## Option parsing
-while getopts ":hs:l:" OPTION ; do
+while getopts ":hs:l:c:" OPTION ; do
case $OPTION in
h) usage
exit 0
;;
- s) STEP=$OPTARG
+ s) START_STEP=$OPTARG
;;
l) LASTPUSH=$OPTARG
;;
+ c) CHROME_PATH=$OPTARG
+ ;;
?) echo "Illegal option: -$OPTARG"
usage
exit 1
@@ -130,46 +77,24 @@ done
########## Regular workflow
-# Cancel if this is not a git checkout.
-[[ -d .git ]] \
- || die "This is not a git checkout, this script won't work for you."
-
-# Cancel if EDITOR is unset or not executable.
-[[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
- || die "Please set your EDITOR environment variable, you'll need it."
-
-if [ $STEP -le 0 ] ; then
- echo ">>> Step 0: Preparation"
- # Check for a clean workdir.
- [[ -z "$(git status -s -uno)" ]] \
- || die "Workspace is not clean. Please commit or undo your changes."
-
- # Persist current branch.
- CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
- persist "CURRENT_BRANCH"
- # Get ahold of a safe temporary branch and check it out.
- if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then
- delete_branch $TEMP_BRANCH
- git checkout -b $TEMP_BRANCH
- fi
- # Delete branches if they exist.
- delete_branch $BRANCHNAME
- delete_branch $TRUNKBRANCH
-fi
+initial_environment_checks
-if [ $STEP -le 1 ] ; then
- echo ">>> Step 1: Fetch unfetched revisions."
- git svn fetch || die "'git svn fetch' failed."
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Preparation"
+ common_prepare
+ delete_branch $TRUNKBRANCH
fi
-if [ $STEP -le 2 ] ; then
- echo ">>> Step 2: Create a fresh branch."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create a fresh branch."
git checkout -b $BRANCHNAME svn/bleeding_edge \
|| die "Creating branch $BRANCHNAME failed."
fi
-if [ $STEP -le 3 ] ; then
- echo ">>> Step 3: Detect commit ID of last push to trunk."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Detect commit ID of last push to trunk."
[[ -n "$LASTPUSH" ]] || LASTPUSH=$(git log -1 --format=%H ChangeLog)
LOOP=1
while [ $LOOP -eq 1 ] ; do
@@ -185,15 +110,11 @@ if [ $STEP -le 3 ] ; then
persist "LASTPUSH"
fi
-if [ $STEP -le 4 ] ; then
- echo ">>> Step 4: Prepare raw ChangeLog entry."
-# These version numbers are used again later for the trunk commit.
- MAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "MAJOR"
- MINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "MINOR"
- BUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
- persist "BUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Prepare raw ChangeLog entry."
+ # These version numbers are used again later for the trunk commit.
+ read_and_persist_version
DATE=$(date +%Y-%m-%d)
persist "DATE"
@@ -206,7 +127,7 @@ if [ $STEP -le 4 ] ; then
# Grep for "BUG=xxxx" lines in the commit message and convert them to
# "(issue xxxx)".
git log -1 $commit --format="%B" \
- | grep "^BUG=" | grep -v "BUG=$" \
+ | grep "^BUG=" | grep -v "BUG=$" | grep -v "BUG=none$" \
| sed -e 's/^/ /' \
| sed -e 's/BUG=v8:\(.*\)$/(issue \1)/' \
| sed -e 's/BUG=\(.*\)$/(Chromium issue \1)/' \
@@ -215,10 +136,13 @@ if [ $STEP -le 4 ] ; then
git log -1 $commit --format="%w(80,8,8)(%an)" >> "$CHANGELOG_ENTRY_FILE"
echo "" >> "$CHANGELOG_ENTRY_FILE"
done
+ echo " Performance and stability improvements on all platforms." \
+ >> "$CHANGELOG_ENTRY_FILE"
fi
-if [ $STEP -le 5 ] ; then
- echo ">>> Step 5: Edit ChangeLog entry."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Edit ChangeLog entry."
echo -n "Please press <Return> to have your EDITOR open the ChangeLog entry, \
then edit its contents to your liking. When you're done, save the file and \
exit your EDITOR. "
@@ -241,8 +165,9 @@ exit your EDITOR. "
mv "$NEWCHANGELOG" ChangeLog
fi
-if [ $STEP -le 6 ] ; then
- echo ">>> Step 6: Increment version number."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Increment version number."
restore_if_unset "BUILD"
NEWBUILD=$(($BUILD + 1))
confirm "Automatically increment BUILD_NUMBER? (Saying 'n' will fire up \
@@ -254,19 +179,13 @@ you're done, save the file and exit your EDITOR.)"
else
$EDITOR "$VERSION_FILE"
fi
- NEWMAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWMAJOR"
- NEWMINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWMINOR"
- NEWBUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWBUILD"
+ read_and_persist_version "NEW"
fi
-if [ $STEP -le 7 ] ; then
- echo ">>> Step 7: Commit to local branch."
- restore_if_unset "NEWMAJOR"
- restore_if_unset "NEWMINOR"
- restore_if_unset "NEWBUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to local branch."
+ restore_version_if_unset "NEW"
PREPARE_COMMIT_MSG="Prepare push to trunk. \
Now working on version $NEWMAJOR.$NEWMINOR.$NEWBUILD."
persist "PREPARE_COMMIT_MSG"
@@ -274,25 +193,12 @@ Now working on version $NEWMAJOR.$NEWMINOR.$NEWBUILD."
|| die "'git commit -a' failed."
fi
-if [ $STEP -le 8 ] ; then
- echo ">>> Step 8: Upload for code review."
- echo -n "Please enter the email address of a V8 reviewer for your patch: "
- read REVIEWER
- git cl upload -r $REVIEWER --send-mail \
- || die "'git cl upload' failed, please try again."
-fi
+upload_step
-if [ $STEP -le 9 ] ; then
- echo ">>> Step 9: Commit to the repository."
- echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
-change. (If you need to iterate on the patch, do so in another shell. Do not \
-modify the existing local commit's commit message.)"
- unset ANSWER
- while [ "$ANSWER" != "LGTM" ] ; do
- [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
- echo -n "> "
- read ANSWER
- done
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to the repository."
+ wait_for_lgtm
# Re-read the ChangeLog entry (to pick up possible changes).
cat ChangeLog | awk --posix '{
if ($0 ~ /^[0-9]{4}-[0-9]{2}-[0-9]{2}:/) {
@@ -307,9 +213,10 @@ modify the existing local commit's commit message.)"
git cl dcommit || die "'git cl dcommit' failed, please try again."
fi
-if [ $STEP -le 10 ] ; then
- echo ">>> Step 10: Fetch straggler commits that sneaked in between \
-steps 1 and 9."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Fetch straggler commits that sneaked in \
+since this script was started."
git svn fetch || die "'git svn fetch' failed."
git checkout svn/bleeding_edge
restore_if_unset "PREPARE_COMMIT_MSG"
@@ -317,8 +224,9 @@ steps 1 and 9."
persist "PREPARE_COMMIT_HASH"
fi
-if [ $STEP -le 11 ] ; then
- echo ">>> Step 11: Squash commits into one."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Squash commits into one."
# Instead of relying on "git rebase -i", we'll just create a diff, because
# that's easier to automate.
restore_if_unset "PREPARE_COMMIT_HASH"
@@ -344,54 +252,29 @@ if [ $STEP -le 11 ] ; then
need_space = 1;
}
}' > "$COMMITMSG_FILE" || die "Commit message editing failed."
- LOOP=1
- while [ $LOOP -eq 1 ] ; do
- echo "This is the trunk commit message:"
- echo "--------------------"
- cat "$COMMITMSG_FILE"
- echo -e "\n--------------------"
- confirm "Does this look good to you? (Saying 'n' will fire up your \
-EDITOR so you can change the commit message. When you're done, save the \
-file and exit your EDITOR.)"
- if [ $? -eq 0 ] ; then
- LOOP=0
- else
- $EDITOR "$COMMITMSG_FILE"
- fi
- done
rm -f "$CHANGELOG_ENTRY_FILE"
fi
-if [ $STEP -le 12 ] ; then
- echo ">>> Step 12: Create a new branch from trunk."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create a new branch from trunk."
git checkout -b $TRUNKBRANCH svn/trunk \
|| die "Checking out a new branch '$TRUNKBRANCH' failed."
fi
-if [ $STEP -le 13 ] ; then
- echo ">>> Step 13: Apply squashed changes."
- patch -p1 < "$PATCH_FILE" | tee >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
- [[ $? -eq 0 ]] || die "Applying the patch to trunk failed."
- # Stage added and modified files.
- TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
- for FILE in $TOUCHED_FILES ; do
- git add "$FILE"
- done
- # Stage deleted files.
- DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
- | awk '{print $NF}')
- for FILE in $DELETED_FILES ; do
- git rm "$FILE"
- done
- rm -f "$PATCH_FILE"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Apply squashed changes."
rm -f "$TOUCHED_FILES_FILE"
+ apply_patch "$PATCH_FILE"
+ stage_files
+ rm -f "$PATCH_FILE"
fi
-if [ $STEP -le 14 ] ; then
- echo ">>> Step 14: Set correct version for trunk."
- restore_if_unset "MAJOR"
- restore_if_unset "MINOR"
- restore_if_unset "BUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Set correct version for trunk."
+ restore_version_if_unset
sed -e "/#define MAJOR_VERSION/s/[0-9]*$/$MAJOR/" \
-e "/#define MINOR_VERSION/s/[0-9]*$/$MINOR/" \
-e "/#define BUILD_NUMBER/s/[0-9]*$/$BUILD/" \
@@ -400,57 +283,107 @@ if [ $STEP -le 14 ] ; then
-i "$VERSION_FILE" || die "Patching $VERSION_FILE failed."
fi
-if [ $STEP -le 15 ] ; then
- echo ">>> Step 15: Commit to local trunk branch."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to local trunk branch."
git add "$VERSION_FILE"
git commit -F "$COMMITMSG_FILE" || die "'git commit' failed."
rm -f "$COMMITMSG_FILE"
fi
-if [ $STEP -le 16 ] ; then
- echo ">>> Step 16: Sanity check."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Sanity check."
confirm "Please check if your local checkout is sane: Inspect $VERSION_FILE, \
compile, run tests. Do you want to commit this new trunk revision to the \
repository?"
[[ $? -eq 0 ]] || die "Execution canceled."
fi
-if [ $STEP -le 17 ] ; then
- echo ">>> Step 17. Commit to SVN."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to SVN."
git svn dcommit | tee >(grep -E "^Committed r[0-9]+" \
| sed -e 's/^Committed r\([0-9]\+\)/\1/' \
> "$TRUNK_REVISION_FILE") \
|| die "'git svn dcommit' failed."
+ TRUNK_REVISION=$(cat "$TRUNK_REVISION_FILE")
+ persist "TRUNK_REVISION"
+ rm -f "$TRUNK_REVISION_FILE"
fi
-if [ $STEP -le 18 ] ; then
- echo ">>> Step 18: Tag the new revision."
- restore_if_unset "MAJOR"
- restore_if_unset "MINOR"
- restore_if_unset "BUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Tag the new revision."
+ restore_version_if_unset
git svn tag $MAJOR.$MINOR.$BUILD -m "Tagging version $MAJOR.$MINOR.$BUILD" \
|| die "'git svn tag' failed."
fi
-if [ $STEP -le 19 ] ; then
- echo ">>> Step 19: Cleanup."
- restore_if_unset "CURRENT_BRANCH"
- git checkout -f $CURRENT_BRANCH
- [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH
- [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
- [[ "$TRUNKBRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TRUNKBRANCH
-fi
+if [ -n "$CHROME_PATH" ] ; then
+
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Switch to Chromium checkout."
+ V8_PATH=$(pwd)
+ persist "V8_PATH"
+ cd "$CHROME_PATH"
+ initial_environment_checks
+ # Check for a clean workdir.
+ [[ -z "$(git status -s -uno)" ]] \
+ || die "Workspace is not clean. Please commit or undo your changes."
+ fi
-if [ $STEP -le 20 ] ; then
- echo ">>> Step 20: Done!"
- restore_if_unset "MAJOR"
- restore_if_unset "MINOR"
- restore_if_unset "BUILD"
- echo "Congratulations, you have successfully created the trunk revision \
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Update the checkout and create a new branch."
+ git checkout master || die "'git checkout master' failed."
+ git pull || die "'git pull' failed, please try again."
+ restore_if_unset "TRUNK_REVISION"
+ git checkout -b "v8-roll-$TRUNK_REVISION" \
+ || die "Failed to checkout a new branch."
+ fi
+
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create and upload CL."
+ # Patch DEPS file.
+ sed -e "/\"v8_revision\": /s/\"[0-9]+\"/\"$TRUNK_REVISION\"/" \
+ -i DEPS
+ restore_version_if_unset
+ echo -n "Please enter the email address of a reviewer for the roll CL: "
+ read REVIEWER
+ git commit -am "Update V8 to version $MAJOR.$MINOR.$BUILD.
+
+TBR=$REVIEWER" || die "'git commit' failed."
+ git cl upload --send-mail --use-commit-queue \
+ || die "'git cl upload' failed, please try again."
+ echo "CL uploaded and sent to commit queue."
+ fi
+
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Returning to V8 checkout."
+ restore_if_unset "V8_PATH"
+ cd "$V8_PATH"
+ fi
+fi # if [ -n "$CHROME_PATH" ]
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Done!"
+ restore_version_if_unset
+ restore_if_unset "TRUNK_REVISION"
+ if [ -n "$CHROME_PATH" ] ; then
+ echo "Congratulations, you have successfully created the trunk revision \
+$MAJOR.$MINOR.$BUILD and rolled it into Chromium. Please don't forget to \
+update the v8rel spreadsheet:"
+ else
+ echo "Congratulations, you have successfully created the trunk revision \
$MAJOR.$MINOR.$BUILD. Please don't forget to roll this new version into \
Chromium, and to update the v8rel spreadsheet:"
- TRUNK_REVISION=$(cat "$TRUNK_REVISION_FILE")
+ fi
echo -e "$MAJOR.$MINOR.$BUILD\ttrunk\t$TRUNK_REVISION"
- # Clean up all temporary files.
- rm -f "$PERSISTFILE_BASENAME"*
+ common_cleanup
+ [[ "$TRUNKBRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TRUNKBRANCH
fi
diff --git a/deps/v8/tools/test-wrapper-gypbuild.py b/deps/v8/tools/test-wrapper-gypbuild.py
index e9984d76c1f..465ca88c3d9 100755
--- a/deps/v8/tools/test-wrapper-gypbuild.py
+++ b/deps/v8/tools/test-wrapper-gypbuild.py
@@ -73,6 +73,8 @@ def BuildOptions():
choices=PROGRESS_INDICATORS, default="mono")
result.add_option("--report", help="Print a summary of the tests to be run",
default=False, action="store_true")
+ result.add_option("--download-data", help="Download missing test suite data",
+ default=False, action="store_true")
result.add_option("-s", "--suite", help="A test suite",
default=[], action="append")
result.add_option("-t", "--timeout", help="Timeout in seconds",
@@ -161,6 +163,8 @@ def PassOnOptions(options):
result += ['--progress=' + options.progress]
if options.report:
result += ['--report']
+ if options.download_data:
+ result += ['--download-data']
if options.suite != []:
for suite in options.suite:
result += ['--suite=../../test/' + suite]
diff --git a/deps/v8/tools/test.py b/deps/v8/tools/test.py
index c8f9da52ecd..951afcc85da 100755
--- a/deps/v8/tools/test.py
+++ b/deps/v8/tools/test.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2008 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -631,9 +631,15 @@ class TestRepository(TestSuite):
def GetBuildRequirements(self, path, context):
return self.GetConfiguration(context).GetBuildRequirements()
+ def DownloadData(self, context):
+ config = self.GetConfiguration(context)
+ if 'DownloadData' in dir(config):
+ config.DownloadData()
+
def AddTestsToList(self, result, current_path, path, context, mode):
- for v in self.GetConfiguration(context).VariantFlags():
- tests = self.GetConfiguration(context).ListTests(current_path, path, mode, v)
+ config = self.GetConfiguration(context)
+ for v in config.VariantFlags():
+ tests = config.ListTests(current_path, path, mode, v)
for t in tests: t.variant_flags = v
result += tests
@@ -655,6 +661,12 @@ class LiteralTestSuite(TestSuite):
result += test.GetBuildRequirements(rest, context)
return result
+ def DownloadData(self, path, context):
+ (name, rest) = CarCdr(path)
+ for test in self.tests:
+ if not name or name.match(test.GetName()):
+ test.DownloadData(context)
+
def ListTests(self, current_path, path, context, mode, variant_flags):
(name, rest) = CarCdr(path)
result = [ ]
@@ -850,6 +862,9 @@ class Operation(Expression):
elif self.op == '==':
inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs))
return not inter.IsEmpty()
+ elif self.op == '!=':
+ inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs))
+ return inter.IsEmpty()
else:
assert self.op == '&&'
return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs)
@@ -932,6 +947,9 @@ class Tokenizer(object):
elif self.Current(2) == '==':
self.AddToken('==')
self.Advance(2)
+ elif self.Current(2) == '!=':
+ self.AddToken('!=')
+ self.Advance(2)
else:
return None
return self.tokens
@@ -984,7 +1002,7 @@ def ParseAtomicExpression(scan):
return None
-BINARIES = ['==']
+BINARIES = ['==', '!=']
def ParseOperatorExpression(scan):
left = ParseAtomicExpression(scan)
if not left: return None
@@ -1006,7 +1024,7 @@ def ParseConditionalExpression(scan):
right = ParseOperatorExpression(scan)
if not right:
return None
- left= Operation(left, 'if', right)
+ left = Operation(left, 'if', right)
return left
@@ -1186,6 +1204,8 @@ def BuildOptions():
default='scons')
result.add_option("--report", help="Print a summary of the tests to be run",
default=False, action="store_true")
+ result.add_option("--download-data", help="Download missing test suite data",
+ default=False, action="store_true")
result.add_option("-s", "--suite", help="A test suite",
default=[], action="append")
result.add_option("-t", "--timeout", help="Timeout in seconds",
@@ -1456,6 +1476,11 @@ def Main():
root.GetTestStatus(context, sections, defs)
config = Configuration(sections, defs)
+ # Download missing test suite data if requested.
+ if options.download_data:
+ for path in paths:
+ root.DownloadData(path, context)
+
# List the tests
all_cases = [ ]
all_unused = [ ]