diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000000000..859c4d3575daaa --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +exclude=.git,deps,lib,src,test/fixtures,tools/*_macros.py,tools/gyp,tools/inspector_protocol,tools/jinja2,tools/markupsafe,tools/pip,tools/v8_gypfiles/broken +select=E901,E999,F821,F822,F823 diff --git a/.gitignore b/.gitignore index 05e859c3066802..41b7beb222d5af 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ !.editorconfig !.eslintignore !.eslintrc.js +!.flake8 !.gitattributes !.github !.gitignore @@ -123,8 +124,6 @@ deps/uv/docs/src/guide/ # do not override V8's .gitignore !deps/v8/** # ignore VS compiler output unhandled by V8's .gitignore -deps/v8/gypfiles/Debug/ -deps/v8/gypfiles/Release/ deps/v8/third_party/eu-strip/ .DS_Store diff --git a/Makefile b/Makefile index 062d70b844542b..346f355ea7a298 100644 --- a/Makefile +++ b/Makefile @@ -123,10 +123,10 @@ with-code-cache: test-code-cache: with-code-cache $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) code-cache -out/Makefile: common.gypi deps/uv/uv.gyp deps/http_parser/http_parser.gyp \ - deps/zlib/zlib.gyp deps/v8/gypfiles/toolchain.gypi \ - deps/v8/gypfiles/features.gypi deps/v8/gypfiles/v8.gyp node.gyp \ - config.gypi +out/Makefile: config.gypi common.gypi node.gyp \ + deps/uv/uv.gyp deps/http_parser/http_parser.gyp deps/zlib/zlib.gyp \ + tools/v8_gypfiles/toolchain.gypi tools/v8_gypfiles/features.gypi \ + tools/v8_gypfiles/inspector.gypi tools/v8_gypfiles/v8.gyp $(PYTHON) tools/gyp_node.py -f make config.gypi: configure configure.py @@ -1305,9 +1305,7 @@ ifneq ("","$(wildcard tools/pip/site-packages)") # Lints the Python code with flake8. # Flag the build if there are Python syntax errors or undefined names lint-py: - PYTHONPATH=tools/pip $(PYTHON) -m flake8 . \ - --count --show-source --statistics --select=E901,E999,F821,F822,F823 \ - --exclude=.git,deps,lib,src,test/fixtures,tools/*_macros.py,tools/gyp,tools/inspector_protocol,tools/jinja2,tools/markupsafe,tools/pip + PYTHONPATH=tools/pip $(PYTHON) -m flake8 --count --show-source --statistics . else lint-py: @echo "Python linting with flake8 is not avalible" diff --git a/common.gypi b/common.gypi index a44de87bc20df8..23ac7205baa36b 100644 --- a/common.gypi +++ b/common.gypi @@ -37,12 +37,12 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.7', + 'v8_embedder_string': '-node.13', ##### V8 defaults for Node.js ##### # Old time default, now explicitly stated. - 'v8_use_snapshot': 'true', + 'v8_use_snapshot': 1, # Turn on SipHash for hash seed generation, addresses HashWick 'v8_use_siphash': 'true', @@ -58,21 +58,12 @@ # Enable disassembler for `--print-code` v8 options 'v8_enable_disassembler': 1, - # Don't bake anything extra into the snapshot. - 'v8_use_external_startup_data': 0, - # https://github.com/nodejs/node/pull/22920/files#r222779926 'v8_enable_handle_zapping': 0, # Disable V8 untrusted code mitigations. # See https://github.com/v8/v8/wiki/Untrusted-code-mitigations - 'v8_untrusted_code_mitigations': 'false', - - # Still WIP in V8 7.1 - 'v8_enable_pointer_compression': 'false', - - # New in V8 7.1 - 'v8_enable_embedded_builtins': 'true', + 'v8_untrusted_code_mitigations': 0, # This is more of a V8 dev setting # https://github.com/nodejs/node/pull/22920/files#r222779926 @@ -89,39 +80,24 @@ }], ['GENERATOR=="ninja"', { 'obj_dir': '<(PRODUCT_DIR)/obj', - 'conditions': [ - [ 'build_v8_with_gn=="true"', { - 'v8_base': '<(PRODUCT_DIR)/obj/deps/v8/gypfiles/v8_monolith.gen/gn/obj/libv8_monolith.a', - }, { - 'v8_base': '<(PRODUCT_DIR)/obj/deps/v8/gypfiles/libv8_base.a', - }], - ] + 'v8_base': '<(PRODUCT_DIR)/obj/tools/v8_gypfiles/libv8_base.a', }, { 'obj_dir%': '<(PRODUCT_DIR)/obj.target', - 'v8_base': '<(PRODUCT_DIR)/obj.target/deps/v8/gypfiles/libv8_base.a', + 'v8_base': '<(PRODUCT_DIR)/obj.target/tools/v8_gypfiles/libv8_base.a', }], ['OS == "win"', { 'os_posix': 0, - 'v8_postmortem_support%': 'false', + 'v8_postmortem_support%': 0, 'obj_dir': '<(PRODUCT_DIR)/obj', 'v8_base': '<(PRODUCT_DIR)/lib/v8_libbase.lib', }, { 'os_posix': 1, - 'v8_postmortem_support%': 'true', + 'v8_postmortem_support%': 1, }], ['OS == "mac"', { 'obj_dir%': '<(PRODUCT_DIR)/obj.target', 'v8_base': '<(PRODUCT_DIR)/libv8_base.a', }], - ['build_v8_with_gn == "true"', { - 'conditions': [ - ['GENERATOR == "ninja"', { - 'v8_base': '<(PRODUCT_DIR)/obj/deps/v8/gypfiles/v8_monolith.gen/gn/obj/libv8_monolith.a', - }, { - 'v8_base': '<(PRODUCT_DIR)/obj.target/v8_monolith/geni/gn/obj/libv8_monolith.a', - }], - ], - }], ['openssl_fips != ""', { 'openssl_product': '<(STATIC_LIB_PREFIX)crypto<(STATIC_LIB_SUFFIX)', }, { diff --git a/configure.py b/configure.py index 9539c1760c4909..6d184f56555e30 100755 --- a/configure.py +++ b/configure.py @@ -1124,8 +1124,8 @@ def configure_v8(o): o['variables']['v8_optimized_debug'] = 0 if options.v8_non_optimized_debug else 1 o['variables']['v8_random_seed'] = 0 # Use a random seed for hash tables. o['variables']['v8_promise_internal_field_count'] = 1 # Add internal field to promises for async hooks. - o['variables']['v8_use_snapshot'] = 'false' if options.without_snapshot else 'true' - o['variables']['v8_use_siphash'] = 'false' if options.without_siphash else 'true' + o['variables']['v8_use_siphash'] = 0 if options.without_siphash else 1 + o['variables']['v8_use_snapshot'] = 0 if options.without_snapshot else 1 o['variables']['v8_trace_maps'] = 1 if options.trace_maps else 0 o['variables']['node_use_v8_platform'] = b(not options.without_v8_platform) o['variables']['node_use_bundled_v8'] = b(not options.without_bundled_v8) diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore index 6cf6ab4e9199e8..7fc0f66b373003 100644 --- a/deps/v8/.gitignore +++ b/deps/v8/.gitignore @@ -23,6 +23,7 @@ *~ .#* .*.sw? +.ccls-cache .cpplint-cache .cproject .d8_history diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index ecf0e5d1fbebe9..a32b13c66993e6 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -52,6 +52,7 @@ Andreas Anyuru Andrew Paprocki Andrei Kashcha Anna Henningsen +Anton Bikineev Bangfu Tao Daniel Shelton Ben Coe @@ -84,6 +85,7 @@ Geoffrey Garside Gergely Nagy Gus Caplan Gwang Yoon Hwang +Hannu Trey Henrique Ferreiro Hirofumi Mako Honggyu Kim @@ -164,6 +166,7 @@ Tiancheng "Timothy" Gu Tobias Burnus Tobias Nießen Ujjwal Sharma +Vadim Gorbachev Victor Costan Vlad Burlik Vladimir Krivosheev diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn index 16e0b60ca78441..7a50b192f0a9eb 100644 --- a/deps/v8/BUILD.gn +++ b/deps/v8/BUILD.gn @@ -98,10 +98,6 @@ declare_args() { v8_enable_pointer_compression = false v8_enable_31bit_smis_on_64bit_arch = false - # Interpreted regexp engine exists as platform-independent alternative - # based where the regular expression is compiled to a bytecode. - v8_interpreted_regexp = false - # Sets -dOBJECT_PRINT. v8_enable_object_print = "" @@ -175,13 +171,11 @@ declare_args() { # setting the "check_v8_header_includes" gclient variable to run a # specific hook). v8_check_header_includes = false - - # We reuse the snapshot toolchain for building torque and other generators to - # avoid building v8_libbase on the host more than once. On mips with big - # endian, the snapshot toolchain is the target toolchain and, hence, can't be - # used. } +# We reuse the snapshot toolchain for building torque and other generators to +# avoid building v8_libbase on the host more than once. On mips with big endian, +# the snapshot toolchain is the target toolchain and, hence, can't be used. v8_generator_toolchain = v8_snapshot_toolchain if (host_cpu == "x64" && (v8_current_cpu == "mips" || v8_current_cpu == "mips64")) { @@ -226,28 +220,29 @@ v8_toolset_for_shell = "host" ############################################################################### # Configurations # -config("internal_config") { + +config("internal_config_base") { visibility = [ ":*" ] # Only targets in this file can depend on this. include_dirs = [ ".", "$target_gen_dir", ] - - configs = [ "//build/config/compiler:wexit_time_destructors" ] - - if (is_component_build) { - defines = [ "BUILDING_V8_SHARED" ] - } } -config("internal_config_base") { +config("internal_config") { + defines = [] visibility = [ ":*" ] # Only targets in this file can depend on this. - include_dirs = [ - ".", - "$target_gen_dir", + configs = [ + "//build/config/compiler:wexit_time_destructors", + ":internal_config_base", + ":v8_header_features", ] + + if (is_component_build) { + defines += [ "BUILDING_V8_SHARED" ] + } } # This config should be applied to code using the libplatform. @@ -278,18 +273,10 @@ config("libsampler_config") { # itself. config("external_config") { defines = [] + configs = [ ":v8_header_features" ] if (is_component_build) { defines += [ "USING_V8_SHARED" ] } - if (v8_enable_v8_checks) { - defines += [ "V8_ENABLE_CHECKS" ] # Used in "include/v8.h". - } - if (v8_deprecation_warnings) { - defines += [ "V8_DEPRECATION_WARNINGS" ] - } - if (v8_imminent_deprecation_warnings) { - defines += [ "V8_IMMINENT_DEPRECATION_WARNINGS" ] - } include_dirs = [ "include", "$target_gen_dir/include", @@ -304,11 +291,39 @@ config("external_startup_data") { } } +# Put defines that are used in public headers here; public headers are +# defined in "v8_headers" and are included by embedders of V8. +config("v8_header_features") { + visibility = [ ":*" ] + + defines = [] + + if (v8_enable_v8_checks) { + defines += [ "V8_ENABLE_CHECKS" ] # Used in "include/v8.h". + } + if (v8_enable_pointer_compression) { + defines += [ "V8_COMPRESS_POINTERS" ] + } + if (v8_enable_pointer_compression || v8_enable_31bit_smis_on_64bit_arch) { + defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ] + } + if (v8_deprecation_warnings) { + defines += [ "V8_DEPRECATION_WARNINGS" ] + } + if (v8_imminent_deprecation_warnings) { + defines += [ "V8_IMMINENT_DEPRECATION_WARNINGS" ] + } +} + +# Put defines here that are only used in our internal files and NEVER in +# external headers that embedders (such as chromium and node) might include. config("features") { visibility = [ ":*" ] # Only targets in this file can depend on this. defines = [] + configs = [ ":v8_header_features" ] + if (v8_embedder_string != "") { defines += [ "V8_EMBEDDER_STRING=\"$v8_embedder_string\"" ] } @@ -340,12 +355,6 @@ config("features") { if (v8_enable_minor_mc) { defines += [ "ENABLE_MINOR_MC" ] } - if (v8_enable_pointer_compression) { - defines += [ "V8_COMPRESS_POINTERS" ] - } - if (v8_enable_31bit_smis_on_64bit_arch) { - defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ] - } if (v8_enable_object_print) { defines += [ "OBJECT_PRINT" ] } @@ -368,18 +377,6 @@ config("features") { defines += [ "V8_ENABLE_ALLOCATION_TIMEOUT" ] defines += [ "V8_ENABLE_FORCE_SLOW_PATH" ] } - if (v8_enable_v8_checks) { - defines += [ "V8_ENABLE_CHECKS" ] - } - if (v8_interpreted_regexp || v8_enable_lite_mode) { - defines += [ "V8_INTERPRETED_REGEXP" ] - } - if (v8_deprecation_warnings) { - defines += [ "V8_DEPRECATION_WARNINGS" ] - } - if (v8_imminent_deprecation_warnings) { - defines += [ "V8_IMMINENT_DEPRECATION_WARNINGS" ] - } if (v8_enable_i18n_support) { defines += [ "V8_INTL_SUPPORT" ] } @@ -600,16 +597,23 @@ config("toolchain") { } if (v8_no_inline) { - cflags += [ - "-fno-inline-functions", - "-fno-inline", - ] + if (is_win) { + cflags += [ "/Ob0" ] + } else { + cflags += [ + "-fno-inline-functions", + "-fno-inline", + ] + } } if (is_clang) { cflags += [ "-Wmissing-field-initializers", + # TODO(thakis): Remove once enabled globally, https://crbug.com/926235 + "-Wextra-semi", + # TODO(hans): Remove once http://crbug.com/428099 is resolved. "-Winconsistent-missing-override", ] @@ -659,6 +663,37 @@ config("toolchain") { "-Wno-return-type", ] } + + # Chromium uses a hand-picked subset of UBSan coverage. We want everything. + if (is_ubsan) { + cflags += [ "-fsanitize=undefined" ] + } +} + +config("default_optimization") { + if (is_debug && !v8_optimized_debug) { + configs = [ "//build/config/compiler:no_optimize" ] + } else { + # TODO(crbug.com/621335) Rework this so that we don't have the confusion + # between "optimize_speed" and "optimize_max". + if (((is_posix && !is_android) || is_fuchsia) && !using_sanitizer) { + configs = [ "//build/config/compiler:optimize_speed" ] + } else { + configs = [ "//build/config/compiler:optimize_max" ] + } + } +} + +# For code that is only run a few times during the build, C++ optimizations +# are a waste of time. +config("unoptimized_initializer") { + configs = [ ":internal_config" ] + if (using_sanitizer) { + # Some sanitizers rely on optimizations. + configs += [ ":default_optimization" ] + } else { + configs += [ "//build/config/compiler:no_optimize" ] + } } # Configs for code coverage with gcov. Separate configs for cflags and ldflags @@ -830,7 +865,9 @@ action("postmortem-metadata") { "src/objects/js-regexp-string-iterator-inl.h", "src/objects/js-regexp-string-iterator.h", "src/objects/map.h", + "src/objects/map.cc", "src/objects/map-inl.h", + "src/objects/js-objects.cc", "src/objects/name.h", "src/objects/name-inl.h", "src/objects/oddball-inl.h", @@ -840,7 +877,10 @@ action("postmortem-metadata") { "src/objects/script-inl.h", "src/objects/shared-function-info.h", "src/objects/shared-function-info-inl.h", + "src/objects/string.cc", "src/objects/string.h", + "src/objects/string-comparator.cc", + "src/objects/string-comparator.h", "src/objects/string-inl.h", "src/objects/struct.h", "src/objects/struct-inl.h", @@ -856,42 +896,79 @@ action("postmortem-metadata") { torque_files = [ "src/builtins/base.tq", + "src/builtins/growable-fixed-array.tq", "src/builtins/frames.tq", "src/builtins/arguments.tq", "src/builtins/array.tq", "src/builtins/array-copywithin.tq", + "src/builtins/array-every.tq", "src/builtins/array-filter.tq", + "src/builtins/array-find.tq", + "src/builtins/array-findindex.tq", "src/builtins/array-foreach.tq", "src/builtins/array-join.tq", "src/builtins/array-lastindexof.tq", "src/builtins/array-of.tq", + "src/builtins/array-map.tq", + "src/builtins/array-reduce.tq", + "src/builtins/array-reduce-right.tq", "src/builtins/array-reverse.tq", "src/builtins/array-slice.tq", + "src/builtins/array-some.tq", "src/builtins/array-splice.tq", "src/builtins/array-unshift.tq", "src/builtins/collections.tq", "src/builtins/data-view.tq", "src/builtins/extras-utils.tq", - "src/builtins/object.tq", "src/builtins/object-fromentries.tq", "src/builtins/iterator.tq", + "src/builtins/string-endswith.tq", + "src/builtins/string-startswith.tq", "src/builtins/typed-array.tq", "src/builtins/typed-array-createtypedarray.tq", + "src/builtins/typed-array-filter.tq", + "src/builtins/typed-array-foreach.tq", + "src/builtins/typed-array-reduce.tq", + "src/builtins/typed-array-reduceright.tq", + "src/builtins/typed-array-slice.tq", + "src/builtins/typed-array-subarray.tq", "test/torque/test-torque.tq", "third_party/v8/builtins/array-sort.tq", ] torque_namespaces = [ - "base", "arguments", "array", + "array-copywithin", + "array-filter", + "array-find", + "array-findindex", + "array-foreach", + "array-join", + "array-map", + "array-of", + "array-reverse", + "array-slice", + "array-splice", + "array-unshift", + "array-lastindexof", + "base", "collections", - "iterator", - "object", - "typed-array", "data-view", "extras-utils", + "growable-fixed-array", + "iterator", + "object", + "string", "test", + "typed-array", + "typed-array-createtypedarray", + "typed-array-filter", + "typed-array-foreach", + "typed-array-reduce", + "typed-array-reduceright", + "typed-array-slice", + "typed-array-subarray", ] action("run_torque") { @@ -911,6 +988,7 @@ action("run_torque") { outputs = [ "$target_gen_dir/torque-generated/builtin-definitions-from-dsl.h", + "$target_gen_dir/torque-generated/class-definitions-from-dsl.h", ] foreach(namespace, torque_namespaces) { outputs += [ @@ -954,7 +1032,8 @@ v8_source_set("torque_generated_initializers") { ] } - configs = [ ":internal_config" ] + remove_configs = [ v8_path_prefix + ":default_optimization" ] + configs = [ ":unoptimized_initializer" ] } action("generate_bytecode_builtins_list") { @@ -1123,6 +1202,7 @@ action("v8_dump_build_config") { "is_android=$is_android", "is_asan=$is_asan", "is_cfi=$is_cfi", + "is_clang=$is_clang", "is_component_build=$is_component_build", "is_debug=$v8_enable_debugging_features", "is_gcov_coverage=$is_gcov_coverage", @@ -1263,23 +1343,17 @@ if (v8_use_snapshot && v8_use_external_startup_data) { if (v8_use_multi_snapshots) { deps += [ ":asm_to_inline_asm_trusted" ] sources += [ "$target_gen_dir/embedded_trusted.cc" ] - - if (use_jumbo_build == true) { - jumbo_excluded_sources = [ "$target_gen_dir/embedded_trusted.cc" ] - } + jumbo_excluded_sources = [ "$target_gen_dir/embedded_trusted.cc" ] } } else if (v8_enable_embedded_builtins) { sources += [ "$target_gen_dir/embedded.S" ] if (v8_use_multi_snapshots) { sources += [ "$target_gen_dir/embedded_trusted.S" ] - - if (use_jumbo_build == true) { - jumbo_excluded_sources = [ - # Duplicated symbols with embedded.S - "$target_gen_dir/embedded_trusted.S", - ] - } + jumbo_excluded_sources = [ + # Duplicated symbols with embedded.S + "$target_gen_dir/embedded_trusted.S", + ] } } else { sources += [ "src/snapshot/embedded-empty.cc" ] @@ -1297,6 +1371,9 @@ v8_source_set("v8_initializers") { deps = [ ":torque_generated_initializers", + ] + + public_deps = [ ":v8_base", ] @@ -1376,18 +1453,16 @@ v8_source_set("v8_initializers") { "src/interpreter/interpreter-intrinsics-generator.h", ] - if (use_jumbo_build == true) { - jumbo_excluded_sources = [ - # TODO(mostynb@vewd.com): don't exclude these http://crbug.com/752428 - "src/builtins/builtins-async-iterator-gen.cc", - "src/builtins/builtins-async-generator-gen.cc", + jumbo_excluded_sources = [ + # TODO(mostynb@vewd.com): don't exclude these http://crbug.com/752428 + "src/builtins/builtins-async-iterator-gen.cc", + "src/builtins/builtins-async-generator-gen.cc", - # These source files take an unusually large amount of time to - # compile. Build them separately to avoid bottlenecks. - "src/builtins/builtins-regexp-gen.cc", - "src/code-stub-assembler.cc", - ] - } + # These source files take an unusually large amount of time to + # compile. Build them separately to avoid bottlenecks. + "src/builtins/builtins-regexp-gen.cc", + "src/code-stub-assembler.cc", + ] if (v8_current_cpu == "x86") { sources += [ @@ -1435,7 +1510,8 @@ v8_source_set("v8_initializers") { sources -= [ "src/builtins/builtins-intl-gen.cc" ] } - configs = [ ":internal_config" ] + remove_configs = [ v8_path_prefix + ":default_optimization" ] + configs = [ ":unoptimized_initializer" ] } v8_source_set("v8_init") { @@ -1474,6 +1550,7 @@ v8_header_set("v8_version") { # can depend upon to get basic v8 types. v8_header_set("v8_headers") { configs = [ ":internal_config" ] + public_configs = [ ":v8_header_features" ] sources = [ "include/v8-internal.h", @@ -1481,19 +1558,33 @@ v8_header_set("v8_headers") { "include/v8config.h", ] - if (is_linux || is_mac) { - sources += [ "include/v8-wasm-trap-handler-posix.h" ] - } - - if (is_win) { - sources += [ "include/v8-wasm-trap-handler-win.h" ] - } + sources += [ + # The following headers cannot be platform-specific. The include validation + # of `gn gen $dir --check` requires all header files to be available on all + # platforms. + "include/v8-wasm-trap-handler-posix.h", + "include/v8-wasm-trap-handler-win.h", + ] deps = [ ":v8_version", ] } +# This is split out to share basic headers with Torque. +v8_header_set("v8_shared_internal_headers") { + visibility = [ ":*" ] # Only targets in this file can depend on this. + configs = [ ":internal_config" ] + + sources = [ + "src/globals.h", + ] + + deps = [ + ":v8_headers", + ] +} + v8_source_set("v8_base") { visibility = [ ":*" ] # Only targets in this file can depend on this. @@ -1543,7 +1634,6 @@ v8_source_set("v8_base") { "src/asmjs/asm-scanner.h", "src/asmjs/asm-types.cc", "src/asmjs/asm-types.h", - "src/assembler-arch-inl.h", "src/assembler-arch.h", "src/assembler-inl.h", "src/assembler.cc", @@ -1632,6 +1722,8 @@ v8_source_set("v8_base") { "src/checks.h", "src/code-comments.cc", "src/code-comments.h", + "src/code-desc.cc", + "src/code-desc.h", "src/code-events.h", "src/code-factory.cc", "src/code-factory.h", @@ -1958,22 +2050,25 @@ v8_source_set("v8_base") { "src/flag-definitions.h", "src/flags.cc", "src/flags.h", + "src/flush-instruction-cache.cc", + "src/flush-instruction-cache.h", "src/frame-constants.h", "src/frames-inl.h", "src/frames.cc", "src/frames.h", + "src/function-kind.h", "src/futex-emulation.cc", "src/futex-emulation.h", "src/gdb-jit.cc", "src/gdb-jit.h", "src/global-handles.cc", "src/global-handles.h", - "src/globals.h", "src/handler-table.cc", "src/handler-table.h", "src/handles-inl.h", "src/handles.cc", "src/handles.h", + "src/hash-seed-inl.h", "src/heap-symbols.h", "src/heap/array-buffer-collector.cc", "src/heap/array-buffer-collector.h", @@ -2025,6 +2120,8 @@ v8_source_set("v8_base") { "src/heap/objects-visiting-inl.h", "src/heap/objects-visiting.cc", "src/heap/objects-visiting.h", + "src/heap/read-only-heap.cc", + "src/heap/read-only-heap.h", "src/heap/remembered-set.h", "src/heap/scavenge-job.cc", "src/heap/scavenge-job.h", @@ -2163,6 +2260,8 @@ v8_source_set("v8_base") { "src/objects-printer.cc", "src/objects.cc", "src/objects.h", + "src/objects/allocation-site-inl.h", + "src/objects/allocation-site.h", "src/objects/api-callbacks-inl.h", "src/objects/api-callbacks.h", "src/objects/arguments-inl.h", @@ -2173,9 +2272,13 @@ v8_source_set("v8_base") { "src/objects/cell-inl.h", "src/objects/cell.h", "src/objects/code-inl.h", + "src/objects/code.cc", "src/objects/code.h", "src/objects/compilation-cache-inl.h", "src/objects/compilation-cache.h", + "src/objects/compressed-slots-inl.h", + "src/objects/compressed-slots.h", + "src/objects/data-handler.h", "src/objects/debug-objects-inl.h", "src/objects/debug-objects.cc", "src/objects/debug-objects.h", @@ -2232,6 +2335,7 @@ v8_source_set("v8_base") { "src/objects/js-number-format.cc", "src/objects/js-number-format.h", "src/objects/js-objects-inl.h", + "src/objects/js-objects.cc", "src/objects/js-objects.h", "src/objects/js-plural-rules-inl.h", "src/objects/js-plural-rules.cc", @@ -2261,6 +2365,7 @@ v8_source_set("v8_base") { "src/objects/managed.cc", "src/objects/managed.h", "src/objects/map-inl.h", + "src/objects/map.cc", "src/objects/map.h", "src/objects/maybe-object-inl.h", "src/objects/maybe-object.h", @@ -2299,13 +2404,18 @@ v8_source_set("v8_base") { "src/objects/slots-inl.h", "src/objects/slots.h", "src/objects/stack-frame-info-inl.h", + "src/objects/stack-frame-info.cc", "src/objects/stack-frame-info.h", + "src/objects/string-comparator.cc", + "src/objects/string-comparator.h", "src/objects/string-inl.h", "src/objects/string-table-inl.h", "src/objects/string-table.h", + "src/objects/string.cc", "src/objects/string.h", "src/objects/struct-inl.h", "src/objects/struct.h", + "src/objects/template-objects-inl.h", "src/objects/template-objects.cc", "src/objects/template-objects.h", "src/objects/templates-inl.h", @@ -2326,7 +2436,6 @@ v8_source_set("v8_base") { "src/parsing/parser.h", "src/parsing/parsing.cc", "src/parsing/parsing.h", - "src/parsing/pattern-rewriter.cc", "src/parsing/preparse-data-impl.h", "src/parsing/preparse-data.cc", "src/parsing/preparse-data.h", @@ -2521,6 +2630,8 @@ v8_source_set("v8_base") { "src/third_party/utf8-decoder/utf8-decoder.h", "src/thread-id.cc", "src/thread-id.h", + "src/thread-local-top.cc", + "src/thread-local-top.h", "src/tracing/trace-event.cc", "src/tracing/trace-event.h", "src/tracing/traced-value.cc", @@ -2650,6 +2761,7 @@ v8_source_set("v8_base") { "src/zone/zone-list-inl.h", "src/zone/zone-segment.cc", "src/zone/zone-segment.h", + "src/zone/zone-splay-tree.h", "src/zone/zone.cc", "src/zone/zone.h", ] @@ -2661,20 +2773,18 @@ v8_source_set("v8_base") { sources += check_header_includes_sources } - if (use_jumbo_build == true) { - jumbo_excluded_sources = [ - # TODO(mostynb@vewd.com): don't exclude these http://crbug.com/752428 - "src/profiler/heap-snapshot-generator.cc", # Macro clash in mman-linux.h + jumbo_excluded_sources = [ + # TODO(mostynb@vewd.com): don't exclude these http://crbug.com/752428 + "src/profiler/heap-snapshot-generator.cc", # Macro clash in mman-linux.h - # These source files take an unusually large amount of time to - # compile. Build them separately to avoid bottlenecks. - "src/api.cc", - "src/elements.cc", - "src/heap/heap.cc", - "src/objects.cc", - "src/parsing/parser.cc", - ] - } + # These source files take an unusually large amount of time to + # compile. Build them separately to avoid bottlenecks. + "src/api.cc", + "src/elements.cc", + "src/heap/heap.cc", + "src/objects.cc", + "src/parsing/parser.cc", + ] if (v8_current_cpu == "x86") { sources += [ ### gcmole(arch:ia32) ### @@ -2817,13 +2927,11 @@ v8_source_set("v8_base") { "src/regexp/arm64/regexp-macro-assembler-arm64.h", "src/wasm/baseline/arm64/liftoff-assembler-arm64.h", ] - if (use_jumbo_build) { - jumbo_excluded_sources += [ - # TODO(mostynb@vewd.com): fix this code so it doesn't need - # to be excluded, see the comments inside. - "src/arm64/instructions-arm64-constants.cc", - ] - } + jumbo_excluded_sources += [ + # TODO(mostynb@vewd.com): fix this code so it doesn't need + # to be excluded, see the comments inside. + "src/arm64/instructions-arm64-constants.cc", + ] } else if (v8_current_cpu == "mips" || v8_current_cpu == "mipsel") { sources += [ ### gcmole(arch:mipsel) ### "src/compiler/backend/mips/code-generator-mips.cc", @@ -2938,18 +3046,21 @@ v8_source_set("v8_base") { defines = [] deps = [ - ":generate_bytecode_builtins_list", - ":run_torque", ":v8_headers", ":v8_libbase", ":v8_libsampler", + ":v8_shared_internal_headers", + ":v8_version", "src/inspector:inspector", ] + public_deps = [ + ":generate_bytecode_builtins_list", + ":run_torque", + ] + if (v8_enable_i18n_support) { - public_deps = [ - "//third_party/icu", - ] + public_deps += [ "//third_party/icu" ] if (is_win) { deps += [ "//third_party/icu:icudata" ] } @@ -3033,8 +3144,12 @@ v8_source_set("torque_base") { "src/torque/implementation-visitor.h", "src/torque/instructions.cc", "src/torque/instructions.h", + "src/torque/server-data.cc", + "src/torque/server-data.h", "src/torque/source-positions.cc", "src/torque/source-positions.h", + "src/torque/torque-compiler.cc", + "src/torque/torque-compiler.h", "src/torque/torque-parser.cc", "src/torque/torque-parser.h", "src/torque/type-oracle.cc", @@ -3047,6 +3162,31 @@ v8_source_set("torque_base") { deps = [ ":v8_libbase", + ":v8_shared_internal_headers", + ] + + configs = [ ":internal_config" ] + if (is_win && is_asan) { + remove_configs = [ "//build/config/sanitizers:default_sanitizer_flags" ] + } +} + +v8_source_set("torque_ls_base") { + sources = [ + "src/torque/ls/globals.h", + "src/torque/ls/json-parser.cc", + "src/torque/ls/json-parser.h", + "src/torque/ls/json.cc", + "src/torque/ls/json.h", + "src/torque/ls/message-handler.cc", + "src/torque/ls/message-handler.h", + "src/torque/ls/message-macros.h", + "src/torque/ls/message-pipe.h", + "src/torque/ls/message.h", + ] + + deps = [ + ":torque_base", ] configs = [ ":internal_config" ] @@ -3135,6 +3275,10 @@ v8_component("v8_libbase") { public_configs = [ ":libbase_config" ] + deps = [ + ":v8_headers", + ] + public_deps = [] data = [] @@ -3282,6 +3426,7 @@ v8_component("v8_libplatform") { public_configs = [ ":libplatform_config" ] deps = [ + ":v8_headers", ":v8_libbase", ] } @@ -3292,7 +3437,7 @@ v8_source_set("v8_libsampler") { "src/libsampler/sampler.h", ] - configs = [ ":internal_config_base" ] + configs = [ ":internal_config" ] public_configs = [ ":libsampler_config" ] @@ -3422,6 +3567,25 @@ if (current_toolchain == v8_snapshot_toolchain) { } } +v8_executable("torque-language-server") { + visibility = [ ":*" ] # Only targets in this file can depend on this. + + sources = [ + "src/torque/ls/torque-language-server.cc", + ] + + deps = [ + ":torque_base", + ":torque_ls_base", + "//build/win:default_exe_manifest", + ] + + configs = [ ":internal_config" ] + if (is_win && is_asan) { + remove_configs = [ "//build/config/sanitizers:default_sanitizer_flags" ] + } +} + ############################################################################### # Public targets # @@ -3542,7 +3706,9 @@ if (is_component_build) { public_deps = [ ":torque_base", + ":torque_ls_base", ":v8_base", + ":v8_headers", ":v8_maybe_snapshot", ] @@ -3569,6 +3735,7 @@ if (is_component_build) { public_deps = [ ":torque_base", + ":torque_ls_base", ":v8_base", ":v8_maybe_snapshot", ] diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 62b3ace776f1c4..23725637e62b47 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,3 +1,1458 @@ +2019-03-05: Version 7.4.288 + + Performance and stability improvements on all platforms. + + +2019-03-05: Version 7.4.287 + + Performance and stability improvements on all platforms. + + +2019-03-05: Version 7.4.286 + + Performance and stability improvements on all platforms. + + +2019-03-05: Version 7.4.285 + + Performance and stability improvements on all platforms. + + +2019-03-05: Version 7.4.284 + + Performance and stability improvements on all platforms. + + +2019-03-05: Version 7.4.283 + + Performance and stability improvements on all platforms. + + +2019-03-05: Version 7.4.282 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.281 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.280 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.279 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.278 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.277 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.276 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.275 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.274 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.273 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.272 + + Performance and stability improvements on all platforms. + + +2019-03-04: Version 7.4.271 + + Performance and stability improvements on all platforms. + + +2019-03-03: Version 7.4.270 + + Performance and stability improvements on all platforms. + + +2019-03-02: Version 7.4.269 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.268 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.267 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.266 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.265 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.264 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.263 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.262 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.261 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.260 + + Performance and stability improvements on all platforms. + + +2019-03-01: Version 7.4.259 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.258 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.257 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.256 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.255 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.254 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.253 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.252 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.251 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.250 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.249 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.248 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.247 + + Performance and stability improvements on all platforms. + + +2019-02-28: Version 7.4.246 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.245 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.244 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.243 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.242 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.241 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.240 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.239 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.238 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.237 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.236 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.235 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.234 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.233 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.232 + + Performance and stability improvements on all platforms. + + +2019-02-27: Version 7.4.231 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.230 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.229 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.228 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.227 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.226 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.225 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.224 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.223 + + Performance and stability improvements on all platforms. + + +2019-02-26: Version 7.4.222 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.221 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.220 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.219 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.218 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.217 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.216 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.215 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.214 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.213 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.212 + + Performance and stability improvements on all platforms. + + +2019-02-25: Version 7.4.211 + + Performance and stability improvements on all platforms. + + +2019-02-23: Version 7.4.210 + + Performance and stability improvements on all platforms. + + +2019-02-23: Version 7.4.209 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.208 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.207 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.206 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.205 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.204 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.203 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.202 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.201 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.200 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.199 + + Performance and stability improvements on all platforms. + + +2019-02-22: Version 7.4.198 + + Performance and stability improvements on all platforms. + + +2019-02-21: Version 7.4.197 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.196 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.195 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.194 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.193 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.192 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.191 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.190 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.189 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.188 + + Performance and stability improvements on all platforms. + + +2019-02-18: Version 7.4.187 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.186 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.185 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.184 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.183 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.182 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.181 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.180 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.179 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.178 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.177 + + Performance and stability improvements on all platforms. + + +2019-02-15: Version 7.4.176 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.175 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.174 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.173 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.172 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.171 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.170 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.169 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.168 + + Performance and stability improvements on all platforms. + + +2019-02-14: Version 7.4.167 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.166 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.165 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.164 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.163 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.162 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.161 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.160 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.159 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.158 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.157 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.156 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.155 + + Performance and stability improvements on all platforms. + + +2019-02-13: Version 7.4.154 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.153 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.152 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.151 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.150 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.149 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.148 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.147 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.146 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.145 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.144 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.143 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.142 + + Performance and stability improvements on all platforms. + + +2019-02-12: Version 7.4.141 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.140 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.139 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.138 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.137 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.136 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.135 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.134 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.133 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.132 + + Performance and stability improvements on all platforms. + + +2019-02-11: Version 7.4.131 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.130 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.129 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.128 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.127 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.126 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.125 + + Performance and stability improvements on all platforms. + + +2019-02-09: Version 7.4.124 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.123 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.122 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.121 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.120 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.119 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.118 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.117 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.116 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.115 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.114 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.113 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.112 + + Performance and stability improvements on all platforms. + + +2019-02-08: Version 7.4.111 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.110 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.109 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.108 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.107 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.106 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.105 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.104 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.103 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.102 + + Performance and stability improvements on all platforms. + + +2019-02-07: Version 7.4.101 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.100 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.99 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.98 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.97 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.96 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.95 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.94 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.93 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.92 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.91 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.90 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.89 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.88 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.87 + + Performance and stability improvements on all platforms. + + +2019-02-06: Version 7.4.86 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.85 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.84 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.83 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.82 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.81 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.80 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.79 + + Performance and stability improvements on all platforms. + + +2019-02-05: Version 7.4.78 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.77 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.76 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.75 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.74 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.73 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.72 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.71 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.70 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.69 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.68 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.67 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.66 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.65 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.64 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.63 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.62 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.61 + + Performance and stability improvements on all platforms. + + +2019-02-04: Version 7.4.60 + + Performance and stability improvements on all platforms. + + +2019-02-03: Version 7.4.59 + + Performance and stability improvements on all platforms. + + +2019-02-03: Version 7.4.58 + + Performance and stability improvements on all platforms. + + +2019-02-02: Version 7.4.57 + + Performance and stability improvements on all platforms. + + +2019-02-02: Version 7.4.56 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.55 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.54 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.53 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.52 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.51 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.50 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.49 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.48 + + Performance and stability improvements on all platforms. + + +2019-02-01: Version 7.4.47 + + Performance and stability improvements on all platforms. + + +2019-01-31: Version 7.4.46 + + Performance and stability improvements on all platforms. + + +2019-01-31: Version 7.4.45 + + Performance and stability improvements on all platforms. + + +2019-01-31: Version 7.4.44 + + Performance and stability improvements on all platforms. + + +2019-01-31: Version 7.4.43 + + Performance and stability improvements on all platforms. + + +2019-01-31: Version 7.4.42 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.41 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.40 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.39 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.38 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.37 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.36 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.35 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.34 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.33 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.32 + + Performance and stability improvements on all platforms. + + +2019-01-30: Version 7.4.31 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.30 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.29 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.28 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.27 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.26 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.25 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.24 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.23 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.22 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.21 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.20 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.19 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.18 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.17 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.16 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.15 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.14 + + Performance and stability improvements on all platforms. + + +2019-01-29: Version 7.4.13 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.12 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.11 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.10 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.9 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.8 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.7 + + Performance and stability improvements on all platforms. + + +2019-01-28: Version 7.4.6 + + Performance and stability improvements on all platforms. + + +2019-01-27: Version 7.4.5 + + Performance and stability improvements on all platforms. + + +2019-01-27: Version 7.4.4 + + Performance and stability improvements on all platforms. + + +2019-01-26: Version 7.4.3 + + Performance and stability improvements on all platforms. + + +2019-01-26: Version 7.4.2 + + Performance and stability improvements on all platforms. + + +2019-01-26: Version 7.4.1 + + Performance and stability improvements on all platforms. + + +2019-01-23: Version 7.3.495 + + Performance and stability improvements on all platforms. + + +2019-01-23: Version 7.3.494 + + Performance and stability improvements on all platforms. + + +2019-01-23: Version 7.3.493 + + Performance and stability improvements on all platforms. + + 2019-01-23: Version 7.3.492 Performance and stability improvements on all platforms. diff --git a/deps/v8/DEPS b/deps/v8/DEPS index ec6045a90a0c0b..1837bd96bce8ad 100644 --- a/deps/v8/DEPS +++ b/deps/v8/DEPS @@ -16,27 +16,35 @@ vars = { deps = { 'v8/build': - Var('chromium_url') + '/chromium/src/build.git' + '@' + 'f2ca77c3aa839107f36fed20dac81fe8b71b060e', + Var('chromium_url') + '/chromium/src/build.git' + '@' + '80892bfe019dc854c6acdbfbb7304cca63986d4f', 'v8/third_party/depot_tools': - Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'f7971436824dd8eeb9b0cf19dabc3e32b369a904', + Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'cf661acb705fccc302901a1f8a251ad43ce2dd62', 'v8/third_party/icu': - Var('chromium_url') + '/chromium/deps/icu.git' + '@' + '07e7295d964399ee7bee16a3ac7ca5a053b2cf0a', + Var('chromium_url') + '/chromium/deps/icu.git' + '@' + '8c67416ccb4da42d817e7081ff83a2193b1aabe7', 'v8/third_party/instrumented_libraries': Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + 'a959e4f0cb643003f2d75d179cede449979e3e77', 'v8/buildtools': - Var('chromium_url') + '/chromium/buildtools.git' + '@' + '2f02e1f363b1af2715536f38e239853f04ec1497', + Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '3e50219fc4503f461b2176a9976891b28d80f9ab', + 'v8/buildtools/clang_format/script': + Var('chromium_url') + '/chromium/llvm-project/cfe/tools/clang-format.git' + '@' + '96636aa0e9f047f17447f2d45a094d0b59ed7917', + 'v8/buildtools/third_party/libc++/trunk': + Var('chromium_url') + '/chromium/llvm-project/libcxx.git' + '@' + '22d3f6dd25e5efc59124ba1c00b8f98b14be4201', + 'v8/buildtools/third_party/libc++abi/trunk': + Var('chromium_url') + '/chromium/llvm-project/libcxxabi.git' + '@' + '0d529660e32d77d9111912d73f2c74fc5fa2a858', + 'v8/buildtools/third_party/libunwind/trunk': + Var('chromium_url') + '/external/llvm.org/libunwind.git' + '@' + '69d9b84cca8354117b9fe9705a4430d789ee599b', 'v8/base/trace_event/common': - Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + 'e31a1706337ccb9a658b37d29a018c81695c6518', + Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '936ba8a963284a6b3737cf2f0474a7131073abee', 'v8/third_party/android_ndk': { 'url': Var('chromium_url') + '/android_ndk.git' + '@' + '4e2cea441bfd43f0863d14f57b1e1844260b9884', 'condition': 'checkout_android', }, 'v8/third_party/android_tools': { - 'url': Var('chromium_url') + '/android_tools.git' + '@' + 'e958d6ea74442d4e0849bb8a018d215a0e78981d', + 'url': Var('chromium_url') + '/android_tools.git' + '@' + '347a7c8078a009e98995985b7ab6ec6b35696dea', 'condition': 'checkout_android', }, 'v8/third_party/catapult': { - 'url': Var('chromium_url') + '/catapult.git' + '@' + 'dd2de388fc4e3e8fa97a97515ec35c5b3834b753', + 'url': Var('chromium_url') + '/catapult.git' + '@' + 'ccc29087522abefc852d1294595ae6db7e86d649', 'condition': 'checkout_android', }, 'v8/third_party/colorama/src': { @@ -44,23 +52,23 @@ deps = { 'condition': 'checkout_android', }, 'v8/third_party/fuchsia-sdk': { - 'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-sdk.git' + '@' + '5157be49c92d031a74192ee993f32a2a28c8b1c3', + 'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-sdk.git' + '@' + '8e8db13b538ecb251e5ce9d5c781fc142f9752fd', 'condition': 'checkout_fuchsia', }, 'v8/third_party/googletest/src': - Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + '9518a57428ae0a7ed450c1361768e84a2a38af5a', + Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'efecb0bfa687cf87836494f5d62868485c00fb66', 'v8/third_party/jinja2': Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + 'b41863e42637544c2941b574c7877d3e1f663e25', 'v8/third_party/markupsafe': Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '8f45f5cfa0009d2a70589bcda0349b8cb2b72783', 'v8/tools/swarming_client': - Var('chromium_url') + '/infra/luci/client-py.git' + '@' + 'd50a88f50782ba29076061b94c7b9d08a6c7e424', + Var('chromium_url') + '/infra/luci/client-py.git' + '@' + '7a61cf37d6a0163f0ec02d495289a1d038e62457', 'v8/test/benchmarks/data': Var('chromium_url') + '/v8/deps/third_party/benchmarks.git' + '@' + '05d7188267b4560491ff9155c5ee13e207ecd65f', 'v8/test/mozilla/data': Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be', 'v8/test/test262/data': - Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '4f1155c566a222238fd86f179c6635ecb4c289bb', + Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '565d7d5b7dd808d9267006b83ac4ea9c48f782cc', 'v8/test/test262/harness': Var('chromium_url') + '/external/github.com/test262-utils/test262-harness-py.git' + '@' + '0f2acdd882c84cff43b9d60df7574a1901e2cdcd', 'v8/third_party/qemu-linux-x64': { @@ -84,7 +92,7 @@ deps = { 'dep_type': 'cipd', }, 'v8/tools/clang': - Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '3a16568a56486d7d032b8ec7b8dae892413a9a7a', + Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '257c91cc44b07bd06ff03dde809ccbc46a22bec6', 'v8/tools/luci-go': { 'packages': [ { @@ -104,11 +112,10 @@ deps = { 'dep_type': 'cipd', }, 'v8/test/wasm-js/data': - Var('chromium_url') + '/external/github.com/WebAssembly/spec.git' + '@' + 'b42efa9b07c5544079c31f6088a66bead617559c', + Var('chromium_url') + '/external/github.com/WebAssembly/spec.git' + '@' + '44dfa15cb87b1e9bef36e033ad5d2cdc4c2008fd', } recursedeps = [ - 'v8/buildtools', 'v8/third_party/android_tools', ] diff --git a/deps/v8/base/trace_event/common/trace_event_common.h b/deps/v8/base/trace_event/common/trace_event_common.h index f9ef03f5ba82ca..6837eb95d0227c 100644 --- a/deps/v8/base/trace_event/common/trace_event_common.h +++ b/deps/v8/base/trace_event/common/trace_event_common.h @@ -984,8 +984,6 @@ INTERNAL_TRACE_EVENT_ADD_WITH_ID( \ TRACE_EVENT_PHASE_LEAVE_CONTEXT, category_group, name, context, \ TRACE_EVENT_FLAG_NONE) -#define TRACE_EVENT_SCOPED_CONTEXT(category_group, name, context) \ - INTERNAL_TRACE_EVENT_SCOPED_CONTEXT(category_group, name, context) // Macro to specify that two trace IDs are identical. For example, // TRACE_LINK_IDS( diff --git a/deps/v8/gni/v8.gni b/deps/v8/gni/v8.gni index 1ed8e0382ab13c..2a691dfa60d621 100644 --- a/deps/v8/gni/v8.gni +++ b/deps/v8/gni/v8.gni @@ -53,6 +53,9 @@ declare_args() { # Enable monolithic static library for embedders. v8_monolithic = false + + # Expose symbols for dynamic linking. + v8_expose_symbols = false } if (v8_use_external_startup_data == "") { @@ -78,7 +81,7 @@ if (v8_enable_backtrace == "") { # subdirectories. v8_path_prefix = get_path_info("../", "abspath") -v8_inspector_js_protocol = v8_path_prefix + "/src/inspector/js_protocol.json" +v8_inspector_js_protocol = v8_path_prefix + "/src/inspector/js_protocol.pdl" ############################################################################### # Templates @@ -91,20 +94,8 @@ v8_add_configs = [ v8_path_prefix + ":toolchain", ] -if (is_debug && !v8_optimized_debug) { - v8_remove_configs += [ "//build/config/compiler:default_optimization" ] - v8_add_configs += [ "//build/config/compiler:no_optimize" ] -} else { - v8_remove_configs += [ "//build/config/compiler:default_optimization" ] - - # TODO(crbug.com/621335) Rework this so that we don't have the confusion - # between "optimize_speed" and "optimize_max". - if (((is_posix && !is_android) || is_fuchsia) && !using_sanitizer) { - v8_add_configs += [ "//build/config/compiler:optimize_speed" ] - } else { - v8_add_configs += [ "//build/config/compiler:optimize_max" ] - } -} +v8_remove_configs += [ "//build/config/compiler:default_optimization" ] +v8_add_configs += [ v8_path_prefix + ":default_optimization" ] if (v8_code_coverage && !is_clang) { v8_add_configs += [ @@ -113,7 +104,8 @@ if (v8_code_coverage && !is_clang) { ] } -if ((is_posix || is_fuchsia) && (v8_enable_backtrace || v8_monolithic)) { +if ((is_posix || is_fuchsia) && + (v8_enable_backtrace || v8_monolithic || v8_expose_symbols)) { v8_remove_configs += [ "//build/config/gcc:symbol_visibility_hidden" ] v8_add_configs += [ "//build/config/gcc:symbol_visibility_default" ] } @@ -132,15 +124,11 @@ if (!build_with_chromium && is_clang) { template("v8_source_set") { if (defined(invoker.split_count) && invoker.split_count > 1 && defined(v8_static_library) && v8_static_library && is_win) { - link_target_type = "split_static_library" + link_target_type = "jumbo_split_static_library" } else if (defined(v8_static_library) && v8_static_library) { - link_target_type = "static_library" + link_target_type = "jumbo_static_library" } else { - if (use_jumbo_build) { - link_target_type = "jumbo_source_set" - } else { - link_target_type = "source_set" - } + link_target_type = "jumbo_source_set" } target(link_target_type, target_name) { forward_variables_from(invoker, @@ -149,21 +137,21 @@ template("v8_source_set") { "configs", "remove_configs", ]) + configs -= v8_remove_configs + configs += v8_add_configs if (defined(invoker.remove_configs)) { configs -= invoker.remove_configs } configs += invoker.configs - configs -= v8_remove_configs - configs += v8_add_configs } } template("v8_header_set") { jumbo_source_set(target_name) { forward_variables_from(invoker, "*", [ "configs" ]) - configs += invoker.configs configs -= v8_remove_configs configs += v8_add_configs + configs += invoker.configs } } @@ -175,12 +163,12 @@ template("v8_executable") { "configs", "remove_configs", ]) + configs -= v8_remove_configs + configs += v8_add_configs if (defined(invoker.remove_configs)) { configs -= invoker.remove_configs } configs += invoker.configs - configs -= v8_remove_configs - configs += v8_add_configs if (is_linux) { # For enabling ASLR. ldflags = [ "-pie" ] @@ -203,9 +191,9 @@ template("v8_executable") { template("v8_component") { component(target_name) { forward_variables_from(invoker, "*", [ "configs" ]) - configs += invoker.configs configs -= v8_remove_configs configs += v8_add_configs + configs += invoker.configs } } @@ -213,9 +201,9 @@ template("v8_static_library") { static_library(target_name) { complete_static_lib = true forward_variables_from(invoker, "*", [ "configs" ]) - configs += invoker.configs configs -= v8_remove_configs configs -= [ "//build/config/compiler:thin_archive" ] configs += v8_add_configs + configs += invoker.configs } } diff --git a/deps/v8/gypfiles/OWNERS b/deps/v8/gypfiles/OWNERS deleted file mode 100644 index 0e56af312942fa..00000000000000 --- a/deps/v8/gypfiles/OWNERS +++ /dev/null @@ -1 +0,0 @@ -machenbach@chromium.org diff --git a/deps/v8/gypfiles/README.txt b/deps/v8/gypfiles/README.txt deleted file mode 100644 index 5f242ada342f1c..00000000000000 --- a/deps/v8/gypfiles/README.txt +++ /dev/null @@ -1,9 +0,0 @@ -For build instructions, please refer to: - -https://code.google.com/p/v8/wiki/BuildingWithGYP - -TL;DR version on *nix: -$ make dependencies # Only needed once. -$ make ia32.release -j8 -$ make ia32.release.check # Optionally: run tests. - diff --git a/deps/v8/gypfiles/inspector.gypi b/deps/v8/gypfiles/inspector.gypi deleted file mode 100644 index cd90b14c2ede33..00000000000000 --- a/deps/v8/gypfiles/inspector.gypi +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright 2016 the V8 project authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'protocol_path': '../third_party/inspector_protocol', - 'inspector_path': '../src/inspector', - - 'inspector_generated_sources': [ - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Forward.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Protocol.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Protocol.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Console.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Console.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Debugger.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Debugger.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/HeapProfiler.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/HeapProfiler.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Profiler.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Profiler.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Runtime.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Runtime.h', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Schema.cpp', - '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Schema.h', - '<(SHARED_INTERMEDIATE_DIR)/include/inspector/Debugger.h', - '<(SHARED_INTERMEDIATE_DIR)/include/inspector/Runtime.h', - '<(SHARED_INTERMEDIATE_DIR)/include/inspector/Schema.h', - ], - - 'inspector_all_sources': [ - '../include/v8-inspector.h', - '../include/v8-inspector-protocol.h', - '../src/inspector/custom-preview.cc', - '../src/inspector/custom-preview.h', - '../src/inspector/injected-script.cc', - '../src/inspector/injected-script.h', - '../src/inspector/inspected-context.cc', - '../src/inspector/inspected-context.h', - '../src/inspector/remote-object-id.cc', - '../src/inspector/remote-object-id.h', - '../src/inspector/search-util.cc', - '../src/inspector/search-util.h', - '../src/inspector/string-16.cc', - '../src/inspector/string-16.h', - '../src/inspector/string-util.cc', - '../src/inspector/string-util.h', - '../src/inspector/test-interface.cc', - '../src/inspector/test-interface.h', - '../src/inspector/v8-console.cc', - '../src/inspector/v8-console.h', - '../src/inspector/v8-console-agent-impl.cc', - '../src/inspector/v8-console-agent-impl.h', - '../src/inspector/v8-console-message.cc', - '../src/inspector/v8-console-message.h', - '../src/inspector/v8-debugger.cc', - '../src/inspector/v8-debugger.h', - '../src/inspector/v8-debugger-agent-impl.cc', - '../src/inspector/v8-debugger-agent-impl.h', - '../src/inspector/v8-debugger-script.cc', - '../src/inspector/v8-debugger-script.h', - '../src/inspector/v8-heap-profiler-agent-impl.cc', - '../src/inspector/v8-heap-profiler-agent-impl.h', - '../src/inspector/v8-inspector-impl.cc', - '../src/inspector/v8-inspector-impl.h', - '../src/inspector/v8-inspector-session-impl.cc', - '../src/inspector/v8-inspector-session-impl.h', - '../src/inspector/v8-profiler-agent-impl.cc', - '../src/inspector/v8-profiler-agent-impl.h', - '../src/inspector/v8-regex.cc', - '../src/inspector/v8-regex.h', - '../src/inspector/v8-runtime-agent-impl.cc', - '../src/inspector/v8-runtime-agent-impl.h', - '../src/inspector/v8-schema-agent-impl.cc', - '../src/inspector/v8-schema-agent-impl.h', - '../src/inspector/v8-stack-trace-impl.cc', - '../src/inspector/v8-stack-trace-impl.h', - '../src/inspector/v8-value-utils.cc', - '../src/inspector/v8-value-utils.h', - '../src/inspector/value-mirror.cc', - '../src/inspector/value-mirror.h', - '../src/inspector/wasm-translation.cc', - '../src/inspector/wasm-translation.h', - ] - }, - 'includes': [ - '../third_party/inspector_protocol/inspector_protocol.gypi', - ], - 'actions': [ - { - 'action_name': 'protocol_compatibility', - 'inputs': [ - '<(inspector_path)/js_protocol.pdl', - ], - 'outputs': [ - '<@(SHARED_INTERMEDIATE_DIR)/src/js_protocol.stamp', - ], - 'action': [ - 'python', - '<(protocol_path)/check_protocol_compatibility.py', - '--stamp', '<@(_outputs)', - '<@(_inputs)', - ], - 'message': 'Checking inspector protocol compatibility', - }, - { - 'action_name': 'protocol_generated_sources', - 'inputs': [ - '<(inspector_path)/js_protocol.pdl', - '<(inspector_path)/inspector_protocol_config.json', - '<@(inspector_protocol_files)', - ], - 'outputs': [ - '<@(inspector_generated_sources)', - ], - 'process_outputs_as_sources': 1, - 'action': [ - 'python', - '<(protocol_path)/code_generator.py', - '--jinja_dir', '../third_party', - '--output_base', '<(SHARED_INTERMEDIATE_DIR)/src/inspector', - '--config', '<(inspector_path)/inspector_protocol_config.json', - ], - 'message': 'Generating inspector protocol sources from protocol json', - }, - ], -} diff --git a/deps/v8/gypfiles/v8.gyp b/deps/v8/gypfiles/v8.gyp deleted file mode 100644 index eb31d7087ea35c..00000000000000 --- a/deps/v8/gypfiles/v8.gyp +++ /dev/null @@ -1,2758 +0,0 @@ -# Copyright 2012 the V8 project authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -{ - 'variables': { - 'v8_code': 1, - 'v8_random_seed%': 314159265, - 'v8_vector_stores%': 0, - 'v8_embed_script%': "", - # Placeholder. For upstream V8, this includes test files that Node.js does not need. - 'v8_extra_library_files%': [], - 'mksnapshot_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)', - 'v8_os_page_size%': 0, - 'generate_bytecode_builtins_list_output' : '<(SHARED_INTERMEDIATE_DIR)/builtins-generated/bytecodes-builtins-list.h', - 'torque_files': [ - "../src/builtins/base.tq", - "../src/builtins/frames.tq", - "../src/builtins/arguments.tq", - "../src/builtins/array.tq", - "../src/builtins/array-copywithin.tq", - "../src/builtins/array-filter.tq", - "../src/builtins/array-foreach.tq", - "../src/builtins/array-join.tq", - "../src/builtins/array-lastindexof.tq", - "../src/builtins/array-of.tq", - "../src/builtins/array-reverse.tq", - "../src/builtins/array-slice.tq", - "../src/builtins/array-splice.tq", - "../src/builtins/array-unshift.tq", - "../src/builtins/collections.tq", - "../src/builtins/data-view.tq", - "../src/builtins/extras-utils.tq", - "../src/builtins/object.tq", - "../src/builtins/object-fromentries.tq", - "../src/builtins/iterator.tq", - "../src/builtins/typed-array.tq", - "../src/builtins/typed-array-createtypedarray.tq", - "../third_party/v8/builtins/array-sort.tq", - ], - 'torque_namespaces': [ - "base", - "arguments", - "array", - "collections", - "iterator", - "object", - "typed-array", - "data-view", - "extras-utils", - ], - # Since there is no foreach in GYP we manualy unroll the following: - # foreach(namespace, torque_namespaces) { - # outputs += [ - # "$target_gen_dir/torque-generated/builtins-$namespace-from-dsl-gen.cc", - # "$target_gen_dir/torque-generated/builtins-$namespace-from-dsl-gen.h", - # ] - # } - 'torque_outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-base-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-base-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-arguments-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-arguments-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-array-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-array-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-collections-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-collections-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-iterator-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-iterator-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-object-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-object-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-typed-array-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-typed-array-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-data-view-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-data-view-from-dsl-gen.h', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-extras-utils-from-dsl-gen.cc', - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtins-extras-utils-from-dsl-gen.h', - ], - 'torque_generated_pure_headers': [ - '<(SHARED_INTERMEDIATE_DIR)/torque-generated/builtin-definitions-from-dsl.h', - ], - }, - 'includes': ['toolchain.gypi', 'features.gypi', 'v8_external_snapshot.gypi'], - 'targets': [ - { - 'target_name': 'v8', - 'dependencies': [ 'v8_maybe_snapshot' ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ['component=="shared_library"', { - 'type': '<(component)', - 'sources': [ - # Note: on non-Windows we still build this file so that gyp - # has some sources to link into the component. - '../src/v8dll-main.cc', - ], - 'include_dirs': [ - '..', - ], - 'defines': [ - 'BUILDING_V8_SHARED', - ], - 'direct_dependent_settings': { - 'defines': [ - 'USING_V8_SHARED', - ], - }, - 'conditions': [ - ['OS=="mac"', { - 'xcode_settings': { - 'OTHER_LDFLAGS': ['-dynamiclib', '-all_load'] - }, - }], - ['soname_version!=""', { - 'product_extension': 'so.<(soname_version)', - }], - ], - }, - { - 'type': 'none', - }], - ], - 'direct_dependent_settings': { - 'include_dirs': [ '../include/', ], - }, - 'actions': [ - { - 'action_name': 'v8_dump_build_config', - 'toolsets': ['target'], - 'inputs': [ - '../tools/testrunner/utils/dump_build_config_gyp.py', - ], - 'outputs': [ - '<(PRODUCT_DIR)/v8_build_config.json', - ], - 'variables': { - 'v8_dump_build_config_args': [ - '<(PRODUCT_DIR)/v8_build_config.json', - 'dcheck_always_on=<(dcheck_always_on)', - 'is_android=<(is_android)', - 'is_asan=<(asan)', - 'is_cfi=<(cfi_vptr)', - 'is_component_build=<(component)', - 'is_debug=<(CONFIGURATION_NAME)', - # Not available in gyp. - 'is_gcov_coverage=0', - 'is_msan=<(msan)', - 'is_tsan=<(tsan)', - # Not available in gyp. - 'is_ubsan_vptr=0', - 'target_cpu=<(target_arch)', - 'v8_enable_i18n_support=<(v8_enable_i18n_support)', - 'v8_enable_verify_predictable=<(v8_enable_verify_predictable)', - 'v8_target_cpu=<(v8_target_arch)', - 'v8_use_snapshot=<(v8_use_snapshot)', - 'v8_use_siphash=<(v8_use_siphash)', - 'v8_enable_embedded_builtins=<(v8_enable_embedded_builtins)', - 'v8_enable_verify_csa=<(v8_enable_verify_csa)', - 'v8_enable_lite_mode=<(v8_enable_lite_mode)', - 'v8_enable_pointer_compression=<(v8_enable_pointer_compression)', - ] - }, - 'conditions': [ - ['v8_target_arch=="mips" or v8_target_arch=="mipsel" \ - or v8_target_arch=="mips64" or v8_target_arch=="mips64el"', { - 'v8_dump_build_config_args':[ - 'mips_arch_variant=<(mips_arch_variant)', - 'mips_use_msa=<(mips_use_msa)', - ], - }], - ], - 'action': [ - 'python', '../tools/testrunner/utils/dump_build_config_gyp.py', - '<@(v8_dump_build_config_args)', - ], - }, - ], - }, # v8 - { - # This rule delegates to either v8_snapshot, v8_nosnapshot, or - # v8_external_snapshot, depending on the current variables. - # The intention is to make the 'calling' rules a bit simpler. - 'target_name': 'v8_maybe_snapshot', - 'type': 'none', - 'conditions': [ - ['v8_use_snapshot!="true"', { - # The dependency on v8_base should come from a transitive - # dependency however the Android toolchain requires libv8_base.a - # to appear before libv8_snapshot.a so it's listed explicitly. - 'dependencies': ['v8_base', 'v8_init', 'v8_nosnapshot'], - }], - ['v8_use_snapshot=="true" and v8_use_external_startup_data==0', { - # The dependency on v8_base should come from a transitive - # dependency however the Android toolchain requires libv8_base.a - # to appear before libv8_snapshot.a so it's listed explicitly. - 'dependencies': ['v8_base', 'v8_snapshot'], - }], - ['v8_use_snapshot=="true" and v8_use_external_startup_data==1 and want_separate_host_toolset==0', { - 'dependencies': ['v8_base', 'v8_external_snapshot'], - }], - ['v8_use_snapshot=="true" and v8_use_external_startup_data==1 and want_separate_host_toolset==1', { - 'dependencies': ['v8_base', 'v8_external_snapshot'], - }], - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ] - }, # v8_maybe_snapshot - { - 'target_name': 'v8_init', - 'type': 'static_library', - 'dependencies': [ - 'v8_initializers', - # We need this transitive dependency, since it also does codegen. - 'v8_base', - ], - 'variables': { - 'optimize': 'max', - }, - 'include_dirs': [ - '..', - '../include/', - # This is for `gen/builtins-generated` - '<(SHARED_INTERMEDIATE_DIR)', - ], - 'sources': [ - '../src/setup-isolate-full.cc', - '<(generate_bytecode_builtins_list_output)', - '<@(torque_generated_pure_headers)', - ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ['v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', - ], - }], - ], - }, # v8_init - { - 'target_name': 'v8_initializers', - 'type': 'static_library', - 'dependencies': [ - 'v8_base', - ], - 'variables': { - 'optimize': 'max', - }, - 'include_dirs': [ - '..', - '../include/', - ], - 'sources': [ - '../src/builtins/builtins-arguments-gen.cc', - '../src/builtins/builtins-arguments-gen.h', - '../src/builtins/builtins-array-gen.cc', - '../src/builtins/builtins-array-gen.h', - '../src/builtins/builtins-async-function-gen.cc', - '../src/builtins/builtins-async-gen.cc', - '../src/builtins/builtins-async-gen.h', - '../src/builtins/builtins-async-generator-gen.cc', - '../src/builtins/builtins-async-iterator-gen.cc', - '../src/builtins/builtins-bigint-gen.cc', - '../src/builtins/builtins-boolean-gen.cc', - '../src/builtins/builtins-call-gen.cc', - '../src/builtins/builtins-call-gen.h', - '../src/builtins/builtins-collections-gen.cc', - '../src/builtins/builtins-console-gen.cc', - '../src/builtins/builtins-constructor-gen.cc', - '../src/builtins/builtins-constructor-gen.h', - '../src/builtins/builtins-constructor.h', - '../src/builtins/builtins-conversion-gen.cc', - '../src/builtins/builtins-data-view-gen.h', - '../src/builtins/builtins-date-gen.cc', - '../src/builtins/builtins-debug-gen.cc', - '../src/builtins/builtins-function-gen.cc', - '../src/builtins/builtins-generator-gen.cc', - '../src/builtins/builtins-global-gen.cc', - '../src/builtins/builtins-handler-gen.cc', - '../src/builtins/builtins-ic-gen.cc', - '../src/builtins/builtins-internal-gen.cc', - '../src/builtins/builtins-interpreter-gen.cc', - '../src/builtins/builtins-intl-gen.cc', - '../src/builtins/builtins-iterator-gen.cc', - '../src/builtins/builtins-iterator-gen.h', - '../src/builtins/builtins-lazy-gen.cc', - '../src/builtins/builtins-lazy-gen.h', - '../src/builtins/builtins-math-gen.cc', - '../src/builtins/builtins-math-gen.h', - '../src/builtins/builtins-microtask-queue-gen.cc', - '../src/builtins/builtins-number-gen.cc', - '../src/builtins/builtins-object-gen.cc', - '../src/builtins/builtins-promise-gen.cc', - '../src/builtins/builtins-promise-gen.h', - '../src/builtins/builtins-proxy-gen.cc', - '../src/builtins/builtins-proxy-gen.h', - '../src/builtins/builtins-reflect-gen.cc', - '../src/builtins/builtins-regexp-gen.cc', - '../src/builtins/builtins-regexp-gen.h', - '../src/builtins/builtins-sharedarraybuffer-gen.cc', - '../src/builtins/builtins-string-gen.cc', - '../src/builtins/builtins-string-gen.h', - '../src/builtins/builtins-symbol-gen.cc', - '../src/builtins/builtins-typed-array-gen.cc', - '../src/builtins/builtins-typed-array-gen.h', - '../src/builtins/builtins-utils-gen.h', - '../src/builtins/builtins-wasm-gen.cc', - '../src/builtins/growable-fixed-array-gen.cc', - '../src/builtins/growable-fixed-array-gen.h', - '../src/builtins/setup-builtins-internal.cc', - '../src/code-stub-assembler.cc', - '../src/code-stub-assembler.h', - '../src/heap/setup-heap-internal.cc', - '../src/ic/accessor-assembler.cc', - '../src/ic/accessor-assembler.h', - '../src/ic/binary-op-assembler.cc', - '../src/ic/binary-op-assembler.h', - '../src/ic/keyed-store-generic.cc', - '../src/ic/keyed-store-generic.h', - '../src/interpreter/interpreter-assembler.cc', - '../src/interpreter/interpreter-assembler.h', - '../src/interpreter/interpreter-generator.cc', - '../src/interpreter/interpreter-generator.h', - '../src/interpreter/interpreter-intrinsics-generator.cc', - '../src/interpreter/interpreter-intrinsics-generator.h', - '<@(torque_outputs)', - '<@(torque_generated_pure_headers)', - ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ['v8_target_arch=="ia32"', { - 'sources': [ - '../src/builtins/ia32/builtins-ia32.cc', - ], - }], - ['v8_target_arch=="x64"', { - 'sources': [ - '../src/builtins/x64/builtins-x64.cc', - ], - }], - ['v8_target_arch=="arm"', { - 'sources': [ - '../src/builtins/arm/builtins-arm.cc', - ], - }], - ['v8_target_arch=="arm64"', { - 'sources': [ - '../src/builtins/arm64/builtins-arm64.cc', - ], - }], - ['v8_target_arch=="mips" or v8_target_arch=="mipsel"', { - 'sources': [ - '../src/builtins/mips/builtins-mips.cc', - ], - }], - ['v8_target_arch=="mips64" or v8_target_arch=="mips64el"', { - 'sources': [ - '../src/builtins/mips64/builtins-mips64.cc', - ], - }], - ['v8_target_arch=="ppc" or v8_target_arch=="ppc64"', { - 'sources': [ - '../src/builtins/ppc/builtins-ppc.cc', - ], - }], - ['v8_target_arch=="s390" or v8_target_arch=="s390x"', { - 'sources': [ - '../src/builtins/s390/builtins-s390.cc', - ], - }], - ['v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', - ], - }, { - 'sources!': [ - '../src/builtins/builtins-intl-gen.cc', - ], - }], - # Platforms that don't have Compare-And-Swap support need to link atomic - # library to implement atomic memory access - [ 'v8_current_cpu in ["mips", "mipsel", "mips64", "mips64el", "ppc", "ppc64", "s390", "s390x"]', { - 'link_settings': { - 'libraries': [ '-latomic', ], - }, - }, - ], - ['OS=="win"', { - 'msvs_precompiled_header': '../../../tools/msvs/pch/v8_pch.h', - 'msvs_precompiled_source': '../../../tools/msvs/pch/v8_pch.cc', - 'sources': [ - '<(_msvs_precompiled_header)', - '<(_msvs_precompiled_source)', - ], - }], - ], - }, # v8_initializers - { - 'target_name': 'v8_snapshot', - 'type': 'static_library', - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - 'dependencies': [ - 'mksnapshot#host', - 'js2c_extras#host', # TODO(refack) get rid of this. - # This has effectively become a noop for Node.js, - # but the V8 code still tried to acess the code this generates. - # Refs: https://github.com/nodejs/node/blob/169b7f1f3b3751289f24678930e6a5731464ebc9/deps/v8/src/bootstrapper.cc#L5172-L5174 - ], - }, { - 'toolsets': ['target'], - 'dependencies': [ - 'mksnapshot', - 'js2c_extras', - ], - }], - ['component=="shared_library"', { - 'defines': [ - 'BUILDING_V8_SHARED', - ], - 'direct_dependent_settings': { - 'defines': [ - 'USING_V8_SHARED', - ], - }, - }], - ['v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', - ], - }], - - ], - 'dependencies': [ - 'v8_base', - ], - 'include_dirs': [ - '..', - '<(DEPTH)', - ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc', - '../src/setup-isolate-deserialize.cc', - ], - 'actions': [ - { - 'action_name': 'run_mksnapshot', - 'message': 'generating: >@(_outputs)', - 'variables': { - 'mksnapshot_flags': [ '--turbo_instruction_scheduling', ], - }, - 'conditions': [ - ['v8_enable_embedded_builtins == "true"', { - # In this case we use `embedded_variant "Default"` - # and `suffix = ''` for the template `embedded${suffix}.S`. - 'outputs': [ '<(INTERMEDIATE_DIR)/embedded.S' ], - 'variables': { - 'mksnapshot_flags': [ - '--embedded_src', '<(INTERMEDIATE_DIR)/embedded.S', - '--embedded_variant', 'Default', - ], - }, - }, { - 'outputs': [ '../src/snapshot/embedded-empty.cc' ] - }], - ['v8_random_seed != 0', { - 'variables': { - 'mksnapshot_flags': [ '--random-seed', '<(v8_random_seed)' ], - }, - }], - ['v8_os_page_size != 0', { - 'variables': { - 'mksnapshot_flags': [ '--v8_os_page_size', '<(v8_os_page_size)' ], - }, - }], - ['v8_use_external_startup_data !=0 ', { - 'outputs': [ '<(INTERMEDIATE_DIR)/snapshot_blob.bin', ], - 'variables': { - 'mksnapshot_flags': [ '--startup_blob', '<(INTERMEDIATE_DIR)/snapshot_blob.bin', ], - }, - }, { - 'outputs': [ "<(INTERMEDIATE_DIR)/snapshot.cc" ], - 'variables': { - 'mksnapshot_flags': [ '--startup_src', '<(INTERMEDIATE_DIR)/snapshot.cc', ], - }, - }], - ['v8_embed_script != ""', { - 'inputs': [ '<(v8_embed_script)' ], - 'variables': { - 'mksnapshot_flags': [ '<(v8_embed_script)' ], - }, - }], - ['v8_enable_snapshot_code_comments == "true"', { - 'variables': { - 'mksnapshot_flags': [ '--code-comments' ], - }, - }], - ], - 'inputs': [ - '<(mksnapshot_exec)', - ], - 'process_outputs_as_sources': 1, - 'action': [ - '>@(_inputs)', - '>@(mksnapshot_flags)', - ], - }, - ], - }, # v8_snapshot - { - 'target_name': 'v8_nosnapshot', - 'type': 'static_library', - 'dependencies': [ - 'v8_base', - ], - 'include_dirs': [ - '..', - '<(DEPTH)', - ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc', - '../src/snapshot/embedded-empty.cc', - '../src/snapshot/snapshot-empty.cc', - ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - 'dependencies': [ 'js2c_extras#host', ], - }, { - 'toolsets': ['target'], - 'dependencies': [ 'js2c_extras', ], - }], - ['component=="shared_library"', { - 'defines': [ - 'BUILDING_V8_SHARED', - ], - }], - ['v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', - ], - }], - ] - }, # v8_nosnapshot - { - 'target_name': 'v8_base', - 'type': 'static_library', - # Since this target is a static-library, but as a side effect it generates - # header files, it needs to be a hard dependency. - 'hard_dependency': 1, - 'includes': [ 'inspector.gypi' ], - 'dependencies': [ - 'v8_libbase', - 'v8_libsampler', - # Code generators - 'torque#host', - 'generate_bytecode_builtins_list#host', - ], - 'direct_dependent_settings': { - 'include_dirs': ['<(SHARED_INTERMEDIATE_DIR)'], - }, - 'objs': ['foo.o'], - 'variables': { - 'optimize': 'max', - }, - 'include_dirs': [ - '..', - '<(DEPTH)', - '<(SHARED_INTERMEDIATE_DIR)' - ], - 'sources': [ - '<@(inspector_all_sources)', - '../include//v8-inspector-protocol.h', - '../include//v8-inspector.h', - '../include//v8-internal.h', - '../include//v8-platform.h', - '../include//v8-profiler.h', - '../include//v8-testing.h', - '../include//v8-util.h', - '../include//v8-value-serializer-version.h', - '../include//v8-version-string.h', - '../include//v8-version.h', - '../include//v8-wasm-trap-handler-posix.h', - '../include//v8-wasm-trap-handler-win.h', - '../include//v8.h', - '../include//v8config.h', - '../src/accessors.cc', - '../src/accessors.h', - '../src/address-map.cc', - '../src/address-map.h', - '../src/allocation-site-scopes-inl.h', - '../src/allocation-site-scopes.h', - '../src/allocation.cc', - '../src/allocation.h', - '../src/api-arguments-inl.h', - '../src/api-arguments.cc', - '../src/api-arguments.h', - '../src/api-natives.cc', - '../src/api-natives.h', - '../src/api.cc', - '../src/api.h', - '../src/arguments-inl.h', - '../src/arguments.cc', - '../src/arguments.h', - '../src/asan.h', - '../src/asmjs/asm-js.cc', - '../src/asmjs/asm-js.h', - '../src/asmjs/asm-names.h', - '../src/asmjs/asm-parser.cc', - '../src/asmjs/asm-parser.h', - '../src/asmjs/asm-scanner.cc', - '../src/asmjs/asm-scanner.h', - '../src/asmjs/asm-types.cc', - '../src/asmjs/asm-types.h', - '../src/assembler-arch.h', - '../src/assembler-inl.h', - '../src/assembler.cc', - '../src/assembler.h', - '../src/assert-scope.cc', - '../src/assert-scope.h', - '../src/ast/ast-function-literal-id-reindexer.cc', - '../src/ast/ast-function-literal-id-reindexer.h', - '../src/ast/ast-source-ranges.h', - '../src/ast/ast-traversal-visitor.h', - '../src/ast/ast-value-factory.cc', - '../src/ast/ast-value-factory.h', - '../src/ast/ast.cc', - '../src/ast/ast.h', - '../src/ast/modules.cc', - '../src/ast/modules.h', - '../src/ast/prettyprinter.cc', - '../src/ast/prettyprinter.h', - '../src/ast/scopes.cc', - '../src/ast/scopes.h', - '../src/ast/source-range-ast-visitor.cc', - '../src/ast/source-range-ast-visitor.h', - '../src/ast/variables.cc', - '../src/ast/variables.h', - '../src/bailout-reason.cc', - '../src/bailout-reason.h', - '../src/basic-block-profiler.cc', - '../src/basic-block-profiler.h', - '../src/bignum-dtoa.cc', - '../src/bignum-dtoa.h', - '../src/bignum.cc', - '../src/bignum.h', - '../src/bit-vector.cc', - '../src/bit-vector.h', - '../src/bootstrapper.cc', - '../src/bootstrapper.h', - '../src/boxed-float.h', - '../src/builtins/builtins-api.cc', - '../src/builtins/builtins-array.cc', - '../src/builtins/builtins-arraybuffer.cc', - '../src/builtins/builtins-bigint.cc', - '../src/builtins/builtins-boolean.cc', - '../src/builtins/builtins-call.cc', - '../src/builtins/builtins-callsite.cc', - '../src/builtins/builtins-collections.cc', - '../src/builtins/builtins-console.cc', - '../src/builtins/builtins-constructor.h', - '../src/builtins/builtins-dataview.cc', - '../src/builtins/builtins-date.cc', - '../src/builtins/builtins-definitions.h', - '../src/builtins/builtins-descriptors.h', - '../src/builtins/builtins-error.cc', - '../src/builtins/builtins-extras-utils.cc', - '../src/builtins/builtins-function.cc', - '../src/builtins/builtins-global.cc', - '../src/builtins/builtins-internal.cc', - '../src/builtins/builtins-intl.cc', - '../src/builtins/builtins-json.cc', - '../src/builtins/builtins-math.cc', - '../src/builtins/builtins-number.cc', - '../src/builtins/builtins-object.cc', - '../src/builtins/builtins-promise.cc', - '../src/builtins/builtins-promise.h', - '../src/builtins/builtins-reflect.cc', - '../src/builtins/builtins-regexp.cc', - '../src/builtins/builtins-sharedarraybuffer.cc', - '../src/builtins/builtins-string.cc', - '../src/builtins/builtins-symbol.cc', - '../src/builtins/builtins-trace.cc', - '../src/builtins/builtins-typed-array.cc', - '../src/builtins/builtins-utils-inl.h', - '../src/builtins/builtins-utils.h', - '../src/builtins/builtins-weak-refs.cc', - '../src/builtins/builtins.cc', - '../src/builtins/builtins.h', - '../src/builtins/constants-table-builder.cc', - '../src/builtins/constants-table-builder.h', - '../src/cached-powers.cc', - '../src/cached-powers.h', - '../src/callable.h', - '../src/cancelable-task.cc', - '../src/cancelable-task.h', - '../src/char-predicates-inl.h', - '../src/char-predicates.cc', - '../src/char-predicates.h', - '../src/checks.h', - '../src/code-comments.cc', - '../src/code-comments.h', - '../src/code-events.h', - '../src/code-factory.cc', - '../src/code-factory.h', - '../src/code-reference.cc', - '../src/code-reference.h', - '../src/code-tracer.h', - '../src/collector.h', - '../src/compilation-cache.cc', - '../src/compilation-cache.h', - '../src/compilation-statistics.cc', - '../src/compilation-statistics.h', - '../src/compiler-dispatcher/compiler-dispatcher.cc', - '../src/compiler-dispatcher/compiler-dispatcher.h', - '../src/compiler-dispatcher/optimizing-compile-dispatcher.cc', - '../src/compiler-dispatcher/optimizing-compile-dispatcher.h', - '../src/compiler.cc', - '../src/compiler.h', - '../src/compiler/access-builder.cc', - '../src/compiler/access-builder.h', - '../src/compiler/access-info.cc', - '../src/compiler/access-info.h', - '../src/compiler/all-nodes.cc', - '../src/compiler/all-nodes.h', - '../src/compiler/allocation-builder-inl.h', - '../src/compiler/allocation-builder.h', - '../src/compiler/backend/code-generator-impl.h', - '../src/compiler/backend/code-generator.cc', - '../src/compiler/backend/code-generator.h', - '../src/compiler/backend/frame-elider.cc', - '../src/compiler/backend/frame-elider.h', - '../src/compiler/backend/gap-resolver.cc', - '../src/compiler/backend/gap-resolver.h', - '../src/compiler/backend/instruction-codes.h', - '../src/compiler/backend/instruction-scheduler.cc', - '../src/compiler/backend/instruction-scheduler.h', - '../src/compiler/backend/instruction-selector-impl.h', - '../src/compiler/backend/instruction-selector.cc', - '../src/compiler/backend/instruction-selector.h', - '../src/compiler/backend/instruction.cc', - '../src/compiler/backend/instruction.h', - '../src/compiler/backend/jump-threading.cc', - '../src/compiler/backend/jump-threading.h', - '../src/compiler/backend/live-range-separator.cc', - '../src/compiler/backend/live-range-separator.h', - '../src/compiler/backend/move-optimizer.cc', - '../src/compiler/backend/move-optimizer.h', - '../src/compiler/backend/register-allocator-verifier.cc', - '../src/compiler/backend/register-allocator-verifier.h', - '../src/compiler/backend/register-allocator.cc', - '../src/compiler/backend/register-allocator.h', - '../src/compiler/backend/unwinding-info-writer.h', - '../src/compiler/basic-block-instrumentor.cc', - '../src/compiler/basic-block-instrumentor.h', - '../src/compiler/branch-elimination.cc', - '../src/compiler/branch-elimination.h', - '../src/compiler/bytecode-analysis.cc', - '../src/compiler/bytecode-analysis.h', - '../src/compiler/bytecode-graph-builder.cc', - '../src/compiler/bytecode-graph-builder.h', - '../src/compiler/bytecode-liveness-map.cc', - '../src/compiler/bytecode-liveness-map.h', - '../src/compiler/c-linkage.cc', - '../src/compiler/checkpoint-elimination.cc', - '../src/compiler/checkpoint-elimination.h', - '../src/compiler/code-assembler.cc', - '../src/compiler/code-assembler.h', - '../src/compiler/common-node-cache.cc', - '../src/compiler/common-node-cache.h', - '../src/compiler/common-operator-reducer.cc', - '../src/compiler/common-operator-reducer.h', - '../src/compiler/common-operator.cc', - '../src/compiler/common-operator.h', - '../src/compiler/compilation-dependencies.cc', - '../src/compiler/compilation-dependencies.h', - '../src/compiler/compiler-source-position-table.cc', - '../src/compiler/compiler-source-position-table.h', - '../src/compiler/constant-folding-reducer.cc', - '../src/compiler/constant-folding-reducer.h', - '../src/compiler/control-equivalence.cc', - '../src/compiler/control-equivalence.h', - '../src/compiler/control-flow-optimizer.cc', - '../src/compiler/control-flow-optimizer.h', - '../src/compiler/dead-code-elimination.cc', - '../src/compiler/dead-code-elimination.h', - '../src/compiler/diamond.h', - '../src/compiler/effect-control-linearizer.cc', - '../src/compiler/effect-control-linearizer.h', - '../src/compiler/escape-analysis-reducer.cc', - '../src/compiler/escape-analysis-reducer.h', - '../src/compiler/escape-analysis.cc', - '../src/compiler/escape-analysis.h', - '../src/compiler/frame-states.cc', - '../src/compiler/frame-states.h', - '../src/compiler/frame.cc', - '../src/compiler/frame.h', - '../src/compiler/functional-list.h', - '../src/compiler/graph-assembler.cc', - '../src/compiler/graph-assembler.h', - '../src/compiler/graph-reducer.cc', - '../src/compiler/graph-reducer.h', - '../src/compiler/graph-trimmer.cc', - '../src/compiler/graph-trimmer.h', - '../src/compiler/graph-visualizer.cc', - '../src/compiler/graph-visualizer.h', - '../src/compiler/graph.cc', - '../src/compiler/graph.h', - '../src/compiler/int64-lowering.cc', - '../src/compiler/int64-lowering.h', - '../src/compiler/js-call-reducer.cc', - '../src/compiler/js-call-reducer.h', - '../src/compiler/js-context-specialization.cc', - '../src/compiler/js-context-specialization.h', - '../src/compiler/js-create-lowering.cc', - '../src/compiler/js-create-lowering.h', - '../src/compiler/js-generic-lowering.cc', - '../src/compiler/js-generic-lowering.h', - '../src/compiler/js-graph.cc', - '../src/compiler/js-graph.h', - '../src/compiler/js-heap-broker.cc', - '../src/compiler/js-heap-broker.h', - '../src/compiler/js-heap-copy-reducer.cc', - '../src/compiler/js-heap-copy-reducer.h', - '../src/compiler/js-inlining-heuristic.cc', - '../src/compiler/js-inlining-heuristic.h', - '../src/compiler/js-inlining.cc', - '../src/compiler/js-inlining.h', - '../src/compiler/js-intrinsic-lowering.cc', - '../src/compiler/js-intrinsic-lowering.h', - '../src/compiler/js-native-context-specialization.cc', - '../src/compiler/js-native-context-specialization.h', - '../src/compiler/js-operator.cc', - '../src/compiler/js-operator.h', - '../src/compiler/js-type-hint-lowering.cc', - '../src/compiler/js-type-hint-lowering.h', - '../src/compiler/js-typed-lowering.cc', - '../src/compiler/js-typed-lowering.h', - '../src/compiler/linkage.cc', - '../src/compiler/linkage.h', - '../src/compiler/load-elimination.cc', - '../src/compiler/load-elimination.h', - '../src/compiler/loop-analysis.cc', - '../src/compiler/loop-analysis.h', - '../src/compiler/loop-peeling.cc', - '../src/compiler/loop-peeling.h', - '../src/compiler/loop-variable-optimizer.cc', - '../src/compiler/loop-variable-optimizer.h', - '../src/compiler/machine-graph-verifier.cc', - '../src/compiler/machine-graph-verifier.h', - '../src/compiler/machine-graph.cc', - '../src/compiler/machine-graph.h', - '../src/compiler/machine-operator-reducer.cc', - '../src/compiler/machine-operator-reducer.h', - '../src/compiler/machine-operator.cc', - '../src/compiler/machine-operator.h', - '../src/compiler/memory-optimizer.cc', - '../src/compiler/memory-optimizer.h', - '../src/compiler/node-aux-data.h', - '../src/compiler/node-cache.cc', - '../src/compiler/node-cache.h', - '../src/compiler/node-marker.cc', - '../src/compiler/node-marker.h', - '../src/compiler/node-matchers.cc', - '../src/compiler/node-matchers.h', - '../src/compiler/node-origin-table.cc', - '../src/compiler/node-origin-table.h', - '../src/compiler/node-properties.cc', - '../src/compiler/node-properties.h', - '../src/compiler/node.cc', - '../src/compiler/node.h', - '../src/compiler/opcodes.cc', - '../src/compiler/opcodes.h', - '../src/compiler/operation-typer.cc', - '../src/compiler/operation-typer.h', - '../src/compiler/operator-properties.cc', - '../src/compiler/operator-properties.h', - '../src/compiler/operator.cc', - '../src/compiler/operator.h', - '../src/compiler/osr.cc', - '../src/compiler/osr.h', - '../src/compiler/per-isolate-compiler-cache.h', - '../src/compiler/persistent-map.h', - '../src/compiler/pipeline-statistics.cc', - '../src/compiler/pipeline-statistics.h', - '../src/compiler/pipeline.cc', - '../src/compiler/pipeline.h', - '../src/compiler/property-access-builder.cc', - '../src/compiler/property-access-builder.h', - '../src/compiler/raw-machine-assembler.cc', - '../src/compiler/raw-machine-assembler.h', - '../src/compiler/redundancy-elimination.cc', - '../src/compiler/redundancy-elimination.h', - '../src/compiler/refs-map.cc', - '../src/compiler/refs-map.h', - '../src/compiler/representation-change.cc', - '../src/compiler/representation-change.h', - '../src/compiler/schedule.cc', - '../src/compiler/schedule.h', - '../src/compiler/scheduler.cc', - '../src/compiler/scheduler.h', - '../src/compiler/select-lowering.cc', - '../src/compiler/select-lowering.h', - '../src/compiler/serializer-for-background-compilation.cc', - '../src/compiler/serializer-for-background-compilation.h', - '../src/compiler/simd-scalar-lowering.cc', - '../src/compiler/simd-scalar-lowering.h', - '../src/compiler/simplified-lowering.cc', - '../src/compiler/simplified-lowering.h', - '../src/compiler/simplified-operator-reducer.cc', - '../src/compiler/simplified-operator-reducer.h', - '../src/compiler/simplified-operator.cc', - '../src/compiler/simplified-operator.h', - '../src/compiler/state-values-utils.cc', - '../src/compiler/state-values-utils.h', - '../src/compiler/store-store-elimination.cc', - '../src/compiler/store-store-elimination.h', - '../src/compiler/type-cache.cc', - '../src/compiler/type-cache.h', - '../src/compiler/type-narrowing-reducer.cc', - '../src/compiler/type-narrowing-reducer.h', - '../src/compiler/typed-optimization.cc', - '../src/compiler/typed-optimization.h', - '../src/compiler/typer.cc', - '../src/compiler/typer.h', - '../src/compiler/types.cc', - '../src/compiler/types.h', - '../src/compiler/value-numbering-reducer.cc', - '../src/compiler/value-numbering-reducer.h', - '../src/compiler/verifier.cc', - '../src/compiler/verifier.h', - '../src/compiler/wasm-compiler.cc', - '../src/compiler/wasm-compiler.h', - '../src/compiler/zone-stats.cc', - '../src/compiler/zone-stats.h', - '../src/constant-pool.cc', - '../src/constant-pool.h', - '../src/constants-arch.h', - '../src/contexts-inl.h', - '../src/contexts.cc', - '../src/contexts.h', - '../src/conversions-inl.h', - '../src/conversions.cc', - '../src/conversions.h', - '../src/counters-inl.h', - '../src/counters.cc', - '../src/counters.h', - '../src/cpu-features.h', - '../src/date.cc', - '../src/date.h', - '../src/dateparser-inl.h', - '../src/dateparser.cc', - '../src/dateparser.h', - '../src/debug/debug-coverage.cc', - '../src/debug/debug-coverage.h', - '../src/debug/debug-evaluate.cc', - '../src/debug/debug-evaluate.h', - '../src/debug/debug-frames.cc', - '../src/debug/debug-frames.h', - '../src/debug/debug-interface.h', - '../src/debug/debug-property-iterator.cc', - '../src/debug/debug-property-iterator.h', - '../src/debug/debug-scope-iterator.cc', - '../src/debug/debug-scope-iterator.h', - '../src/debug/debug-scopes.cc', - '../src/debug/debug-scopes.h', - '../src/debug/debug-stack-trace-iterator.cc', - '../src/debug/debug-stack-trace-iterator.h', - '../src/debug/debug-type-profile.cc', - '../src/debug/debug-type-profile.h', - '../src/debug/debug.cc', - '../src/debug/debug.h', - '../src/debug/interface-types.h', - '../src/debug/liveedit.cc', - '../src/debug/liveedit.h', - '../src/deoptimize-reason.cc', - '../src/deoptimize-reason.h', - '../src/deoptimizer.cc', - '../src/deoptimizer.h', - '../src/detachable-vector.cc', - '../src/detachable-vector.h', - '../src/disasm.h', - '../src/disassembler.cc', - '../src/disassembler.h', - '../src/diy-fp.cc', - '../src/diy-fp.h', - '../src/double.h', - '../src/dtoa.cc', - '../src/dtoa.h', - '../src/eh-frame.cc', - '../src/eh-frame.h', - '../src/elements-inl.h', - '../src/elements-kind.cc', - '../src/elements-kind.h', - '../src/elements.cc', - '../src/elements.h', - '../src/execution.cc', - '../src/execution.h', - '../src/extensions/externalize-string-extension.cc', - '../src/extensions/externalize-string-extension.h', - '../src/extensions/free-buffer-extension.cc', - '../src/extensions/free-buffer-extension.h', - '../src/extensions/gc-extension.cc', - '../src/extensions/gc-extension.h', - '../src/extensions/ignition-statistics-extension.cc', - '../src/extensions/ignition-statistics-extension.h', - '../src/extensions/statistics-extension.cc', - '../src/extensions/statistics-extension.h', - '../src/extensions/trigger-failure-extension.cc', - '../src/extensions/trigger-failure-extension.h', - '../src/external-reference-table.cc', - '../src/external-reference-table.h', - '../src/external-reference.cc', - '../src/external-reference.h', - '../src/fast-dtoa.cc', - '../src/fast-dtoa.h', - '../src/feedback-vector-inl.h', - '../src/feedback-vector.cc', - '../src/feedback-vector.h', - '../src/field-index-inl.h', - '../src/field-index.h', - '../src/field-type.cc', - '../src/field-type.h', - '../src/fixed-dtoa.cc', - '../src/fixed-dtoa.h', - '../src/flag-definitions.h', - '../src/flags.cc', - '../src/flags.h', - '../src/frame-constants.h', - '../src/frames-inl.h', - '../src/frames.cc', - '../src/frames.h', - '../src/futex-emulation.cc', - '../src/futex-emulation.h', - '../src/gdb-jit.cc', - '../src/gdb-jit.h', - '../src/global-handles.cc', - '../src/global-handles.h', - '../src/globals.h', - '../src/handler-table.cc', - '../src/handler-table.h', - '../src/handles-inl.h', - '../src/handles.cc', - '../src/handles.h', - '../src/heap-symbols.h', - '../src/heap/array-buffer-collector.cc', - '../src/heap/array-buffer-collector.h', - '../src/heap/array-buffer-tracker-inl.h', - '../src/heap/array-buffer-tracker.cc', - '../src/heap/array-buffer-tracker.h', - '../src/heap/barrier.h', - '../src/heap/code-stats.cc', - '../src/heap/code-stats.h', - '../src/heap/concurrent-marking.cc', - '../src/heap/concurrent-marking.h', - '../src/heap/embedder-tracing.cc', - '../src/heap/embedder-tracing.h', - '../src/heap/factory-inl.h', - '../src/heap/factory.cc', - '../src/heap/factory.h', - '../src/heap/gc-idle-time-handler.cc', - '../src/heap/gc-idle-time-handler.h', - '../src/heap/gc-tracer.cc', - '../src/heap/gc-tracer.h', - '../src/heap/heap-controller.cc', - '../src/heap/heap-controller.h', - '../src/heap/heap-inl.h', - '../src/heap/heap-write-barrier-inl.h', - '../src/heap/heap-write-barrier.h', - '../src/heap/heap.cc', - '../src/heap/heap.h', - '../src/heap/incremental-marking-inl.h', - '../src/heap/incremental-marking-job.cc', - '../src/heap/incremental-marking-job.h', - '../src/heap/incremental-marking.cc', - '../src/heap/incremental-marking.h', - '../src/heap/invalidated-slots-inl.h', - '../src/heap/invalidated-slots.cc', - '../src/heap/invalidated-slots.h', - '../src/heap/item-parallel-job.cc', - '../src/heap/item-parallel-job.h', - '../src/heap/local-allocator-inl.h', - '../src/heap/local-allocator.h', - '../src/heap/mark-compact-inl.h', - '../src/heap/mark-compact.cc', - '../src/heap/mark-compact.h', - '../src/heap/marking.cc', - '../src/heap/marking.h', - '../src/heap/memory-reducer.cc', - '../src/heap/memory-reducer.h', - '../src/heap/object-stats.cc', - '../src/heap/object-stats.h', - '../src/heap/objects-visiting-inl.h', - '../src/heap/objects-visiting.cc', - '../src/heap/objects-visiting.h', - '../src/heap/remembered-set.h', - '../src/heap/scavenge-job.cc', - '../src/heap/scavenge-job.h', - '../src/heap/scavenger-inl.h', - '../src/heap/scavenger.cc', - '../src/heap/scavenger.h', - '../src/heap/slot-set.cc', - '../src/heap/slot-set.h', - '../src/heap/spaces-inl.h', - '../src/heap/spaces.cc', - '../src/heap/spaces.h', - '../src/heap/store-buffer-inl.h', - '../src/heap/store-buffer.cc', - '../src/heap/store-buffer.h', - '../src/heap/stress-marking-observer.cc', - '../src/heap/stress-marking-observer.h', - '../src/heap/stress-scavenge-observer.cc', - '../src/heap/stress-scavenge-observer.h', - '../src/heap/sweeper.cc', - '../src/heap/sweeper.h', - '../src/heap/worklist.h', - '../src/ic/call-optimization.cc', - '../src/ic/call-optimization.h', - '../src/ic/handler-configuration-inl.h', - '../src/ic/handler-configuration.cc', - '../src/ic/handler-configuration.h', - '../src/ic/ic-inl.h', - '../src/ic/ic-stats.cc', - '../src/ic/ic-stats.h', - '../src/ic/ic.cc', - '../src/ic/ic.h', - '../src/ic/stub-cache.cc', - '../src/ic/stub-cache.h', - '../src/icu_util.cc', - '../src/icu_util.h', - '../src/identity-map.cc', - '../src/identity-map.h', - '../src/interface-descriptors.cc', - '../src/interface-descriptors.h', - '../src/interpreter/block-coverage-builder.h', - '../src/interpreter/bytecode-array-accessor.cc', - '../src/interpreter/bytecode-array-accessor.h', - '../src/interpreter/bytecode-array-builder.cc', - '../src/interpreter/bytecode-array-builder.h', - '../src/interpreter/bytecode-array-iterator.cc', - '../src/interpreter/bytecode-array-iterator.h', - '../src/interpreter/bytecode-array-random-iterator.cc', - '../src/interpreter/bytecode-array-random-iterator.h', - '../src/interpreter/bytecode-array-writer.cc', - '../src/interpreter/bytecode-array-writer.h', - '../src/interpreter/bytecode-decoder.cc', - '../src/interpreter/bytecode-decoder.h', - '../src/interpreter/bytecode-flags.cc', - '../src/interpreter/bytecode-flags.h', - '../src/interpreter/bytecode-generator.cc', - '../src/interpreter/bytecode-generator.h', - '../src/interpreter/bytecode-jump-table.h', - '../src/interpreter/bytecode-label.cc', - '../src/interpreter/bytecode-label.h', - '../src/interpreter/bytecode-node.cc', - '../src/interpreter/bytecode-node.h', - '../src/interpreter/bytecode-operands.cc', - '../src/interpreter/bytecode-operands.h', - '../src/interpreter/bytecode-register-allocator.h', - '../src/interpreter/bytecode-register-optimizer.cc', - '../src/interpreter/bytecode-register-optimizer.h', - '../src/interpreter/bytecode-register.cc', - '../src/interpreter/bytecode-register.h', - '../src/interpreter/bytecode-source-info.cc', - '../src/interpreter/bytecode-source-info.h', - '../src/interpreter/bytecode-traits.h', - '../src/interpreter/bytecodes.cc', - '../src/interpreter/bytecodes.h', - '../src/interpreter/constant-array-builder.cc', - '../src/interpreter/constant-array-builder.h', - '../src/interpreter/control-flow-builders.cc', - '../src/interpreter/control-flow-builders.h', - '../src/interpreter/handler-table-builder.cc', - '../src/interpreter/handler-table-builder.h', - '../src/interpreter/interpreter-generator.h', - '../src/interpreter/interpreter-intrinsics.cc', - '../src/interpreter/interpreter-intrinsics.h', - '../src/interpreter/interpreter.cc', - '../src/interpreter/interpreter.h', - '../src/isolate-allocator.cc', - '../src/isolate-allocator.h', - '../src/isolate-data.h', - '../src/isolate-inl.h', - '../src/isolate.cc', - '../src/isolate.h', - '../src/json-parser.cc', - '../src/json-parser.h', - '../src/json-stringifier.cc', - '../src/json-stringifier.h', - '../src/keys.cc', - '../src/keys.h', - '../src/label.h', - '../src/layout-descriptor-inl.h', - '../src/layout-descriptor.cc', - '../src/layout-descriptor.h', - '../src/locked-queue-inl.h', - '../src/locked-queue.h', - '../src/log-inl.h', - '../src/log-utils.cc', - '../src/log-utils.h', - '../src/log.cc', - '../src/log.h', - '../src/lookup-cache-inl.h', - '../src/lookup-cache.cc', - '../src/lookup-cache.h', - '../src/lookup-inl.h', - '../src/lookup.cc', - '../src/lookup.h', - '../src/machine-type.cc', - '../src/machine-type.h', - '../src/macro-assembler-inl.h', - '../src/macro-assembler.h', - '../src/map-updater.cc', - '../src/map-updater.h', - '../src/math-random.cc', - '../src/math-random.h', - '../src/maybe-handles-inl.h', - '../src/maybe-handles.h', - '../src/memcopy.cc', - '../src/memcopy.h', - '../src/message-template.h', - '../src/messages.cc', - '../src/messages.h', - '../src/microtask-queue.cc', - '../src/microtask-queue.h', - '../src/msan.h', - '../src/objects-body-descriptors-inl.h', - '../src/objects-body-descriptors.h', - '../src/objects-debug.cc', - '../src/objects-inl.h', - '../src/objects-printer.cc', - '../src/objects.cc', - '../src/objects.h', - '../src/objects/api-callbacks-inl.h', - '../src/objects/api-callbacks.h', - '../src/objects/arguments-inl.h', - '../src/objects/arguments.h', - '../src/objects/bigint.cc', - '../src/objects/bigint.h', - '../src/objects/builtin-function-id.h', - '../src/objects/cell-inl.h', - '../src/objects/cell.h', - '../src/objects/code-inl.h', - '../src/objects/code.h', - '../src/objects/compilation-cache-inl.h', - '../src/objects/compilation-cache.h', - '../src/objects/debug-objects-inl.h', - '../src/objects/debug-objects.cc', - '../src/objects/debug-objects.h', - '../src/objects/descriptor-array-inl.h', - '../src/objects/descriptor-array.h', - '../src/objects/dictionary-inl.h', - '../src/objects/dictionary.h', - '../src/objects/embedder-data-array-inl.h', - '../src/objects/embedder-data-array.cc', - '../src/objects/embedder-data-array.h', - '../src/objects/embedder-data-slot-inl.h', - '../src/objects/embedder-data-slot.h', - '../src/objects/feedback-cell-inl.h', - '../src/objects/feedback-cell.h', - '../src/objects/fixed-array-inl.h', - '../src/objects/fixed-array.h', - '../src/objects/frame-array-inl.h', - '../src/objects/frame-array.h', - '../src/objects/hash-table-inl.h', - '../src/objects/hash-table.h', - '../src/objects/heap-number-inl.h', - '../src/objects/heap-number.h', - '../src/objects/heap-object-inl.h', - '../src/objects/heap-object.h', - '../src/objects/instance-type-inl.h', - '../src/objects/instance-type.h', - '../src/objects/intl-objects.cc', - '../src/objects/intl-objects.h', - '../src/objects/js-array-buffer-inl.h', - '../src/objects/js-array-buffer.cc', - '../src/objects/js-array-buffer.h', - '../src/objects/js-array-inl.h', - '../src/objects/js-array.h', - '../src/objects/js-break-iterator-inl.h', - '../src/objects/js-break-iterator.cc', - '../src/objects/js-break-iterator.h', - '../src/objects/js-collator-inl.h', - '../src/objects/js-collator.cc', - '../src/objects/js-collator.h', - '../src/objects/js-collection-inl.h', - '../src/objects/js-collection.h', - '../src/objects/js-date-time-format-inl.h', - '../src/objects/js-date-time-format.cc', - '../src/objects/js-date-time-format.h', - '../src/objects/js-generator-inl.h', - '../src/objects/js-generator.h', - '../src/objects/js-list-format-inl.h', - '../src/objects/js-list-format.cc', - '../src/objects/js-list-format.h', - '../src/objects/js-locale-inl.h', - '../src/objects/js-locale.cc', - '../src/objects/js-locale.h', - '../src/objects/js-number-format-inl.h', - '../src/objects/js-number-format.cc', - '../src/objects/js-number-format.h', - '../src/objects/js-objects-inl.h', - '../src/objects/js-objects.h', - '../src/objects/js-plural-rules-inl.h', - '../src/objects/js-plural-rules.cc', - '../src/objects/js-plural-rules.h', - '../src/objects/js-promise-inl.h', - '../src/objects/js-promise.h', - '../src/objects/js-proxy-inl.h', - '../src/objects/js-proxy.h', - '../src/objects/js-regexp-inl.h', - '../src/objects/js-regexp-string-iterator-inl.h', - '../src/objects/js-regexp-string-iterator.h', - '../src/objects/js-regexp.h', - '../src/objects/js-relative-time-format-inl.h', - '../src/objects/js-relative-time-format.cc', - '../src/objects/js-relative-time-format.h', - '../src/objects/js-segment-iterator-inl.h', - '../src/objects/js-segment-iterator.cc', - '../src/objects/js-segment-iterator.h', - '../src/objects/js-segmenter-inl.h', - '../src/objects/js-segmenter.cc', - '../src/objects/js-segmenter.h', - '../src/objects/js-weak-refs-inl.h', - '../src/objects/js-weak-refs.h', - '../src/objects/literal-objects-inl.h', - '../src/objects/literal-objects.cc', - '../src/objects/literal-objects.h', - '../src/objects/managed.cc', - '../src/objects/managed.h', - '../src/objects/map-inl.h', - '../src/objects/map.h', - '../src/objects/maybe-object-inl.h', - '../src/objects/maybe-object.h', - '../src/objects/microtask-inl.h', - '../src/objects/microtask.h', - '../src/objects/module-inl.h', - '../src/objects/module.cc', - '../src/objects/module.h', - '../src/objects/name-inl.h', - '../src/objects/name.h', - '../src/objects/object-macros-undef.h', - '../src/objects/object-macros.h', - '../src/objects/oddball-inl.h', - '../src/objects/oddball.h', - '../src/objects/ordered-hash-table-inl.h', - '../src/objects/ordered-hash-table.cc', - '../src/objects/ordered-hash-table.h', - '../src/objects/promise-inl.h', - '../src/objects/promise.h', - '../src/objects/property-array-inl.h', - '../src/objects/property-array.h', - '../src/objects/property-cell-inl.h', - '../src/objects/property-cell.h', - '../src/objects/property-descriptor-object-inl.h', - '../src/objects/property-descriptor-object.h', - '../src/objects/prototype-info-inl.h', - '../src/objects/prototype-info.h', - '../src/objects/regexp-match-info.h', - '../src/objects/scope-info.cc', - '../src/objects/scope-info.h', - '../src/objects/script-inl.h', - '../src/objects/script.h', - '../src/objects/shared-function-info-inl.h', - '../src/objects/shared-function-info.h', - '../src/objects/slots-atomic-inl.h', - '../src/objects/slots-inl.h', - '../src/objects/slots.h', - '../src/objects/stack-frame-info-inl.h', - '../src/objects/stack-frame-info.h', - '../src/objects/string-inl.h', - '../src/objects/string-table-inl.h', - '../src/objects/string-table.h', - '../src/objects/string.h', - '../src/objects/struct-inl.h', - '../src/objects/struct.h', - '../src/objects/template-objects.cc', - '../src/objects/template-objects.h', - '../src/objects/templates-inl.h', - '../src/objects/templates.h', - '../src/optimized-compilation-info.cc', - '../src/optimized-compilation-info.h', - '../src/ostreams.cc', - '../src/ostreams.h', - '../src/parsing/expression-scope-reparenter.cc', - '../src/parsing/expression-scope-reparenter.h', - '../src/parsing/expression-scope.h', - '../src/parsing/func-name-inferrer.cc', - '../src/parsing/func-name-inferrer.h', - '../src/parsing/parse-info.cc', - '../src/parsing/parse-info.h', - '../src/parsing/parser-base.h', - '../src/parsing/parser.cc', - '../src/parsing/parser.h', - '../src/parsing/parsing.cc', - '../src/parsing/parsing.h', - '../src/parsing/pattern-rewriter.cc', - '../src/parsing/preparse-data-impl.h', - '../src/parsing/preparse-data.cc', - '../src/parsing/preparse-data.h', - '../src/parsing/preparser-logger.h', - '../src/parsing/preparser.cc', - '../src/parsing/preparser.h', - '../src/parsing/rewriter.cc', - '../src/parsing/rewriter.h', - '../src/parsing/scanner-character-streams.cc', - '../src/parsing/scanner-character-streams.h', - '../src/parsing/scanner.cc', - '../src/parsing/scanner.h', - '../src/parsing/token.cc', - '../src/parsing/token.h', - '../src/pending-compilation-error-handler.cc', - '../src/pending-compilation-error-handler.h', - '../src/perf-jit.cc', - '../src/perf-jit.h', - '../src/pointer-with-payload.h', - '../src/profiler/allocation-tracker.cc', - '../src/profiler/allocation-tracker.h', - '../src/profiler/circular-queue-inl.h', - '../src/profiler/circular-queue.h', - '../src/profiler/cpu-profiler-inl.h', - '../src/profiler/cpu-profiler.cc', - '../src/profiler/cpu-profiler.h', - '../src/profiler/heap-profiler.cc', - '../src/profiler/heap-profiler.h', - '../src/profiler/heap-snapshot-generator-inl.h', - '../src/profiler/heap-snapshot-generator.cc', - '../src/profiler/heap-snapshot-generator.h', - '../src/profiler/profile-generator-inl.h', - '../src/profiler/profile-generator.cc', - '../src/profiler/profile-generator.h', - '../src/profiler/profiler-listener.cc', - '../src/profiler/profiler-listener.h', - '../src/profiler/sampling-heap-profiler.cc', - '../src/profiler/sampling-heap-profiler.h', - '../src/profiler/strings-storage.cc', - '../src/profiler/strings-storage.h', - '../src/profiler/tick-sample.cc', - '../src/profiler/tick-sample.h', - '../src/profiler/tracing-cpu-profiler.cc', - '../src/profiler/tracing-cpu-profiler.h', - '../src/profiler/unbound-queue-inl.h', - '../src/profiler/unbound-queue.h', - '../src/property-descriptor.cc', - '../src/property-descriptor.h', - '../src/property-details.h', - '../src/property.cc', - '../src/property.h', - '../src/prototype.h', - '../src/ptr-compr-inl.h', - '../src/ptr-compr.h', - '../src/regexp/bytecodes-irregexp.h', - '../src/regexp/interpreter-irregexp.cc', - '../src/regexp/interpreter-irregexp.h', - '../src/regexp/jsregexp-inl.h', - '../src/regexp/jsregexp.cc', - '../src/regexp/jsregexp.h', - '../src/regexp/property-sequences.cc', - '../src/regexp/property-sequences.h', - '../src/regexp/regexp-ast.cc', - '../src/regexp/regexp-ast.h', - '../src/regexp/regexp-macro-assembler-irregexp-inl.h', - '../src/regexp/regexp-macro-assembler-irregexp.cc', - '../src/regexp/regexp-macro-assembler-irregexp.h', - '../src/regexp/regexp-macro-assembler-tracer.cc', - '../src/regexp/regexp-macro-assembler-tracer.h', - '../src/regexp/regexp-macro-assembler.cc', - '../src/regexp/regexp-macro-assembler.h', - '../src/regexp/regexp-parser.cc', - '../src/regexp/regexp-parser.h', - '../src/regexp/regexp-stack.cc', - '../src/regexp/regexp-stack.h', - '../src/regexp/regexp-utils.cc', - '../src/regexp/regexp-utils.h', - '../src/register-arch.h', - '../src/register-configuration.cc', - '../src/register-configuration.h', - '../src/register.h', - '../src/reglist.h', - '../src/reloc-info.cc', - '../src/reloc-info.h', - '../src/roots-inl.h', - '../src/roots.cc', - '../src/roots.h', - '../src/runtime-profiler.cc', - '../src/runtime-profiler.h', - '../src/runtime/runtime-array.cc', - '../src/runtime/runtime-atomics.cc', - '../src/runtime/runtime-bigint.cc', - '../src/runtime/runtime-classes.cc', - '../src/runtime/runtime-collections.cc', - '../src/runtime/runtime-compiler.cc', - '../src/runtime/runtime-date.cc', - '../src/runtime/runtime-debug.cc', - '../src/runtime/runtime-forin.cc', - '../src/runtime/runtime-function.cc', - '../src/runtime/runtime-futex.cc', - '../src/runtime/runtime-generator.cc', - '../src/runtime/runtime-internal.cc', - '../src/runtime/runtime-interpreter.cc', - '../src/runtime/runtime-intl.cc', - '../src/runtime/runtime-literals.cc', - '../src/runtime/runtime-module.cc', - '../src/runtime/runtime-numbers.cc', - '../src/runtime/runtime-object.cc', - '../src/runtime/runtime-operators.cc', - '../src/runtime/runtime-promise.cc', - '../src/runtime/runtime-proxy.cc', - '../src/runtime/runtime-regexp.cc', - '../src/runtime/runtime-scopes.cc', - '../src/runtime/runtime-strings.cc', - '../src/runtime/runtime-symbol.cc', - '../src/runtime/runtime-test.cc', - '../src/runtime/runtime-typedarray.cc', - '../src/runtime/runtime-utils.h', - '../src/runtime/runtime-wasm.cc', - '../src/runtime/runtime-weak-refs.cc', - '../src/runtime/runtime.cc', - '../src/runtime/runtime.h', - '../src/safepoint-table.cc', - '../src/safepoint-table.h', - '../src/setup-isolate.h', - '../src/signature.h', - '../src/simulator-base.cc', - '../src/simulator-base.h', - '../src/simulator.h', - '../src/snapshot/code-serializer.cc', - '../src/snapshot/code-serializer.h', - '../src/snapshot/deserializer-allocator.cc', - '../src/snapshot/deserializer-allocator.h', - '../src/snapshot/deserializer.cc', - '../src/snapshot/deserializer.h', - '../src/snapshot/embedded-data.cc', - '../src/snapshot/embedded-data.h', - '../src/snapshot/natives-common.cc', - '../src/snapshot/natives.h', - '../src/snapshot/object-deserializer.cc', - '../src/snapshot/object-deserializer.h', - '../src/snapshot/partial-deserializer.cc', - '../src/snapshot/partial-deserializer.h', - '../src/snapshot/partial-serializer.cc', - '../src/snapshot/partial-serializer.h', - '../src/snapshot/read-only-deserializer.cc', - '../src/snapshot/read-only-deserializer.h', - '../src/snapshot/read-only-serializer.cc', - '../src/snapshot/read-only-serializer.h', - '../src/snapshot/references.h', - '../src/snapshot/roots-serializer.cc', - '../src/snapshot/roots-serializer.h', - '../src/snapshot/serializer-allocator.cc', - '../src/snapshot/serializer-allocator.h', - '../src/snapshot/serializer-common.cc', - '../src/snapshot/serializer-common.h', - '../src/snapshot/serializer.cc', - '../src/snapshot/serializer.h', - '../src/snapshot/snapshot-common.cc', - '../src/snapshot/snapshot-source-sink.cc', - '../src/snapshot/snapshot-source-sink.h', - '../src/snapshot/snapshot.h', - '../src/snapshot/startup-deserializer.cc', - '../src/snapshot/startup-deserializer.h', - '../src/snapshot/startup-serializer.cc', - '../src/snapshot/startup-serializer.h', - '../src/source-position-table.cc', - '../src/source-position-table.h', - '../src/source-position.cc', - '../src/source-position.h', - '../src/splay-tree-inl.h', - '../src/splay-tree.h', - '../src/startup-data-util.cc', - '../src/startup-data-util.h', - '../src/string-builder-inl.h', - '../src/string-builder.cc', - '../src/string-case.cc', - '../src/string-case.h', - '../src/string-constants.cc', - '../src/string-constants.h', - '../src/string-hasher-inl.h', - '../src/string-hasher.h', - '../src/string-search.h', - '../src/string-stream.cc', - '../src/string-stream.h', - '../src/strtod.cc', - '../src/strtod.h', - '../src/task-utils.cc', - '../src/task-utils.h', - '../src/third_party/siphash/halfsiphash.cc', - '../src/third_party/siphash/halfsiphash.h', - '../src/third_party/utf8-decoder/utf8-decoder.h', - '../src/thread-id.cc', - '../src/thread-id.h', - '../src/tracing/trace-event.cc', - '../src/tracing/trace-event.h', - '../src/tracing/traced-value.cc', - '../src/tracing/traced-value.h', - '../src/tracing/tracing-category-observer.cc', - '../src/tracing/tracing-category-observer.h', - '../src/transitions-inl.h', - '../src/transitions.cc', - '../src/transitions.h', - '../src/trap-handler/handler-inside.cc', - '../src/trap-handler/handler-outside.cc', - '../src/trap-handler/handler-shared.cc', - '../src/trap-handler/trap-handler-internal.h', - '../src/trap-handler/trap-handler.h', - '../src/turbo-assembler.cc', - '../src/turbo-assembler.h', - '../src/type-hints.cc', - '../src/type-hints.h', - '../src/unicode-cache.h', - '../src/unicode-decoder.cc', - '../src/unicode-decoder.h', - '../src/unicode-inl.h', - '../src/unicode.cc', - '../src/unicode.h', - '../src/unoptimized-compilation-info.cc', - '../src/unoptimized-compilation-info.h', - '../src/unwinder.cc', - '../src/uri.cc', - '../src/uri.h', - '../src/utils-inl.h', - '../src/utils.cc', - '../src/utils.h', - '../src/v8.cc', - '../src/v8.h', - '../src/v8memory.h', - '../src/v8threads.cc', - '../src/v8threads.h', - '../src/value-serializer.cc', - '../src/value-serializer.h', - '../src/vector-slot-pair.cc', - '../src/vector-slot-pair.h', - '../src/vector.h', - '../src/version.cc', - '../src/version.h', - '../src/visitors.cc', - '../src/visitors.h', - '../src/vm-state-inl.h', - '../src/vm-state.h', - '../src/wasm/baseline/liftoff-assembler-defs.h', - '../src/wasm/baseline/liftoff-assembler.cc', - '../src/wasm/baseline/liftoff-assembler.h', - '../src/wasm/baseline/liftoff-compiler.cc', - '../src/wasm/baseline/liftoff-compiler.h', - '../src/wasm/baseline/liftoff-register.h', - '../src/wasm/compilation-environment.h', - '../src/wasm/decoder.h', - '../src/wasm/function-body-decoder-impl.h', - '../src/wasm/function-body-decoder.cc', - '../src/wasm/function-body-decoder.h', - '../src/wasm/function-compiler.cc', - '../src/wasm/function-compiler.h', - '../src/wasm/graph-builder-interface.cc', - '../src/wasm/graph-builder-interface.h', - '../src/wasm/js-to-wasm-wrapper-cache-inl.h', - '../src/wasm/jump-table-assembler.cc', - '../src/wasm/jump-table-assembler.h', - '../src/wasm/leb-helper.h', - '../src/wasm/local-decl-encoder.cc', - '../src/wasm/local-decl-encoder.h', - '../src/wasm/memory-tracing.cc', - '../src/wasm/memory-tracing.h', - '../src/wasm/module-compiler.cc', - '../src/wasm/module-compiler.h', - '../src/wasm/module-decoder.cc', - '../src/wasm/module-decoder.h', - '../src/wasm/module-instantiate.cc', - '../src/wasm/module-instantiate.h', - '../src/wasm/object-access.h', - '../src/wasm/signature-map.cc', - '../src/wasm/signature-map.h', - '../src/wasm/streaming-decoder.cc', - '../src/wasm/streaming-decoder.h', - '../src/wasm/value-type.h', - '../src/wasm/wasm-code-manager.cc', - '../src/wasm/wasm-code-manager.h', - '../src/wasm/wasm-constants.h', - '../src/wasm/wasm-debug.cc', - '../src/wasm/wasm-engine.cc', - '../src/wasm/wasm-engine.h', - '../src/wasm/wasm-external-refs.cc', - '../src/wasm/wasm-external-refs.h', - '../src/wasm/wasm-feature-flags.h', - '../src/wasm/wasm-features.cc', - '../src/wasm/wasm-features.h', - '../src/wasm/wasm-import-wrapper-cache-inl.h', - '../src/wasm/wasm-interpreter.cc', - '../src/wasm/wasm-interpreter.h', - '../src/wasm/wasm-js.cc', - '../src/wasm/wasm-js.h', - '../src/wasm/wasm-limits.h', - '../src/wasm/wasm-linkage.h', - '../src/wasm/wasm-memory.cc', - '../src/wasm/wasm-memory.h', - '../src/wasm/wasm-module-builder.cc', - '../src/wasm/wasm-module-builder.h', - '../src/wasm/wasm-module.cc', - '../src/wasm/wasm-module.h', - '../src/wasm/wasm-objects-inl.h', - '../src/wasm/wasm-objects.cc', - '../src/wasm/wasm-objects.h', - '../src/wasm/wasm-opcodes.cc', - '../src/wasm/wasm-opcodes.h', - '../src/wasm/wasm-result.cc', - '../src/wasm/wasm-result.h', - '../src/wasm/wasm-serialization.cc', - '../src/wasm/wasm-serialization.h', - '../src/wasm/wasm-text.cc', - '../src/wasm/wasm-text.h', - '../src/wasm/wasm-tier.h', - '../src/wasm/wasm-value.h', - '../src/zone/accounting-allocator.cc', - '../src/zone/accounting-allocator.h', - '../src/zone/zone-allocator.h', - '../src/zone/zone-chunk-list.h', - '../src/zone/zone-containers.h', - '../src/zone/zone-handle-set.h', - '../src/zone/zone-list-inl.h', - '../src/zone/zone-segment.cc', - '../src/zone/zone-segment.h', - '../src/zone/zone.cc', - '../src/zone/zone.h', - '<(generate_bytecode_builtins_list_output)', - '<@(torque_generated_pure_headers)', - ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ['v8_target_arch=="ia32"', { - 'sources': [ - '../src/compiler/backend/ia32/code-generator-ia32.cc', - '../src/compiler/backend/ia32/instruction-codes-ia32.h', - '../src/compiler/backend/ia32/instruction-scheduler-ia32.cc', - '../src/compiler/backend/ia32/instruction-selector-ia32.cc', - '../src/debug/ia32/debug-ia32.cc', - '../src/ia32/assembler-ia32-inl.h', - '../src/ia32/assembler-ia32.cc', - '../src/ia32/assembler-ia32.h', - '../src/ia32/constants-ia32.h', - '../src/ia32/cpu-ia32.cc', - '../src/ia32/deoptimizer-ia32.cc', - '../src/ia32/disasm-ia32.cc', - '../src/ia32/frame-constants-ia32.cc', - '../src/ia32/frame-constants-ia32.h', - '../src/ia32/interface-descriptors-ia32.cc', - '../src/ia32/macro-assembler-ia32.cc', - '../src/ia32/macro-assembler-ia32.h', - '../src/ia32/register-ia32.h', - '../src/ia32/sse-instr.h', - '../src/regexp/ia32/regexp-macro-assembler-ia32.cc', - '../src/regexp/ia32/regexp-macro-assembler-ia32.h', - '../src/wasm/baseline/ia32/liftoff-assembler-ia32.h', - ], - }], - ['v8_target_arch=="x64"', { - 'sources': [ - '../src/compiler/backend/x64/code-generator-x64.cc', - '../src/compiler/backend/x64/instruction-codes-x64.h', - '../src/compiler/backend/x64/instruction-scheduler-x64.cc', - '../src/compiler/backend/x64/instruction-selector-x64.cc', - '../src/compiler/backend/x64/unwinding-info-writer-x64.cc', - '../src/compiler/backend/x64/unwinding-info-writer-x64.h', - '../src/debug/x64/debug-x64.cc', - '../src/regexp/x64/regexp-macro-assembler-x64.cc', - '../src/regexp/x64/regexp-macro-assembler-x64.h', - '../src/third_party/valgrind/valgrind.h', - '../src/wasm/baseline/x64/liftoff-assembler-x64.h', - '../src/x64/assembler-x64-inl.h', - '../src/x64/assembler-x64.cc', - '../src/x64/assembler-x64.h', - '../src/x64/constants-x64.h', - '../src/x64/cpu-x64.cc', - '../src/x64/deoptimizer-x64.cc', - '../src/x64/disasm-x64.cc', - '../src/x64/eh-frame-x64.cc', - '../src/x64/frame-constants-x64.cc', - '../src/x64/frame-constants-x64.h', - '../src/x64/interface-descriptors-x64.cc', - '../src/x64/macro-assembler-x64.cc', - '../src/x64/macro-assembler-x64.h', - '../src/x64/register-x64.h', - '../src/x64/sse-instr.h', - ], - }], - ['v8_target_arch=="x64" and (OS=="linux" or OS=="mac")', { - 'sources': [ - '../src/trap-handler/handler-inside-posix.cc', - '../src/trap-handler/handler-inside-posix.h', - '../src/trap-handler/handler-outside-posix.cc', - ], - }], - ['v8_target_arch=="x64" and OS=="win"', { - 'sources': [ - '../src/trap-handler/handler-inside-win.cc', - '../src/trap-handler/handler-inside-win.h', - '../src/trap-handler/handler-outside-win.cc', - ], - }], - ['v8_target_arch=="arm"', { - 'sources': [ - '../src/arm/assembler-arm-inl.h', - '../src/arm/assembler-arm.cc', - '../src/arm/assembler-arm.h', - '../src/arm/constants-arm.h', - '../src/arm/constants-arm.cc', - '../src/arm/cpu-arm.cc', - '../src/arm/deoptimizer-arm.cc', - '../src/arm/disasm-arm.cc', - '../src/arm/eh-frame-arm.cc', - '../src/arm/frame-constants-arm.cc', - '../src/arm/frame-constants-arm.h', - '../src/arm/interface-descriptors-arm.cc', - '../src/arm/macro-assembler-arm.cc', - '../src/arm/macro-assembler-arm.h', - '../src/arm/register-arm.h', - '../src/arm/simulator-arm.cc', - '../src/arm/simulator-arm.h', - '../src/compiler/backend/arm/code-generator-arm.cc', - '../src/compiler/backend/arm/instruction-codes-arm.h', - '../src/compiler/backend/arm/instruction-scheduler-arm.cc', - '../src/compiler/backend/arm/instruction-selector-arm.cc', - '../src/compiler/backend/arm/unwinding-info-writer-arm.cc', - '../src/compiler/backend/arm/unwinding-info-writer-arm.h', - '../src/debug/arm/debug-arm.cc', - '../src/regexp/arm/regexp-macro-assembler-arm.cc', - '../src/regexp/arm/regexp-macro-assembler-arm.h', - '../src/wasm/baseline/arm/liftoff-assembler-arm.h', - ], - }], - ['v8_target_arch=="arm64"', { - 'sources': [ - '../src/arm64/assembler-arm64-inl.h', - '../src/arm64/assembler-arm64.cc', - '../src/arm64/assembler-arm64.h', - '../src/arm64/constants-arm64.h', - '../src/arm64/cpu-arm64.cc', - '../src/arm64/decoder-arm64-inl.h', - '../src/arm64/decoder-arm64.cc', - '../src/arm64/decoder-arm64.h', - '../src/arm64/deoptimizer-arm64.cc', - '../src/arm64/disasm-arm64.cc', - '../src/arm64/disasm-arm64.h', - '../src/arm64/eh-frame-arm64.cc', - '../src/arm64/frame-constants-arm64.cc', - '../src/arm64/frame-constants-arm64.h', - '../src/arm64/instructions-arm64-constants.cc', - '../src/arm64/instructions-arm64.cc', - '../src/arm64/instructions-arm64.h', - '../src/arm64/instrument-arm64.cc', - '../src/arm64/instrument-arm64.h', - '../src/arm64/interface-descriptors-arm64.cc', - '../src/arm64/macro-assembler-arm64-inl.h', - '../src/arm64/macro-assembler-arm64.cc', - '../src/arm64/macro-assembler-arm64.h', - '../src/arm64/register-arm64.cc', - '../src/arm64/register-arm64.h', - '../src/arm64/simulator-arm64.cc', - '../src/arm64/simulator-arm64.h', - '../src/arm64/simulator-logic-arm64.cc', - '../src/arm64/utils-arm64.cc', - '../src/arm64/utils-arm64.h', - '../src/compiler/backend/arm64/code-generator-arm64.cc', - '../src/compiler/backend/arm64/instruction-codes-arm64.h', - '../src/compiler/backend/arm64/instruction-scheduler-arm64.cc', - '../src/compiler/backend/arm64/instruction-selector-arm64.cc', - '../src/compiler/backend/arm64/unwinding-info-writer-arm64.cc', - '../src/compiler/backend/arm64/unwinding-info-writer-arm64.h', - '../src/debug/arm64/debug-arm64.cc', - '../src/regexp/arm64/regexp-macro-assembler-arm64.cc', - '../src/regexp/arm64/regexp-macro-assembler-arm64.h', - '../src/wasm/baseline/arm64/liftoff-assembler-arm64.h', - ], - }], - ['v8_target_arch=="mips" or v8_target_arch=="mipsel"', { - 'sources': [ - '../src/compiler/backend/mips/code-generator-mips.cc', - '../src/compiler/backend/mips/instruction-codes-mips.h', - '../src/compiler/backend/mips/instruction-scheduler-mips.cc', - '../src/compiler/backend/mips/instruction-selector-mips.cc', - '../src/debug/mips/debug-mips.cc', - '../src/mips/assembler-mips-inl.h', - '../src/mips/assembler-mips.cc', - '../src/mips/assembler-mips.h', - '../src/mips/constants-mips.cc', - '../src/mips/constants-mips.h', - '../src/mips/cpu-mips.cc', - '../src/mips/deoptimizer-mips.cc', - '../src/mips/disasm-mips.cc', - '../src/mips/frame-constants-mips.cc', - '../src/mips/frame-constants-mips.h', - '../src/mips/interface-descriptors-mips.cc', - '../src/mips/macro-assembler-mips.cc', - '../src/mips/macro-assembler-mips.h', - '../src/mips/register-mips.h', - '../src/mips/simulator-mips.cc', - '../src/mips/simulator-mips.h', - '../src/regexp/mips/regexp-macro-assembler-mips.cc', - '../src/regexp/mips/regexp-macro-assembler-mips.h', - '../src/wasm/baseline/mips/liftoff-assembler-mips.h', - ], - }], - ['v8_target_arch=="mips64" or v8_target_arch=="mips64el"', { - 'sources': [ - '../src/compiler/backend/mips64/code-generator-mips64.cc', - '../src/compiler/backend/mips64/instruction-codes-mips64.h', - '../src/compiler/backend/mips64/instruction-scheduler-mips64.cc', - '../src/compiler/backend/mips64/instruction-selector-mips64.cc', - '../src/debug/mips64/debug-mips64.cc', - '../src/mips64/assembler-mips64-inl.h', - '../src/mips64/assembler-mips64.cc', - '../src/mips64/assembler-mips64.h', - '../src/mips64/constants-mips64.cc', - '../src/mips64/constants-mips64.h', - '../src/mips64/cpu-mips64.cc', - '../src/mips64/deoptimizer-mips64.cc', - '../src/mips64/disasm-mips64.cc', - '../src/mips64/frame-constants-mips64.cc', - '../src/mips64/frame-constants-mips64.h', - '../src/mips64/interface-descriptors-mips64.cc', - '../src/mips64/macro-assembler-mips64.cc', - '../src/mips64/macro-assembler-mips64.h', - '../src/mips64/register-mips64.h', - '../src/mips64/simulator-mips64.cc', - '../src/mips64/simulator-mips64.h', - '../src/regexp/mips64/regexp-macro-assembler-mips64.cc', - '../src/regexp/mips64/regexp-macro-assembler-mips64.h', - '../src/wasm/baseline/mips64/liftoff-assembler-mips64.h', - ], - }], - ['v8_target_arch=="ppc" or v8_target_arch=="ppc64"', { - 'sources': [ - '../src/compiler/backend/ppc/code-generator-ppc.cc', - '../src/compiler/backend/ppc/instruction-codes-ppc.h', - '../src/compiler/backend/ppc/instruction-scheduler-ppc.cc', - '../src/compiler/backend/ppc/instruction-selector-ppc.cc', - '../src/debug/ppc/debug-ppc.cc', - '../src/ppc/assembler-ppc-inl.h', - '../src/ppc/assembler-ppc.cc', - '../src/ppc/assembler-ppc.h', - '../src/ppc/constants-ppc.h', - '../src/ppc/constants-ppc.cc', - '../src/ppc/cpu-ppc.cc', - '../src/ppc/deoptimizer-ppc.cc', - '../src/ppc/disasm-ppc.cc', - '../src/ppc/frame-constants-ppc.cc', - '../src/ppc/frame-constants-ppc.h', - '../src/ppc/interface-descriptors-ppc.cc', - '../src/ppc/macro-assembler-ppc.cc', - '../src/ppc/macro-assembler-ppc.h', - '../src/ppc/register-ppc.h', - '../src/ppc/simulator-ppc.cc', - '../src/ppc/simulator-ppc.h', - '../src/regexp/ppc/regexp-macro-assembler-ppc.cc', - '../src/regexp/ppc/regexp-macro-assembler-ppc.h', - '../src/wasm/baseline/ppc/liftoff-assembler-ppc.h', - ], - }], - ['v8_target_arch=="s390" or v8_target_arch=="s390x"', { - 'sources': [ - '../src/compiler/backend/s390/code-generator-s390.cc', - '../src/compiler/backend/s390/instruction-codes-s390.h', - '../src/compiler/backend/s390/instruction-scheduler-s390.cc', - '../src/compiler/backend/s390/instruction-selector-s390.cc', - '../src/debug/s390/debug-s390.cc', - '../src/regexp/s390/regexp-macro-assembler-s390.cc', - '../src/regexp/s390/regexp-macro-assembler-s390.h', - '../src/s390/assembler-s390-inl.h', - '../src/s390/assembler-s390.cc', - '../src/s390/assembler-s390.h', - '../src/s390/constants-s390.cc', - '../src/s390/constants-s390.h', - '../src/s390/cpu-s390.cc', - '../src/s390/deoptimizer-s390.cc', - '../src/s390/disasm-s390.cc', - '../src/s390/frame-constants-s390.cc', - '../src/s390/frame-constants-s390.h', - '../src/s390/interface-descriptors-s390.cc', - '../src/s390/macro-assembler-s390.cc', - '../src/s390/macro-assembler-s390.h', - '../src/s390/register-s390.h', - '../src/s390/simulator-s390.cc', - '../src/s390/simulator-s390.h', - '../src/wasm/baseline/s390/liftoff-assembler-s390.h', - ], - }], - ['OS=="win"', { - 'msvs_disabled_warnings': [4351, 4355, 4800], - 'msvs_precompiled_header': '../../../tools/msvs/pch/v8_pch.h', - 'msvs_precompiled_source': '../../../tools/msvs/pch/v8_pch.cc', - 'sources': [ - '<(_msvs_precompiled_header)', - '<(_msvs_precompiled_source)', - ], - # This will prevent V8's .cc files conflicting with the inspector's - # .cpp files in the same shard. - 'msvs_settings': { - 'VCCLCompilerTool': { - 'ObjectFile':'$(IntDir)%(Extension)\\', - }, - }, - }], - ['component=="shared_library"', { - 'defines': [ - 'BUILDING_V8_SHARED', - ], - }], - ['v8_postmortem_support=="true"', { - 'dependencies': [ 'postmortem-metadata#target' ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/debug-support.cc', - ], - }], - ['v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', - ], - 'conditions': [ - ['icu_use_data_file_flag==1', { - 'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE'], - }, { # else icu_use_data_file_flag !=1 - 'conditions': [ - ['OS=="win"', { - 'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_SHARED'], - }, { - 'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC'], - }], - ], - }], - ], - }, { # v8_enable_i18n_support==0 - 'sources!': [ - '../src/builtins/builtins-intl.cc', - '../src/char-predicates.cc', - '../src/objects/intl-objects.cc', - '../src/objects/intl-objects.h', - '../src/objects/js-break-iterator-inl.h', - '../src/objects/js-break-iterator.cc', - '../src/objects/js-break-iterator.h', - '../src/objects/js-collator-inl.h', - '../src/objects/js-collator.cc', - '../src/objects/js-collator.h', - '../src/objects/js-date-time-format-inl.h', - '../src/objects/js-date-time-format.cc', - '../src/objects/js-date-time-format.h', - '../src/objects/js-list-format-inl.h', - '../src/objects/js-list-format.cc', - '../src/objects/js-list-format.h', - '../src/objects/js-locale-inl.h', - '../src/objects/js-locale.cc', - '../src/objects/js-locale.h', - '../src/objects/js-number-format-inl.h', - '../src/objects/js-number-format.cc', - '../src/objects/js-number-format.h', - '../src/objects/js-plural-rules-inl.h', - '../src/objects/js-plural-rules.cc', - '../src/objects/js-plural-rules.h', - '../src/objects/js-relative-time-format-inl.h', - '../src/objects/js-relative-time-format.cc', - '../src/objects/js-relative-time-format.h', - '../src/objects/js-segment-iterator-inl.h', - '../src/objects/js-segment-iterator.cc', - '../src/objects/js-segment-iterator.h', - '../src/objects/js-segmenter-inl.h', - '../src/objects/js-segmenter.cc', - '../src/objects/js-segmenter.h', - '../src/runtime/runtime-intl.cc', - ], - }], - ['OS=="win" and v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icudata', - ], - }], - ], - 'actions': [ - { - 'action_name': 'run_torque_action', - 'inputs': [ # Order matters. - '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)torque<(EXECUTABLE_SUFFIX)', - '<@(torque_files)', - ], - 'outputs': [ - '<@(torque_outputs)', - '<@(torque_generated_pure_headers)', - ], - 'action': [ - '<@(_inputs)', - '-o', '<(SHARED_INTERMEDIATE_DIR)/torque-generated' - ], - }, - ], - }, # v8_base - { - 'target_name': 'v8_libbase', - 'type': '<(component)', - 'toolsets': ['host', 'target'], - 'variables': { - 'optimize': 'max', - }, - 'include_dirs': [ - '..', - ], - 'direct_dependent_settings': { - 'include_dirs': ['..'], - }, - 'sources': [ - '../src/base/adapters.h', - '../src/base/address-region.h', - '../src/base/atomic-utils.h', - '../src/base/atomicops.h', - '../src/base/atomicops_internals_atomicword_compat.h', - '../src/base/atomicops_internals_portable.h', - '../src/base/atomicops_internals_std.h', - '../src/base/base-export.h', - '../src/base/bits.cc', - '../src/base/bits.h', - '../src/base/bounded-page-allocator.cc', - '../src/base/bounded-page-allocator.h', - '../src/base/build_config.h', - '../src/base/compiler-specific.h', - '../src/base/cpu.cc', - '../src/base/cpu.h', - '../src/base/debug/stack_trace.cc', - '../src/base/debug/stack_trace.h', - '../src/base/division-by-constant.cc', - '../src/base/division-by-constant.h', - '../src/base/enum-set.h', - '../src/base/export-template.h', - '../src/base/file-utils.cc', - '../src/base/file-utils.h', - '../src/base/flags.h', - '../src/base/format-macros.h', - '../src/base/free_deleter.h', - '../src/base/functional.cc', - '../src/base/functional.h', - '../src/base/hashmap-entry.h', - '../src/base/hashmap.h', - '../src/base/ieee754.cc', - '../src/base/ieee754.h', - '../src/base/iterator.h', - '../src/base/lazy-instance.h', - '../src/base/list.h', - '../src/base/logging.cc', - '../src/base/logging.h', - '../src/base/lsan-page-allocator.cc', - '../src/base/lsan-page-allocator.h', - '../src/base/macros.h', - '../src/base/once.cc', - '../src/base/once.h', - '../src/base/optional.h', - '../src/base/overflowing-math.h', - '../src/base/page-allocator.cc', - '../src/base/page-allocator.h', - '../src/base/platform/condition-variable.cc', - '../src/base/platform/condition-variable.h', - '../src/base/platform/elapsed-timer.h', - '../src/base/platform/mutex.cc', - '../src/base/platform/mutex.h', - '../src/base/platform/platform.h', - '../src/base/platform/semaphore.cc', - '../src/base/platform/semaphore.h', - '../src/base/platform/time.cc', - '../src/base/platform/time.h', - '../src/base/region-allocator.cc', - '../src/base/region-allocator.h', - '../src/base/ring-buffer.h', - '../src/base/safe_conversions.h', - '../src/base/safe_conversions_impl.h', - '../src/base/safe_math.h', - '../src/base/safe_math_impl.h', - '../src/base/small-vector.h', - '../src/base/sys-info.cc', - '../src/base/sys-info.h', - '../src/base/template-utils.h', - '../src/base/timezone-cache.h', - '../src/base/tsan.h', - '../src/base/utils/random-number-generator.cc', - '../src/base/utils/random-number-generator.h', - ], - 'target_conditions': [ - ['OS=="android" and _toolset=="target"', { - 'libraries': [ - '-llog', - ], - 'include_dirs': [ - 'src/common/android/include', - ], - }], - ], - 'conditions': [ - ['component=="shared_library"', { - 'defines': [ - 'BUILDING_V8_BASE_SHARED', - ], - 'direct_dependent_settings': { - 'defines': [ - 'USING_V8_BASE_SHARED', - ], - }, - }], - ['OS=="linux"', { - 'link_settings': { - 'libraries': [ - '-ldl', - '-lrt' - ], - }, - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-linux.cc', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix-time.cc', - '../src/base/platform/platform-posix-time.h', - ], - } - ], - ['OS=="aix"', { - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-aix.cc', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix.h', - ]}, - ], - ['OS=="android"', { - 'sources': [ - '../src/base/debug/stack_trace_android.cc', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix-time.cc', - '../src/base/platform/platform-posix-time.h', - ], - 'link_settings': { - 'target_conditions': [ - ['_toolset=="host" and host_os!="mac"', { - # Only include libdl and librt on host builds because they - # are included by default on Android target builds, and we - # don't want to re-include them here since this will change - # library order and break (see crbug.com/469973). - # These libraries do not exist on Mac hosted builds. - 'libraries': [ - '-ldl', - '-lrt' - ] - }] - ] - }, - 'conditions': [ - ['host_os=="mac"', { - 'target_conditions': [ - ['_toolset=="host"', { - 'sources': [ - '../src/base/platform/platform-macos.cc' - ] - }, { - 'sources': [ - '../src/base/platform/platform-linux.cc' - ] - }], - ], - }, { - 'sources': [ - '../src/base/platform/platform-linux.cc' - ] - }], - ], - }, - ], - ['OS=="fuchsia"', { - 'sources': [ - '../src/base/debug/stack_trace_fuchsia.cc', - '../src/base/platform/platform-fuchsia.cc', - ]}, - ], - ['OS=="mac"', { - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-macos.cc', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix-time.cc', - '../src/base/platform/platform-posix-time.h', - ]}, - ], - ['OS=="win"', { - 'defines': [ - '_CRT_RAND_S' # for rand_s() - ], - 'sources': [ - '../src/base/debug/stack_trace_win.cc', - '../src/base/platform/platform-win32.cc', - '../src/base/win32-headers.h', - ], - 'msvs_disabled_warnings': [4351, 4355, 4800], - 'link_settings': { - 'libraries': [ - '-ldbghelp.lib', - '-lshlwapi.lib', - '-lwinmm.lib', - '-lws2_32.lib' - ], - }, - }], - ['OS=="qnx"', { - 'link_settings': { - 'target_conditions': [ - ['_toolset=="host" and host_os=="linux"', { - 'libraries': [ - '-lrt' - ], - }], - ['_toolset=="target"', { - 'libraries': [ - '-lbacktrace' - ], - }], - ], - }, - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix-time.h', - '../src/base/platform/platform-posix-time.cc', - '../src/base/qnx-math.h' - ], - 'target_conditions': [ - ['_toolset=="host" and host_os=="linux"', { - 'sources': [ - '../src/base/platform/platform-linux.cc' - ], - }], - ['_toolset=="host" and host_os=="mac"', { - 'sources': [ - '../src/base/platform/platform-macos.cc' - ], - }], - ['_toolset=="target"', { - 'sources': [ - '../src/base/platform/platform-qnx.cc' - ], - }], - ], - }, - ], - ['OS=="freebsd"', { - 'link_settings': { - 'libraries': [ - '-L/usr/local/lib -lexecinfo', - ]}, - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-freebsd.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix-time.h', - '../src/base/platform/platform-posix-time.cc', - ], - } - ], - ['OS=="openbsd"', { - 'link_settings': { - 'libraries': [ - '-L/usr/local/lib -lexecinfo', - ]}, - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-openbsd.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix-time.h', - '../src/base/platform/platform-posix-time.cc', - ], - } - ], - ['OS=="netbsd"', { - 'link_settings': { - 'libraries': [ - '-L/usr/pkg/lib -Wl,-R/usr/pkg/lib -lexecinfo', - ]}, - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-openbsd.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix.cc', - '../src/base/platform/platform-posix-time.h', - '../src/base/platform/platform-posix-time.cc', - ], - } - ], - ['OS=="solaris"', { - 'link_settings': { - 'libraries': [ - '-lnsl -lrt', - ]}, - 'sources': [ - '../src/base/debug/stack_trace_posix.cc', - '../src/base/platform/platform-solaris.cc', - '../src/base/platform/platform-posix.h', - '../src/base/platform/platform-posix.cc', - ], - } - ], - ], - }, # v8_libbase - { - 'target_name': 'v8_libplatform', - 'type': '<(component)', - 'variables': { - 'optimize': 'max', - }, - 'dependencies': [ - 'v8_libbase', - ], - 'include_dirs': [ - '..', - '<(DEPTH)', - '../include/', - ], - 'sources': [ - '../include/libplatform/libplatform-export.h', - '../include/libplatform/libplatform.h', - '../include/libplatform/v8-tracing.h', - '../src/libplatform/default-foreground-task-runner.cc', - '../src/libplatform/default-foreground-task-runner.h', - '../src/libplatform/default-platform.cc', - '../src/libplatform/default-platform.h', - '../src/libplatform/default-worker-threads-task-runner.cc', - '../src/libplatform/default-worker-threads-task-runner.h', - '../src/libplatform/task-queue.cc', - '../src/libplatform/task-queue.h', - '../src/libplatform/tracing/trace-buffer.cc', - '../src/libplatform/tracing/trace-buffer.h', - '../src/libplatform/tracing/trace-config.cc', - '../src/libplatform/tracing/trace-object.cc', - '../src/libplatform/tracing/trace-writer.cc', - '../src/libplatform/tracing/trace-writer.h', - '../src/libplatform/tracing/tracing-controller.cc', - '../src/libplatform/worker-thread.cc', - '../src/libplatform/worker-thread.h', - ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ['component=="shared_library"', { - 'direct_dependent_settings': { - 'defines': [ 'USING_V8_PLATFORM_SHARED' ], - }, - 'defines': [ 'BUILDING_V8_PLATFORM_SHARED' ], - }] - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../include/', - ], - }, - }, # v8_libplatform - { - 'target_name': 'v8_libsampler', - 'type': 'static_library', - 'variables': { - 'optimize': 'max', - }, - 'dependencies': [ - 'v8_libbase', - ], - 'include_dirs': [ - '..', - '../include/', - ], - 'sources': [ - '../src/libsampler/sampler.cc', - '../src/libsampler/sampler.h' - ], - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host', 'target'], - }, { - 'toolsets': ['target'], - }], - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../include/', - ], - }, - }, # v8_libsampler - { - 'target_name': 'js2c_extras', - 'type': 'none', - 'conditions': [ - ['want_separate_host_toolset==1', { - 'toolsets': ['host'], - }, { - 'toolsets': ['target'], - }], - ], - 'actions': [ - { - 'action_name': 'js2c_extras', - 'inputs': [ - '../tools/js2c.py', - '<@(v8_extra_library_files)', - ], - 'outputs': ['<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc'], - 'action': [ - 'python', - '../tools/js2c.py', '<@(_outputs)', - 'EXTRAS', '<@(v8_extra_library_files)', - ], - }, - ], - }, # js2c_extras - { - 'target_name': 'torque_base', - 'type': '<(component)', - 'toolsets': ['host'], - 'dependencies': ['v8_libbase#host'], - 'defines!': [ - '_HAS_EXCEPTIONS=0', - 'BUILDING_V8_SHARED=1', - ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeTypeInfo': 'true', - 'ExceptionHandling': 1, - }, - }, - 'sources': [ - '../src/torque/ast.h', - '../src/torque/cfg.cc', - '../src/torque/cfg.h', - '../src/torque/contextual.h', - '../src/torque/csa-generator.cc', - '../src/torque/csa-generator.h', - '../src/torque/declarable.cc', - '../src/torque/declarable.h', - '../src/torque/declaration-visitor.cc', - '../src/torque/declaration-visitor.h', - '../src/torque/declarations.cc', - '../src/torque/declarations.h', - '../src/torque/earley-parser.cc', - '../src/torque/earley-parser.h', - '../src/torque/file-visitor.cc', - '../src/torque/file-visitor.h', - '../src/torque/global-context.h', - '../src/torque/implementation-visitor.cc', - '../src/torque/implementation-visitor.h', - '../src/torque/instructions.cc', - '../src/torque/instructions.h', - '../src/torque/source-positions.cc', - '../src/torque/source-positions.h', - '../src/torque/torque-parser.cc', - '../src/torque/torque-parser.h', - '../src/torque/type-oracle.cc', - '../src/torque/type-oracle.h', - '../src/torque/types.cc', - '../src/torque/types.h', - '../src/torque/utils.cc', - '../src/torque/utils.h', - ], - }, # torque_base - { - 'target_name': 'torque', - 'type': 'executable', - 'toolsets': ['host'], - 'dependencies': ['torque_base'], - 'defines!': [ - '_HAS_EXCEPTIONS=0', - 'BUILDING_V8_SHARED=1', - ], - # This is defined trough `configurations` for GYP+ninja compatibility - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeTypeInfo': 'true', - 'ExceptionHandling': 1, - }, - }, - 'include_dirs': ['..'], - 'sources': [ - "../src/torque/torque.cc", - ], - }, # torque - { - 'target_name': 'postmortem-metadata', - 'type': 'none', - 'variables': { - 'heapobject_files': [ - '../src/objects.h', - '../src/objects-inl.h', - '../src/objects/allocation-site-inl.h', - '../src/objects/allocation-site.h', - '../src/objects/cell-inl.h', - '../src/objects/cell.h', - '../src/objects/code-inl.h', - '../src/objects/code.h', - '../src/objects/data-handler.h', - '../src/objects/data-handler-inl.h', - '../src/objects/feedback-cell.h', - '../src/objects/feedback-cell-inl.h', - '../src/objects/fixed-array-inl.h', - '../src/objects/fixed-array.h', - '../src/objects/heap-number-inl.h', - '../src/objects/heap-number.h', - '../src/objects/heap-object-inl.h', - '../src/objects/heap-object.h', - '../src/objects/instance-type.h', - '../src/objects/js-array-inl.h', - '../src/objects/js-array.h', - '../src/objects/js-array-buffer-inl.h', - '../src/objects/js-array-buffer.h', - '../src/objects/js-objects-inl.h', - '../src/objects/js-objects.h', - '../src/objects/js-promise-inl.h', - '../src/objects/js-promise.h', - '../src/objects/js-regexp-inl.h', - '../src/objects/js-regexp.h', - '../src/objects/js-regexp-string-iterator-inl.h', - '../src/objects/js-regexp-string-iterator.h', - '../src/objects/map.h', - '../src/objects/map-inl.h', - '../src/objects/name.h', - '../src/objects/name-inl.h', - '../src/objects/oddball-inl.h', - '../src/objects/oddball.h', - '../src/objects/scope-info.h', - '../src/objects/script.h', - '../src/objects/script-inl.h', - '../src/objects/shared-function-info.h', - '../src/objects/shared-function-info-inl.h', - '../src/objects/string.h', - '../src/objects/string-inl.h', - '../src/objects/struct.h', - '../src/objects/struct-inl.h', - ], - }, - 'actions': [ - { - 'action_name': 'gen-postmortem-metadata', - 'inputs': [ - '../tools/gen-postmortem-metadata.py', - '<@(heapobject_files)', - ], - 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/debug-support.cc', - ], - 'action': [ - 'python', - '../tools/gen-postmortem-metadata.py', - '<@(_outputs)', - '<@(heapobject_files)' - ], - }, - ], - }, # postmortem-metadata - { - 'target_name': 'mksnapshot', - 'type': 'executable', - 'dependencies': [ - 'v8_base', - 'v8_init', - 'v8_libbase', - 'v8_libplatform', - 'v8_nosnapshot', - ], - 'include_dirs': [ - '..', - '<(DEPTH)', - ], - 'sources': [ - '../src/snapshot/embedded-file-writer.cc', - '../src/snapshot/embedded-file-writer.h', - '../src/snapshot/mksnapshot.cc', - ], - 'conditions': [ - ['v8_enable_i18n_support==1', { - 'dependencies': [ - '<(icu_gyp_path):icui18n', - '<(icu_gyp_path):icuuc', - ] - }], - ['want_separate_host_toolset==1', { - 'toolsets': ['host'], - }, { - 'toolsets': ['target'], - }], - ], - }, # mksnapshot - { - 'target_name': 'bytecode_builtins_list_generator', - 'type': 'executable', - 'toolsets': ['host'], - 'dependencies': [ - "v8_libbase#host" - ], - 'include_dirs': [".."], - 'sources': [ - "../src/builtins/generate-bytecodes-builtins-list.cc", - "../src/interpreter/bytecode-operands.cc", - "../src/interpreter/bytecode-operands.h", - "../src/interpreter/bytecodes.cc", - "../src/interpreter/bytecodes.h", - ], - }, # bytecode_builtins_list_generator - { - 'target_name': 'generate_bytecode_builtins_list', - 'type': 'none', - 'toolsets': ['host'], - 'dependencies': [ - "bytecode_builtins_list_generator", - ], - 'actions': [ - { - 'action_name': 'generate_bytecode_builtins_list_action', - 'inputs': [ - '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)bytecode_builtins_list_generator<(EXECUTABLE_SUFFIX)', - ], - 'outputs': [ - '<(generate_bytecode_builtins_list_output)', - ], - 'action': [ - 'python', - '../tools/run.py', - '<@(_inputs)', - '<@(_outputs)', - ], - }, - ], - }, # generate_bytecode_builtins_list - ], -} diff --git a/deps/v8/include/libplatform/libplatform.h b/deps/v8/include/libplatform/libplatform.h index 13c0db9a85487a..6908aeaa88a1b0 100644 --- a/deps/v8/include/libplatform/libplatform.h +++ b/deps/v8/include/libplatform/libplatform.h @@ -41,15 +41,6 @@ V8_PLATFORM_EXPORT std::unique_ptr NewDefaultPlatform( InProcessStackDumping::kDisabled, std::unique_ptr tracing_controller = {}); -V8_PLATFORM_EXPORT V8_DEPRECATED( - "Use NewDefaultPlatform instead", - v8::Platform* CreateDefaultPlatform( - int thread_pool_size = 0, - IdleTaskSupport idle_task_support = IdleTaskSupport::kDisabled, - InProcessStackDumping in_process_stack_dumping = - InProcessStackDumping::kDisabled, - v8::TracingController* tracing_controller = nullptr)); - /** * Pumps the message loop for the given isolate. * diff --git a/deps/v8/include/libplatform/v8-tracing.h b/deps/v8/include/libplatform/v8-tracing.h index 250d5fbdb9f07f..bc249cb9ecc378 100644 --- a/deps/v8/include/libplatform/v8-tracing.h +++ b/deps/v8/include/libplatform/v8-tracing.h @@ -5,6 +5,7 @@ #ifndef V8_LIBPLATFORM_V8_TRACING_H_ #define V8_LIBPLATFORM_V8_TRACING_H_ +#include #include #include #include @@ -221,12 +222,10 @@ class V8_PLATFORM_EXPORT TraceConfig { class V8_PLATFORM_EXPORT TracingController : public V8_PLATFORM_NON_EXPORTED_BASE(v8::TracingController) { public: - enum Mode { DISABLED = 0, RECORDING_MODE }; - - // The pointer returned from GetCategoryGroupEnabledInternal() points to a - // value with zero or more of the following bits. Used in this class only. - // The TRACE_EVENT macros should only use the value as a bool. - // These values must be in sync with macro values in TraceEvent.h in Blink. + // The pointer returned from GetCategoryGroupEnabled() points to a value with + // zero or more of the following bits. Used in this class only. The + // TRACE_EVENT macros should only use the value as a bool. These values must + // be in sync with macro values in TraceEvent.h in Blink. enum CategoryGroupEnabledFlags { // Category group enabled for the recording mode. ENABLED_FOR_RECORDING = 1 << 0, @@ -273,7 +272,6 @@ class V8_PLATFORM_EXPORT TracingController virtual int64_t CurrentCpuTimestampMicroseconds(); private: - const uint8_t* GetCategoryGroupEnabledInternal(const char* category_group); void UpdateCategoryGroupEnabledFlag(size_t category_index); void UpdateCategoryGroupEnabledFlags(); @@ -281,7 +279,7 @@ class V8_PLATFORM_EXPORT TracingController std::unique_ptr trace_config_; std::unique_ptr mutex_; std::unordered_set observers_; - Mode mode_ = DISABLED; + std::atomic_bool recording_{false}; // Disallow copy and assign TracingController(const TracingController&) = delete; diff --git a/deps/v8/include/v8-internal.h b/deps/v8/include/v8-internal.h index 7f9c27ebb96d96..5cc62f3e726f7c 100644 --- a/deps/v8/include/v8-internal.h +++ b/deps/v8/include/v8-internal.h @@ -29,9 +29,8 @@ static const Address kNullAddress = 0; * Configuration of tagging scheme. */ const int kApiSystemPointerSize = sizeof(void*); -const int kApiTaggedSize = kApiSystemPointerSize; const int kApiDoubleSize = sizeof(double); -const int kApiIntSize = sizeof(int); +const int kApiInt32Size = sizeof(int32_t); const int kApiInt64Size = sizeof(int64_t); // Tag information for HeapObject. @@ -88,16 +87,19 @@ struct SmiTagging<8> { } }; -#if defined(V8_COMPRESS_POINTERS) +#ifdef V8_COMPRESS_POINTERS static_assert( kApiSystemPointerSize == kApiInt64Size, "Pointer compression can be enabled only for 64-bit architectures"); +const int kApiTaggedSize = kApiInt32Size; +#else +const int kApiTaggedSize = kApiSystemPointerSize; #endif -#if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH) -typedef SmiTagging PlatformSmiTagging; +#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH +typedef SmiTagging PlatformSmiTagging; #else -typedef SmiTagging PlatformSmiTagging; +typedef SmiTagging PlatformSmiTagging; #endif const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize; @@ -122,22 +124,16 @@ class Internals { // These values match non-compiler-dependent values defined within // the implementation of v8. static const int kHeapObjectMapOffset = 0; - static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiIntSize; - static const int kStringResourceOffset = 1 * kApiTaggedSize + 2 * kApiIntSize; + static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiInt32Size; + static const int kStringResourceOffset = + 1 * kApiTaggedSize + 2 * kApiInt32Size; static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize; static const int kForeignAddressOffset = kApiTaggedSize; static const int kJSObjectHeaderSize = 3 * kApiTaggedSize; - static const int kJSObjectHeaderSizeForEmbedderFields = - (kJSObjectHeaderSize + kApiSystemPointerSize - 1) & - -kApiSystemPointerSize; static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize; static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize; - static const int kEmbedderDataSlotSize = -#ifdef V8_COMPRESS_POINTERS - 2 * -#endif - kApiSystemPointerSize; + static const int kEmbedderDataSlotSize = kApiSystemPointerSize; static const int kNativeContextEmbedderDataOffset = 7 * kApiTaggedSize; static const int kFullStringRepresentationMask = 0x0f; static const int kStringEncodingMask = 0x8; @@ -148,7 +144,7 @@ class Internals { static const int kIsolateEmbedderDataOffset = 0; static const int kExternalMemoryOffset = - kNumIsolateDataSlots * kApiTaggedSize; + kNumIsolateDataSlots * kApiSystemPointerSize; static const int kExternalMemoryLimitOffset = kExternalMemoryOffset + kApiInt64Size; static const int kExternalMemoryAtLastMarkCompactOffset = @@ -163,8 +159,8 @@ class Internals { static const int kFalseValueRootIndex = 8; static const int kEmptyStringRootIndex = 9; - static const int kNodeClassIdOffset = 1 * kApiTaggedSize; - static const int kNodeFlagsOffset = 1 * kApiTaggedSize + 3; + static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize; + static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3; static const int kNodeStateMask = 0x7; static const int kNodeStateIsWeakValue = 2; static const int kNodeStateIsPendingValue = 3; @@ -172,9 +168,9 @@ class Internals { static const int kNodeIsIndependentShift = 3; static const int kNodeIsActiveShift = 4; - static const int kFirstNonstringType = 0x80; - static const int kOddballType = 0x83; - static const int kForeignType = 0x87; + static const int kFirstNonstringType = 0x40; + static const int kOddballType = 0x43; + static const int kForeignType = 0x47; static const int kJSSpecialApiObjectType = 0x410; static const int kJSApiObjectType = 0x420; static const int kJSObjectType = 0x421; @@ -182,6 +178,12 @@ class Internals { static const int kUndefinedOddballKind = 5; static const int kNullOddballKind = 3; + // Constants used by PropertyCallbackInfo to check if we should throw when an + // error occurs. + static const int kThrowOnError = 0; + static const int kDontThrow = 1; + static const int kInferShouldThrowMode = 2; + // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an // incremental GC once the external memory reaches this limit. static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024; @@ -297,22 +299,8 @@ class Internals { #endif } - V8_INLINE static internal::Address ReadTaggedAnyField( - internal::Address heap_object_ptr, int offset) { -#ifdef V8_COMPRESS_POINTERS - int32_t value = ReadRawField(heap_object_ptr, offset); - internal::Address root_mask = static_cast( - -static_cast(value & kSmiTagMask)); - internal::Address root_or_zero = - root_mask & GetRootFromOnHeapAddress(heap_object_ptr); - return root_or_zero + - static_cast(static_cast(value)); -#else - return ReadRawField(heap_object_ptr, offset); -#endif - } - #ifdef V8_COMPRESS_POINTERS + // See v8:7703 or src/ptr-compr.* for details about pointer compression. static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32; static constexpr size_t kPtrComprIsolateRootBias = kPtrComprHeapReservationSize / 2; @@ -324,18 +312,14 @@ class Internals { -static_cast(kPtrComprIsolateRootAlignment); } -#else - - template - V8_INLINE static T ReadEmbedderData(const v8::Context* context, int index) { - typedef internal::Address A; - typedef internal::Internals I; - A ctx = *reinterpret_cast(context); - A embedder_data = - I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); - int value_offset = - I::kEmbedderDataArrayHeaderSize + (I::kEmbedderDataSlotSize * index); - return I::ReadRawField(embedder_data, value_offset); + V8_INLINE static internal::Address DecompressTaggedAnyField( + internal::Address heap_object_ptr, int32_t value) { + internal::Address root_mask = static_cast( + -static_cast(value & kSmiTagMask)); + internal::Address root_or_zero = + root_mask & GetRootFromOnHeapAddress(heap_object_ptr); + return root_or_zero + + static_cast(static_cast(value)); } #endif // V8_COMPRESS_POINTERS }; @@ -367,6 +351,11 @@ V8_INLINE void PerformCastCheck(T* data) { // that's guaranteed to never be in ReadOnlySpace. V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj); +// Returns if we need to throw when an error occurs. This infers the language +// mode based on the current context and the closure. This returns true if the +// language mode is strict. +V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate); + } // namespace internal } // namespace v8 diff --git a/deps/v8/include/v8-platform.h b/deps/v8/include/v8-platform.h index fc008979f69210..556407d8761f1b 100644 --- a/deps/v8/include/v8-platform.h +++ b/deps/v8/include/v8-platform.h @@ -70,6 +70,17 @@ class TaskRunner { virtual void PostDelayedTask(std::unique_ptr task, double delay_in_seconds) = 0; + /** + * Schedules a task to be invoked by this TaskRunner. The task is scheduled + * after the given number of seconds |delay_in_seconds|. The TaskRunner + * implementation takes ownership of |task|. The |task| cannot be nested + * within other task executions. + * + * Requires that |TaskRunner::NonNestableDelayedTasksEnabled()| is true. + */ + virtual void PostNonNestableDelayedTask(std::unique_ptr task, + double delay_in_seconds) {} + /** * Schedules an idle task to be invoked by this TaskRunner. The task is * scheduled when the embedder is idle. Requires that @@ -90,6 +101,11 @@ class TaskRunner { */ virtual bool NonNestableTasksEnabled() const { return false; } + /** + * Returns true if non-nestable delayed tasks are enabled for this TaskRunner. + */ + virtual bool NonNestableDelayedTasksEnabled() const { return false; } + TaskRunner() = default; virtual ~TaskRunner() = default; @@ -430,7 +446,7 @@ class Platform { * since epoch. Useful for implementing |CurrentClockTimeMillis| if * nothing special needed. */ - static double SystemClockTimeMillis(); + V8_EXPORT static double SystemClockTimeMillis(); }; } // namespace v8 diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index 94d3fcfcf63419..ada2dbbe5c1c04 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -746,33 +746,6 @@ class V8_EXPORT HeapProfiler { kSamplingForceGC = 1 << 0, }; - typedef std::unordered_set*> - RetainerChildren; - typedef std::vector> - RetainerGroups; - typedef std::vector*, - const v8::PersistentBase*>> - RetainerEdges; - - struct RetainerInfos { - RetainerGroups groups; - RetainerEdges edges; - }; - - /** - * Callback function invoked to retrieve all RetainerInfos from the embedder. - */ - typedef RetainerInfos (*GetRetainerInfosCallback)(v8::Isolate* isolate); - - /** - * Callback function invoked for obtaining RetainedObjectInfo for - * the given JavaScript wrapper object. It is prohibited to enter V8 - * while the callback is running: only getters on the handle and - * GetPointerFromInternalField on the objects are allowed. - */ - typedef RetainedObjectInfo* (*WrapperInfoCallback)(uint16_t class_id, - Local wrapper); - /** * Callback function invoked during heap snapshot generation to retrieve * the embedder object graph. The callback should use graph->AddEdge(..) to @@ -783,10 +756,6 @@ class V8_EXPORT HeapProfiler { v8::EmbedderGraph* graph, void* data); - /** TODO(addaleax): Remove */ - typedef void (*LegacyBuildEmbedderGraphCallback)(v8::Isolate* isolate, - v8::EmbedderGraph* graph); - /** Returns the number of snapshots taken. */ int GetSnapshotCount(); @@ -925,20 +894,6 @@ class V8_EXPORT HeapProfiler { */ void DeleteAllHeapSnapshots(); - /** Binds a callback to embedder's class ID. */ - V8_DEPRECATED( - "Use AddBuildEmbedderGraphCallback to provide info about embedder nodes", - void SetWrapperClassInfoProvider(uint16_t class_id, - WrapperInfoCallback callback)); - - V8_DEPRECATED( - "Use AddBuildEmbedderGraphCallback to provide info about embedder nodes", - void SetGetRetainerInfosCallback(GetRetainerInfosCallback callback)); - - V8_DEPRECATED( - "Use AddBuildEmbedderGraphCallback to provide info about embedder nodes", - void SetBuildEmbedderGraphCallback( - LegacyBuildEmbedderGraphCallback callback)); void AddBuildEmbedderGraphCallback(BuildEmbedderGraphCallback callback, void* data); void RemoveBuildEmbedderGraphCallback(BuildEmbedderGraphCallback callback, @@ -958,80 +913,6 @@ class V8_EXPORT HeapProfiler { HeapProfiler& operator=(const HeapProfiler&); }; -/** - * Interface for providing information about embedder's objects - * held by global handles. This information is reported in two ways: - * - * 1. When calling AddObjectGroup, an embedder may pass - * RetainedObjectInfo instance describing the group. To collect - * this information while taking a heap snapshot, V8 calls GC - * prologue and epilogue callbacks. - * - * 2. When a heap snapshot is collected, V8 additionally - * requests RetainedObjectInfos for persistent handles that - * were not previously reported via AddObjectGroup. - * - * Thus, if an embedder wants to provide information about native - * objects for heap snapshots, it can do it in a GC prologue - * handler, and / or by assigning wrapper class ids in the following way: - * - * 1. Bind a callback to class id by calling SetWrapperClassInfoProvider. - * 2. Call SetWrapperClassId on certain persistent handles. - * - * V8 takes ownership of RetainedObjectInfo instances passed to it and - * keeps them alive only during snapshot collection. Afterwards, they - * are freed by calling the Dispose class function. - */ -class V8_EXPORT RetainedObjectInfo { // NOLINT - public: - /** Called by V8 when it no longer needs an instance. */ - virtual void Dispose() = 0; - - /** Returns whether two instances are equivalent. */ - virtual bool IsEquivalent(RetainedObjectInfo* other) = 0; - - /** - * Returns hash value for the instance. Equivalent instances - * must have the same hash value. - */ - virtual intptr_t GetHash() = 0; - - /** - * Returns human-readable label. It must be a null-terminated UTF-8 - * encoded string. V8 copies its contents during a call to GetLabel. - */ - virtual const char* GetLabel() = 0; - - /** - * Returns human-readable group label. It must be a null-terminated UTF-8 - * encoded string. V8 copies its contents during a call to GetGroupLabel. - * Heap snapshot generator will collect all the group names, create - * top level entries with these names and attach the objects to the - * corresponding top level group objects. There is a default - * implementation which is required because embedders don't have their - * own implementation yet. - */ - virtual const char* GetGroupLabel() { return GetLabel(); } - - /** - * Returns element count in case if a global handle retains - * a subgraph by holding one of its nodes. - */ - virtual intptr_t GetElementCount() { return -1; } - - /** Returns embedder's object size in bytes. */ - virtual intptr_t GetSizeInBytes() { return -1; } - - protected: - RetainedObjectInfo() = default; - virtual ~RetainedObjectInfo() = default; - - private: - RetainedObjectInfo(const RetainedObjectInfo&); - RetainedObjectInfo& operator=(const RetainedObjectInfo&); -}; - - /** * A struct for exporting HeapStats data from V8, using "push" model. * See HeapProfiler::GetHeapStats. diff --git a/deps/v8/include/v8-util.h b/deps/v8/include/v8-util.h index 7f12ead16b6b84..466b99fd6b717e 100644 --- a/deps/v8/include/v8-util.h +++ b/deps/v8/include/v8-util.h @@ -198,12 +198,9 @@ class PersistentValueMapBase { * Call V8::RegisterExternallyReferencedObject with the map value for given * key. */ - void RegisterExternallyReferencedObject(K& key) { - assert(Contains(key)); - V8::RegisterExternallyReferencedObject( - reinterpret_cast(FromVal(Traits::Get(&impl_, key))), - reinterpret_cast(GetIsolate())); - } + V8_DEPRECATE_SOON( + "Used TracedGlobal and EmbedderHeapTracer::RegisterEmbedderReference", + inline void RegisterExternallyReferencedObject(K& key)); /** * Return value for key and remove it from the map. @@ -355,6 +352,15 @@ class PersistentValueMapBase { const char* label_; }; +template +inline void +PersistentValueMapBase::RegisterExternallyReferencedObject( + K& key) { + assert(Contains(key)); + V8::RegisterExternallyReferencedObject( + reinterpret_cast(FromVal(Traits::Get(&impl_, key))), + reinterpret_cast(GetIsolate())); +} template class PersistentValueMap : public PersistentValueMapBase { diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index abf640228fffb5..402da028c35054 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -9,9 +9,9 @@ // NOTE these macros are used by some of the tool scripts and the build // system so their names cannot be changed without changing the scripts. #define V8_MAJOR_VERSION 7 -#define V8_MINOR_VERSION 3 -#define V8_BUILD_NUMBER 492 -#define V8_PATCH_LEVEL 25 +#define V8_MINOR_VERSION 4 +#define V8_BUILD_NUMBER 288 +#define V8_PATCH_LEVEL 13 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index c5d9fc3a97525a..5635bd21b10693 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -54,6 +54,7 @@ class Integer; class Isolate; template class Maybe; +class MicrotaskQueue; class Name; class Number; class NumberObject; @@ -92,6 +93,8 @@ template > class Persistent; template class Global; +template +class TracedGlobal; template class PersistentValueMap; template class PersistentValueMapBase; @@ -117,6 +120,7 @@ class Heap; class HeapObject; class Isolate; class LocalEmbedderHeapTracer; +class MicrotaskQueue; class NeverReadOnlySpaceObject; struct ScriptStreamingData; template class CustomArguments; @@ -275,6 +279,7 @@ class Local { V8_INLINE static Local New(Isolate* isolate, Local that); V8_INLINE static Local New(Isolate* isolate, const PersistentBase& that); + V8_INLINE static Local New(Isolate* isolate, const TracedGlobal& that); private: friend class Utils; @@ -303,6 +308,8 @@ class Local { template friend class PersistentValueVector; template friend class ReturnValue; + template + friend class TracedGlobal; explicit V8_INLINE Local(T* that) : val_(that) {} V8_INLINE static Local New(Isolate* isolate, T* that); @@ -542,7 +549,9 @@ template class PersistentBase { * is alive. Only allowed when the embedder is asked to trace its heap by * EmbedderHeapTracer. */ - V8_INLINE void RegisterExternalReference(Isolate* isolate) const; + V8_DEPRECATE_SOON( + "Used TracedGlobal and EmbedderHeapTracer::RegisterEmbedderReference", + V8_INLINE void RegisterExternalReference(Isolate* isolate) const); /** * Marks the reference to this object independent. Garbage collector is free @@ -550,9 +559,10 @@ template class PersistentBase { * independent handle should not assume that it will be preceded by a global * GC prologue callback or followed by a global GC epilogue callback. */ - V8_DEPRECATE_SOON( - "Objects are always considered independent. " - "Use MarkActive to avoid collecting otherwise dead weak handles.", + V8_DEPRECATED( + "Weak objects are always considered independent. " + "Use TracedGlobal when trying to use EmbedderHeapTracer. " + "Use a strong handle when trying to keep an object alive.", V8_INLINE void MarkIndependent()); /** @@ -562,22 +572,19 @@ template class PersistentBase { * * This bit is cleared after the each garbage collection pass. */ - V8_INLINE void MarkActive(); + V8_DEPRECATE_SOON("Use TracedGlobal.", V8_INLINE void MarkActive()); - V8_DEPRECATE_SOON("See MarkIndependent.", - V8_INLINE bool IsIndependent() const); + V8_DEPRECATED("See MarkIndependent.", V8_INLINE bool IsIndependent() const); /** Checks if the handle holds the only reference to an object. */ - V8_DEPRECATE_SOON( - "Garbage collection internal state should not be relied on.", - V8_INLINE bool IsNearDeath() const); + V8_DEPRECATED("Garbage collection internal state should not be relied on.", + V8_INLINE bool IsNearDeath() const); /** Returns true if the handle's reference is weak. */ V8_INLINE bool IsWeak() const; /** - * Assigns a wrapper class ID to the handle. See RetainedObjectInfo interface - * description in v8-profiler.h for details. + * Assigns a wrapper class ID to the handle. */ V8_INLINE void SetWrapperClassId(uint16_t class_id); @@ -758,6 +765,7 @@ class Global : public PersistentBase { * A Global with no storage cell. */ V8_INLINE Global() : PersistentBase(nullptr) {} + /** * Construct a Global from a Local. * When the Local is non-empty, a new storage cell is created @@ -768,6 +776,7 @@ class Global : public PersistentBase { : PersistentBase(PersistentBase::New(isolate, *that)) { TYPE_CHECK(T, S); } + /** * Construct a Global from a PersistentBase. * When the Persistent is non-empty, a new storage cell is created @@ -778,26 +787,20 @@ class Global : public PersistentBase { : PersistentBase(PersistentBase::New(isolate, that.val_)) { TYPE_CHECK(T, S); } + /** * Move constructor. */ - V8_INLINE Global(Global&& other) : PersistentBase(other.val_) { - other.val_ = nullptr; - } + V8_INLINE Global(Global&& other); + V8_INLINE ~Global() { this->Reset(); } + /** * Move via assignment. */ template - V8_INLINE Global& operator=(Global&& rhs) { // NOLINT - TYPE_CHECK(T, S); - if (this != &rhs) { - this->Reset(); - this->val_ = rhs.val_; - rhs.val_ = nullptr; - } - return *this; - } + V8_INLINE Global& operator=(Global&& rhs); + /** * Pass allows returning uniques from functions, etc. */ @@ -822,6 +825,151 @@ class Global : public PersistentBase { template using UniquePersistent = Global; +/** + * A traced handle with move semantics, similar to std::unique_ptr. The handle + * is to be used together with |v8::EmbedderHeapTracer| and specifies edges from + * the embedder into V8's heap. + * + * The exact semantics are: + * - Tracing garbage collections use |v8::EmbedderHeapTracer|. + * - Non-tracing garbage collections refer to + * |v8::EmbedderHeapTracer::IsRootForNonTracingGC()| whether the handle should + * be treated as root or not. + */ +template +class V8_EXPORT TracedGlobal { + public: + /** + * An empty TracedGlobal without storage cell. + */ + TracedGlobal() = default; + ~TracedGlobal() { Reset(); } + + /** + * Construct a TracedGlobal from a Local. + * + * When the Local is non-empty, a new storage cell is created + * pointing to the same object. + */ + template + TracedGlobal(Isolate* isolate, Local that) + : val_(New(isolate, *that, &val_)) { + TYPE_CHECK(T, S); + } + + /** + * Move constructor initializing TracedGlobal from an existing one. + */ + V8_INLINE TracedGlobal(TracedGlobal&& other); + + /** + * Move assignment operator initializing TracedGlobal from an existing one. + */ + template + V8_INLINE TracedGlobal& operator=(TracedGlobal&& rhs); + + /** + * TracedGlobal only supports move semantics and forbids copying. + */ + TracedGlobal(const TracedGlobal&) = delete; + void operator=(const TracedGlobal&) = delete; + + /** + * Returns true if this TracedGlobal is empty, i.e., has not been assigned an + * object. + */ + bool IsEmpty() const { return val_ == nullptr; } + + /** + * If non-empty, destroy the underlying storage cell. |IsEmpty| will return + * true after this call. + */ + V8_INLINE void Reset(); + + /** + * If non-empty, destroy the underlying storage cell and create a new one with + * the contents of other if other is non empty + */ + template + V8_INLINE void Reset(Isolate* isolate, const Local& other); + + /** + * Construct a Local from this handle. + */ + Local Get(Isolate* isolate) const { return Local::New(isolate, *this); } + + template + V8_INLINE TracedGlobal& As() const { + return reinterpret_cast&>( + const_cast&>(*this)); + } + + template + V8_INLINE bool operator==(const TracedGlobal& that) const { + internal::Address* a = reinterpret_cast(this->val_); + internal::Address* b = reinterpret_cast(that.val_); + if (a == nullptr) return b == nullptr; + if (b == nullptr) return false; + return *a == *b; + } + + template + V8_INLINE bool operator==(const Local& that) const { + internal::Address* a = reinterpret_cast(this->val_); + internal::Address* b = reinterpret_cast(that.val_); + if (a == nullptr) return b == nullptr; + if (b == nullptr) return false; + return *a == *b; + } + + template + V8_INLINE bool operator!=(const TracedGlobal& that) const { + return !operator==(that); + } + + template + V8_INLINE bool operator!=(const Local& that) const { + return !operator==(that); + } + + /** + * Assigns a wrapper class ID to the handle. + */ + V8_INLINE void SetWrapperClassId(uint16_t class_id); + + /** + * Returns the class ID previously assigned to this handle or 0 if no class ID + * was previously assigned. + */ + V8_INLINE uint16_t WrapperClassId() const; + + /** + * Adds a finalization callback to the handle. The type of this callback is + * similar to WeakCallbackType::kInternalFields, i.e., it will pass the + * parameter and the first two internal fields of the object. + * + * The callback is then supposed to reset the handle in the callback. No + * further V8 API may be called in this callback. In case additional work + * involving V8 needs to be done, a second callback can be scheduled using + * WeakCallbackInfo::SetSecondPassCallback. + */ + V8_INLINE void SetFinalizationCallback( + void* parameter, WeakCallbackInfo::Callback callback); + + private: + V8_INLINE static T* New(Isolate* isolate, T* that, T** slot); + + T* operator*() const { return this->val_; } + + T* val_ = nullptr; + + friend class EmbedderHeapTracer; + template + friend class Local; + friend class Object; + template + friend class ReturnValue; +}; /** * A stack-allocated class that governs a number of local handles. @@ -1387,7 +1535,12 @@ class V8_EXPORT ScriptCompiler { public: enum Encoding { ONE_BYTE, TWO_BYTE, UTF8 }; - StreamedSource(ExternalSourceStream* source_stream, Encoding encoding); + V8_DEPRECATE_SOON( + "This class takes ownership of source_stream, so use the constructor " + "taking a unique_ptr to make these semantics clearer", + StreamedSource(ExternalSourceStream* source_stream, Encoding encoding)); + StreamedSource(std::unique_ptr source_stream, + Encoding encoding); ~StreamedSource(); internal::ScriptStreamingData* impl() const { return impl_.get(); } @@ -1861,10 +2014,6 @@ class V8_EXPORT JSON { /** * Value serialization compatible with the HTML structured clone algorithm. * The format is backward-compatible (i.e. safe to store to disk). - * - * WARNING: This API is under development, and changes (including incompatible - * changes to the API or wire format) may occur without notice until this - * warning is removed. */ class V8_EXPORT ValueSerializer { public: @@ -1985,10 +2134,6 @@ class V8_EXPORT ValueSerializer { /** * Deserializes values from data written with ValueSerializer, or a compatible * implementation. - * - * WARNING: This API is under development, and changes (including incompatible - * changes to the API or wire format) may occur without notice until this - * warning is removed. */ class V8_EXPORT ValueDeserializer { public: @@ -2416,9 +2561,9 @@ class V8_EXPORT Value : public Data { bool BooleanValue(Isolate* isolate) const; - V8_DEPRECATE_SOON("BooleanValue can never throw. Use Isolate version.", - V8_WARN_UNUSED_RESULT Maybe BooleanValue( - Local context) const); + V8_DEPRECATED("BooleanValue can never throw. Use Isolate version.", + V8_WARN_UNUSED_RESULT Maybe BooleanValue( + Local context) const); V8_WARN_UNUSED_RESULT Maybe NumberValue(Local context) const; V8_WARN_UNUSED_RESULT Maybe IntegerValue( Local context) const; @@ -2615,10 +2760,6 @@ class V8_EXPORT String : public Name { public: virtual ~ExternalStringResourceBase() = default; - V8_DEPRECATED("Use IsCacheable().", virtual bool IsCompressible() const) { - return false; - } - /** * If a string is cacheable, the value returned by * ExternalStringResource::data() may be cached, otherwise it is not @@ -3381,7 +3522,6 @@ class V8_EXPORT Object : public Value { * array returned by this method contains the same values as would * be enumerated by a for-in statement over this object. */ - V8_DEPRECATED("Use maybe version", Local GetPropertyNames()); V8_WARN_UNUSED_RESULT MaybeLocal GetPropertyNames( Local context); V8_WARN_UNUSED_RESULT MaybeLocal GetPropertyNames( @@ -3394,7 +3534,6 @@ class V8_EXPORT Object : public Value { * the returned array doesn't contain the names of properties from * prototype objects. */ - V8_DEPRECATED("Use maybe version", Local GetOwnPropertyNames()); V8_WARN_UNUSED_RESULT MaybeLocal GetOwnPropertyNames( Local context); @@ -3450,12 +3589,17 @@ class V8_EXPORT Object : public Value { /** Gets the number of internal fields for this Object. */ int InternalFieldCount(); - /** Same as above, but works for Persistents */ + /** Same as above, but works for PersistentBase. */ V8_INLINE static int InternalFieldCount( const PersistentBase& object) { return object.val_->InternalFieldCount(); } + /** Same as above, but works for TracedGlobal. */ + V8_INLINE static int InternalFieldCount(const TracedGlobal& object) { + return object.val_->InternalFieldCount(); + } + /** Gets the value from an internal field. */ V8_INLINE Local GetInternalField(int index); @@ -3469,12 +3613,18 @@ class V8_EXPORT Object : public Value { */ V8_INLINE void* GetAlignedPointerFromInternalField(int index); - /** Same as above, but works for Persistents */ + /** Same as above, but works for PersistentBase. */ V8_INLINE static void* GetAlignedPointerFromInternalField( const PersistentBase& object, int index) { return object.val_->GetAlignedPointerFromInternalField(index); } + /** Same as above, but works for TracedGlobal. */ + V8_INLINE static void* GetAlignedPointerFromInternalField( + const TracedGlobal& object, int index) { + return object.val_->GetAlignedPointerFromInternalField(index); + } + /** * Sets a 2-byte-aligned native pointer in an internal field. To retrieve such * a field, GetAlignedPointerFromInternalField must be used, everything else @@ -3493,8 +3643,6 @@ class V8_EXPORT Object : public Value { Local key); V8_WARN_UNUSED_RESULT Maybe HasOwnProperty(Local context, uint32_t index); - V8_DEPRECATED("Use maybe version", - bool HasRealNamedProperty(Local key)); /** * Use HasRealNamedProperty() if you want to check if an object has an own * property without causing side effects, i.e., without calling interceptors. @@ -3510,12 +3658,8 @@ class V8_EXPORT Object : public Value { */ V8_WARN_UNUSED_RESULT Maybe HasRealNamedProperty(Local context, Local key); - V8_DEPRECATED("Use maybe version", - bool HasRealIndexedProperty(uint32_t index)); V8_WARN_UNUSED_RESULT Maybe HasRealIndexedProperty( Local context, uint32_t index); - V8_DEPRECATED("Use maybe version", - bool HasRealNamedCallbackProperty(Local key)); V8_WARN_UNUSED_RESULT Maybe HasRealNamedCallbackProperty( Local context, Local key); @@ -3761,6 +3905,8 @@ class ReturnValue { template V8_INLINE void Set(const Global& handle); template + V8_INLINE void Set(const TracedGlobal& handle); + template V8_INLINE void Set(const Local handle); // Fast primitive setters V8_INLINE void Set(bool value); @@ -3984,11 +4130,6 @@ class V8_EXPORT Function : public Object { Local data = Local(), int length = 0, ConstructorBehavior behavior = ConstructorBehavior::kAllow, SideEffectType side_effect_type = SideEffectType::kHasSideEffect); - static V8_DEPRECATED("Use maybe version", - Local New(Isolate* isolate, - FunctionCallback callback, - Local data = Local(), - int length = 0)); V8_WARN_UNUSED_RESULT MaybeLocal NewInstance( Local context, int argc, Local argv[]) const; @@ -4007,9 +4148,6 @@ class V8_EXPORT Function : public Object { Local context, int argc, Local argv[], SideEffectType side_effect_type = SideEffectType::kHasSideEffect) const; - V8_DEPRECATED("Use maybe version", - Local Call(Local recv, int argc, - Local argv[])); V8_WARN_UNUSED_RESULT MaybeLocal Call(Local context, Local recv, int argc, Local argv[]); @@ -4194,14 +4332,6 @@ class V8_EXPORT PropertyDescriptor { // GenericDescriptor PropertyDescriptor(); - // DataDescriptor (implicit / DEPRECATED) - // Templatized such that the explicit constructor is chosen first. - // TODO(clemensh): Remove after 7.3 branch. - template - V8_DEPRECATED( - "Use explicit constructor", - PropertyDescriptor(Local value)); // NOLINT(runtime/explicit) - // DataDescriptor explicit PropertyDescriptor(Local value); @@ -4242,11 +4372,6 @@ class V8_EXPORT PropertyDescriptor { PrivateData* private_; }; -// TODO(clemensh): Remove after 7.3 branch. -template -PropertyDescriptor::PropertyDescriptor(Local value) - : PropertyDescriptor(value) {} - /** * An instance of the built-in Proxy constructor (ECMA-262, 6th Edition, * 26.2.1). @@ -4336,27 +4461,6 @@ class V8_EXPORT CompiledWasmModule { // An instance of WebAssembly.Module. class V8_EXPORT WasmModuleObject : public Object { public: - // TODO(clemensh): Remove after 7.3 branch. - typedef std::pair, size_t> SerializedModule; - - /** - * A unowned reference to a byte buffer. - * TODO(clemensh): Remove after 7.3 branch. - */ - struct BufferReference { - const uint8_t* start; - size_t size; - BufferReference(const uint8_t* start, size_t size) - : start(start), size(size) {} - - // Implicit conversion to and from MemorySpan. - BufferReference(MemorySpan span) // NOLINT(runtime/explicit) - : start(span.data()), size(span.size()) {} - operator MemorySpan() const { - return MemorySpan{start, size}; - } - }; - /** * An opaque, native heap object for transferring wasm modules. It * supports move semantics, and does not support copy semantics. @@ -4398,25 +4502,12 @@ class V8_EXPORT WasmModuleObject : public Object { static MaybeLocal FromTransferrableModule( Isolate* isolate, const TransferrableModule&); - /** - * Get the wasm-encoded bytes that were used to compile this module. - */ - V8_DEPRECATED("Use CompiledWasmModule::GetWireBytesRef()", - BufferReference GetWasmWireBytesRef()); - /** * Get the compiled module for this module object. The compiled module can be * shared by several module objects. */ CompiledWasmModule GetCompiledModule(); - /** - * Serialize the compiled module. The serialized data does not include the - * uncompiled bytes. - */ - V8_DEPRECATED("Use CompiledWasmModule::Serialize()", - SerializedModule Serialize()); - /** * If possible, deserialize the module, otherwise compile it from the provided * uncompiled bytes. @@ -4441,9 +4532,6 @@ class V8_EXPORT WasmModuleObject : public Object { static void CheckCast(Value* obj); }; -V8_DEPRECATED("Use WasmModuleObject", - typedef WasmModuleObject WasmCompiledModule); - /** * The V8 interface for WebAssembly streaming compilation. When streaming * compilation is initiated, V8 passes a {WasmStreaming} object to the embedder @@ -5079,7 +5167,8 @@ class V8_EXPORT SharedArrayBuffer : public Object { allocation_length_(0), allocation_mode_(Allocator::AllocationMode::kNormal), deleter_(nullptr), - deleter_data_(nullptr) {} + deleter_data_(nullptr), + is_growable_(false) {} void* AllocationBase() const { return allocation_base_; } size_t AllocationLength() const { return allocation_length_; } @@ -5091,12 +5180,13 @@ class V8_EXPORT SharedArrayBuffer : public Object { size_t ByteLength() const { return byte_length_; } DeleterCallback Deleter() const { return deleter_; } void* DeleterData() const { return deleter_data_; } + bool IsGrowable() const { return is_growable_; } private: Contents(void* data, size_t byte_length, void* allocation_base, size_t allocation_length, Allocator::AllocationMode allocation_mode, DeleterCallback deleter, - void* deleter_data); + void* deleter_data, bool is_growable); void* data_; size_t byte_length_; @@ -5105,6 +5195,7 @@ class V8_EXPORT SharedArrayBuffer : public Object { Allocator::AllocationMode allocation_mode_; DeleterCallback deleter_; void* deleter_data_; + bool is_growable_; friend class SharedArrayBuffer; }; @@ -5132,6 +5223,14 @@ class V8_EXPORT SharedArrayBuffer : public Object { Isolate* isolate, void* data, size_t byte_length, ArrayBufferCreationMode mode = ArrayBufferCreationMode::kExternalized); + /** + * Create a new SharedArrayBuffer over an existing memory block. Propagate + * flags to indicate whether the underlying buffer can be grown. + */ + static Local New( + Isolate* isolate, const SharedArrayBuffer::Contents&, + ArrayBufferCreationMode mode = ArrayBufferCreationMode::kExternalized); + /** * Returns true if SharedArrayBuffer is externalized, that is, does not * own its memory block. @@ -5192,6 +5291,21 @@ class V8_EXPORT Date : public Object { V8_INLINE static Date* Cast(Value* obj); + /** + * Time zone redetection indicator for + * DateTimeConfigurationChangeNotification. + * + * kSkip indicates V8 that the notification should not trigger redetecting + * host time zone. kRedetect indicates V8 that host time zone should be + * redetected, and used to set the default time zone. + * + * The host time zone detection may require file system access or similar + * operations unlikely to be available inside a sandbox. If v8 is run inside a + * sandbox, the host time zone has to be detected outside the sandbox before + * calling DateTimeConfigurationChangeNotification function. + */ + enum class TimeZoneDetection { kSkip, kRedetect }; + /** * Notification that the embedder has changed the time zone, * daylight savings time, or other date / time configuration @@ -5204,7 +5318,11 @@ class V8_EXPORT Date : public Object { * This API should not be called more than needed as it will * negatively impact the performance of date operations. */ - static void DateTimeConfigurationChangeNotification(Isolate* isolate); + V8_DEPRECATE_SOON( + "Use Isolate::DateTimeConfigurationChangeNotification", + static void DateTimeConfigurationChangeNotification( + Isolate* isolate, + TimeZoneDetection time_zone_detection = TimeZoneDetection::kSkip)); private: static void CheckCast(Value* obj); @@ -5829,7 +5947,6 @@ class V8_EXPORT FunctionTemplate : public Template { SideEffectType side_effect_type = SideEffectType::kHasSideEffect); /** Returns the unique function instance in the current execution context.*/ - V8_DEPRECATED("Use maybe version", Local GetFunction()); V8_WARN_UNUSED_RESULT MaybeLocal GetFunction( Local context); @@ -5904,7 +6021,8 @@ class V8_EXPORT FunctionTemplate : public Template { * function template. By default, instances of a function template * are not ignored. */ - void SetHiddenPrototype(bool value); + V8_DEPRECATED("This feature is incompatible with ES6+.", + void SetHiddenPrototype(bool value)); /** * Sets the ReadOnly flag in the attributes of the 'prototype' property @@ -6126,7 +6244,6 @@ class V8_EXPORT ObjectTemplate : public Template { size_t index); /** Creates a new instance of this template.*/ - V8_DEPRECATED("Use maybe version", Local NewInstance()); V8_WARN_UNUSED_RESULT MaybeLocal NewInstance(Local context); /** @@ -6346,20 +6463,6 @@ class V8_EXPORT AccessorSignature : public Data { // --- Extensions --- -V8_DEPRECATED("Implementation detail", class) -V8_EXPORT ExternalOneByteStringResourceImpl - : public String::ExternalOneByteStringResource { - public: - ExternalOneByteStringResourceImpl() : data_(nullptr), length_(0) {} - ExternalOneByteStringResourceImpl(const char* data, size_t length) - : data_(data), length_(length) {} - const char* data() const override { return data_; } - size_t length() const override { return length_; } - - private: - const char* data_; - size_t length_; -}; /** * Ignore @@ -6381,8 +6484,8 @@ class V8_EXPORT Extension { // NOLINT const String::ExternalOneByteStringResource* source() const { return source_; } - int dependency_count() { return dep_count_; } - const char** dependencies() { return deps_; } + int dependency_count() const { return dep_count_; } + const char** dependencies() const { return deps_; } void set_auto_enable(bool value) { auto_enable_ = value; } bool auto_enable() { return auto_enable_; } @@ -6399,9 +6502,7 @@ class V8_EXPORT Extension { // NOLINT bool auto_enable_; }; - -void V8_EXPORT RegisterExtension(Extension* extension); - +void V8_EXPORT RegisterExtension(std::unique_ptr); // --- Statics --- @@ -6461,8 +6562,14 @@ class V8_EXPORT ResourceConstraints { void set_code_range_size(size_t limit_in_mb) { code_range_size_ = limit_in_mb; } - size_t max_zone_pool_size() const { return max_zone_pool_size_; } - void set_max_zone_pool_size(size_t bytes) { max_zone_pool_size_ = bytes; } + V8_DEPRECATE_SOON("Zone does not pool memory any more.", + size_t max_zone_pool_size() const) { + return max_zone_pool_size_; + } + V8_DEPRECATE_SOON("Zone does not pool memory any more.", + void set_max_zone_pool_size(size_t bytes)) { + max_zone_pool_size_ = bytes; + } private: // max_semi_space_size_ is in KB @@ -6615,11 +6722,8 @@ enum PromiseRejectEvent { class PromiseRejectMessage { public: PromiseRejectMessage(Local promise, PromiseRejectEvent event, - Local value, Local stack_trace) - : promise_(promise), - event_(event), - value_(value), - stack_trace_(stack_trace) {} + Local value) + : promise_(promise), event_(event), value_(value) {} V8_INLINE Local GetPromise() const { return promise_; } V8_INLINE PromiseRejectEvent GetEvent() const { return event_; } @@ -6629,13 +6733,13 @@ class PromiseRejectMessage { Local promise_; PromiseRejectEvent event_; Local value_; - Local stack_trace_; }; typedef void (*PromiseRejectCallback)(PromiseRejectMessage message); // --- Microtasks Callbacks --- typedef void (*MicrotasksCompletedCallback)(Isolate*); +typedef void (*MicrotasksCompletedCallbackWithData)(Isolate*, void*); typedef void (*MicrotaskCallback)(void* data); @@ -6648,6 +6752,80 @@ typedef void (*MicrotaskCallback)(void* data); */ enum class MicrotasksPolicy { kExplicit, kScoped, kAuto }; +/** + * Represents the microtask queue, where microtasks are stored and processed. + * https://html.spec.whatwg.org/multipage/webappapis.html#microtask-queue + * https://html.spec.whatwg.org/multipage/webappapis.html#enqueuejob(queuename,-job,-arguments) + * https://html.spec.whatwg.org/multipage/webappapis.html#perform-a-microtask-checkpoint + * + * A MicrotaskQueue instance may be associated to multiple Contexts by passing + * it to Context::New(), and they can be detached by Context::DetachGlobal(). + * The embedder must keep the MicrotaskQueue instance alive until all associated + * Contexts are gone or detached. + * + * Use the same instance of MicrotaskQueue for all Contexts that may access each + * other synchronously. E.g. for Web embedding, use the same instance for all + * origins that share the same URL scheme and eTLD+1. + */ +class V8_EXPORT MicrotaskQueue { + public: + /** + * Creates an empty MicrotaskQueue instance. + */ + static std::unique_ptr New(Isolate* isolate); + + virtual ~MicrotaskQueue() = default; + + /** + * Enqueues the callback to the queue. + */ + virtual void EnqueueMicrotask(Isolate* isolate, + Local microtask) = 0; + + /** + * Enqueues the callback to the queue. + */ + virtual void EnqueueMicrotask(v8::Isolate* isolate, + MicrotaskCallback callback, + void* data = nullptr) = 0; + + /** + * Adds a callback to notify the embedder after microtasks were run. The + * callback is triggered by explicit RunMicrotasks call or automatic + * microtasks execution (see Isolate::SetMicrotasksPolicy). + * + * Callback will trigger even if microtasks were attempted to run, + * but the microtasks queue was empty and no single microtask was actually + * executed. + * + * Executing scripts inside the callback will not re-trigger microtasks and + * the callback. + */ + virtual void AddMicrotasksCompletedCallback( + MicrotasksCompletedCallbackWithData callback, void* data = nullptr) = 0; + + /** + * Removes callback that was installed by AddMicrotasksCompletedCallback. + */ + virtual void RemoveMicrotasksCompletedCallback( + MicrotasksCompletedCallbackWithData callback, void* data = nullptr) = 0; + + /** + * Runs microtasks if no microtask is running on this MicrotaskQueue instance. + */ + virtual void PerformCheckpoint(Isolate* isolate) = 0; + + /** + * Returns true if a microtask is running on this MicrotaskQueue instance. + */ + virtual bool IsRunningMicrotasks() const = 0; + + private: + friend class internal::MicrotaskQueue; + MicrotaskQueue() = default; + MicrotaskQueue(const MicrotaskQueue&) = delete; + MicrotaskQueue& operator=(const MicrotaskQueue&) = delete; +}; /** * This scope is used to control microtasks when kScopeMicrotasksInvocation @@ -6663,6 +6841,7 @@ class V8_EXPORT MicrotasksScope { enum Type { kRunMicrotasks, kDoNotRunMicrotasks }; MicrotasksScope(Isolate* isolate, Type type); + MicrotasksScope(Isolate* isolate, MicrotaskQueue* microtask_queue, Type type); ~MicrotasksScope(); /** @@ -6686,6 +6865,7 @@ class V8_EXPORT MicrotasksScope { private: internal::Isolate* const isolate_; + internal::MicrotaskQueue* const microtask_queue_; bool run_; }; @@ -6874,8 +7054,6 @@ class V8_EXPORT HeapCodeStatistics { friend class Isolate; }; -class RetainedObjectInfo; - /** * A JIT code event is issued each time code is added, moved or removed. * @@ -6956,7 +7134,7 @@ struct JitCodeEvent { * See documentation https://developers.google.com/web/tools/chrome-devtools/ * profile/evaluate-performance/rail */ -enum RAILMode { +enum RAILMode : unsigned { // Response performance mode: In this mode very low virtual machine latency // is provided. V8 will try to avoid JavaScript execution interruptions. // Throughput may be throttled. @@ -7037,8 +7215,23 @@ class V8_EXPORT EmbedderHeapTracer { kEmpty, }; + /** + * Interface for iterating through TracedGlobal handles. + */ + class V8_EXPORT TracedGlobalHandleVisitor { + public: + virtual ~TracedGlobalHandleVisitor() = default; + virtual void VisitTracedGlobalHandle(const TracedGlobal& value) = 0; + }; + virtual ~EmbedderHeapTracer() = default; + /** + * Iterates all TracedGlobal handles created for the v8::Isolate the tracer is + * attached to. + */ + void IterateTracedGlobalHandles(TracedGlobalHandleVisitor* visitor); + /** * Called by v8 to register internal fields of found wrappers. * @@ -7048,6 +7241,8 @@ class V8_EXPORT EmbedderHeapTracer { virtual void RegisterV8References( const std::vector >& embedder_fields) = 0; + void RegisterEmbedderReference(const TracedGlobal& ref); + /** * Called at the beginning of a GC cycle. */ @@ -7084,15 +7279,6 @@ class V8_EXPORT EmbedderHeapTracer { */ virtual void EnterFinalPause(EmbedderStackState stack_state) = 0; - /** - * Called when tracing is aborted. - * - * The embedder is expected to throw away all intermediate data and reset to - * the initial state. - */ - V8_DEPRECATED("Obsolete as V8 will not abort tracing anymore.", - virtual void AbortTracing()) {} - /* * Called by the embedder to request immediate finalization of the currently * running tracing phase that has been started with TracePrologue and not @@ -7104,6 +7290,17 @@ class V8_EXPORT EmbedderHeapTracer { */ void FinalizeTracing(); + /** + * Returns true if the TracedGlobal handle should be considered as root for + * the currently running non-tracing garbage collection and false otherwise. + * + * Default implementation will keep all TracedGlobal references as roots. + */ + virtual bool IsRootForNonTracingGC( + const v8::TracedGlobal& handle) { + return true; + } + /* * Called by the embedder to immediately perform a full garbage collection. * @@ -7314,6 +7511,7 @@ class V8_EXPORT Isolate { class V8_EXPORT SuppressMicrotaskExecutionScope { public: explicit SuppressMicrotaskExecutionScope(Isolate* isolate); + explicit SuppressMicrotaskExecutionScope(MicrotaskQueue* microtask_queue); ~SuppressMicrotaskExecutionScope(); // Prevent copying of Scope objects. @@ -7324,6 +7522,7 @@ class V8_EXPORT Isolate { private: internal::Isolate* const isolate_; + internal::MicrotaskQueue* const microtask_queue_; }; /** @@ -7434,6 +7633,7 @@ class V8_EXPORT Isolate { kRegExpMatchIsTrueishOnNonJSRegExp = 72, kRegExpMatchIsFalseishOnJSRegExp = 73, kDateGetTimezoneOffset = 74, + kStringNormalize = 75, // If you add new values here, you'll also need to update Chromium's: // web_feature.mojom, UseCounterCallback.cpp, and enums.xml. V8 changes to @@ -7987,18 +8187,18 @@ class V8_EXPORT Isolate { void SetPromiseRejectCallback(PromiseRejectCallback callback); /** - * Runs the Microtask Work Queue until empty + * Runs the default MicrotaskQueue until it gets empty. * Any exceptions thrown by microtask callbacks are swallowed. */ void RunMicrotasks(); /** - * Enqueues the callback to the Microtask Work Queue + * Enqueues the callback to the default MicrotaskQueue */ void EnqueueMicrotask(Local microtask); /** - * Enqueues the callback to the Microtask Work Queue + * Enqueues the callback to the default MicrotaskQueue */ void EnqueueMicrotask(MicrotaskCallback callback, void* data = nullptr); @@ -8014,22 +8214,31 @@ class V8_EXPORT Isolate { /** * Adds a callback to notify the host application after - * microtasks were run. The callback is triggered by explicit RunMicrotasks - * call or automatic microtasks execution (see SetAutorunMicrotasks). + * microtasks were run on the default MicrotaskQueue. The callback is + * triggered by explicit RunMicrotasks call or automatic microtasks execution + * (see SetMicrotaskPolicy). * * Callback will trigger even if microtasks were attempted to run, * but the microtasks queue was empty and no single microtask was actually * executed. * - * Executing scriptsinside the callback will not re-trigger microtasks and + * Executing scripts inside the callback will not re-trigger microtasks and * the callback. */ - void AddMicrotasksCompletedCallback(MicrotasksCompletedCallback callback); + V8_DEPRECATE_SOON("Use *WithData version.", + void AddMicrotasksCompletedCallback( + MicrotasksCompletedCallback callback)); + void AddMicrotasksCompletedCallback( + MicrotasksCompletedCallbackWithData callback, void* data = nullptr); /** * Removes callback that was installed by AddMicrotasksCompletedCallback. */ - void RemoveMicrotasksCompletedCallback(MicrotasksCompletedCallback callback); + V8_DEPRECATE_SOON("Use *WithData version.", + void RemoveMicrotasksCompletedCallback( + MicrotasksCompletedCallback callback)); + void RemoveMicrotasksCompletedCallback( + MicrotasksCompletedCallbackWithData callback, void* data = nullptr); /** * Sets a callback for counting the number of times a feature of V8 is used. @@ -8244,10 +8453,6 @@ class V8_EXPORT Isolate { void SetWasmModuleCallback(ExtensionCallback callback); void SetWasmInstanceCallback(ExtensionCallback callback); - V8_DEPRECATED( - "The callback set in SetWasmStreamingCallback is used now", - void SetWasmCompileStreamingCallback(ApiImplementationCallback callback)); - void SetWasmStreamingCallback(WasmStreamingCallback callback); void SetWasmThreadsEnabledCallback(WasmThreadsEnabledCallback callback); @@ -8321,7 +8526,7 @@ class V8_EXPORT Isolate { * garbage collection but is free to visit an arbitrary superset of these * objects. */ - V8_DEPRECATE_SOON( + V8_DEPRECATED( "Use VisitHandlesWithClassIds", void VisitHandlesForPartialDependence(PersistentHandleVisitor* visitor)); @@ -8345,6 +8550,45 @@ class V8_EXPORT Isolate { */ void SetAllowAtomicsWait(bool allow); + /** + * Time zone redetection indicator for + * DateTimeConfigurationChangeNotification. + * + * kSkip indicates V8 that the notification should not trigger redetecting + * host time zone. kRedetect indicates V8 that host time zone should be + * redetected, and used to set the default time zone. + * + * The host time zone detection may require file system access or similar + * operations unlikely to be available inside a sandbox. If v8 is run inside a + * sandbox, the host time zone has to be detected outside the sandbox before + * calling DateTimeConfigurationChangeNotification function. + */ + enum class TimeZoneDetection { kSkip, kRedetect }; + + /** + * Notification that the embedder has changed the time zone, daylight savings + * time or other date / time configuration parameters. V8 keeps a cache of + * various values used for date / time computation. This notification will + * reset those cached values for the current context so that date / time + * configuration changes would be reflected. + * + * This API should not be called more than needed as it will negatively impact + * the performance of date operations. + */ + void DateTimeConfigurationChangeNotification( + TimeZoneDetection time_zone_detection = TimeZoneDetection::kSkip); + + /** + * Notification that the embedder has changed the locale. V8 keeps a cache of + * various values used for locale computation. This notification will reset + * those cached values for the current context so that locale configuration + * changes would be reflected. + * + * This API should not be called more than needed as it will negatively impact + * the performance of locale operations. + */ + void LocaleConfigurationChangeNotification(); + Isolate() = delete; ~Isolate() = delete; Isolate(const Isolate&) = delete; @@ -8547,13 +8791,6 @@ class V8_EXPORT V8 { void* context)); #endif // V8_OS_POSIX - /** - * Enable the default signal handler rather than using one provided by the - * embedder. - */ - V8_DEPRECATED("Use EnableWebAssemblyTrapHandler", - static bool RegisterDefaultSignalHandler()); - /** * Activate trap-based bounds checking for WebAssembly. * @@ -8567,13 +8804,24 @@ class V8_EXPORT V8 { static internal::Address* GlobalizeReference(internal::Isolate* isolate, internal::Address* handle); - static internal::Address* CopyPersistent(internal::Address* handle); + static internal::Address* GlobalizeTracedReference(internal::Isolate* isolate, + internal::Address* handle, + internal::Address* slot); + static void MoveGlobalReference(internal::Address** from, + internal::Address** to); + static void MoveTracedGlobalReference(internal::Address** from, + internal::Address** to); + static internal::Address* CopyGlobalReference(internal::Address* from); static void DisposeGlobal(internal::Address* global_handle); + static void DisposeTracedGlobal(internal::Address* global_handle); static void MakeWeak(internal::Address* location, void* data, WeakCallbackInfo::Callback weak_callback, WeakCallbackType type); static void MakeWeak(internal::Address** location_addr); static void* ClearWeak(internal::Address* location); + static void SetFinalizationCallbackTraced( + internal::Address* location, void* parameter, + WeakCallbackInfo::Callback callback); static void AnnotateStrongRetainer(internal::Address* location, const char* label); static Value* Eternalize(Isolate* isolate, Value* handle); @@ -8587,12 +8835,16 @@ class V8_EXPORT V8 { static void FromJustIsNothing(); static void ToLocalEmpty(); static void InternalFieldOutOfBounds(int index); + template + friend class Global; template friend class Local; template friend class MaybeLocal; template friend class Maybe; template + friend class TracedGlobal; + template friend class WeakCallbackInfo; template friend class Eternal; template friend class PersistentBase; @@ -9048,7 +9300,8 @@ class V8_EXPORT Context { MaybeLocal global_template = MaybeLocal(), MaybeLocal global_object = MaybeLocal(), DeserializeInternalFieldsCallback internal_fields_deserializer = - DeserializeInternalFieldsCallback()); + DeserializeInternalFieldsCallback(), + MicrotaskQueue* microtask_queue = nullptr); /** * Create a new context from a (non-default) context snapshot. There @@ -9068,13 +9321,13 @@ class V8_EXPORT Context { * * \param global_object See v8::Context::New. */ - static MaybeLocal FromSnapshot( Isolate* isolate, size_t context_snapshot_index, DeserializeInternalFieldsCallback embedder_fields_deserializer = DeserializeInternalFieldsCallback(), ExtensionConfiguration* extensions = nullptr, - MaybeLocal global_object = MaybeLocal()); + MaybeLocal global_object = MaybeLocal(), + MicrotaskQueue* microtask_queue = nullptr); /** * Returns an global object that isn't backed by an actual context. @@ -9452,6 +9705,10 @@ Local Local::New(Isolate* isolate, const PersistentBase& that) { return New(isolate, that.val_); } +template +Local Local::New(Isolate* isolate, const TracedGlobal& that) { + return New(isolate, that.val_); +} template Local Local::New(Isolate* isolate, T* that) { @@ -9514,7 +9771,7 @@ void Persistent::Copy(const Persistent& that) { this->Reset(); if (that.IsEmpty()) return; internal::Address* p = reinterpret_cast(that.val_); - this->val_ = reinterpret_cast(V8::CopyPersistent(p)); + this->val_ = reinterpret_cast(V8::CopyGlobalReference(p)); M::Copy(that, this); } @@ -9647,6 +9904,109 @@ uint16_t PersistentBase::WrapperClassId() const { return *reinterpret_cast(addr); } +template +Global::Global(Global&& other) : PersistentBase(other.val_) { + if (other.val_ != nullptr) { + V8::MoveGlobalReference(reinterpret_cast(&other.val_), + reinterpret_cast(&this->val_)); + other.val_ = nullptr; + } +} + +template +template +Global& Global::operator=(Global&& rhs) { + TYPE_CHECK(T, S); + if (this != &rhs) { + this->Reset(); + if (rhs.val_ != nullptr) { + this->val_ = rhs.val_; + V8::MoveGlobalReference( + reinterpret_cast(&rhs.val_), + reinterpret_cast(&this->val_)); + rhs.val_ = nullptr; + } + } + return *this; +} + +template +T* TracedGlobal::New(Isolate* isolate, T* that, T** slot) { + if (that == nullptr) return nullptr; + internal::Address* p = reinterpret_cast(that); + return reinterpret_cast(V8::GlobalizeTracedReference( + reinterpret_cast(isolate), p, + reinterpret_cast(slot))); +} + +template +void TracedGlobal::Reset() { + if (IsEmpty()) return; + V8::DisposeTracedGlobal(reinterpret_cast(val_)); + val_ = nullptr; +} + +template +template +void TracedGlobal::Reset(Isolate* isolate, const Local& other) { + TYPE_CHECK(T, S); + Reset(); + if (other.IsEmpty()) return; + this->val_ = New(isolate, other.val_, &val_); +} + +template +TracedGlobal::TracedGlobal(TracedGlobal&& other) : val_(other.val_) { + if (other.val_ != nullptr) { + V8::MoveTracedGlobalReference( + reinterpret_cast(&other.val_), + reinterpret_cast(&this->val_)); + other.val_ = nullptr; + } +} + +template +template +TracedGlobal& TracedGlobal::operator=(TracedGlobal&& rhs) { + TYPE_CHECK(T, S); + if (this != &rhs) { + this->Reset(); + if (rhs.val_ != nullptr) { + this->val_ = rhs.val_; + V8::MoveTracedGlobalReference( + reinterpret_cast(&rhs.val_), + reinterpret_cast(&this->val_)); + rhs.val_ = nullptr; + } + } + return *this; +} + +template +void TracedGlobal::SetWrapperClassId(uint16_t class_id) { + typedef internal::Internals I; + if (IsEmpty()) return; + internal::Address* obj = reinterpret_cast(this->val_); + uint8_t* addr = reinterpret_cast(obj) + I::kNodeClassIdOffset; + *reinterpret_cast(addr) = class_id; +} + +template +uint16_t TracedGlobal::WrapperClassId() const { + typedef internal::Internals I; + if (IsEmpty()) return 0; + internal::Address* obj = reinterpret_cast(this->val_); + uint8_t* addr = reinterpret_cast(obj) + I::kNodeClassIdOffset; + return *reinterpret_cast(addr); +} + +template +void TracedGlobal::SetFinalizationCallback( + void* parameter, typename WeakCallbackInfo::Callback callback) { + V8::SetFinalizationCallbackTraced( + reinterpret_cast(this->val_), parameter, callback); +} + template ReturnValue::ReturnValue(internal::Address* slot) : value_(slot) {} @@ -9672,6 +10032,17 @@ void ReturnValue::Set(const Global& handle) { } } +template +template +void ReturnValue::Set(const TracedGlobal& handle) { + TYPE_CHECK(T, S); + if (V8_UNLIKELY(handle.IsEmpty())) { + *value_ = GetDefaultValue(); + } else { + *value_ = *reinterpret_cast(*handle); + } +} + template template void ReturnValue::Set(const Local handle) { @@ -9944,7 +10315,7 @@ AccessorSignature* AccessorSignature::Cast(Data* data) { } Local Object::GetInternalField(int index) { -#if !defined(V8_ENABLE_CHECKS) && !defined(V8_COMPRESS_POINTERS) +#ifndef V8_ENABLE_CHECKS typedef internal::Address A; typedef internal::Internals I; A obj = *reinterpret_cast(this); @@ -9954,9 +10325,13 @@ Local Object::GetInternalField(int index) { if (instance_type == I::kJSObjectType || instance_type == I::kJSApiObjectType || instance_type == I::kJSSpecialApiObjectType) { - int offset = I::kJSObjectHeaderSizeForEmbedderFields + - (I::kEmbedderDataSlotSize * index); - A value = I::ReadTaggedAnyField(obj, offset); + int offset = I::kJSObjectHeaderSize + (I::kEmbedderDataSlotSize * index); + A value = I::ReadRawField(obj, offset); +#ifdef V8_COMPRESS_POINTERS + // We read the full pointer value and then decompress it in order to avoid + // dealing with potential endiannes issues. + value = I::DecompressTaggedAnyField(obj, static_cast(value)); +#endif internal::Isolate* isolate = internal::IsolateFromNeverReadOnlySpaceObject(obj); A* result = HandleScope::CreateHandle(isolate, value); @@ -9968,7 +10343,7 @@ Local Object::GetInternalField(int index) { void* Object::GetAlignedPointerFromInternalField(int index) { -#if !defined(V8_ENABLE_CHECKS) && !defined(V8_COMPRESS_POINTERS) +#ifndef V8_ENABLE_CHECKS typedef internal::Address A; typedef internal::Internals I; A obj = *reinterpret_cast(this); @@ -9978,8 +10353,7 @@ void* Object::GetAlignedPointerFromInternalField(int index) { if (V8_LIKELY(instance_type == I::kJSObjectType || instance_type == I::kJSApiObjectType || instance_type == I::kJSSpecialApiObjectType)) { - int offset = I::kJSObjectHeaderSizeForEmbedderFields + - (I::kEmbedderDataSlotSize * index); + int offset = I::kJSObjectHeaderSize + (I::kEmbedderDataSlotSize * index); return I::ReadRawField(obj, offset); } #endif @@ -10478,10 +10852,14 @@ ReturnValue PropertyCallbackInfo::GetReturnValue() const { template bool PropertyCallbackInfo::ShouldThrowOnError() const { typedef internal::Internals I; - return args_[kShouldThrowOnErrorIndex] != I::IntToSmi(0); + if (args_[kShouldThrowOnErrorIndex] != + I::IntToSmi(I::kInferShouldThrowMode)) { + return args_[kShouldThrowOnErrorIndex] != I::IntToSmi(I::kDontThrow); + } + return v8::internal::ShouldThrowOnError( + reinterpret_cast(GetIsolate())); } - Local Undefined(Isolate* isolate) { typedef internal::Address S; typedef internal::Internals I; @@ -10554,7 +10932,11 @@ int64_t Isolate::AdjustAmountOfExternalAllocatedMemory( reinterpret_cast(reinterpret_cast(this) + I::kExternalMemoryAtLastMarkCompactOffset); - const int64_t amount = *external_memory + change_in_bytes; + // Embedders are weird: we see both over- and underflows here. Perform the + // addition with unsigned types to avoid undefined behavior. + const int64_t amount = + static_cast(static_cast(change_in_bytes) + + static_cast(*external_memory)); *external_memory = amount; int64_t allocation_diff_since_last_mc = @@ -10576,13 +10958,24 @@ int64_t Isolate::AdjustAmountOfExternalAllocatedMemory( } Local Context::GetEmbedderData(int index) { -#if !defined(V8_ENABLE_CHECKS) && !defined(V8_COMPRESS_POINTERS) +#ifndef V8_ENABLE_CHECKS typedef internal::Address A; typedef internal::Internals I; + A ctx = *reinterpret_cast(this); + A embedder_data = + I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); + int value_offset = + I::kEmbedderDataArrayHeaderSize + (I::kEmbedderDataSlotSize * index); + A value = I::ReadRawField(embedder_data, value_offset); +#ifdef V8_COMPRESS_POINTERS + // We read the full pointer value and then decompress it in order to avoid + // dealing with potential endiannes issues. + value = + I::DecompressTaggedAnyField(embedder_data, static_cast(value)); +#endif internal::Isolate* isolate = internal::IsolateFromNeverReadOnlySpaceObject( *reinterpret_cast(this)); - A* result = - HandleScope::CreateHandle(isolate, I::ReadEmbedderData(this, index)); + A* result = HandleScope::CreateHandle(isolate, value); return Local(reinterpret_cast(result)); #else return SlowGetEmbedderData(index); @@ -10591,9 +10984,15 @@ Local Context::GetEmbedderData(int index) { void* Context::GetAlignedPointerFromEmbedderData(int index) { -#if !defined(V8_ENABLE_CHECKS) && !defined(V8_COMPRESS_POINTERS) +#ifndef V8_ENABLE_CHECKS + typedef internal::Address A; typedef internal::Internals I; - return I::ReadEmbedderData(this, index); + A ctx = *reinterpret_cast(this); + A embedder_data = + I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); + int value_offset = + I::kEmbedderDataArrayHeaderSize + (I::kEmbedderDataSlotSize * index); + return I::ReadRawField(embedder_data, value_offset); #else return SlowGetAlignedPointerFromEmbedderData(index); #endif diff --git a/deps/v8/infra/OWNERS b/deps/v8/infra/OWNERS index ea6f5446ee7e90..c05d1d39218a42 100644 --- a/deps/v8/infra/OWNERS +++ b/deps/v8/infra/OWNERS @@ -1,3 +1,4 @@ machenbach@chromium.org sergiyb@chromium.org tandrii@chromium.org +tmrts@chromium.org \ No newline at end of file diff --git a/deps/v8/infra/config/OWNERS b/deps/v8/infra/config/OWNERS deleted file mode 100644 index 1d89078df75ea6..00000000000000 --- a/deps/v8/infra/config/OWNERS +++ /dev/null @@ -1 +0,0 @@ -sergiyb@chromium.org diff --git a/deps/v8/infra/config/PRESUBMIT.py b/deps/v8/infra/config/PRESUBMIT.py deleted file mode 100644 index 3d20f403f68eb9..00000000000000 --- a/deps/v8/infra/config/PRESUBMIT.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2018 the V8 project authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Presubmit script for changes in the infrastructure configs. - -See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts -for more details about the presubmit API built into gcl. -""" - - -def _CommonChecks(input_api, output_api): - """Checks common to both upload and commit.""" - results = [] - results.extend( - input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api)) - return results - - -def CheckChangeOnUpload(input_api, output_api): - results = [] - results.extend(_CommonChecks(input_api, output_api)) - return results - - -def CheckChangeOnCommit(input_api, output_api): - results = [] - results.extend(_CommonChecks(input_api, output_api)) - return results diff --git a/deps/v8/infra/config/cq.cfg b/deps/v8/infra/config/cq.cfg deleted file mode 100644 index e58723719e6769..00000000000000 --- a/deps/v8/infra/config/cq.cfg +++ /dev/null @@ -1,158 +0,0 @@ -# See http://luci-config.appspot.com/schemas/projects/refs:cq.cfg for the -# documentation of this file format. - -version: 1 -cq_status_url: "https://chromium-cq-status.appspot.com" -git_repo_url: "https://chromium.googlesource.com/v8/v8" -commit_burst_delay: 60 -max_commit_burst: 1 - -gerrit {} - -verifiers { - gerrit_cq_ability { - committer_list: "project-v8-committers" - dry_run_access_list: "project-v8-tryjob-access" - } - - tree_status { - tree_status_url: "https://v8-status.appspot.com" - } - - try_job { - buckets { - name: "luci.v8.try" - builders { name: "v8_android_arm_compile_rel" } - builders { name: "v8_fuchsia_rel_ng" } - builders { name: "v8_linux64_asan_rel_ng" } - builders { - name: "v8_linux64_asan_rel_ng_triggered" - triggered_by: "v8_linux64_asan_rel_ng" - } - builders { name: "v8_linux64_dbg_ng" } - builders { - name: "v8_linux64_dbg_ng_triggered" - triggered_by: "v8_linux64_dbg_ng" - } - builders { name: "v8_linux64_gcc_compile_dbg" } - builders { name: "v8_linux64_header_includes_dbg" } - builders { name: "v8_linux64_jumbo_compile_rel" } - builders { name: "v8_linux64_rel_ng" } - builders { - name: "v8_linux64_rel_ng_triggered" - triggered_by: "v8_linux64_rel_ng" - } - # TODO(machenbach): Figure out if bot should be removed or if - # functionality should be revived. - builders { - name: "v8_linux64_sanitizer_coverage_rel" - experiment_percentage: 10 - } - builders { name: "v8_linux64_shared_compile_rel" } - builders { name: "v8_linux64_verify_csa_rel_ng" } - builders { - name: "v8_linux64_verify_csa_rel_ng_triggered" - triggered_by: "v8_linux64_verify_csa_rel_ng" - } - builders { name: "v8_linux_arm64_rel_ng" } - builders { - name: "v8_linux_arm64_rel_ng_triggered" - triggered_by: "v8_linux_arm64_rel_ng" - } - builders { name: "v8_linux_arm_rel_ng" } - builders { - name: "v8_linux_arm_rel_ng_triggered" - triggered_by: "v8_linux_arm_rel_ng" - } - builders { - name: "v8_linux_blink_rel" - experiment_percentage: 100 - } - builders { name: "v8_linux_chromium_gn_rel" } - builders { name: "v8_linux_gcc_compile_rel" } - builders { name: "v8_linux_nodcheck_rel_ng" } - builders { - name: "v8_linux_nodcheck_rel_ng_triggered" - triggered_by: "v8_linux_nodcheck_rel_ng" - } - builders { name: "v8_linux_rel_ng" } - builders { - name: "v8_linux_rel_ng_triggered" - triggered_by: "v8_linux_rel_ng" - } - builders { name: "v8_linux_verify_csa_rel_ng" } - builders { - name: "v8_linux_verify_csa_rel_ng_triggered" - triggered_by: "v8_linux_verify_csa_rel_ng" - } - builders { name: "v8_mac64_rel_ng" } - builders { - name: "v8_mac64_rel_ng_triggered" - triggered_by: "v8_mac64_rel_ng" - } - builders { name: "v8_node_linux64_rel" } - builders { - name: "v8_presubmit" - disable_reuse: true - } - builders { name: "v8_win64_msvc_compile_rel" } - builders { name: "v8_win64_rel_ng" } - builders { - name: "v8_win64_rel_ng_triggered" - triggered_by: "v8_win64_rel_ng" - } - builders { name: "v8_win_compile_dbg" } - builders { name: "v8_win_nosnap_shared_rel_ng" } - builders { - name: "v8_win_nosnap_shared_rel_ng_triggered" - triggered_by: "v8_win_nosnap_shared_rel_ng" - } - builders { name: "v8_win_rel_ng" } - builders { - name: "v8_win_rel_ng_triggered" - triggered_by: "v8_win_rel_ng" - } - builders { - name: "v8_linux_noi18n_rel_ng" - path_regexp: ".*intl.*" - path_regexp: ".*test262.*" - } - } - - buckets { - name: "luci.chromium.try" - builders { - name: "linux_chromium_rel_ng" - path_regexp: "include/.+\\.h" - path_regexp: "src/api\\.cc" - path_regexp: "src/message-template\\.h" - } - builders { - name: "linux_chromium_headless_rel" - path_regexp: "src/inspector/.+" - path_regexp: "test/inspector/.+" - } - builders { - name: "linux-blink-rel" - path_regexp: "src/inspector/.+" - path_regexp: "test/inspector/.+" - } - # TODO(machenbach): Uncomment path_regexp after testing, as currently, - # path_regexp can not be combined with experiment_percentage. See more - # details at crbug.com/v8/8058. - builders { - name: "cast_shell_android" - #path_regexp: "include/.+\\.h" - #path_regexp: "src/api\\.cc" - experiment_percentage: 20 - } - builders { - name: "cast_shell_linux" - #path_regexp: "include/.+\\.h" - #path_regexp: "src/api\\.cc" - experiment_percentage: 20 - } - } - } -} - diff --git a/deps/v8/infra/mb/mb_config.pyl b/deps/v8/infra/mb/mb_config.pyl index 8a6cba71e06234..cdf4b81a41eb56 100644 --- a/deps/v8/infra/mb/mb_config.pyl +++ b/deps/v8/infra/mb/mb_config.pyl @@ -89,6 +89,9 @@ 'V8 Linux64 TSAN - builder': 'release_x64_tsan', 'V8 Linux - arm64 - sim - MSAN': 'release_simulate_arm64_msan', # Misc. + 'V8 Linux gcc': 'release_x86_gcc_no_goma', + 'V8 Linux64 gcc - debug': 'debug_x64_gcc_no_goma', + # TODO(machenbach): Remove after switching. 'V8 Linux gcc 4.8': 'release_x86_gcc', 'V8 Linux64 gcc 4.8 - debug': 'debug_x64_gcc', # FYI. @@ -101,12 +104,10 @@ 'V8 Fuchsia - debug': 'debug_x64_fuchsia', 'V8 Linux64 - cfi': 'release_x64_cfi', 'V8 Linux64 UBSan': 'release_x64_ubsan', - 'V8 Linux64 UBSanVptr': 'release_x64_ubsan_vptr', 'V8 Linux - vtunejit': 'debug_x86_vtunejit', 'V8 Linux64 - gcov coverage': 'release_x64_gcc_coverage', 'V8 Linux - predictable': 'release_x86_predictable', 'V8 Linux - full debug': 'full_debug_x86', - 'V8 Linux - interpreted regexp': 'release_x86_interpreted_regexp', 'V8 Random Deopt Fuzzer - debug': 'debug_x64', }, 'client.v8.clusterfuzz': { @@ -140,8 +141,6 @@ 'V8 Clusterfuzz Linux64 TSAN - release builder': 'release_x64_tsan', 'V8 Clusterfuzz Linux64 UBSan - release builder': 'release_x64_ubsan_recover_edge', - 'V8 Clusterfuzz Linux64 UBSanVptr - release builder': - 'release_x64_ubsan_vptr_recover_edge', }, 'client.v8.ports': { # Arm. @@ -212,7 +211,8 @@ 'release_simulate_arm64_pointer_compression', 'v8_linux64_compile_rel_xg': 'release_x64_test_features_trybot', 'v8_linux64_dbg_ng': 'debug_x64_trybot', - 'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc', + 'v8_linux64_gc_stress_custom_snapshot_dbg_ng': 'debug_x64_trybot_custom', + 'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc_no_goma', 'v8_linux64_header_includes_dbg': 'debug_x64_header_includes', 'v8_linux64_fyi_rel_ng': 'release_x64_test_features_trybot', 'v8_linux64_pointer_compression_rel_ng': 'release_x64_pointer_compression', @@ -230,7 +230,6 @@ 'v8_linux64_tsan_isolates_rel_ng': 'release_x64_tsan_minimal_symbols', 'v8_linux64_ubsan_rel_ng': 'release_x64_ubsan_minimal_symbols', - 'v8_linux64_ubsan_vptr_rel_ng': 'release_x64_ubsan_vptr_minimal_symbols', 'v8_odroid_arm_rel_ng': 'release_arm', # TODO(machenbach): Remove after switching to x64 on infra side. 'v8_win_dbg': 'debug_x86_trybot', @@ -465,12 +464,6 @@ 'release_bot', 'x64', 'ubsan', 'minimal_symbols'], 'release_x64_ubsan_recover_edge': [ 'release_bot', 'x64', 'edge', 'ubsan_recover'], - 'release_x64_ubsan_vptr': [ - 'release_bot', 'x64', 'ubsan_vptr'], - 'release_x64_ubsan_vptr_minimal_symbols': [ - 'release_bot', 'x64', 'ubsan_vptr', 'minimal_symbols'], - 'release_x64_ubsan_vptr_recover_edge': [ - 'release_bot', 'x64', 'edge', 'ubsan_vptr_recover'], 'release_x64_shared_verify_heap': [ 'release_bot', 'x64', 'shared', 'v8_verify_heap'], 'release_x64_verify_csa': [ @@ -491,6 +484,9 @@ 'debug_bot', 'x64', 'fuchsia'], 'debug_x64_gcc': [ 'debug_bot', 'x64', 'gcc'], + 'debug_x64_gcc_no_goma': [ + 'debug', 'shared', 'v8_enable_slow_dchecks', 'v8_optimized_debug', 'x64', + 'gcc', 'no_goma'], 'debug_x64_header_includes': [ 'debug_bot', 'x64', 'v8_check_header_includes'], 'debug_x64_jumbo': [ @@ -501,6 +497,8 @@ 'debug_bot', 'x64', 'minimal_symbols'], 'debug_x64_trybot': [ 'debug_trybot', 'x64'], + 'debug_x64_trybot_custom': [ + 'debug_trybot', 'x64', 'v8_snapshot_custom'], # Debug configs for x86. 'debug_x86': [ @@ -532,14 +530,15 @@ 'release_trybot', 'x86', 'v8_no_enable_embedded_builtins'], 'release_x86_gcc': [ 'release_bot', 'x86', 'gcc'], + # TODO(machenbach): Enable back goma once supported. 'release_x86_gcc_minimal_symbols': [ - 'release_bot', 'x86', 'gcc', 'minimal_symbols'], + 'release', 'static', 'x86', 'gcc', 'minimal_symbols', 'no_goma'], + 'release_x86_gcc_no_goma': [ + 'release', 'static', 'x86', 'gcc', 'no_goma'], 'release_x86_gcmole': [ 'release_bot', 'x86', 'gcmole'], 'release_x86_gcmole_trybot': [ 'release_trybot', 'x86', 'gcmole'], - 'release_x86_interpreted_regexp': [ - 'release_bot', 'x86', 'v8_interpreted_regexp'], 'release_x86_minimal_symbols': [ 'release_bot', 'x86', 'minimal_symbols'], 'release_x86_no_i18n_trybot': [ @@ -705,6 +704,10 @@ 'gn_args': 'use_custom_libcxx=false', }, + 'no_goma': { + 'gn_args': 'use_goma=false', + }, + 'no_sysroot': { 'gn_args': 'use_sysroot=false', }, @@ -778,20 +781,6 @@ 'gn_args': 'is_ubsan=true is_ubsan_no_recover=false', }, - 'ubsan_vptr': { - 'mixins': ['v8_enable_test_features'], - # TODO(krasin): Remove is_ubsan_no_recover=true when - # https://llvm.org/bugs/show_bug.cgi?id=25569 is fixed and just use - # ubsan_vptr instead. - 'gn_args': 'is_ubsan_vptr=true is_ubsan_no_recover=true', - }, - - 'ubsan_vptr_recover': { - 'mixins': ['v8_enable_test_features'], - # Ubsan vptr with recovery. - 'gn_args': 'is_ubsan_vptr=true is_ubsan_no_recover=false', - }, - 'v8_check_header_includes': { 'gn_args': 'v8_check_header_includes=true', }, @@ -833,10 +822,6 @@ 'gn_args': 'v8_optimized_debug=false', }, - 'v8_interpreted_regexp': { - 'gn_args': 'v8_interpreted_regexp=true', - }, - 'v8_optimized_debug': { # This is the default in gn for debug. }, diff --git a/deps/v8/infra/testing/OWNERS b/deps/v8/infra/testing/OWNERS index f0129f758ec386..c8693c972c7225 100644 --- a/deps/v8/infra/testing/OWNERS +++ b/deps/v8/infra/testing/OWNERS @@ -1,4 +1,5 @@ set noparent machenbach@chromium.org -sergiyb@chromium.org \ No newline at end of file +sergiyb@chromium.org +tmrts@chromium.org \ No newline at end of file diff --git a/deps/v8/infra/testing/PRESUBMIT.py b/deps/v8/infra/testing/PRESUBMIT.py index d8047bc244c1ac..b8e059724e0805 100644 --- a/deps/v8/infra/testing/PRESUBMIT.py +++ b/deps/v8/infra/testing/PRESUBMIT.py @@ -11,6 +11,10 @@ import ast import os +try: + basestring # Python 2 +except NameError: # Python 3 + basestring = str SUPPORTED_BUILDER_SPEC_KEYS = [ 'swarming_dimensions', diff --git a/deps/v8/infra/testing/builders.pyl b/deps/v8/infra/testing/builders.pyl index f15358405ca8f2..86349a48a486c3 100644 --- a/deps/v8/infra/testing/builders.pyl +++ b/deps/v8/infra/testing/builders.pyl @@ -41,8 +41,9 @@ 'tests': [ {'name': 'benchmarks', 'variant': 'default'}, {'name': 'v8testing', 'variant': 'default', 'shards': 4}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 4}, {'name': 'mozilla', 'variant': 'default'}, - {'name': 'test262_variants', 'variant': 'default', 'shards': 6}, + {'name': 'test262_variants', 'variant': 'default', 'shards': 7}, ], }, ############################################################################## @@ -70,6 +71,9 @@ ], }, 'v8_linux_gcc_rel': { + 'swarming_dimensions' : { + 'os': 'Ubuntu-16.04', + }, 'tests': [ {'name': 'v8testing'}, ], @@ -103,7 +107,7 @@ }, 'v8_linux_nosnap_rel': { 'tests': [ - {'name': 'v8testing', 'variant': 'default', 'shards': 4}, + {'name': 'v8testing', 'variant': 'default', 'shards': 6}, ], }, 'v8_linux_nosnap_dbg': { @@ -202,6 +206,7 @@ {'name': 'test262'}, {'name': 'v8testing', 'shards': 7}, {'name': 'v8testing', 'variant': 'extra', 'shards': 3}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 2}, ], }, 'v8_linux_arm_lite_rel_ng_triggered': { @@ -215,7 +220,8 @@ {'name': 'mozilla', 'shards': 2}, {'name': 'test262', 'shards': 2}, {'name': 'v8testing', 'shards': 8}, - {'name': 'v8testing', 'variant': 'extra', 'shards': 3}, + {'name': 'v8testing', 'variant': 'extra', 'shards': 6}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 2}, ], }, ############################################################################## @@ -254,6 +260,15 @@ {'name': 'v8testing', 'variant': 'minor_mc', 'shards': 1}, ], }, + 'v8_linux64_gc_stress_custom_snapshot_dbg_ng_triggered': { + 'tests': [ + { + 'name': 'mjsunit', + 'test_args': ['--gc-stress', '--no-harness'], + 'shards': 3, + }, + ], + }, 'v8_linux64_fyi_rel_ng_triggered': { 'tests': [ # Stress sampling. @@ -329,11 +344,6 @@ {'name': 'v8testing', 'shards': 2}, ], }, - 'v8_linux64_ubsan_vptr_rel_ng_triggered': { - 'tests': [ - {'name': 'v8testing', 'shards': 2}, - ], - }, 'v8_linux64_verify_csa_rel_ng_triggered': { 'tests': [ {'name': 'v8testing', 'shards': 2}, @@ -348,6 +358,7 @@ {'name': 'test262', 'shards': 2}, {'name': 'v8testing', 'shards': 10}, {'name': 'v8testing', 'variant': 'extra', 'shards': 6}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 3}, ], }, 'v8_linux_arm64_gc_stress_dbg': { @@ -362,6 +373,7 @@ {'name': 'test262', 'shards': 2}, {'name': 'v8testing', 'shards': 9}, {'name': 'v8testing', 'variant': 'extra', 'shards': 6}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 3}, ], }, 'v8_linux64_arm64_pointer_compression_rel_ng_triggered': { @@ -419,6 +431,7 @@ 'os': 'Windows-7-SP1', }, 'tests': [ + {'name': 'mozilla'}, {'name': 'test262'}, {'name': 'v8testing', 'shards': 2}, ], @@ -462,6 +475,7 @@ 'os': 'Windows-7-SP1', }, 'tests': [ + {'name': 'mozilla'}, {'name': 'test262'}, {'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'variant': 'extra'}, @@ -485,7 +499,7 @@ }, 'tests': [ {'name': 'mozilla'}, - {'name': 'test262'}, + {'name': 'test262', 'shards': 2}, {'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'variant': 'extra', 'shards': 2}, ], @@ -669,16 +683,6 @@ }, ], }, - 'V8 Linux - interpreted regexp': { - 'swarming_task_attrs': { - 'expiration': 14400, - 'hard_timeout': 3600, - 'priority': 35, - }, - 'tests': [ - {'name': 'v8testing'}, - ], - }, 'V8 Linux - noi18n - debug': { 'tests': [ {'name': 'mozilla', 'variant': 'default'}, @@ -694,7 +698,7 @@ }, 'tests': [ {'name': 'mozilla', 'variant': 'default'}, - {'name': 'test262', 'variant': 'default', 'shards': 3}, + {'name': 'test262', 'variant': 'default', 'shards': 4}, {'name': 'v8testing', 'variant': 'default', 'shards': 3}, ], }, @@ -727,7 +731,19 @@ {'name': 'v8testing'}, ], }, + 'V8 Linux gcc': { + 'swarming_dimensions' : { + 'os': 'Ubuntu-16.04', + }, + 'tests': [ + {'name': 'v8testing'}, + ], + }, + # TODO(machenbach): Remove after switching. 'V8 Linux gcc 4.8': { + 'swarming_dimensions' : { + 'os': 'Ubuntu-16.04', + }, 'tests': [ {'name': 'v8testing'}, ], @@ -929,11 +945,6 @@ {'name': 'v8testing', 'variant': 'extra'}, ], }, - 'V8 Linux64 UBSanVptr': { - 'tests': [ - {'name': 'v8testing'}, - ], - }, 'V8 Mac64': { 'swarming_dimensions': { 'cpu': 'x86-64', @@ -941,7 +952,7 @@ }, 'tests': [ {'name': 'mozilla'}, - {'name': 'test262'}, + {'name': 'test262', 'shards': 2}, {'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'variant': 'extra'}, ], @@ -1057,8 +1068,9 @@ }, 'tests': [ {'name': 'mozilla', 'variant': 'default'}, - {'name': 'test262', 'variant': 'default', 'shards': 5}, + {'name': 'test262', 'variant': 'default', 'shards': 6}, {'name': 'v8testing', 'variant': 'default', 'shards': 3}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 3}, ], }, 'V8 Arm': { @@ -1193,6 +1205,7 @@ {'name': 'test262'}, {'name': 'v8testing', 'shards': 4}, {'name': 'v8testing', 'variant': 'extra'}, + {'name': 'v8testing', 'variant': 'trusted'}, # Armv8-a. { 'name': 'mozilla', @@ -1227,7 +1240,8 @@ {'name': 'mozilla'}, {'name': 'test262'}, {'name': 'v8testing', 'shards': 8}, - {'name': 'v8testing', 'variant': 'extra', 'shards': 3}, + {'name': 'v8testing', 'variant': 'extra', 'shards': 4}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 2}, # Armv8-a. { 'name': 'mozilla', @@ -1284,6 +1298,7 @@ {'name': 'test262'}, {'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'variant': 'extra'}, + {'name': 'v8testing', 'variant': 'trusted'}, ], }, 'V8 Linux - arm64 - sim - debug': { @@ -1297,6 +1312,7 @@ {'name': 'test262', 'shards': 2}, {'name': 'v8testing', 'shards': 10}, {'name': 'v8testing', 'variant': 'extra', 'shards': 6}, + {'name': 'v8testing', 'variant': 'trusted', 'shards': 2}, ], }, 'V8 Linux - arm64 - sim - gc stress': { diff --git a/deps/v8/src/DEPS b/deps/v8/src/DEPS index f8190e8fd9d896..74c48a6dddab58 100644 --- a/deps/v8/src/DEPS +++ b/deps/v8/src/DEPS @@ -8,12 +8,14 @@ include_rules = [ "+src/compiler/code-assembler.h", "+src/compiler/wasm-compiler.h", "-src/heap", + "+src/heap/embedder-tracing.h", "+src/heap/factory.h", "+src/heap/factory-inl.h", "+src/heap/heap.h", "+src/heap/heap-inl.h", "+src/heap/heap-write-barrier-inl.h", "+src/heap/heap-write-barrier.h", + "+src/heap/read-only-heap.h", "-src/inspector", "-src/interpreter", "+src/interpreter/bytecode-array-accessor.h", diff --git a/deps/v8/src/accessors.cc b/deps/v8/src/accessors.cc index a368bbd81ead80..bf8db184f40406 100644 --- a/deps/v8/src/accessors.cc +++ b/deps/v8/src/accessors.cc @@ -9,6 +9,7 @@ #include "src/counters.h" #include "src/deoptimizer.h" #include "src/execution.h" +#include "src/field-index-inl.h" #include "src/frames-inl.h" #include "src/heap/factory.h" #include "src/isolate-inl.h" @@ -796,7 +797,8 @@ MaybeHandle ClearInternalStackTrace(Isolate* isolate, isolate, Object::SetProperty( isolate, error, isolate->factory()->stack_trace_symbol(), - isolate->factory()->undefined_value(), LanguageMode::kStrict), + isolate->factory()->undefined_value(), StoreOrigin::kMaybeKeyed, + Just(ShouldThrow::kThrowOnError)), JSReceiver); return error; } diff --git a/deps/v8/src/allocation.cc b/deps/v8/src/allocation.cc index 4be8fb408463b4..09d07920b38819 100644 --- a/deps/v8/src/allocation.cc +++ b/deps/v8/src/allocation.cc @@ -64,7 +64,7 @@ class PageAllocatorInitializer { }; DEFINE_LAZY_LEAKY_OBJECT_GETTER(PageAllocatorInitializer, - GetPageTableInitializer); + GetPageTableInitializer) // We will attempt allocation this many times. After each failure, we call // OnCriticalMemoryPressure to try to free some memory. diff --git a/deps/v8/src/api-arguments-inl.h b/deps/v8/src/api-arguments-inl.h index 1e2e9ed8073c12..7f83708b969d67 100644 --- a/deps/v8/src/api-arguments-inl.h +++ b/deps/v8/src/api-arguments-inl.h @@ -17,6 +17,17 @@ namespace v8 { namespace internal { +void Object::VerifyApiCallResultType() { +#if DEBUG + if (IsSmi()) return; + DCHECK(IsHeapObject()); + if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() || + IsBigInt() || IsUndefined() || IsTrue() || IsFalse() || IsNull())) { + FATAL("API call returned invalid object"); + } +#endif // DEBUG +} + CustomArgumentsBase::CustomArgumentsBase(Isolate* isolate) : Relocatable(isolate) {} diff --git a/deps/v8/src/api-arguments.cc b/deps/v8/src/api-arguments.cc index b706050b3064d8..76e821cad7a2d7 100644 --- a/deps/v8/src/api-arguments.cc +++ b/deps/v8/src/api-arguments.cc @@ -9,17 +9,19 @@ namespace v8 { namespace internal { -PropertyCallbackArguments::PropertyCallbackArguments(Isolate* isolate, - Object data, Object self, - JSObject holder, - ShouldThrow should_throw) +PropertyCallbackArguments::PropertyCallbackArguments( + Isolate* isolate, Object data, Object self, JSObject holder, + Maybe should_throw) : Super(isolate) { slot_at(T::kThisIndex).store(self); slot_at(T::kHolderIndex).store(holder); slot_at(T::kDataIndex).store(data); slot_at(T::kIsolateIndex).store(Object(reinterpret_cast
(isolate))); - slot_at(T::kShouldThrowOnErrorIndex) - .store(Smi::FromInt(should_throw == kThrowOnError ? 1 : 0)); + int value = Internals::kInferShouldThrowMode; + if (should_throw.IsJust()) { + value = should_throw.FromJust(); + } + slot_at(T::kShouldThrowOnErrorIndex).store(Smi::FromInt(value)); // Here the hole is set as default value. // It cannot escape into js as it's removed in Call below. diff --git a/deps/v8/src/api-arguments.h b/deps/v8/src/api-arguments.h index 6b025bdbb358b1..4f1ea8c85a5fbd 100644 --- a/deps/v8/src/api-arguments.h +++ b/deps/v8/src/api-arguments.h @@ -72,7 +72,7 @@ class PropertyCallbackArguments static const int kShouldThrowOnErrorIndex = T::kShouldThrowOnErrorIndex; PropertyCallbackArguments(Isolate* isolate, Object data, Object self, - JSObject holder, ShouldThrow should_throw); + JSObject holder, Maybe should_throw); // ------------------------------------------------------------------------- // Accessor Callbacks diff --git a/deps/v8/src/api-inl.h b/deps/v8/src/api-inl.h index 024dc88537cc6f..9ccb9e4a6a2522 100644 --- a/deps/v8/src/api-inl.h +++ b/deps/v8/src/api-inl.h @@ -94,11 +94,11 @@ MAKE_TO_LOCAL(AccessorSignatureToLocal, FunctionTemplateInfo, AccessorSignature) MAKE_TO_LOCAL(MessageToLocal, Object, Message) MAKE_TO_LOCAL(PromiseToLocal, JSObject, Promise) MAKE_TO_LOCAL(StackTraceToLocal, FixedArray, StackTrace) -MAKE_TO_LOCAL(StackFrameToLocal, StackFrameInfo, StackFrame) +MAKE_TO_LOCAL(StackFrameToLocal, StackTraceFrame, StackFrame) MAKE_TO_LOCAL(NumberToLocal, Object, Number) MAKE_TO_LOCAL(IntegerToLocal, Object, Integer) MAKE_TO_LOCAL(Uint32ToLocal, Object, Uint32) -MAKE_TO_LOCAL(ToLocal, BigInt, BigInt); +MAKE_TO_LOCAL(ToLocal, BigInt, BigInt) MAKE_TO_LOCAL(ExternalToLocal, JSObject, External) MAKE_TO_LOCAL(CallableToLocal, JSReceiver, Function) MAKE_TO_LOCAL(ToLocalPrimitive, Object, Primitive) diff --git a/deps/v8/src/api-natives.cc b/deps/v8/src/api-natives.cc index d0088bbf1c8f93..2e34595ab5989a 100644 --- a/deps/v8/src/api-natives.cc +++ b/deps/v8/src/api-natives.cc @@ -115,8 +115,9 @@ MaybeHandle DefineDataProperty(Isolate* isolate, } #endif - MAYBE_RETURN_NULL(Object::AddDataProperty( - &it, value, attributes, kThrowOnError, StoreOrigin::kNamed)); + MAYBE_RETURN_NULL(Object::AddDataProperty(&it, value, attributes, + Just(ShouldThrow::kThrowOnError), + StoreOrigin::kNamed)); return value; } diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 85306decd7d764..360bb7840757e6 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -45,6 +45,7 @@ #include "src/gdb-jit.h" #include "src/global-handles.h" #include "src/globals.h" +#include "src/heap/heap-inl.h" #include "src/icu_util.h" #include "src/isolate-inl.h" #include "src/json-parser.h" @@ -55,6 +56,7 @@ #include "src/objects/api-callbacks.h" #include "src/objects/embedder-data-array-inl.h" #include "src/objects/embedder-data-slot-inl.h" +#include "src/objects/frame-array-inl.h" #include "src/objects/hash-table-inl.h" #include "src/objects/heap-object.h" #include "src/objects/js-array-inl.h" @@ -251,11 +253,9 @@ class InternalEscapableScope : public v8::EscapableHandleScope { // TODO(jochen): This should be #ifdef DEBUG #ifdef V8_CHECK_MICROTASKS_SCOPES_CONSISTENCY -void CheckMicrotasksScopesConsistency(i::Isolate* isolate) { - auto handle_scope_implementer = isolate->handle_scope_implementer(); - auto* microtask_queue = isolate->default_microtask_queue(); - if (handle_scope_implementer->microtasks_policy() == - v8::MicrotasksPolicy::kScoped) { +void CheckMicrotasksScopesConsistency(i::MicrotaskQueue* microtask_queue) { + if (microtask_queue && + microtask_queue->microtasks_policy() == v8::MicrotasksPolicy::kScoped) { DCHECK(microtask_queue->GetMicrotasksScopeDepth() || !microtask_queue->DebugMicrotasksScopeDepthIsZero()); } @@ -292,15 +292,19 @@ class CallDepthScope { if (do_callback) isolate_->FireBeforeCallEnteredCallback(); } ~CallDepthScope() { + i::MicrotaskQueue* microtask_queue = isolate_->default_microtask_queue(); if (!context_.IsEmpty()) { i::HandleScopeImplementer* impl = isolate_->handle_scope_implementer(); isolate_->set_context(impl->RestoreContext()); + + i::Handle env = Utils::OpenHandle(*context_); + microtask_queue = env->native_context()->microtask_queue(); } if (!escaped_) isolate_->handle_scope_implementer()->DecrementCallDepth(); - if (do_callback) isolate_->FireCallCompletedCallback(); + if (do_callback) isolate_->FireCallCompletedCallback(microtask_queue); // TODO(jochen): This should be #ifdef DEBUG #ifdef V8_CHECK_MICROTASKS_SCOPES_CONSISTENCY - if (do_callback) CheckMicrotasksScopesConsistency(isolate_); + if (do_callback) CheckMicrotasksScopesConsistency(microtask_queue); #endif isolate_->set_next_v8_call_is_safe_for_termination(safe_for_termination_); } @@ -312,7 +316,7 @@ class CallDepthScope { handle_scope_implementer->DecrementCallDepth(); bool clear_exception = handle_scope_implementer->CallDepthIsZero() && - isolate_->thread_local_top()->try_catch_handler() == nullptr; + isolate_->thread_local_top()->try_catch_handler_ == nullptr; isolate_->OptionalRescheduleException(clear_exception); } @@ -580,7 +584,7 @@ SnapshotCreator::SnapshotCreator(Isolate* isolate, internal_isolate->set_snapshot_blob(blob); i::Snapshot::Initialize(internal_isolate); } else { - internal_isolate->Init(nullptr); + internal_isolate->InitWithoutSnapshot(); } data_ = data; } @@ -894,16 +898,18 @@ void V8::SetFlagsFromCommandLine(int* argc, char** argv, bool remove_flags) { RegisteredExtension* RegisteredExtension::first_extension_ = nullptr; -RegisteredExtension::RegisteredExtension(Extension* extension) - : extension_(extension) { } +RegisteredExtension::RegisteredExtension(std::unique_ptr extension) + : extension_(std::move(extension)) {} - -void RegisteredExtension::Register(RegisteredExtension* that) { - that->next_ = first_extension_; - first_extension_ = that; +// static +void RegisteredExtension::Register(std::unique_ptr extension) { + RegisteredExtension* new_extension = + new RegisteredExtension(std::move(extension)); + new_extension->next_ = first_extension_; + first_extension_ = new_extension; } - +// static void RegisteredExtension::UnregisterAll() { RegisteredExtension* re = first_extension_; while (re != nullptr) { @@ -930,12 +936,10 @@ class ExtensionResource : public String::ExternalOneByteStringResource { }; } // anonymous namespace -void RegisterExtension(Extension* that) { - RegisteredExtension* extension = new RegisteredExtension(that); - RegisteredExtension::Register(extension); +void RegisterExtension(std::unique_ptr extension) { + RegisteredExtension::Register(std::move(extension)); } - Extension::Extension(const char* name, const char* source, int dep_count, @@ -964,7 +968,6 @@ void ResourceConstraints::ConfigureDefaults(uint64_t physical_memory, set_max_semi_space_size_in_kb( i::Heap::ComputeMaxSemiSpaceSize(physical_memory)); set_max_old_space_size(i::Heap::ComputeMaxOldGenerationSize(physical_memory)); - set_max_zone_pool_size(i::AccountingAllocator::kMaxPoolSize); if (virtual_memory_limit > 0 && i::kRequiresCodeRange) { // Reserve no more than 1/8 of the memory for the code range, but at most @@ -980,12 +983,10 @@ void SetResourceConstraints(i::Isolate* isolate, size_t semi_space_size = constraints.max_semi_space_size_in_kb(); size_t old_space_size = constraints.max_old_space_size(); size_t code_range_size = constraints.code_range_size(); - size_t max_pool_size = constraints.max_zone_pool_size(); if (semi_space_size != 0 || old_space_size != 0 || code_range_size != 0) { isolate->heap()->ConfigureHeap(semi_space_size, old_space_size, code_range_size); } - isolate->allocator()->ConfigureSegmentPool(max_pool_size); if (constraints.stack_limit() != nullptr) { uintptr_t limit = reinterpret_cast(constraints.stack_limit()); @@ -1004,11 +1005,33 @@ i::Address* V8::GlobalizeReference(i::Isolate* isolate, i::Address* obj) { return result.location(); } -i::Address* V8::CopyPersistent(i::Address* obj) { - i::Handle result = i::GlobalHandles::CopyGlobal(obj); +i::Address* V8::GlobalizeTracedReference(i::Isolate* isolate, i::Address* obj, + internal::Address* slot) { + LOG_API(isolate, TracedGlobal, New); + i::Handle result = + isolate->global_handles()->CreateTraced(*obj, slot); +#ifdef VERIFY_HEAP + if (i::FLAG_verify_heap) { + i::Object(*obj)->ObjectVerify(isolate); + } +#endif // VERIFY_HEAP + return result.location(); +} + +i::Address* V8::CopyGlobalReference(i::Address* from) { + i::Handle result = i::GlobalHandles::CopyGlobal(from); return result.location(); } +void V8::MoveGlobalReference(internal::Address** from, internal::Address** to) { + i::GlobalHandles::MoveGlobal(from, to); +} + +void V8::MoveTracedGlobalReference(internal::Address** from, + internal::Address** to) { + i::GlobalHandles::MoveTracedGlobal(from, to); +} + void V8::RegisterExternallyReferencedObject(i::Address* location, i::Isolate* isolate) { isolate->heap()->RegisterExternallyReferencedObject(location); @@ -1036,6 +1059,17 @@ void V8::DisposeGlobal(i::Address* location) { i::GlobalHandles::Destroy(location); } +void V8::DisposeTracedGlobal(internal::Address* location) { + i::GlobalHandles::DestroyTraced(location); +} + +void V8::SetFinalizationCallbackTraced( + internal::Address* location, void* parameter, + WeakCallbackInfo::Callback callback) { + i::GlobalHandles::SetFinalizationCallbackForTraced(location, parameter, + callback); +} + Value* V8::Eternalize(Isolate* v8_isolate, Value* value) { i::Isolate* isolate = reinterpret_cast(v8_isolate); i::Object object = *Utils::OpenHandle(value); @@ -2037,7 +2071,11 @@ void ScriptCompiler::ExternalSourceStream::ResetToBookmark() { UNREACHABLE(); } ScriptCompiler::StreamedSource::StreamedSource(ExternalSourceStream* stream, Encoding encoding) - : impl_(new i::ScriptStreamingData(stream, encoding)) {} + : StreamedSource(std::unique_ptr(stream), encoding) {} + +ScriptCompiler::StreamedSource::StreamedSource( + std::unique_ptr stream, Encoding encoding) + : impl_(new i::ScriptStreamingData(std::move(stream), encoding)) {} ScriptCompiler::StreamedSource::~StreamedSource() = default; @@ -2901,8 +2939,8 @@ Local StackTrace::GetFrame(Isolate* v8_isolate, ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate); EscapableHandleScope scope(v8_isolate); auto obj = handle(Utils::OpenHandle(this)->get(index), isolate); - auto info = i::Handle::cast(obj); - return scope.Escape(Utils::StackFrameToLocal(info)); + auto frame = i::Handle::cast(obj); + return scope.Escape(Utils::StackFrameToLocal(frame)); } int StackTrace::GetFrameCount() const { @@ -2925,29 +2963,26 @@ Local StackTrace::CurrentStackTrace( // --- S t a c k F r a m e --- int StackFrame::GetLineNumber() const { - int v = Utils::OpenHandle(this)->line_number(); - return v ? v : Message::kNoLineNumberInfo; + return i::StackTraceFrame::GetLineNumber(Utils::OpenHandle(this)); } int StackFrame::GetColumn() const { - int v = Utils::OpenHandle(this)->column_number(); - return v ? v : Message::kNoLineNumberInfo; + return i::StackTraceFrame::GetColumnNumber(Utils::OpenHandle(this)); } int StackFrame::GetScriptId() const { - int v = Utils::OpenHandle(this)->script_id(); - return v ? v : Message::kNoScriptIdInfo; + return i::StackTraceFrame::GetScriptId(Utils::OpenHandle(this)); } Local StackFrame::GetScriptName() const { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); EscapableHandleScope scope(reinterpret_cast(isolate)); - i::Handle self = Utils::OpenHandle(this); - i::Handle obj(self->script_name(), isolate); - return obj->IsString() - ? scope.Escape(Local::Cast(Utils::ToLocal(obj))) + i::Handle name = + i::StackTraceFrame::GetFileName(Utils::OpenHandle(this)); + return name->IsString() + ? scope.Escape(Local::Cast(Utils::ToLocal(name))) : Local(); } @@ -2955,10 +2990,10 @@ Local StackFrame::GetScriptName() const { Local StackFrame::GetScriptNameOrSourceURL() const { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); EscapableHandleScope scope(reinterpret_cast(isolate)); - i::Handle self = Utils::OpenHandle(this); - i::Handle obj(self->script_name_or_source_url(), isolate); - return obj->IsString() - ? scope.Escape(Local::Cast(Utils::ToLocal(obj))) + i::Handle name = + i::StackTraceFrame::GetScriptNameOrSourceUrl(Utils::OpenHandle(this)); + return name->IsString() + ? scope.Escape(Local::Cast(Utils::ToLocal(name))) : Local(); } @@ -2966,21 +3001,24 @@ Local StackFrame::GetScriptNameOrSourceURL() const { Local StackFrame::GetFunctionName() const { i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); EscapableHandleScope scope(reinterpret_cast(isolate)); - i::Handle self = Utils::OpenHandle(this); - i::Handle obj(self->function_name(), isolate); - return obj->IsString() - ? scope.Escape(Local::Cast(Utils::ToLocal(obj))) + i::Handle name = + i::StackTraceFrame::GetFunctionName(Utils::OpenHandle(this)); + return name->IsString() + ? scope.Escape(Local::Cast(Utils::ToLocal(name))) : Local(); } -bool StackFrame::IsEval() const { return Utils::OpenHandle(this)->is_eval(); } +bool StackFrame::IsEval() const { + return i::StackTraceFrame::IsEval(Utils::OpenHandle(this)); +} bool StackFrame::IsConstructor() const { - return Utils::OpenHandle(this)->is_constructor(); + return i::StackTraceFrame::IsConstructor(Utils::OpenHandle(this)); } -bool StackFrame::IsWasm() const { return Utils::OpenHandle(this)->is_wasm(); } - +bool StackFrame::IsWasm() const { + return i::StackTraceFrame::IsWasm(Utils::OpenHandle(this)); +} // --- J S O N --- @@ -3615,6 +3653,11 @@ i::Isolate* i::IsolateFromNeverReadOnlySpaceObject(i::Address obj) { i::HeapObject::cast(i::Object(obj))); } +bool i::ShouldThrowOnError(i::Isolate* isolate) { + return i::GetShouldThrow(isolate, Nothing()) == + i::ShouldThrow::kThrowOnError; +} + void i::Internals::CheckInitializedImpl(v8::Isolate* external_isolate) { i::Isolate* isolate = reinterpret_cast(external_isolate); Utils::ApiCheck(isolate != nullptr && !isolate->IsDead(), @@ -3993,8 +4036,8 @@ Maybe v8::Object::Set(v8::Local context, auto value_obj = Utils::OpenHandle(*value); has_pending_exception = i::Runtime::SetObjectProperty(isolate, self, key_obj, value_obj, - i::LanguageMode::kSloppy, - i::StoreOrigin::kMaybeKeyed) + i::StoreOrigin::kMaybeKeyed, + Just(i::ShouldThrow::kDontThrow)) .is_null(); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return Just(true); @@ -4014,7 +4057,7 @@ Maybe v8::Object::Set(v8::Local context, uint32_t index, auto self = Utils::OpenHandle(this); auto value_obj = Utils::OpenHandle(*value); has_pending_exception = i::Object::SetElement(isolate, self, index, value_obj, - i::LanguageMode::kSloppy) + i::ShouldThrow::kDontThrow) .is_null(); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return Just(true); @@ -4038,7 +4081,7 @@ Maybe v8::Object::CreateDataProperty(v8::Local context, i::Handle value_obj = Utils::OpenHandle(*value); Maybe result = i::JSReceiver::CreateDataProperty( - isolate, self, key_obj, value_obj, i::kDontThrow); + isolate, self, key_obj, value_obj, Just(i::kDontThrow)); has_pending_exception = result.IsNothing(); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return result; @@ -4056,7 +4099,7 @@ Maybe v8::Object::CreateDataProperty(v8::Local context, i::LookupIterator it(isolate, self, index, self, i::LookupIterator::OWN); Maybe result = - i::JSReceiver::CreateDataProperty(&it, value_obj, i::kDontThrow); + i::JSReceiver::CreateDataProperty(&it, value_obj, Just(i::kDontThrow)); has_pending_exception = result.IsNothing(); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return result; @@ -4174,7 +4217,7 @@ Maybe v8::Object::DefineOwnProperty(v8::Local context, ENTER_V8(isolate, context, Object, DefineOwnProperty, Nothing(), i::HandleScope); Maybe success = i::JSReceiver::DefineOwnProperty( - isolate, self, key_obj, &desc, i::kDontThrow); + isolate, self, key_obj, &desc, Just(i::kDontThrow)); // Even though we said kDontThrow, there might be accessors that do throw. RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return success; @@ -4184,7 +4227,7 @@ Maybe v8::Object::DefineOwnProperty(v8::Local context, ENTER_V8_NO_SCRIPT(isolate, context, Object, DefineOwnProperty, Nothing(), i::HandleScope); Maybe success = i::JSReceiver::DefineOwnProperty( - isolate, self, key_obj, &desc, i::kDontThrow); + isolate, self, key_obj, &desc, Just(i::kDontThrow)); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return success; } @@ -4200,7 +4243,8 @@ Maybe v8::Object::DefineProperty(v8::Local context, i::Handle key_obj = Utils::OpenHandle(*key); Maybe success = i::JSReceiver::DefineOwnProperty( - isolate, self, key_obj, &descriptor.get_private()->desc, i::kDontThrow); + isolate, self, key_obj, &descriptor.get_private()->desc, + Just(i::kDontThrow)); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); return success; } @@ -4221,7 +4265,7 @@ Maybe v8::Object::SetPrivate(Local context, Local key, desc.set_value(value_obj); return i::JSProxy::SetPrivateSymbol( isolate, i::Handle::cast(self), - i::Handle::cast(key_obj), &desc, i::kDontThrow); + i::Handle::cast(key_obj), &desc, Just(i::kDontThrow)); } auto js_object = i::Handle::cast(self); i::LookupIterator it(js_object, key_obj, js_object); @@ -4386,22 +4430,11 @@ MaybeLocal v8::Object::GetPropertyNames( RETURN_ESCAPED(Utils::ToLocal(result)); } - -Local v8::Object::GetPropertyNames() { - auto context = ContextFromNeverReadOnlySpaceObject(Utils::OpenHandle(this)); - RETURN_TO_LOCAL_UNCHECKED(GetPropertyNames(context), Array); -} - MaybeLocal v8::Object::GetOwnPropertyNames(Local context) { return GetOwnPropertyNames( context, static_cast(ONLY_ENUMERABLE | SKIP_SYMBOLS)); } -Local v8::Object::GetOwnPropertyNames() { - auto context = ContextFromNeverReadOnlySpaceObject(Utils::OpenHandle(this)); - RETURN_TO_LOCAL_UNCHECKED(GetOwnPropertyNames(context), Array); -} - MaybeLocal v8::Object::GetOwnPropertyNames( Local context, PropertyFilter filter, KeyConversionMode key_conversion) { @@ -4663,12 +4696,6 @@ Maybe v8::Object::HasRealNamedProperty(Local context, } -bool v8::Object::HasRealNamedProperty(Local key) { - auto context = ContextFromNeverReadOnlySpaceObject(Utils::OpenHandle(this)); - return HasRealNamedProperty(context, key).FromMaybe(false); -} - - Maybe v8::Object::HasRealIndexedProperty(Local context, uint32_t index) { auto isolate = reinterpret_cast(context->GetIsolate()); @@ -4683,13 +4710,6 @@ Maybe v8::Object::HasRealIndexedProperty(Local context, return result; } - -bool v8::Object::HasRealIndexedProperty(uint32_t index) { - auto context = ContextFromNeverReadOnlySpaceObject(Utils::OpenHandle(this)); - return HasRealIndexedProperty(context, index).FromMaybe(false); -} - - Maybe v8::Object::HasRealNamedCallbackProperty(Local context, Local key) { auto isolate = reinterpret_cast(context->GetIsolate()); @@ -4705,13 +4725,6 @@ Maybe v8::Object::HasRealNamedCallbackProperty(Local context, return result; } - -bool v8::Object::HasRealNamedCallbackProperty(Local key) { - auto context = ContextFromNeverReadOnlySpaceObject(Utils::OpenHandle(this)); - return HasRealNamedCallbackProperty(context, key).FromMaybe(false); -} - - bool v8::Object::HasNamedLookupInterceptor() { auto self = Utils::OpenHandle(this); return self->IsJSObject() && @@ -4823,7 +4836,8 @@ Local v8::Object::Clone() { Local v8::Object::CreationContext() { auto self = Utils::OpenHandle(this); - return Utils::ToLocal(self->GetCreationContext()); + i::Handle context = self->GetCreationContext(); + return Utils::ToLocal(context); } @@ -4897,14 +4911,6 @@ MaybeLocal Function::New(Local context, return templ->GetFunction(context); } - -Local Function::New(Isolate* v8_isolate, FunctionCallback callback, - Local data, int length) { - return Function::New(v8_isolate->GetCurrentContext(), callback, data, length, - ConstructorBehavior::kAllow) - .FromMaybe(Local()); -} - MaybeLocal Function::NewInstance(Local context, int argc, v8::Local argv[]) const { return NewInstanceWithSideEffectType(context, argc, argv, @@ -4980,14 +4986,6 @@ MaybeLocal Function::Call(Local context, RETURN_ESCAPED(result); } - -Local Function::Call(v8::Local recv, int argc, - v8::Local argv[]) { - auto context = ContextFromNeverReadOnlySpaceObject(Utils::OpenHandle(this)); - RETURN_TO_LOCAL_UNCHECKED(Call(context, recv, argc, argv), Value); -} - - void Function::SetName(v8::Local name) { auto self = Utils::OpenHandle(this); if (!self->IsJSFunction()) return; @@ -5147,7 +5145,7 @@ int String::Length() const { bool String::IsOneByte() const { i::Handle str = Utils::OpenHandle(this); - return str->HasOnlyOneByteChars(); + return str->IsOneByteRepresentation(); } @@ -5260,7 +5258,7 @@ class ContainsOnlyOneByteHelper { bool String::ContainsOnlyOneByte() const { i::Handle str = Utils::OpenHandle(this); - if (str->HasOnlyOneByteChars()) return true; + if (str->IsOneByteRepresentation()) return true; ContainsOnlyOneByteHelper helper; return helper.Check(*str); } @@ -5588,7 +5586,7 @@ Local Symbol::Name() const { i::Handle sym = Utils::OpenHandle(this); i::Isolate* isolate; - if (!i::Isolate::FromWritableHeapObject(*sym, &isolate)) { + if (!i::GetIsolateFromWritableObject(*sym, &isolate)) { // If the Symbol is in RO_SPACE, then its name must be too. Since RO_SPACE // objects are immovable we can use the Handle(Address*) constructor with // the address of the name field in the Symbol object without needing an @@ -5784,10 +5782,6 @@ bool TryHandleWebAssemblyTrapWindows(EXCEPTION_POINTERS* exception) { } #endif -bool V8::RegisterDefaultSignalHandler() { - return v8::internal::trap_handler::RegisterDefaultTrapHandler(); -} - bool V8::EnableWebAssemblyTrapHandler(bool use_v8_signal_handler) { return v8::internal::trap_handler::EnableTrapHandler(use_v8_signal_handler); } @@ -5875,10 +5869,11 @@ struct InvokeBootstrapper { i::Isolate* isolate, i::MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, v8::ExtensionConfiguration* extensions, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer) { + v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) { return isolate->bootstrapper()->CreateEnvironment( maybe_global_proxy, global_proxy_template, extensions, - context_snapshot_index, embedder_fields_deserializer); + context_snapshot_index, embedder_fields_deserializer, microtask_queue); } }; @@ -5888,7 +5883,8 @@ struct InvokeBootstrapper { i::Isolate* isolate, i::MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, v8::ExtensionConfiguration* extensions, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer) { + v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) { USE(extensions); USE(context_snapshot_index); return isolate->bootstrapper()->NewRemoteContext(maybe_global_proxy, @@ -5901,7 +5897,8 @@ static i::Handle CreateEnvironment( i::Isolate* isolate, v8::ExtensionConfiguration* extensions, v8::MaybeLocal maybe_global_template, v8::MaybeLocal maybe_global_proxy, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer) { + v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) { i::Handle result; { @@ -5977,9 +5974,9 @@ static i::Handle CreateEnvironment( } // Create the environment. InvokeBootstrapper invoke; - result = - invoke.Invoke(isolate, maybe_proxy, proxy_template, extensions, - context_snapshot_index, embedder_fields_deserializer); + result = invoke.Invoke(isolate, maybe_proxy, proxy_template, extensions, + context_snapshot_index, embedder_fields_deserializer, + microtask_queue); // Restore the access check info and interceptors on the global template. if (!maybe_global_template.IsEmpty()) { @@ -6005,7 +6002,8 @@ Local NewContext( v8::Isolate* external_isolate, v8::ExtensionConfiguration* extensions, v8::MaybeLocal global_template, v8::MaybeLocal global_object, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer) { + v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) { i::Isolate* isolate = reinterpret_cast(external_isolate); // TODO(jkummerow): This is for crbug.com/713699. Remove it if it doesn't // fail. @@ -6019,7 +6017,7 @@ Local NewContext( if (extensions == nullptr) extensions = &no_extensions; i::Handle env = CreateEnvironment( isolate, extensions, global_template, global_object, - context_snapshot_index, embedder_fields_deserializer); + context_snapshot_index, embedder_fields_deserializer, microtask_queue); if (env.is_null()) { if (isolate->has_pending_exception()) isolate->clear_pending_exception(); return Local(); @@ -6031,15 +6029,18 @@ Local v8::Context::New( v8::Isolate* external_isolate, v8::ExtensionConfiguration* extensions, v8::MaybeLocal global_template, v8::MaybeLocal global_object, - DeserializeInternalFieldsCallback internal_fields_deserializer) { + DeserializeInternalFieldsCallback internal_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) { return NewContext(external_isolate, extensions, global_template, - global_object, 0, internal_fields_deserializer); + global_object, 0, internal_fields_deserializer, + microtask_queue); } MaybeLocal v8::Context::FromSnapshot( v8::Isolate* external_isolate, size_t context_snapshot_index, v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, - v8::ExtensionConfiguration* extensions, MaybeLocal global_object) { + v8::ExtensionConfiguration* extensions, MaybeLocal global_object, + v8::MicrotaskQueue* microtask_queue) { size_t index_including_default_context = context_snapshot_index + 1; if (!i::Snapshot::HasContextSnapshot( reinterpret_cast(external_isolate), @@ -6048,7 +6049,7 @@ MaybeLocal v8::Context::FromSnapshot( } return NewContext(external_isolate, extensions, MaybeLocal(), global_object, index_including_default_context, - embedder_fields_deserializer); + embedder_fields_deserializer, microtask_queue); } MaybeLocal v8::Context::NewRemoteContext( @@ -6069,9 +6070,9 @@ MaybeLocal v8::Context::NewRemoteContext( "v8::Context::NewRemoteContext", "Global template needs to have access check handlers."); i::Handle global_proxy = - CreateEnvironment(isolate, nullptr, global_template, - global_object, 0, - DeserializeInternalFieldsCallback()); + CreateEnvironment( + isolate, nullptr, global_template, global_object, 0, + DeserializeInternalFieldsCallback(), nullptr); if (global_proxy.is_null()) { if (isolate->has_pending_exception()) isolate->clear_pending_exception(); return MaybeLocal(); @@ -6198,14 +6199,6 @@ MaybeLocal ObjectTemplate::NewInstance(Local context) { RETURN_ESCAPED(result); } - -Local ObjectTemplate::NewInstance() { - Local context = - reinterpret_cast(Utils::OpenHandle(this)->GetIsolate()) - ->GetCurrentContext(); - RETURN_TO_LOCAL_UNCHECKED(NewInstance(context), Object); -} - void v8::ObjectTemplate::CheckCast(Data* that) { i::Handle obj = Utils::OpenHandle(that); Utils::ApiCheck(obj->IsObjectTemplateInfo(), "v8::ObjectTemplate::Cast", @@ -6240,14 +6233,6 @@ MaybeLocal FunctionTemplate::GetFunction(Local context) { RETURN_ESCAPED(result); } - -Local FunctionTemplate::GetFunction() { - Local context = - reinterpret_cast(Utils::OpenHandle(this)->GetIsolate()) - ->GetCurrentContext(); - RETURN_TO_LOCAL_UNCHECKED(GetFunction(context), Function); -} - MaybeLocal FunctionTemplate::NewRemoteInstance() { auto self = Utils::OpenHandle(this); i::Isolate* isolate = self->GetIsolate(); @@ -6508,10 +6493,10 @@ bool v8::String::MakeExternal(v8::String::ExternalStringResource* resource) { return false; } - // It is safe to call FromWritable because SupportsExternalization already - // checked that the object is writable. + // It is safe to call GetIsolateFromWritableHeapObject because + // SupportsExternalization already checked that the object is writable. i::Isolate* isolate; - i::Isolate::FromWritableHeapObject(obj, &isolate); + i::GetIsolateFromWritableObject(obj, &isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate); CHECK(resource && resource->data()); @@ -6537,17 +6522,16 @@ bool v8::String::MakeExternal( return false; } - // It is safe to call FromWritable because SupportsExternalization already - // checked that the object is writable. + // It is safe to call GetIsolateFromWritableHeapObject because + // SupportsExternalization already checked that the object is writable. i::Isolate* isolate; - i::Isolate::FromWritableHeapObject(obj, &isolate); + i::GetIsolateFromWritableObject(obj, &isolate); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate); CHECK(resource && resource->data()); bool result = obj->MakeExternal(resource); - DCHECK(result); - DCHECK(obj->IsExternalString()); + DCHECK_IMPLIES(result, obj->IsExternalString()); return result; } @@ -6565,7 +6549,7 @@ bool v8::String::CanMakeExternal() { } // Only old space strings should be externalized. - return !i::Heap::InNewSpace(obj); + return !i::Heap::InYoungGeneration(obj); } bool v8::String::StringEquals(Local that) { @@ -6774,23 +6758,25 @@ double v8::Date::ValueOf() const { return jsdate->value()->Number(); } +// Assert that the static TimeZoneDetection cast in +// DateTimeConfigurationChangeNotification is valid. +#define TIME_ZONE_DETECTION_ASSERT_EQ(value) \ + STATIC_ASSERT( \ + static_cast(v8::Isolate::TimeZoneDetection::value) == \ + static_cast(base::TimezoneCache::TimeZoneDetection::value)); \ + STATIC_ASSERT(static_cast(v8::Isolate::TimeZoneDetection::value) == \ + static_cast(v8::Date::TimeZoneDetection::value)); +TIME_ZONE_DETECTION_ASSERT_EQ(kSkip) +TIME_ZONE_DETECTION_ASSERT_EQ(kRedetect) +#undef TIME_ZONE_DETECTION_ASSERT_EQ -void v8::Date::DateTimeConfigurationChangeNotification(Isolate* isolate) { - i::Isolate* i_isolate = reinterpret_cast(isolate); - LOG_API(i_isolate, Date, DateTimeConfigurationChangeNotification); - ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - i_isolate->date_cache()->ResetDateCache(); -#ifdef V8_INTL_SUPPORT - i_isolate->clear_cached_icu_object( - i::Isolate::ICUObjectCacheType::kDefaultSimpleDateFormat); - i_isolate->clear_cached_icu_object( - i::Isolate::ICUObjectCacheType::kDefaultSimpleDateFormatForTime); - i_isolate->clear_cached_icu_object( - i::Isolate::ICUObjectCacheType::kDefaultSimpleDateFormatForDate); -#endif // V8_INTL_SUPPORT +// static +void v8::Date::DateTimeConfigurationChangeNotification( + Isolate* isolate, TimeZoneDetection time_zone_detection) { + isolate->DateTimeConfigurationChangeNotification( + static_cast(time_zone_detection)); } - MaybeLocal v8::RegExp::New(Local context, Local pattern, Flags flags) { PREPARE_FOR_EXECUTION(context, RegExp, New, RegExp); @@ -7301,10 +7287,6 @@ MemorySpan CompiledWasmModule::GetWireBytesRef() { return {bytes_vec.start(), bytes_vec.size()}; } -WasmModuleObject::BufferReference WasmModuleObject::GetWasmWireBytesRef() { - return GetCompiledModule().GetWireBytesRef(); -} - WasmModuleObject::TransferrableModule WasmModuleObject::GetTransferrableModule() { if (i::FLAG_wasm_shared_code) { @@ -7346,12 +7328,6 @@ MaybeLocal WasmModuleObject::FromTransferrableModule( } } -WasmModuleObject::SerializedModule WasmModuleObject::Serialize() { - // TODO(clemensh): Deprecated; remove after M-73 branch. - OwnedBuffer serialized = GetCompiledModule().Serialize(); - return {std::move(serialized.buffer), serialized.size}; -} - MaybeLocal WasmModuleObject::Deserialize( Isolate* isolate, MemorySpan serialized_module, MemorySpan wire_bytes) { @@ -7701,6 +7677,27 @@ Local DataView::New(Local shared_array_buffer, return Utils::ToLocal(obj); } +namespace { +i::Handle SetupSharedArrayBuffer( + Isolate* isolate, void* data, size_t byte_length, + ArrayBufferCreationMode mode) { + CHECK(i::FLAG_harmony_sharedarraybuffer); + // Embedders must guarantee that the external backing store is valid. + CHECK(byte_length == 0 || data != nullptr); + i::Isolate* i_isolate = reinterpret_cast(isolate); + LOG_API(i_isolate, SharedArrayBuffer, New); + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + i::Handle obj = + i_isolate->factory()->NewJSArrayBuffer(i::SharedFlag::kShared); + bool is_wasm_memory = + i_isolate->wasm_engine()->memory_tracker()->IsWasmMemory(data); + i::JSArrayBuffer::Setup(obj, i_isolate, + mode == ArrayBufferCreationMode::kExternalized, data, + byte_length, i::SharedFlag::kShared, is_wasm_memory); + return obj; +} + +} // namespace bool v8::SharedArrayBuffer::IsExternal() const { return Utils::OpenHandle(this)->is_external(); @@ -7723,14 +7720,15 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::Externalize() { v8::SharedArrayBuffer::Contents::Contents( void* data, size_t byte_length, void* allocation_base, size_t allocation_length, Allocator::AllocationMode allocation_mode, - DeleterCallback deleter, void* deleter_data) + DeleterCallback deleter, void* deleter_data, bool is_growable) : data_(data), byte_length_(byte_length), allocation_base_(allocation_base), allocation_length_(allocation_length), allocation_mode_(allocation_mode), deleter_(deleter), - deleter_data_(deleter_data) { + deleter_data_(deleter_data), + is_growable_(is_growable) { DCHECK_LE(allocation_base_, data_); DCHECK_LE(byte_length_, allocation_length_); } @@ -7748,7 +7746,8 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::GetContents() { : reinterpret_cast(ArrayBufferDeleter), self->is_wasm_memory() ? static_cast(self->GetIsolate()->wasm_engine()) - : static_cast(self->GetIsolate()->array_buffer_allocator())); + : static_cast(self->GetIsolate()->array_buffer_allocator()), + self->is_growable()); return contents; } @@ -7778,22 +7777,19 @@ Local v8::SharedArrayBuffer::New(Isolate* isolate, Local v8::SharedArrayBuffer::New( Isolate* isolate, void* data, size_t byte_length, ArrayBufferCreationMode mode) { - CHECK(i::FLAG_harmony_sharedarraybuffer); - // Embedders must guarantee that the external backing store is valid. - CHECK(byte_length == 0 || data != nullptr); - i::Isolate* i_isolate = reinterpret_cast(isolate); - LOG_API(i_isolate, SharedArrayBuffer, New); - ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - i::Handle obj = - i_isolate->factory()->NewJSArrayBuffer(i::SharedFlag::kShared); - bool is_wasm_memory = - i_isolate->wasm_engine()->memory_tracker()->IsWasmMemory(data); - i::JSArrayBuffer::Setup(obj, i_isolate, - mode == ArrayBufferCreationMode::kExternalized, data, - byte_length, i::SharedFlag::kShared, is_wasm_memory); - return Utils::ToLocalShared(obj); + i::Handle buffer = + SetupSharedArrayBuffer(isolate, data, byte_length, mode); + return Utils::ToLocalShared(buffer); } +Local v8::SharedArrayBuffer::New( + Isolate* isolate, const SharedArrayBuffer::Contents& contents, + ArrayBufferCreationMode mode) { + i::Handle buffer = SetupSharedArrayBuffer( + isolate, contents.Data(), contents.ByteLength(), mode); + buffer->set_is_growable(contents.IsGrowable()); + return Utils::ToLocalShared(buffer); +} Local v8::Symbol::New(Isolate* isolate, Local name) { i::Isolate* i_isolate = reinterpret_cast(isolate); @@ -8194,7 +8190,7 @@ void Isolate::Initialize(Isolate* isolate, } base::ElapsedTimer timer; if (i::FLAG_profile_deserialization) timer.Start(); - i_isolate->Init(nullptr); + i_isolate->InitWithoutSnapshot(); if (i::FLAG_profile_deserialization) { double ms = timer.Elapsed().InMillisecondsF(); i::PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms); @@ -8328,17 +8324,16 @@ Isolate::AllowJavascriptExecutionScope::~AllowJavascriptExecutionScope() { delete reinterpret_cast(internal_dump_); } - Isolate::SuppressMicrotaskExecutionScope::SuppressMicrotaskExecutionScope( Isolate* isolate) - : isolate_(reinterpret_cast(isolate)) { + : isolate_(reinterpret_cast(isolate)), + microtask_queue_(isolate_->default_microtask_queue()) { isolate_->handle_scope_implementer()->IncrementCallDepth(); - isolate_->default_microtask_queue()->IncrementMicrotasksSuppressions(); + microtask_queue_->IncrementMicrotasksSuppressions(); } - Isolate::SuppressMicrotaskExecutionScope::~SuppressMicrotaskExecutionScope() { - isolate_->default_microtask_queue()->DecrementMicrotasksSuppressions(); + microtask_queue_->DecrementMicrotasksSuppressions(); isolate_->handle_scope_implementer()->DecrementCallDepth(); } @@ -8536,11 +8531,14 @@ void Isolate::RunMicrotasks() { isolate->default_microtask_queue()->RunMicrotasks(isolate); } -void Isolate::EnqueueMicrotask(Local function) { +void Isolate::EnqueueMicrotask(Local v8_function) { i::Isolate* isolate = reinterpret_cast(this); - i::Handle microtask = isolate->factory()->NewCallableTask( - Utils::OpenHandle(*function), isolate->native_context()); - isolate->default_microtask_queue()->EnqueueMicrotask(*microtask); + i::Handle function = Utils::OpenHandle(*v8_function); + i::Handle handler_context; + if (!i::JSReceiver::GetContextForMicrotask(function).ToHandle( + &handler_context)) + handler_context = isolate->native_context(); + handler_context->microtask_queue()->EnqueueMicrotask(this, v8_function); } void Isolate::EnqueueMicrotask(MicrotaskCallback callback, void* data) { @@ -8555,32 +8553,54 @@ void Isolate::EnqueueMicrotask(MicrotaskCallback callback, void* data) { void Isolate::SetMicrotasksPolicy(MicrotasksPolicy policy) { i::Isolate* isolate = reinterpret_cast(this); - isolate->handle_scope_implementer()->set_microtasks_policy(policy); + isolate->default_microtask_queue()->set_microtasks_policy(policy); } MicrotasksPolicy Isolate::GetMicrotasksPolicy() const { i::Isolate* isolate = reinterpret_cast(const_cast(this)); - return isolate->handle_scope_implementer()->microtasks_policy(); + return isolate->default_microtask_queue()->microtasks_policy(); } +namespace { + +void MicrotasksCompletedCallbackAdapter(v8::Isolate* isolate, void* data) { + auto callback = reinterpret_cast(data); + callback(isolate); +} + +} // namespace void Isolate::AddMicrotasksCompletedCallback( MicrotasksCompletedCallback callback) { DCHECK(callback); i::Isolate* isolate = reinterpret_cast(this); - isolate->default_microtask_queue()->AddMicrotasksCompletedCallback(callback); + isolate->default_microtask_queue()->AddMicrotasksCompletedCallback( + &MicrotasksCompletedCallbackAdapter, reinterpret_cast(callback)); } +void Isolate::AddMicrotasksCompletedCallback( + MicrotasksCompletedCallbackWithData callback, void* data) { + DCHECK(callback); + i::Isolate* isolate = reinterpret_cast(this); + isolate->default_microtask_queue()->AddMicrotasksCompletedCallback(callback, + data); +} void Isolate::RemoveMicrotasksCompletedCallback( MicrotasksCompletedCallback callback) { i::Isolate* isolate = reinterpret_cast(this); isolate->default_microtask_queue()->RemoveMicrotasksCompletedCallback( - callback); + &MicrotasksCompletedCallbackAdapter, reinterpret_cast(callback)); } +void Isolate::RemoveMicrotasksCompletedCallback( + MicrotasksCompletedCallbackWithData callback, void* data) { + i::Isolate* isolate = reinterpret_cast(this); + isolate->default_microtask_queue()->RemoveMicrotasksCompletedCallback( + callback, data); +} void Isolate::SetUseCounterCallback(UseCounterCallback callback) { reinterpret_cast(this)->SetUseCounterCallback(callback); @@ -8666,7 +8686,6 @@ void Isolate::MemoryPressureNotification(MemoryPressureLevel level) { ? isolate->thread_manager()->IsLockedByCurrentThread() : i::ThreadId::Current().Equals(isolate->thread_id()); isolate->heap()->MemoryPressureNotification(level, on_isolate_thread); - isolate->allocator()->MemoryPressureNotification(level); } void Isolate::EnableMemorySavingsMode() { @@ -8753,9 +8772,6 @@ CALLBACK_SETTER(AllowWasmCodeGenerationCallback, CALLBACK_SETTER(WasmModuleCallback, ExtensionCallback, wasm_module_callback) CALLBACK_SETTER(WasmInstanceCallback, ExtensionCallback, wasm_instance_callback) -CALLBACK_SETTER(WasmCompileStreamingCallback, ApiImplementationCallback, - wasm_compile_streaming_callback) - CALLBACK_SETTER(WasmStreamingCallback, WasmStreamingCallback, wasm_streaming_callback) @@ -8865,14 +8881,14 @@ void Isolate::VisitHandlesForPartialDependence( PersistentHandleVisitor* visitor) { i::Isolate* isolate = reinterpret_cast(this); i::DisallowHeapAllocation no_allocation; - isolate->global_handles()->IterateAllRootsInNewSpaceWithClassIds(visitor); + isolate->global_handles()->IterateAllYoungRootsWithClassIds(visitor); } void Isolate::VisitWeakHandles(PersistentHandleVisitor* visitor) { i::Isolate* isolate = reinterpret_cast(this); i::DisallowHeapAllocation no_allocation; - isolate->global_handles()->IterateWeakRootsInNewSpaceWithClassIds(visitor); + isolate->global_handles()->IterateYoungWeakRootsWithClassIds(visitor); } void Isolate::SetAllowAtomicsWait(bool allow) { @@ -8880,51 +8896,84 @@ void Isolate::SetAllowAtomicsWait(bool allow) { isolate->set_allow_atomics_wait(allow); } +void v8::Isolate::DateTimeConfigurationChangeNotification( + TimeZoneDetection time_zone_detection) { + i::Isolate* i_isolate = reinterpret_cast(this); + LOG_API(i_isolate, Isolate, DateTimeConfigurationChangeNotification); + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + i_isolate->date_cache()->ResetDateCache( + static_cast(time_zone_detection)); +#ifdef V8_INTL_SUPPORT + i_isolate->clear_cached_icu_object( + i::Isolate::ICUObjectCacheType::kDefaultSimpleDateFormat); + i_isolate->clear_cached_icu_object( + i::Isolate::ICUObjectCacheType::kDefaultSimpleDateFormatForTime); + i_isolate->clear_cached_icu_object( + i::Isolate::ICUObjectCacheType::kDefaultSimpleDateFormatForDate); +#endif // V8_INTL_SUPPORT +} + +void v8::Isolate::LocaleConfigurationChangeNotification() { + i::Isolate* i_isolate = reinterpret_cast(this); + LOG_API(i_isolate, Isolate, LocaleConfigurationChangeNotification); + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + +#ifdef V8_INTL_SUPPORT + i_isolate->ResetDefaultLocale(); +#endif // V8_INTL_SUPPORT +} + +// static +std::unique_ptr MicrotaskQueue::New(Isolate* isolate) { + return i::MicrotaskQueue::New(reinterpret_cast(isolate)); +} + MicrotasksScope::MicrotasksScope(Isolate* isolate, MicrotasksScope::Type type) + : MicrotasksScope( + isolate, + reinterpret_cast(isolate)->default_microtask_queue(), + type) {} + +MicrotasksScope::MicrotasksScope(Isolate* isolate, + MicrotaskQueue* microtask_queue, + MicrotasksScope::Type type) : isolate_(reinterpret_cast(isolate)), + microtask_queue_(static_cast(microtask_queue)), run_(type == MicrotasksScope::kRunMicrotasks) { - auto* microtask_queue = isolate_->default_microtask_queue(); - if (run_) microtask_queue->IncrementMicrotasksScopeDepth(); + if (run_) microtask_queue_->IncrementMicrotasksScopeDepth(); #ifdef DEBUG - if (!run_) microtask_queue->IncrementDebugMicrotasksScopeDepth(); + if (!run_) microtask_queue_->IncrementDebugMicrotasksScopeDepth(); #endif } - MicrotasksScope::~MicrotasksScope() { - auto handle_scope_implementer = isolate_->handle_scope_implementer(); - auto* microtask_queue = isolate_->default_microtask_queue(); if (run_) { - microtask_queue->DecrementMicrotasksScopeDepth(); - if (MicrotasksPolicy::kScoped == - handle_scope_implementer->microtasks_policy()) { + microtask_queue_->DecrementMicrotasksScopeDepth(); + if (MicrotasksPolicy::kScoped == microtask_queue_->microtasks_policy()) { PerformCheckpoint(reinterpret_cast(isolate_)); } } #ifdef DEBUG - if (!run_) microtask_queue->DecrementDebugMicrotasksScopeDepth(); + if (!run_) microtask_queue_->DecrementDebugMicrotasksScopeDepth(); #endif } - -void MicrotasksScope::PerformCheckpoint(Isolate* v8Isolate) { - i::Isolate* isolate = reinterpret_cast(v8Isolate); +void MicrotasksScope::PerformCheckpoint(Isolate* v8_isolate) { + i::Isolate* isolate = reinterpret_cast(v8_isolate); auto* microtask_queue = isolate->default_microtask_queue(); - if (!microtask_queue->GetMicrotasksScopeDepth() && - !microtask_queue->HasMicrotasksSuppressions()) { - microtask_queue->RunMicrotasks(isolate); - } + microtask_queue->PerformCheckpoint(v8_isolate); } - -int MicrotasksScope::GetCurrentDepth(Isolate* v8Isolate) { - i::Isolate* isolate = reinterpret_cast(v8Isolate); - return isolate->default_microtask_queue()->GetMicrotasksScopeDepth(); +int MicrotasksScope::GetCurrentDepth(Isolate* v8_isolate) { + i::Isolate* isolate = reinterpret_cast(v8_isolate); + auto* microtask_queue = isolate->default_microtask_queue(); + return microtask_queue->GetMicrotasksScopeDepth(); } -bool MicrotasksScope::IsRunningMicrotasks(Isolate* v8Isolate) { - i::Isolate* isolate = reinterpret_cast(v8Isolate); - return isolate->default_microtask_queue()->IsRunningMicrotasks(); +bool MicrotasksScope::IsRunningMicrotasks(Isolate* v8_isolate) { + i::Isolate* isolate = reinterpret_cast(v8_isolate); + auto* microtask_queue = isolate->default_microtask_queue(); + return microtask_queue->IsRunningMicrotasks(); } String::Utf8Value::Utf8Value(v8::Isolate* isolate, v8::Local obj) @@ -9053,6 +9102,18 @@ MaybeLocal debug::GetInternalProperties(Isolate* v8_isolate, return Utils::ToLocal(result); } +MaybeLocal debug::GetPrivateFields(Local context, + Local value) { + PREPARE_FOR_EXECUTION(context, debug, GetPrivateFields, Array); + i::Handle val = Utils::OpenHandle(*value); + i::Handle result; + i::Isolate* internal_isolate = reinterpret_cast(isolate); + has_pending_exception = + !(internal_isolate->debug()->GetPrivateFields(val).ToHandle(&result)); + RETURN_ON_FAILED_EXECUTION(Array); + RETURN_ESCAPED(Utils::ToLocal(result)); +} + void debug::ChangeBreakOnException(Isolate* isolate, ExceptionBreakState type) { i::Isolate* internal_isolate = reinterpret_cast(isolate); internal_isolate->debug()->ChangeBreakOnException( @@ -9625,8 +9686,11 @@ debug::Location debug::GeneratorObject::SuspendedLocation() { CHECK(obj->is_suspended()); i::Object maybe_script = obj->function()->shared()->script(); if (!maybe_script->IsScript()) return debug::Location(); - i::Handle script(i::Script::cast(maybe_script), obj->GetIsolate()); + i::Isolate* isolate = obj->GetIsolate(); + i::Handle script(i::Script::cast(maybe_script), isolate); i::Script::PositionInfo info; + i::SharedFunctionInfo::EnsureSourcePositionsAvailable( + isolate, i::handle(obj->function()->shared(), isolate)); i::Script::GetPositionInfo(script, obj->source_position(), &info, i::Script::WITH_OFFSET); return debug::Location(info.line, info.column); @@ -9801,7 +9865,7 @@ debug::Coverage debug::Coverage::CollectBestEffort(Isolate* isolate) { i::Coverage::CollectBestEffort(reinterpret_cast(isolate))); } -void debug::Coverage::SelectMode(Isolate* isolate, debug::Coverage::Mode mode) { +void debug::Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) { i::Coverage::SelectMode(reinterpret_cast(isolate), mode); } @@ -9841,7 +9905,7 @@ debug::TypeProfile debug::TypeProfile::Collect(Isolate* isolate) { } void debug::TypeProfile::SelectMode(Isolate* isolate, - debug::TypeProfile::Mode mode) { + debug::TypeProfileMode mode) { i::TypeProfile::SelectMode(reinterpret_cast(isolate), mode); } @@ -10004,13 +10068,13 @@ const CpuProfileNode* CpuProfile::GetTopDownRoot() const { const CpuProfileNode* CpuProfile::GetSample(int index) const { const i::CpuProfile* profile = reinterpret_cast(this); - return reinterpret_cast(profile->sample(index)); + return reinterpret_cast(profile->sample(index).node); } int64_t CpuProfile::GetSampleTimestamp(int index) const { const i::CpuProfile* profile = reinterpret_cast(this); - return (profile->sample_timestamp(index) - base::TimeTicks()) + return (profile->sample(index).timestamp - base::TimeTicks()) .InMicroseconds(); } @@ -10371,29 +10435,6 @@ void HeapProfiler::DeleteAllHeapSnapshots() { reinterpret_cast(this)->DeleteAllSnapshots(); } - -void HeapProfiler::SetWrapperClassInfoProvider(uint16_t class_id, - WrapperInfoCallback callback) { - reinterpret_cast(this)->DefineWrapperClass(class_id, - callback); -} - -void HeapProfiler::SetGetRetainerInfosCallback( - GetRetainerInfosCallback callback) { - reinterpret_cast(this)->SetGetRetainerInfosCallback( - callback); -} - -void HeapProfiler::SetBuildEmbedderGraphCallback( - LegacyBuildEmbedderGraphCallback callback) { - reinterpret_cast(this)->AddBuildEmbedderGraphCallback( - [](v8::Isolate* isolate, v8::EmbedderGraph* graph, void* data) { - reinterpret_cast(data)(isolate, - graph); - }, - reinterpret_cast(callback)); -} - void HeapProfiler::AddBuildEmbedderGraphCallback( BuildEmbedderGraphCallback callback, void* data) { reinterpret_cast(this)->AddBuildEmbedderGraphCallback( @@ -10493,6 +10534,22 @@ void EmbedderHeapTracer::GarbageCollectionForTesting( kGCCallbackFlagForced); } +void EmbedderHeapTracer::RegisterEmbedderReference( + const TracedGlobal& ref) { + if (ref.IsEmpty()) return; + + i::Heap* const heap = reinterpret_cast(isolate_)->heap(); + heap->RegisterExternallyReferencedObject( + reinterpret_cast(ref.val_)); +} + +void EmbedderHeapTracer::IterateTracedGlobalHandles( + TracedGlobalHandleVisitor* visitor) { + i::Isolate* isolate = reinterpret_cast(isolate_); + i::DisallowHeapAllocation no_allocation; + isolate->global_handles()->IterateTracedNodes(visitor); +} + namespace internal { const size_t HandleScopeImplementer::kEnteredContextsOffset = diff --git a/deps/v8/src/api.h b/deps/v8/src/api.h index d9a0efbf2a7ec5..5491a9ece5cc01 100644 --- a/deps/v8/src/api.h +++ b/deps/v8/src/api.h @@ -7,7 +7,6 @@ #include "include/v8-testing.h" #include "src/contexts.h" -#include "src/debug/debug-interface.h" #include "src/detachable-vector.h" #include "src/heap/factory.h" #include "src/isolate.h" @@ -28,6 +27,12 @@ namespace internal { class JSArrayBufferView; } // namespace internal +namespace debug { +class GeneratorObject; +class Script; +class WeakMap; +} // namespace debug + // Constants used in the implementation of the API. The most natural thing // would usually be to place these with the classes that use them, but // we want to keep them out of v8.h because it is an externally @@ -66,15 +71,16 @@ class ApiFunction { class RegisteredExtension { public: - explicit RegisteredExtension(Extension* extension); - static void Register(RegisteredExtension* that); + static void Register(std::unique_ptr); static void UnregisterAll(); - Extension* extension() { return extension_; } - RegisteredExtension* next() { return next_; } + Extension* extension() const { return extension_.get(); } + RegisteredExtension* next() const { return next_; } static RegisteredExtension* first_extension() { return first_extension_; } private: - Extension* extension_; - RegisteredExtension* next_; + explicit RegisteredExtension(Extension*); + explicit RegisteredExtension(std::unique_ptr); + std::unique_ptr extension_; + RegisteredExtension* next_ = nullptr; static RegisteredExtension* first_extension_; }; @@ -116,7 +122,7 @@ class RegisteredExtension { V(Context, Context) \ V(External, Object) \ V(StackTrace, FixedArray) \ - V(StackFrame, StackFrameInfo) \ + V(StackFrame, StackTraceFrame) \ V(Proxy, JSProxy) \ V(debug::GeneratorObject, JSGeneratorObject) \ V(debug::Script, Script) \ @@ -207,7 +213,7 @@ class Utils { static inline Local StackTraceToLocal( v8::internal::Handle obj); static inline Local StackFrameToLocal( - v8::internal::Handle obj); + v8::internal::Handle obj); static inline Local NumberToLocal( v8::internal::Handle obj); static inline Local IntegerToLocal( @@ -355,7 +361,6 @@ class HandleScopeImplementer { : isolate_(isolate), spare_(nullptr), call_depth_(0), - microtasks_policy_(v8::MicrotasksPolicy::kAuto), last_handle_before_deferred_block_(nullptr) { } @@ -388,9 +393,6 @@ class HandleScopeImplementer { inline void EnterMicrotaskContext(Context context); - inline void set_microtasks_policy(v8::MicrotasksPolicy policy); - inline v8::MicrotasksPolicy microtasks_policy() const; - // Returns the last entered context or an empty handle if no // contexts have been entered. inline Handle LastEnteredContext(); @@ -459,8 +461,6 @@ class HandleScopeImplementer { Address* spare_; int call_depth_; - v8::MicrotasksPolicy microtasks_policy_; - Address* last_handle_before_deferred_block_; // This is only used for threading support. HandleScopeData handle_scope_data_; @@ -478,17 +478,6 @@ class HandleScopeImplementer { const int kHandleBlockSize = v8::internal::KB - 2; // fit in one page - -void HandleScopeImplementer::set_microtasks_policy( - v8::MicrotasksPolicy policy) { - microtasks_policy_ = policy; -} - - -v8::MicrotasksPolicy HandleScopeImplementer::microtasks_policy() const { - return microtasks_policy_; -} - void HandleScopeImplementer::SaveContext(Context context) { saved_contexts_.push_back(context); } diff --git a/deps/v8/src/arguments-inl.h b/deps/v8/src/arguments-inl.h index ad2b5ca87ccbd8..e596d44117c3ea 100644 --- a/deps/v8/src/arguments-inl.h +++ b/deps/v8/src/arguments-inl.h @@ -8,7 +8,7 @@ #include "src/arguments.h" #include "src/handles-inl.h" -#include "src/objects-inl.h" +#include "src/objects-inl.h" // TODO(jkummerow): Just smi-inl.h. namespace v8 { namespace internal { diff --git a/deps/v8/src/arm/assembler-arm-inl.h b/deps/v8/src/arm/assembler-arm-inl.h index 041c0309334097..17a38cbbfe2628 100644 --- a/deps/v8/src/arm/assembler-arm-inl.h +++ b/deps/v8/src/arm/assembler-arm-inl.h @@ -184,24 +184,6 @@ Handle Assembler::relative_code_target_object_handle_at( return GetCodeTarget(code_target_index); } -template -void RelocInfo::Visit(ObjectVisitor* visitor) { - RelocInfo::Mode mode = rmode(); - if (mode == RelocInfo::EMBEDDED_OBJECT) { - visitor->VisitEmbeddedPointer(host(), this); - } else if (RelocInfo::IsCodeTargetMode(mode)) { - visitor->VisitCodeTarget(host(), this); - } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { - visitor->VisitExternalReference(host(), this); - } else if (mode == RelocInfo::INTERNAL_REFERENCE) { - visitor->VisitInternalReference(host(), this); - } else if (RelocInfo::IsRuntimeEntry(mode)) { - visitor->VisitRuntimeEntry(host(), this); - } else if (RelocInfo::IsOffHeapTarget(mode)) { - visitor->VisitOffHeapTarget(host(), this); - } -} - Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) : rmode_(rmode) { value_.immediate = immediate; } @@ -366,7 +348,7 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool, Memory
(constant_pool_entry_address(pc, constant_pool)) = target; // Intuitively, we would think it is necessary to always flush the // instruction cache after patching a target address in the code as follows: - // Assembler::FlushICache(pc, sizeof(target)); + // FlushInstructionCache(pc, sizeof(target)); // However, on ARM, no instruction is actually patched in the case // of embedded constants of the form: // ldr ip, [pp, #...] @@ -384,7 +366,7 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool, DCHECK(IsMovW(Memory(pc))); DCHECK(IsMovT(Memory(pc + kInstrSize))); if (icache_flush_mode != SKIP_ICACHE_FLUSH) { - Assembler::FlushICache(pc, 2 * kInstrSize); + FlushInstructionCache(pc, 2 * kInstrSize); } } else if (IsMovImmed(Memory(pc))) { // This is an mov / orr immediate load. Patch the immediate embedded in @@ -404,14 +386,14 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool, IsOrrImmed(Memory(pc + 2 * kInstrSize)) && IsOrrImmed(Memory(pc + 3 * kInstrSize))); if (icache_flush_mode != SKIP_ICACHE_FLUSH) { - Assembler::FlushICache(pc, 4 * kInstrSize); + FlushInstructionCache(pc, 4 * kInstrSize); } } else { intptr_t branch_offset = target - pc - Instruction::kPcLoadDelta; Instruction* branch = Instruction::At(pc); branch->SetBranchOffset(branch_offset); if (icache_flush_mode != SKIP_ICACHE_FLUSH) { - Assembler::FlushICache(pc, kInstrSize); + FlushInstructionCache(pc, kInstrSize); } } } diff --git a/deps/v8/src/arm/assembler-arm.cc b/deps/v8/src/arm/assembler-arm.cc index a994b6907dd065..ce6209c5925c60 100644 --- a/deps/v8/src/arm/assembler-arm.cc +++ b/deps/v8/src/arm/assembler-arm.cc @@ -559,7 +559,9 @@ Assembler::~Assembler() { DCHECK_EQ(const_pool_blocked_nesting_, 0); } -void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { +void Assembler::GetCode(Isolate* isolate, CodeDesc* desc, + SafepointTableBuilder* safepoint_table_builder, + int handler_table_offset) { // Emit constant pool if necessary. CheckConstPool(true, false); DCHECK(pending_32_bit_constants_.empty()); @@ -569,19 +571,27 @@ void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { AllocateAndInstallRequestedHeapObjects(isolate); // Set up code descriptor. - desc->buffer = buffer_start_; - desc->buffer_size = buffer_->size(); - desc->instr_size = pc_offset(); - desc->reloc_size = - (buffer_start_ + desc->buffer_size) - reloc_info_writer.pos(); - desc->constant_pool_size = 0; - desc->origin = this; - desc->unwinding_info_size = 0; - desc->unwinding_info = nullptr; - desc->code_comments_size = code_comments_size; + // TODO(jgruber): Reconsider how these offsets and sizes are maintained up to + // this point to make CodeDesc initialization less fiddly. + + static constexpr int kConstantPoolSize = 0; + const int instruction_size = pc_offset(); + const int code_comments_offset = instruction_size - code_comments_size; + const int constant_pool_offset = code_comments_offset - kConstantPoolSize; + const int handler_table_offset2 = (handler_table_offset == kNoHandlerTable) + ? constant_pool_offset + : handler_table_offset; + const int safepoint_table_offset = + (safepoint_table_builder == kNoSafepointTable) + ? handler_table_offset2 + : safepoint_table_builder->GetCodeOffset(); + const int reloc_info_offset = + static_cast(reloc_info_writer.pos() - buffer_->start()); + CodeDesc::Initialize(desc, this, safepoint_table_offset, + handler_table_offset2, constant_pool_offset, + code_comments_offset, reloc_info_offset); } - void Assembler::Align(int m) { DCHECK(m >= 4 && base::bits::IsPowerOfTwo(m)); DCHECK_EQ(pc_offset() & (kInstrSize - 1), 0); diff --git a/deps/v8/src/arm/assembler-arm.h b/deps/v8/src/arm/assembler-arm.h index 0c14a6770742b7..4528b8efb17126 100644 --- a/deps/v8/src/arm/assembler-arm.h +++ b/deps/v8/src/arm/assembler-arm.h @@ -53,6 +53,8 @@ namespace v8 { namespace internal { +class SafepointTableBuilder; + // Coprocessor number enum Coprocessor { p0 = 0, @@ -306,10 +308,17 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { pending_32_bit_constants_.clear(); } - // GetCode emits any pending (non-emitted) code and fills the descriptor - // desc. GetCode() is idempotent; it returns the same result if no other - // Assembler functions are invoked in between GetCode() calls. - void GetCode(Isolate* isolate, CodeDesc* desc); + // GetCode emits any pending (non-emitted) code and fills the descriptor desc. + static constexpr int kNoHandlerTable = 0; + static constexpr SafepointTableBuilder* kNoSafepointTable = nullptr; + void GetCode(Isolate* isolate, CodeDesc* desc, + SafepointTableBuilder* safepoint_table_builder, + int handler_table_offset); + + // Convenience wrapper for code without safepoint or handler tables. + void GetCode(Isolate* isolate, CodeDesc* desc) { + GetCode(isolate, desc, kNoSafepointTable, kNoHandlerTable); + } // Label operations & relative jumps (PPUM Appendix D) // @@ -1092,6 +1101,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope); }; + // Unused on this architecture. + void MaybeEmitOutOfLineConstantPool() {} + // Record a deoptimization reason that can be used by a log or cpu profiler. // Use --trace-deopt to enable. void RecordDeoptReason(DeoptimizeReason reason, SourcePosition position, diff --git a/deps/v8/src/arm/constants-arm.h b/deps/v8/src/arm/constants-arm.h index fa9791a0e00654..48eaa3484a4189 100644 --- a/deps/v8/src/arm/constants-arm.h +++ b/deps/v8/src/arm/constants-arm.h @@ -522,16 +522,16 @@ class Instruction { inline Condition ConditionField() const { return static_cast(BitField(31, 28)); } - DECLARE_STATIC_TYPED_ACCESSOR(int, ConditionValue); - DECLARE_STATIC_TYPED_ACCESSOR(Condition, ConditionField); + DECLARE_STATIC_TYPED_ACCESSOR(int, ConditionValue) + DECLARE_STATIC_TYPED_ACCESSOR(Condition, ConditionField) inline int TypeValue() const { return Bits(27, 25); } inline int SpecialValue() const { return Bits(27, 23); } inline int RnValue() const { return Bits(19, 16); } - DECLARE_STATIC_ACCESSOR(RnValue); + DECLARE_STATIC_ACCESSOR(RnValue) inline int RdValue() const { return Bits(15, 12); } - DECLARE_STATIC_ACCESSOR(RdValue); + DECLARE_STATIC_ACCESSOR(RdValue) inline int CoprocessorValue() const { return Bits(11, 8); } // Support for VFP. @@ -573,7 +573,7 @@ class Instruction { inline int SValue() const { return Bit(20); } // with register inline int RmValue() const { return Bits(3, 0); } - DECLARE_STATIC_ACCESSOR(RmValue); + DECLARE_STATIC_ACCESSOR(RmValue) inline int ShiftValue() const { return static_cast(Bits(6, 5)); } inline ShiftOp ShiftField() const { return static_cast(BitField(6, 5)); @@ -583,13 +583,13 @@ class Instruction { inline int ShiftAmountValue() const { return Bits(11, 7); } // with immediate inline int RotateValue() const { return Bits(11, 8); } - DECLARE_STATIC_ACCESSOR(RotateValue); + DECLARE_STATIC_ACCESSOR(RotateValue) inline int Immed8Value() const { return Bits(7, 0); } - DECLARE_STATIC_ACCESSOR(Immed8Value); + DECLARE_STATIC_ACCESSOR(Immed8Value) inline int Immed4Value() const { return Bits(19, 16); } inline int ImmedMovwMovtValue() const { return Immed4Value() << 12 | Offset12Value(); } - DECLARE_STATIC_ACCESSOR(ImmedMovwMovtValue); + DECLARE_STATIC_ACCESSOR(ImmedMovwMovtValue) // Fields used in Load/Store instructions inline int PUValue() const { return Bits(24, 23); } diff --git a/deps/v8/src/arm/disasm-arm.cc b/deps/v8/src/arm/disasm-arm.cc index 3f82f43e84494c..35ff085e326086 100644 --- a/deps/v8/src/arm/disasm-arm.cc +++ b/deps/v8/src/arm/disasm-arm.cc @@ -35,6 +35,7 @@ #include "src/base/bits.h" #include "src/base/platform/platform.h" #include "src/disasm.h" +#include "src/vector.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/arm/interface-descriptors-arm.cc b/deps/v8/src/arm/interface-descriptors-arm.cc index 887a1831827762..c4140bdaf91efa 100644 --- a/deps/v8/src/arm/interface-descriptors-arm.cc +++ b/deps/v8/src/arm/interface-descriptors-arm.cc @@ -99,6 +99,14 @@ void CallForwardVarargsDescriptor::InitializePlatformSpecific( data->InitializePlatformSpecific(arraysize(registers), registers); } +void CallFunctionTemplateDescriptor::InitializePlatformSpecific( + CallInterfaceDescriptorData* data) { + // r1 : function template info + // r2 : number of arguments (on the stack, not including receiver) + Register registers[] = {r1, r2}; + data->InitializePlatformSpecific(arraysize(registers), registers); +} + void CallWithSpreadDescriptor::InitializePlatformSpecific( CallInterfaceDescriptorData* data) { // r0 : number of arguments (on the stack, not including receiver) @@ -203,9 +211,10 @@ void ArgumentsAdaptorDescriptor::InitializePlatformSpecific( void ApiCallbackDescriptor::InitializePlatformSpecific( CallInterfaceDescriptorData* data) { Register registers[] = { - JavaScriptFrame::context_register(), // kTargetContext - r1, // kApiFunctionAddress - r2, // kArgc + r1, // kApiFunctionAddress + r2, // kArgc + r3, // kCallData + r0, // kHolder }; data->InitializePlatformSpecific(arraysize(registers), registers); } diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index acf96b31c2a4d9..4690fa7a1e26d3 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -18,6 +18,7 @@ #include "src/double.h" #include "src/external-reference-table.h" #include "src/frames-inl.h" +#include "src/heap/heap-inl.h" // For MemoryChunk. #include "src/macro-assembler.h" #include "src/objects-inl.h" #include "src/register-configuration.h" @@ -475,6 +476,22 @@ void TurboAssembler::Move(QwNeonRegister dst, QwNeonRegister src) { } } +void TurboAssembler::MovePair(Register dst0, Register src0, Register dst1, + Register src1) { + DCHECK_NE(dst0, dst1); + if (dst0 != src1) { + Move(dst0, src0); + Move(dst1, src1); + } else if (dst1 != src0) { + // Swap the order of the moves to resolve the overlap. + Move(dst1, src1); + Move(dst0, src0); + } else { + // Worse case scenario, this is a swap. + Swap(dst0, src0); + } +} + void TurboAssembler::Swap(Register srcdst0, Register srcdst1) { DCHECK(srcdst0 != srcdst1); UseScratchRegisterScope temps(this); @@ -588,41 +605,6 @@ void TurboAssembler::Bfc(Register dst, Register src, int lsb, int width, } } -void MacroAssembler::Load(Register dst, - const MemOperand& src, - Representation r) { - DCHECK(!r.IsDouble()); - if (r.IsInteger8()) { - ldrsb(dst, src); - } else if (r.IsUInteger8()) { - ldrb(dst, src); - } else if (r.IsInteger16()) { - ldrsh(dst, src); - } else if (r.IsUInteger16()) { - ldrh(dst, src); - } else { - ldr(dst, src); - } -} - -void MacroAssembler::Store(Register src, - const MemOperand& dst, - Representation r) { - DCHECK(!r.IsDouble()); - if (r.IsInteger8() || r.IsUInteger8()) { - strb(src, dst); - } else if (r.IsInteger16() || r.IsUInteger16()) { - strh(src, dst); - } else { - if (r.IsHeapObject()) { - AssertNotSmi(src); - } else if (r.IsSmi()) { - AssertSmi(src); - } - str(src, dst); - } -} - void TurboAssembler::LoadRoot(Register destination, RootIndex index, Condition cond) { ldr(destination, @@ -735,11 +717,7 @@ void TurboAssembler::CallRecordWriteStub( Register fp_mode_parameter( descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode)); - Push(object); - Push(address); - - Pop(slot_parameter); - Pop(object_parameter); + MovePair(object_parameter, object, slot_parameter, address); Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action)); Move(fp_mode_parameter, Smi::FromEnum(fp_mode)); @@ -1173,8 +1151,7 @@ void TurboAssembler::LslPair(Register dst_low, Register dst_high, Register src_low, Register src_high, uint32_t shift) { DCHECK(!AreAliased(dst_high, src_low)); - Label less_than_32; - Label done; + if (shift == 0) { Move(dst_high, src_high); Move(dst_low, src_low); @@ -1222,8 +1199,7 @@ void TurboAssembler::LsrPair(Register dst_low, Register dst_high, Register src_low, Register src_high, uint32_t shift) { DCHECK(!AreAliased(dst_low, src_high)); - Label less_than_32; - Label done; + if (shift == 32) { mov(dst_low, src_high); mov(dst_high, Operand(0)); @@ -1270,8 +1246,7 @@ void TurboAssembler::AsrPair(Register dst_low, Register dst_high, Register src_low, Register src_high, uint32_t shift) { DCHECK(!AreAliased(dst_low, src_high)); - Label less_than_32; - Label done; + if (shift == 32) { mov(dst_low, src_high); asr(dst_high, src_high, Operand(31)); @@ -1765,6 +1740,20 @@ void MacroAssembler::CompareRoot(Register obj, RootIndex index) { cmp(obj, scratch); } +void MacroAssembler::JumpIfIsInRange(Register value, unsigned lower_limit, + unsigned higher_limit, + Label* on_in_range) { + if (lower_limit != 0) { + UseScratchRegisterScope temps(this); + Register scratch = temps.Acquire(); + sub(scratch, value, Operand(lower_limit)); + cmp(scratch, Operand(higher_limit - lower_limit)); + } else { + cmp(value, Operand(higher_limit)); + } + b(ls, on_in_range); +} + void MacroAssembler::TryDoubleToInt32Exact(Register result, DwVfpRegister double_input, LowDwVfpRegister double_scratch) { diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 29fa10cfeac9cd..64aabea84de599 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -430,6 +430,9 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { mov(dst, src, sbit, cond); } } + // Move src0 to dst0 and src1 to dst1, handling possible overlaps. + void MovePair(Register dst0, Register src0, Register dst1, Register src1); + void Move(SwVfpRegister dst, SwVfpRegister src, Condition cond = al); void Move(DwVfpRegister dst, DwVfpRegister src, Condition cond = al); void Move(QwNeonRegister dst, QwNeonRegister src); @@ -560,36 +563,9 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void Sbfx(Register dst, Register src, int lsb, int width, Condition cond = al); - void Load(Register dst, const MemOperand& src, Representation r); - void Store(Register src, const MemOperand& dst, Representation r); - // --------------------------------------------------------------------------- // GC Support - // Check if object is in new space. Jumps if the object is not in new space. - // The register scratch can be object itself, but scratch will be clobbered. - void JumpIfNotInNewSpace(Register object, Register scratch, Label* branch) { - InNewSpace(object, scratch, eq, branch); - } - - // Check if object is in new space. Jumps if the object is in new space. - // The register scratch can be object itself, but it will be clobbered. - void JumpIfInNewSpace(Register object, Register scratch, Label* branch) { - InNewSpace(object, scratch, ne, branch); - } - - // Check if an object has a given incremental marking color. - void HasColor(Register object, Register scratch0, Register scratch1, - Label* has_color, int first_bit, int second_bit); - - void JumpIfBlack(Register object, Register scratch0, Register scratch1, - Label* on_black); - - // Checks the color of an object. If the object is white we jump to the - // incremental marker. - void JumpIfWhite(Register value, Register scratch1, Register scratch2, - Register scratch3, Label* value_is_white); - // Notify the garbage collector that we wrote a pointer into an object. // |object| is the object being stored into, |value| is the object being // stored. value and scratch registers are clobbered by the operation. @@ -709,6 +685,11 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { b(ne, if_not_equal); } + // Checks if value is in range [lower_limit, higher_limit] using a single + // comparison. + void JumpIfIsInRange(Register value, unsigned lower_limit, + unsigned higher_limit, Label* on_in_range); + // Try to convert a double to a signed 32-bit integer. // Z flag set to one and result assigned if the conversion is exact. void TryDoubleToInt32Exact(Register result, @@ -813,12 +794,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { const ParameterCount& actual, Label* done, bool* definitely_mismatches, InvokeFlag flag); - // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace. - void InNewSpace(Register object, - Register scratch, - Condition cond, // eq for new space, ne otherwise. - Label* branch); - // Compute memory operands for safepoint stack slots. static int SafepointRegisterStackIndex(int reg_code); diff --git a/deps/v8/src/arm/register-arm.h b/deps/v8/src/arm/register-arm.h index 4767e50661be7d..3584a6b19f0d1f 100644 --- a/deps/v8/src/arm/register-arm.h +++ b/deps/v8/src/arm/register-arm.h @@ -326,12 +326,12 @@ C_REGISTERS(DECLARE_C_REGISTER) #undef DECLARE_C_REGISTER // Define {RegisterName} methods for the register types. -DEFINE_REGISTER_NAMES(Register, GENERAL_REGISTERS); -DEFINE_REGISTER_NAMES(SwVfpRegister, FLOAT_REGISTERS); -DEFINE_REGISTER_NAMES(DwVfpRegister, DOUBLE_REGISTERS); -DEFINE_REGISTER_NAMES(LowDwVfpRegister, LOW_DOUBLE_REGISTERS); -DEFINE_REGISTER_NAMES(QwNeonRegister, SIMD128_REGISTERS); -DEFINE_REGISTER_NAMES(CRegister, C_REGISTERS); +DEFINE_REGISTER_NAMES(Register, GENERAL_REGISTERS) +DEFINE_REGISTER_NAMES(SwVfpRegister, FLOAT_REGISTERS) +DEFINE_REGISTER_NAMES(DwVfpRegister, DOUBLE_REGISTERS) +DEFINE_REGISTER_NAMES(LowDwVfpRegister, LOW_DOUBLE_REGISTERS) +DEFINE_REGISTER_NAMES(QwNeonRegister, SIMD128_REGISTERS) +DEFINE_REGISTER_NAMES(CRegister, C_REGISTERS) // Give alias names to registers for calling conventions. constexpr Register kReturnRegister0 = r0; diff --git a/deps/v8/src/arm/simulator-arm.cc b/deps/v8/src/arm/simulator-arm.cc index 0ee54c8f5b70e2..ef5215c78d61c1 100644 --- a/deps/v8/src/arm/simulator-arm.cc +++ b/deps/v8/src/arm/simulator-arm.cc @@ -19,13 +19,14 @@ #include "src/objects-inl.h" #include "src/ostreams.h" #include "src/runtime/runtime-utils.h" +#include "src/vector.h" // Only build the simulator if not compiling for real ARM hardware. namespace v8 { namespace internal { DEFINE_LAZY_LEAKY_OBJECT_GETTER(Simulator::GlobalMonitor, - Simulator::GlobalMonitor::Get); + Simulator::GlobalMonitor::Get) // This macro provides a platform independent use of sscanf. The reason for // SScanF not being implemented in a platform independent way through diff --git a/deps/v8/src/arm64/assembler-arm64-inl.h b/deps/v8/src/arm64/assembler-arm64-inl.h index 253fb984f44edf..fc8e31aac39de8 100644 --- a/deps/v8/src/arm64/assembler-arm64-inl.h +++ b/deps/v8/src/arm64/assembler-arm64-inl.h @@ -583,7 +583,7 @@ int Assembler::deserialization_special_target_size(Address location) { return kSpecialTargetSize; } else { DCHECK_EQ(instr->InstructionBits(), 0); - return kPointerSize; + return kSystemPointerSize; } } @@ -598,7 +598,7 @@ void Assembler::deserialization_set_special_target_at(Address location, target = location; } instr->SetBranchImmTarget(reinterpret_cast(target)); - Assembler::FlushICache(location, kInstrSize); + FlushInstructionCache(location, kInstrSize); } else { DCHECK_EQ(instr->InstructionBits(), 0); Memory
(location) = target; @@ -635,7 +635,7 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool, } instr->SetBranchImmTarget(reinterpret_cast(target)); if (icache_flush_mode != SKIP_ICACHE_FLUSH) { - Assembler::FlushICache(pc, kInstrSize); + FlushInstructionCache(pc, kInstrSize); } } } @@ -645,7 +645,7 @@ int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; } else { DCHECK(reinterpret_cast(pc_)->IsLdrLiteralX()); - return kPointerSize; + return kSystemPointerSize; } } @@ -765,24 +765,6 @@ void RelocInfo::WipeOut() { } } -template -void RelocInfo::Visit(ObjectVisitor* visitor) { - RelocInfo::Mode mode = rmode(); - if (mode == RelocInfo::EMBEDDED_OBJECT) { - visitor->VisitEmbeddedPointer(host(), this); - } else if (RelocInfo::IsCodeTargetMode(mode)) { - visitor->VisitCodeTarget(host(), this); - } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { - visitor->VisitExternalReference(host(), this); - } else if (mode == RelocInfo::INTERNAL_REFERENCE) { - visitor->VisitInternalReference(host(), this); - } else if (RelocInfo::IsRuntimeEntry(mode)) { - visitor->VisitRuntimeEntry(host(), this); - } else if (RelocInfo::IsOffHeapTarget(mode)) { - visitor->VisitOffHeapTarget(host(), this); - } -} - LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { DCHECK(rt.IsValid()); if (rt.IsRegister()) { diff --git a/deps/v8/src/arm64/assembler-arm64.cc b/deps/v8/src/arm64/assembler-arm64.cc index be0a4a95191f59..8a5a82fba8b53c 100644 --- a/deps/v8/src/arm64/assembler-arm64.cc +++ b/deps/v8/src/arm64/assembler-arm64.cc @@ -377,7 +377,7 @@ int ConstPool::WorstCaseSize() { // blr xzr // nop // All entries are 64-bit for now. - return 4 * kInstrSize + EntryCount() * kPointerSize; + return 4 * kInstrSize + EntryCount() * kSystemPointerSize; } @@ -395,7 +395,7 @@ int ConstPool::SizeIfEmittedAtCurrentPc(bool require_jump) { IsAligned(assm_->pc_offset() + prologue_size, 8) ? 0 : kInstrSize; // All entries are 64-bit for now. - return prologue_size + EntryCount() * kPointerSize; + return prologue_size + EntryCount() * kSystemPointerSize; } @@ -549,6 +549,7 @@ Assembler::~Assembler() { DCHECK_EQ(veneer_pool_blocked_nesting_, 0); } +void Assembler::AbortedCodeGeneration() { constpool_.Clear(); } void Assembler::Reset() { #ifdef DEBUG @@ -589,7 +590,9 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { } } -void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { +void Assembler::GetCode(Isolate* isolate, CodeDesc* desc, + SafepointTableBuilder* safepoint_table_builder, + int handler_table_offset) { // Emit constant pool if necessary. CheckConstPool(true, false); DCHECK(constpool_.IsEmpty()); @@ -599,21 +602,27 @@ void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { AllocateAndInstallRequestedHeapObjects(isolate); // Set up code descriptor. - if (desc) { - desc->buffer = buffer_start_; - desc->buffer_size = buffer_->size(); - desc->instr_size = pc_offset(); - desc->reloc_size = static_cast((buffer_start_ + desc->buffer_size) - - reloc_info_writer.pos()); - desc->origin = this; - desc->constant_pool_size = 0; - desc->unwinding_info_size = 0; - desc->unwinding_info = nullptr; - desc->code_comments_size = code_comments_size; - } + // TODO(jgruber): Reconsider how these offsets and sizes are maintained up to + // this point to make CodeDesc initialization less fiddly. + + static constexpr int kConstantPoolSize = 0; + const int instruction_size = pc_offset(); + const int code_comments_offset = instruction_size - code_comments_size; + const int constant_pool_offset = code_comments_offset - kConstantPoolSize; + const int handler_table_offset2 = (handler_table_offset == kNoHandlerTable) + ? constant_pool_offset + : handler_table_offset; + const int safepoint_table_offset = + (safepoint_table_builder == kNoSafepointTable) + ? handler_table_offset2 + : safepoint_table_builder->GetCodeOffset(); + const int reloc_info_offset = + static_cast(reloc_info_writer.pos() - buffer_->start()); + CodeDesc::Initialize(desc, this, safepoint_table_offset, + handler_table_offset2, constant_pool_offset, + code_comments_offset, reloc_info_offset); } - void Assembler::Align(int m) { DCHECK(m >= 4 && base::bits::IsPowerOfTwo(m)); while ((pc_offset() & (m - 1)) != 0) { @@ -4887,7 +4896,9 @@ void Assembler::EmitVeneers(bool force_emit, bool need_protection, int margin) { EmitVeneersGuard(); +#ifdef DEBUG Label veneer_size_check; +#endif std::multimap::iterator it, it_to_delete; diff --git a/deps/v8/src/arm64/assembler-arm64.h b/deps/v8/src/arm64/assembler-arm64.h index 54e46c74dd6617..586eff1241fed0 100644 --- a/deps/v8/src/arm64/assembler-arm64.h +++ b/deps/v8/src/arm64/assembler-arm64.h @@ -28,6 +28,8 @@ namespace v8 { namespace internal { +class SafepointTableBuilder; + // ----------------------------------------------------------------------------- // Immediates. class Immediate { @@ -260,9 +262,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { virtual ~Assembler(); - virtual void AbortedCodeGeneration() { - constpool_.Clear(); - } + virtual void AbortedCodeGeneration(); // System functions --------------------------------------------------------- // Start generating code from the beginning of the buffer, discarding any code @@ -272,13 +272,17 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { // constant pool is not blocked. void Reset(); - // GetCode emits any pending (non-emitted) code and fills the descriptor - // desc. GetCode() is idempotent; it returns the same result if no other - // Assembler functions are invoked in between GetCode() calls. - // - // The descriptor (desc) can be nullptr. In that case, the code is finalized - // as usual, but the descriptor is not populated. - void GetCode(Isolate* isolate, CodeDesc* desc); + // GetCode emits any pending (non-emitted) code and fills the descriptor desc. + static constexpr int kNoHandlerTable = 0; + static constexpr SafepointTableBuilder* kNoSafepointTable = nullptr; + void GetCode(Isolate* isolate, CodeDesc* desc, + SafepointTableBuilder* safepoint_table_builder, + int handler_table_offset); + + // Convenience wrapper for code without safepoint or handler tables. + void GetCode(Isolate* isolate, CodeDesc* desc) { + GetCode(isolate, desc, kNoSafepointTable, kNoHandlerTable); + } // Insert the smallest number of nop instructions // possible to align the pc offset to a multiple @@ -2504,6 +2508,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope); }; + // Unused on this architecture. + void MaybeEmitOutOfLineConstantPool() {} + // Check if is time to emit a constant pool. void CheckConstPool(bool force_emit, bool require_jump); @@ -2844,7 +2851,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { // Functions used for testing. int GetConstantPoolEntriesSizeForTesting() const { // Do not include branch over the pool. - return constpool_.EntryCount() * kPointerSize; + return constpool_.EntryCount() * kSystemPointerSize; } static constexpr int GetCheckConstPoolIntervalForTesting() { diff --git a/deps/v8/src/arm64/cpu-arm64.cc b/deps/v8/src/arm64/cpu-arm64.cc index 379d7647d75e19..66ce3f9da43e4e 100644 --- a/deps/v8/src/arm64/cpu-arm64.cc +++ b/deps/v8/src/arm64/cpu-arm64.cc @@ -15,7 +15,7 @@ namespace internal { class CacheLineSizes { public: CacheLineSizes() { -#if defined(USE_SIMULATOR) || defined(V8_OS_WIN) +#if !defined(V8_HOST_ARCH_ARM64) || defined(V8_OS_WIN) cache_type_register_ = 0; #else // Copy the content of the cache type register to a core register. @@ -38,9 +38,10 @@ class CacheLineSizes { }; void CpuFeatures::FlushICache(void* address, size_t length) { +#if defined(V8_HOST_ARCH_ARM64) #if defined(V8_OS_WIN) - FlushInstructionCache(GetCurrentProcess(), address, length); -#elif defined(V8_HOST_ARCH_ARM64) + ::FlushInstructionCache(GetCurrentProcess(), address, length); +#else // The code below assumes user space cache operations are allowed. The goal // of this routine is to make sure the code generated is visible to the I // side of the CPU. @@ -109,6 +110,7 @@ void CpuFeatures::FlushICache(void* address, size_t length) { // move this code before the code is generated. : "cc", "memory" ); // NOLINT +#endif // V8_OS_WIN #endif // V8_HOST_ARCH_ARM64 } diff --git a/deps/v8/src/arm64/deoptimizer-arm64.cc b/deps/v8/src/arm64/deoptimizer-arm64.cc index 4b6aa1bf932aa9..9e239886246224 100644 --- a/deps/v8/src/arm64/deoptimizer-arm64.cc +++ b/deps/v8/src/arm64/deoptimizer-arm64.cc @@ -215,7 +215,7 @@ void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, // frame description. __ Add(x3, x1, FrameDescription::frame_content_offset()); __ SlotAddress(x1, 0); - __ Lsr(unwind_limit, unwind_limit, kPointerSizeLog2); + __ Lsr(unwind_limit, unwind_limit, kSystemPointerSizeLog2); __ Mov(x5, unwind_limit); __ CopyDoubleWords(x3, x1, x5); __ Drop(unwind_limit); @@ -237,19 +237,18 @@ void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm, } // Replace the current (input) frame with the output frames. - Label outer_push_loop, inner_push_loop, - outer_loop_header, inner_loop_header; + Label outer_push_loop, outer_loop_header; __ Ldrsw(x1, MemOperand(x4, Deoptimizer::output_count_offset())); __ Ldr(x0, MemOperand(x4, Deoptimizer::output_offset())); - __ Add(x1, x0, Operand(x1, LSL, kPointerSizeLog2)); + __ Add(x1, x0, Operand(x1, LSL, kSystemPointerSizeLog2)); __ B(&outer_loop_header); __ Bind(&outer_push_loop); Register current_frame = x2; Register frame_size = x3; - __ Ldr(current_frame, MemOperand(x0, kPointerSize, PostIndex)); + __ Ldr(current_frame, MemOperand(x0, kSystemPointerSize, PostIndex)); __ Ldr(x3, MemOperand(current_frame, FrameDescription::frame_size_offset())); - __ Lsr(frame_size, x3, kPointerSizeLog2); + __ Lsr(frame_size, x3, kSystemPointerSizeLog2); __ Claim(frame_size); __ Add(x7, current_frame, FrameDescription::frame_content_offset()); diff --git a/deps/v8/src/arm64/frame-constants-arm64.h b/deps/v8/src/arm64/frame-constants-arm64.h index 13a879e8bd374d..3fb21ed8bcfbf3 100644 --- a/deps/v8/src/arm64/frame-constants-arm64.h +++ b/deps/v8/src/arm64/frame-constants-arm64.h @@ -37,8 +37,8 @@ class EntryFrameConstants : public AllStatic { public: // This is the offset to where JSEntry pushes the current value of // Isolate::c_entry_fp onto the stack. - static constexpr int kCallerFPOffset = -3 * kPointerSize; - static constexpr int kFixedFrameSize = 6 * kPointerSize; + static constexpr int kCallerFPOffset = -3 * kSystemPointerSize; + static constexpr int kFixedFrameSize = 6 * kSystemPointerSize; }; class ExitFrameConstants : public TypedFrameConstants { @@ -62,7 +62,7 @@ class WasmCompileLazyFrameConstants : public TypedFrameConstants { static constexpr int kFixedFrameSizeFromFp = // Header is padded to 16 byte (see {MacroAssembler::EnterFrame}). RoundUp<16>(TypedFrameConstants::kFixedFrameSizeFromFp) + - kNumberOfSavedGpParamRegs * kPointerSize + + kNumberOfSavedGpParamRegs * kSystemPointerSize + kNumberOfSavedFpParamRegs * kDoubleSize; }; @@ -74,7 +74,7 @@ class JavaScriptFrameConstants : public AllStatic { // There are two words on the stack (saved fp and saved lr) between fp and // the arguments. - static constexpr int kLastParameterOffset = 2 * kPointerSize; + static constexpr int kLastParameterOffset = 2 * kSystemPointerSize; static constexpr int kFunctionOffset = StandardFrameConstants::kFunctionOffset; diff --git a/deps/v8/src/arm64/interface-descriptors-arm64.cc b/deps/v8/src/arm64/interface-descriptors-arm64.cc index ad79b1ec2b379b..4dc7b4f54ab2cb 100644 --- a/deps/v8/src/arm64/interface-descriptors-arm64.cc +++ b/deps/v8/src/arm64/interface-descriptors-arm64.cc @@ -99,6 +99,14 @@ void CallForwardVarargsDescriptor::InitializePlatformSpecific( data->InitializePlatformSpecific(arraysize(registers), registers); } +void CallFunctionTemplateDescriptor::InitializePlatformSpecific( + CallInterfaceDescriptorData* data) { + // x1 : function template info + // x2 : number of arguments (on the stack, not including receiver) + Register registers[] = {x1, x2}; + data->InitializePlatformSpecific(arraysize(registers), registers); +} + void CallWithSpreadDescriptor::InitializePlatformSpecific( CallInterfaceDescriptorData* data) { // x0 : number of arguments (on the stack, not including receiver) @@ -207,9 +215,10 @@ void ArgumentsAdaptorDescriptor::InitializePlatformSpecific( void ApiCallbackDescriptor::InitializePlatformSpecific( CallInterfaceDescriptorData* data) { Register registers[] = { - JavaScriptFrame::context_register(), // kTargetContext - x1, // kApiFunctionAddress - x2, // kArgc + x1, // kApiFunctionAddress + x2, // kArgc + x3, // kCallData + x0, // kHolder }; data->InitializePlatformSpecific(arraysize(registers), registers); } diff --git a/deps/v8/src/arm64/macro-assembler-arm64-inl.h b/deps/v8/src/arm64/macro-assembler-arm64-inl.h index ae055f40abd420..be6cd4c933ad28 100644 --- a/deps/v8/src/arm64/macro-assembler-arm64-inl.h +++ b/deps/v8/src/arm64/macro-assembler-arm64-inl.h @@ -1060,7 +1060,11 @@ void TurboAssembler::SmiUntag(Register dst, const MemOperand& src) { } } else { DCHECK(SmiValuesAre31Bits()); +#ifdef V8_COMPRESS_POINTERS + Ldrsw(dst, src); +#else Ldr(dst, src); +#endif SmiUntag(dst); } } diff --git a/deps/v8/src/arm64/macro-assembler-arm64.cc b/deps/v8/src/arm64/macro-assembler-arm64.cc index 48cd13d5fc9a57..79688d709b3d88 100644 --- a/deps/v8/src/arm64/macro-assembler-arm64.cc +++ b/deps/v8/src/arm64/macro-assembler-arm64.cc @@ -15,6 +15,7 @@ #include "src/external-reference-table.h" #include "src/frame-constants.h" #include "src/frames-inl.h" +#include "src/heap/heap-inl.h" // For MemoryChunk. #include "src/macro-assembler-inl.h" #include "src/register-configuration.h" #include "src/runtime/runtime.h" @@ -1470,7 +1471,7 @@ void TurboAssembler::CopyDoubleWords(Register dst, Register src, Register count, Bind(&pointer1_below_pointer2); Add(pointer1, pointer1, pointer2); } - static_assert(kPointerSize == kDRegSize, + static_assert(kSystemPointerSize == kDRegSize, "pointers must be the same size as doubles"); int direction = (mode == kDstLessThanSrc) ? 1 : -1; @@ -1481,21 +1482,23 @@ void TurboAssembler::CopyDoubleWords(Register dst, Register src, Register count, Label pairs, loop, done; Tbz(count, 0, &pairs); - Ldr(temp0, MemOperand(src, direction * kPointerSize, PostIndex)); + Ldr(temp0, MemOperand(src, direction * kSystemPointerSize, PostIndex)); Sub(count, count, 1); - Str(temp0, MemOperand(dst, direction * kPointerSize, PostIndex)); + Str(temp0, MemOperand(dst, direction * kSystemPointerSize, PostIndex)); Bind(&pairs); if (mode == kSrcLessThanDst) { // Adjust pointers for post-index ldp/stp with negative offset: - Sub(dst, dst, kPointerSize); - Sub(src, src, kPointerSize); + Sub(dst, dst, kSystemPointerSize); + Sub(src, src, kSystemPointerSize); } Bind(&loop); Cbz(count, &done); - Ldp(temp0, temp1, MemOperand(src, 2 * direction * kPointerSize, PostIndex)); + Ldp(temp0, temp1, + MemOperand(src, 2 * direction * kSystemPointerSize, PostIndex)); Sub(count, count, 2); - Stp(temp0, temp1, MemOperand(dst, 2 * direction * kPointerSize, PostIndex)); + Stp(temp0, temp1, + MemOperand(dst, 2 * direction * kSystemPointerSize, PostIndex)); B(&loop); // TODO(all): large copies may benefit from using temporary Q registers @@ -1505,11 +1508,11 @@ void TurboAssembler::CopyDoubleWords(Register dst, Register src, Register count, } void TurboAssembler::SlotAddress(Register dst, int slot_offset) { - Add(dst, sp, slot_offset << kPointerSizeLog2); + Add(dst, sp, slot_offset << kSystemPointerSizeLog2); } void TurboAssembler::SlotAddress(Register dst, Register slot_offset) { - Add(dst, sp, Operand(slot_offset, LSL, kPointerSizeLog2)); + Add(dst, sp, Operand(slot_offset, LSL, kSystemPointerSizeLog2)); } void TurboAssembler::AssertFPCRState(Register fpcr) { @@ -1565,6 +1568,22 @@ void MacroAssembler::LoadObject(Register result, Handle object) { void TurboAssembler::Move(Register dst, Smi src) { Mov(dst, src); } +void TurboAssembler::MovePair(Register dst0, Register src0, Register dst1, + Register src1) { + DCHECK_NE(dst0, dst1); + if (dst0 != src1) { + Mov(dst0, src0); + Mov(dst1, src1); + } else if (dst1 != src0) { + // Swap the order of the moves to resolve the overlap. + Mov(dst1, src1); + Mov(dst0, src0); + } else { + // Worse case scenario, this is a swap. + Swap(dst0, src0); + } +} + void TurboAssembler::Swap(Register lhs, Register rhs) { DCHECK(lhs.IsSameSizeAndType(rhs)); DCHECK(!lhs.Is(rhs)); @@ -1616,7 +1635,8 @@ void MacroAssembler::AssertConstructor(Register object) { UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); - Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); + LoadTaggedPointerField(temp, + FieldMemOperand(object, HeapObject::kMapOffset)); Ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset)); Tst(temp, Operand(Map::IsConstructorBit::kMask)); @@ -1656,7 +1676,7 @@ void MacroAssembler::AssertGeneratorObject(Register object) { // Load map UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); - Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); + LoadTaggedPointerField(temp, FieldMemOperand(object, HeapObject::kMapOffset)); Label do_check; // Load instance type and check if JSGeneratorObject @@ -1682,7 +1702,8 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { Label done_checking; AssertNotSmi(object); JumpIfRoot(object, RootIndex::kUndefinedValue, &done_checking); - Ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); + LoadTaggedPointerField(scratch, + FieldMemOperand(object, HeapObject::kMapOffset)); CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE); Assert(eq, AbortReason::kExpectedUndefinedOrCell); Bind(&done_checking); @@ -1848,9 +1869,9 @@ void TurboAssembler::LoadFromConstantsTable(Register destination, int constant_index) { DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable)); LoadRoot(destination, RootIndex::kBuiltinsConstantsTable); - Ldr(destination, - FieldMemOperand(destination, - FixedArray::kHeaderSize + constant_index * kPointerSize)); + LoadTaggedPointerField( + destination, FieldMemOperand(destination, FixedArray::OffsetOfElementAt( + constant_index))); } void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) { @@ -2002,13 +2023,18 @@ void TurboAssembler::Call(ExternalReference target) { void TurboAssembler::CallBuiltinPointer(Register builtin_pointer) { STATIC_ASSERT(kSystemPointerSize == 8); - STATIC_ASSERT(kSmiShiftSize == 31); STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTag == 0); // The builtin_pointer register contains the builtin index as a Smi. // Untagging is folded into the indexing operand below. +#if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH) + STATIC_ASSERT(kSmiShiftSize == 0); + Lsl(builtin_pointer, builtin_pointer, kSystemPointerSizeLog2 - kSmiShift); +#else + STATIC_ASSERT(kSmiShiftSize == 31); Asr(builtin_pointer, builtin_pointer, kSmiShift - kSystemPointerSizeLog2); +#endif Add(builtin_pointer, builtin_pointer, IsolateData::builtin_entry_table_offset()); Ldr(builtin_pointer, MemOperand(kRootRegister, builtin_pointer)); @@ -2163,23 +2189,25 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count, #endif // Calculate the end of destination area where we will put the arguments - // after we drop current frame. We add kPointerSize to count the receiver - // argument which is not included into formal parameters count. + // after we drop current frame. We add kSystemPointerSize to count the + // receiver argument which is not included into formal parameters count. Register dst_reg = scratch0; - Add(dst_reg, fp, Operand(caller_args_count_reg, LSL, kPointerSizeLog2)); - Add(dst_reg, dst_reg, StandardFrameConstants::kCallerSPOffset + kPointerSize); + Add(dst_reg, fp, Operand(caller_args_count_reg, LSL, kSystemPointerSizeLog2)); + Add(dst_reg, dst_reg, + StandardFrameConstants::kCallerSPOffset + kSystemPointerSize); // Round dst_reg up to a multiple of 16 bytes, so that we overwrite any // potential padding. Add(dst_reg, dst_reg, 15); Bic(dst_reg, dst_reg, 15); Register src_reg = caller_args_count_reg; - // Calculate the end of source area. +kPointerSize is for the receiver. + // Calculate the end of source area. +kSystemPointerSize is for the receiver. if (callee_args_count.is_reg()) { - Add(src_reg, sp, Operand(callee_args_count.reg(), LSL, kPointerSizeLog2)); - Add(src_reg, src_reg, kPointerSize); + Add(src_reg, sp, + Operand(callee_args_count.reg(), LSL, kSystemPointerSizeLog2)); + Add(src_reg, src_reg, kSystemPointerSize); } else { - Add(src_reg, sp, (callee_args_count.immediate() + 1) * kPointerSize); + Add(src_reg, sp, (callee_args_count.immediate() + 1) * kSystemPointerSize); } // Round src_reg up to a multiple of 16 bytes, so we include any potential @@ -2206,8 +2234,8 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count, Label loop, entry; B(&entry); bind(&loop); - Ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex)); - Str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex)); + Ldr(tmp_reg, MemOperand(src_reg, -kSystemPointerSize, PreIndex)); + Str(tmp_reg, MemOperand(dst_reg, -kSystemPointerSize, PreIndex)); bind(&entry); Cmp(sp, src_reg); B(ne, &loop); @@ -2298,7 +2326,7 @@ void MacroAssembler::CheckDebugHook(Register fun, Register new_target, Operand actual_op = actual.is_immediate() ? Operand(actual.immediate()) : Operand(actual.reg()); Mov(x4, actual_op); - Ldr(x4, MemOperand(sp, x4, LSL, kPointerSizeLog2)); + Ldr(x4, MemOperand(sp, x4, LSL, kSystemPointerSizeLog2)); FrameScope frame(this, has_frame() ? StackFrame::NONE : StackFrame::INTERNAL); @@ -2352,7 +2380,8 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target, // allow recompilation to take effect without changing any of the // call sites. Register code = kJavaScriptCallCodeStartRegister; - Ldr(code, FieldMemOperand(function, JSFunction::kCodeOffset)); + LoadTaggedPointerField(code, + FieldMemOperand(function, JSFunction::kCodeOffset)); if (flag == CALL_FUNCTION) { CallCodeObject(code); } else { @@ -2378,12 +2407,14 @@ void MacroAssembler::InvokeFunction(Register function, Register new_target, Register expected_reg = x2; - Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); + LoadTaggedPointerField(cp, + FieldMemOperand(function, JSFunction::kContextOffset)); // The number of arguments is stored as an int32_t, and -1 is a marker // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign // extension to correctly handle it. - Ldr(expected_reg, FieldMemOperand(function, - JSFunction::kSharedFunctionInfoOffset)); + LoadTaggedPointerField( + expected_reg, + FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); Ldrh(expected_reg, FieldMemOperand(expected_reg, SharedFunctionInfo::kFormalParameterCountOffset)); @@ -2404,7 +2435,8 @@ void MacroAssembler::InvokeFunction(Register function, DCHECK(function.Is(x1)); // Set up the context. - Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); + LoadTaggedPointerField(cp, + FieldMemOperand(function, JSFunction::kContextOffset)); InvokeFunctionCode(function, no_reg, expected, actual, flag); } @@ -2476,7 +2508,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) { // type_reg pushed twice for alignment. Push(lr, fp, type_reg, type_reg); const int kFrameSize = - TypedFrameConstants::kFixedFrameSizeFromFp + kPointerSize; + TypedFrameConstants::kFixedFrameSizeFromFp + kSystemPointerSize; Add(fp, sp, kFrameSize); // sp[3] : lr // sp[2] : fp @@ -2504,7 +2536,8 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) { // The context pointer isn't part of the fixed frame, so add an extra slot // to account for it. - Add(fp, sp, TypedFrameConstants::kFixedFrameSizeFromFp + kPointerSize); + Add(fp, sp, + TypedFrameConstants::kFixedFrameSizeFromFp + kSystemPointerSize); // sp[3] : lr // sp[2] : fp // sp[1] : type @@ -2560,12 +2593,16 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch, // fp[-16]: Space reserved for SPOffset. // fp[-24]: CodeObject() // sp -> fp[-32]: padding - STATIC_ASSERT((2 * kPointerSize) == ExitFrameConstants::kCallerSPOffset); - STATIC_ASSERT((1 * kPointerSize) == ExitFrameConstants::kCallerPCOffset); - STATIC_ASSERT((0 * kPointerSize) == ExitFrameConstants::kCallerFPOffset); - STATIC_ASSERT((-2 * kPointerSize) == ExitFrameConstants::kSPOffset); - STATIC_ASSERT((-3 * kPointerSize) == ExitFrameConstants::kCodeOffset); - STATIC_ASSERT((-4 * kPointerSize) == ExitFrameConstants::kPaddingOffset); + STATIC_ASSERT((2 * kSystemPointerSize) == + ExitFrameConstants::kCallerSPOffset); + STATIC_ASSERT((1 * kSystemPointerSize) == + ExitFrameConstants::kCallerPCOffset); + STATIC_ASSERT((0 * kSystemPointerSize) == + ExitFrameConstants::kCallerFPOffset); + STATIC_ASSERT((-2 * kSystemPointerSize) == ExitFrameConstants::kSPOffset); + STATIC_ASSERT((-3 * kSystemPointerSize) == ExitFrameConstants::kCodeOffset); + STATIC_ASSERT((-4 * kSystemPointerSize) == + ExitFrameConstants::kPaddingOffset); // Save the frame pointer and context pointer in the top frame. Mov(scratch, @@ -2575,7 +2612,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch, ExternalReference::Create(IsolateAddressId::kContextAddress, isolate())); Str(cp, MemOperand(scratch)); - STATIC_ASSERT((-4 * kPointerSize) == ExitFrameConstants::kLastExitFrameField); + STATIC_ASSERT((-4 * kSystemPointerSize) == + ExitFrameConstants::kLastExitFrameField); if (save_doubles) { ExitFramePreserveFPRegs(); } @@ -2692,7 +2730,7 @@ void MacroAssembler::CompareObjectType(Register object, Register map, Register type_reg, InstanceType type) { - Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); + LoadTaggedPointerField(map, FieldMemOperand(object, HeapObject::kMapOffset)); CompareInstanceType(map, type_reg, type); } @@ -2733,6 +2771,87 @@ void MacroAssembler::JumpIfNotRoot(const Register& obj, RootIndex index, B(ne, if_not_equal); } +void MacroAssembler::JumpIfIsInRange(const Register& value, + unsigned lower_limit, + unsigned higher_limit, + Label* on_in_range) { + if (lower_limit != 0) { + UseScratchRegisterScope temps(this); + Register scratch = temps.AcquireW(); + Sub(scratch, value, Operand(lower_limit)); + CompareAndBranch(scratch, Operand(higher_limit - lower_limit), ls, + on_in_range); + } else { + CompareAndBranch(value, Operand(higher_limit - lower_limit), ls, + on_in_range); + } +} + +void TurboAssembler::LoadTaggedPointerField(const Register& destination, + const MemOperand& field_operand) { +#ifdef V8_COMPRESS_POINTERS + DecompressTaggedPointer(destination, field_operand); +#else + Ldr(destination, field_operand); +#endif +} + +void TurboAssembler::LoadAnyTaggedField(const Register& destination, + const MemOperand& field_operand) { +#ifdef V8_COMPRESS_POINTERS + DecompressAnyTagged(destination, field_operand); +#else + Ldr(destination, field_operand); +#endif +} + +void TurboAssembler::SmiUntagField(Register dst, const MemOperand& src) { + SmiUntag(dst, src); +} + +void TurboAssembler::StoreTaggedField(const Register& value, + const MemOperand& dst_field_operand) { +#ifdef V8_COMPRESS_POINTERS + RecordComment("[ StoreTagged"); + Str(value.W(), dst_field_operand); + RecordComment("]"); +#else + Str(value, dst_field_operand); +#endif +} + +void TurboAssembler::DecompressTaggedSigned(const Register& destination, + const MemOperand& field_operand) { + RecordComment("[ DecompressTaggedSigned"); + Ldrsw(destination, field_operand); + RecordComment("]"); +} + +void TurboAssembler::DecompressTaggedPointer(const Register& destination, + const MemOperand& field_operand) { + RecordComment("[ DecompressTaggedPointer"); + Ldrsw(destination, field_operand); + Add(destination, kRootRegister, destination); + RecordComment("]"); +} + +void TurboAssembler::DecompressAnyTagged(const Register& destination, + const MemOperand& field_operand) { + RecordComment("[ DecompressAnyTagged"); + UseScratchRegisterScope temps(this); + Ldrsw(destination, field_operand); + // Branchlessly compute |masked_root|: + // masked_root = HAS_SMI_TAG(destination) ? 0 : kRootRegister; + STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0)); + Register masked_root = temps.AcquireX(); + // Sign extend tag bit to entire register. + Sbfx(masked_root, destination, 0, kSmiTagSize); + And(masked_root, masked_root, kRootRegister); + // Now this add operation will either leave the value unchanged if it is a smi + // or add the isolate root if it is a heap object. + Add(destination, masked_root, destination); + RecordComment("]"); +} void MacroAssembler::CompareAndSplit(const Register& lhs, const Operand& rhs, @@ -2863,13 +2982,13 @@ void MacroAssembler::RecordWriteField(Register object, int offset, } // Although the object register is tagged, the offset is relative to the start - // of the object, so offset must be a multiple of kPointerSize. - DCHECK(IsAligned(offset, kPointerSize)); + // of the object, so offset must be a multiple of kTaggedSize. + DCHECK(IsAligned(offset, kTaggedSize)); Add(scratch, object, offset - kHeapObjectTag); if (emit_debug_code()) { Label ok; - Tst(scratch, kPointerSize - 1); + Tst(scratch, kTaggedSize - 1); B(eq, &ok); Abort(AbortReason::kUnalignedCellInWriteBarrier); Bind(&ok); @@ -2953,9 +3072,7 @@ void TurboAssembler::CallRecordWriteStub( Register fp_mode_parameter( descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode)); - Push(object, address); - - Pop(slot_parameter, object_parameter); + MovePair(object_parameter, object, slot_parameter, address); Mov(remembered_set_parameter, Smi::FromEnum(remembered_set_action)); Mov(fp_mode_parameter, Smi::FromEnum(fp_mode)); @@ -2985,7 +3102,7 @@ void MacroAssembler::RecordWrite(Register object, Register address, UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); - Ldr(temp, MemOperand(address)); + LoadTaggedPointerField(temp, MemOperand(address)); Cmp(temp, value); Check(eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite); } @@ -3100,8 +3217,8 @@ void TurboAssembler::Abort(AbortReason reason) { } void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { - Ldr(dst, NativeContextMemOperand()); - Ldr(dst, ContextMemOperand(dst, index)); + LoadTaggedPointerField(dst, NativeContextMemOperand()); + LoadTaggedPointerField(dst, ContextMemOperand(dst, index)); } diff --git a/deps/v8/src/arm64/macro-assembler-arm64.h b/deps/v8/src/arm64/macro-assembler-arm64.h index ba1885a2484e69..ab8ac0851122bf 100644 --- a/deps/v8/src/arm64/macro-assembler-arm64.h +++ b/deps/v8/src/arm64/macro-assembler-arm64.h @@ -213,6 +213,9 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { // Remove if not needed. void Move(Register dst, Smi src); + // Move src0 to dst0 and src1 to dst1, handling possible overlaps. + void MovePair(Register dst0, Register src0, Register dst1, Register src1); + // Register swap. Note that the register operands should be distinct. void Swap(Register lhs, Register rhs); void Swap(VRegister lhs, VRegister rhs); @@ -1175,6 +1178,32 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { void ResetSpeculationPoisonRegister(); + // --------------------------------------------------------------------------- + // Pointer compression Support + + // Loads a field containing a HeapObject and decompresses it if pointer + // compression is enabled. + void LoadTaggedPointerField(const Register& destination, + const MemOperand& field_operand); + + // Loads a field containing any tagged value and decompresses it if necessary. + void LoadAnyTaggedField(const Register& destination, + const MemOperand& field_operand); + + // Loads a field containing smi value and untags it. + void SmiUntagField(Register dst, const MemOperand& src); + + // Compresses and stores tagged value to given on-heap location. + void StoreTaggedField(const Register& value, + const MemOperand& dst_field_operand); + + void DecompressTaggedSigned(const Register& destination, + const MemOperand& field_operand); + void DecompressTaggedPointer(const Register& destination, + const MemOperand& field_operand); + void DecompressAnyTagged(const Register& destination, + const MemOperand& field_operand); + protected: // The actual Push and Pop implementations. These don't generate any code // other than that required for the push or pop. This allows @@ -1811,6 +1840,11 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { // Compare the object in a register to a value and jump if they are not equal. void JumpIfNotRoot(const Register& obj, RootIndex index, Label* if_not_equal); + // Checks if value is in range [lower_limit, higher_limit] using a single + // comparison. + void JumpIfIsInRange(const Register& value, unsigned lower_limit, + unsigned higher_limit, Label* on_in_range); + // Compare the contents of a register with an operand, and branch to true, // false or fall through, depending on condition. void CompareAndSplit(const Register& lhs, @@ -1962,11 +1996,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { const CPURegister& arg3 = NoCPUReg); private: - // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace. - void InNewSpace(Register object, - Condition cond, // eq for new space, ne otherwise. - Label* branch); - // Try to represent a double as an int so that integer fast-paths may be // used. Not every valid integer value is guaranteed to be caught. // It supports both 32-bit and 64-bit integers depending whether 'as_int' diff --git a/deps/v8/src/arm64/register-arm64.h b/deps/v8/src/arm64/register-arm64.h index 77310213f23873..008268de590b50 100644 --- a/deps/v8/src/arm64/register-arm64.h +++ b/deps/v8/src/arm64/register-arm64.h @@ -61,6 +61,12 @@ namespace internal { V(q16) V(q17) V(q18) V(q19) V(q20) V(q21) V(q22) V(q23) \ V(q24) V(q25) V(q26) V(q27) V(q28) V(q29) V(q30) V(q31) +#define VECTOR_REGISTERS(V) \ + V(v0) V(v1) V(v2) V(v3) V(v4) V(v5) V(v6) V(v7) \ + V(v8) V(v9) V(v10) V(v11) V(v12) V(v13) V(v14) V(v15) \ + V(v16) V(v17) V(v18) V(v19) V(v20) V(v21) V(v22) V(v23) \ + V(v24) V(v25) V(v26) V(v27) V(v28) V(v29) V(v30) V(v31) + // Register d29 could be allocated, but we keep an even length list here, in // order to make stack alignment easier for save and restore. #define ALLOCATABLE_DOUBLE_REGISTERS(R) \ @@ -710,8 +716,9 @@ class CPURegList { #define kCallerSaved CPURegList::GetCallerSaved() #define kCallerSavedV CPURegList::GetCallerSavedV() -// Define a {RegisterName} method for {CPURegister}. -DEFINE_REGISTER_NAMES(CPURegister, GENERAL_REGISTERS); +// Define a {RegisterName} method for {Register} and {VRegister}. +DEFINE_REGISTER_NAMES(Register, GENERAL_REGISTERS) +DEFINE_REGISTER_NAMES(VRegister, VECTOR_REGISTERS) // Give alias names to registers for calling conventions. constexpr Register kReturnRegister0 = x0; diff --git a/deps/v8/src/arm64/simulator-arm64.cc b/deps/v8/src/arm64/simulator-arm64.cc index aa36de4afabd16..be2c6cdec60d2c 100644 --- a/deps/v8/src/arm64/simulator-arm64.cc +++ b/deps/v8/src/arm64/simulator-arm64.cc @@ -58,7 +58,7 @@ TEXT_COLOUR clr_debug_message = FLAG_log_colour ? COLOUR(YELLOW) : ""; TEXT_COLOUR clr_printf = FLAG_log_colour ? COLOUR(GREEN) : ""; DEFINE_LAZY_LEAKY_OBJECT_GETTER(Simulator::GlobalMonitor, - Simulator::GlobalMonitor::Get); + Simulator::GlobalMonitor::Get) // This is basically the same as PrintF, with a guard for FLAG_trace_sim. void Simulator::TraceSim(const char* format, ...) { @@ -252,9 +252,9 @@ uintptr_t Simulator::PushAddress(uintptr_t address) { intptr_t new_sp = sp() - 2 * kXRegSize; uintptr_t* alignment_slot = reinterpret_cast(new_sp + kXRegSize); - memcpy(alignment_slot, &kSlotsZapValue, kPointerSize); + memcpy(alignment_slot, &kSlotsZapValue, kSystemPointerSize); uintptr_t* stack_slot = reinterpret_cast(new_sp); - memcpy(stack_slot, &address, kPointerSize); + memcpy(stack_slot, &address, kSystemPointerSize); set_sp(new_sp); return new_sp; } @@ -2278,7 +2278,8 @@ void Simulator::VisitMoveWideImmediate(Instruction* instr) { unsigned reg_code = instr->Rd(); int64_t prev_xn_val = is_64_bits ? xreg(reg_code) : wreg(reg_code); - new_xn_val = (prev_xn_val & ~(0xFFFFL << shift)) | shifted_imm16; + new_xn_val = + (prev_xn_val & ~(INT64_C(0xFFFF) << shift)) | shifted_imm16; break; } case MOVZ_w: diff --git a/deps/v8/src/arm64/simulator-arm64.h b/deps/v8/src/arm64/simulator-arm64.h index 586d65b341f2ce..e4e3e09e45ad2e 100644 --- a/deps/v8/src/arm64/simulator-arm64.h +++ b/deps/v8/src/arm64/simulator-arm64.h @@ -860,13 +860,13 @@ class Simulator : public DecoderVisitor, public SimulatorBase { // Commonly-used special cases. template void set_lr(T value) { - DCHECK_EQ(sizeof(T), static_cast(kPointerSize)); + DCHECK_EQ(sizeof(T), static_cast(kSystemPointerSize)); set_reg(kLinkRegCode, value); } template void set_sp(T value) { - DCHECK_EQ(sizeof(T), static_cast(kPointerSize)); + DCHECK_EQ(sizeof(T), static_cast(kSystemPointerSize)); set_reg(31, value, Reg31IsStackPointer); } diff --git a/deps/v8/src/asan.h b/deps/v8/src/asan.h index fc0add016e1ca9..07133926724fd9 100644 --- a/deps/v8/src/asan.h +++ b/deps/v8/src/asan.h @@ -16,12 +16,10 @@ #else // !V8_USE_ADDRESS_SANITIZER -#define ASAN_POISON_MEMORY_REGION(start, size) \ - static_assert( \ - (std::is_pointer::value || \ - std::is_same::value) && \ - std::is_convertible::value, \ - "static type violation") +#define ASAN_POISON_MEMORY_REGION(start, size) \ + static_assert(std::is_pointer::value && \ + std::is_convertible::value, \ + "static type violation") #define ASAN_UNPOISON_MEMORY_REGION(start, size) \ ASAN_POISON_MEMORY_REGION(start, size) diff --git a/deps/v8/src/asmjs/asm-js.cc b/deps/v8/src/asmjs/asm-js.cc index c242c56389271a..a84c88546ef605 100644 --- a/deps/v8/src/asmjs/asm-js.cc +++ b/deps/v8/src/asmjs/asm-js.cc @@ -23,6 +23,7 @@ #include "src/parsing/scanner-character-streams.h" #include "src/parsing/scanner.h" #include "src/unoptimized-compilation-info.h" +#include "src/vector.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-js.h" diff --git a/deps/v8/src/asmjs/asm-parser.cc b/deps/v8/src/asmjs/asm-parser.cc index df86bf5b9a7251..8352ec02e23c18 100644 --- a/deps/v8/src/asmjs/asm-parser.cc +++ b/deps/v8/src/asmjs/asm-parser.cc @@ -353,6 +353,8 @@ void AsmJsParser::ValidateModule() { RECURSE(ValidateFunctionTable()); } RECURSE(ValidateExport()); + RECURSE(SkipSemicolon()); + EXPECT_TOKEN('}'); // Check that all functions were eventually defined. for (auto& info : global_var_info_) { diff --git a/deps/v8/src/asmjs/asm-parser.h b/deps/v8/src/asmjs/asm-parser.h index dd8392ddcf600f..202bac718b4ad3 100644 --- a/deps/v8/src/asmjs/asm-parser.h +++ b/deps/v8/src/asmjs/asm-parser.h @@ -11,6 +11,7 @@ #include "src/asmjs/asm-scanner.h" #include "src/asmjs/asm-types.h" #include "src/base/enum-set.h" +#include "src/vector.h" #include "src/wasm/wasm-module-builder.h" #include "src/zone/zone-containers.h" diff --git a/deps/v8/src/asmjs/asm-scanner.cc b/deps/v8/src/asmjs/asm-scanner.cc index 448f8a77d3b6c4..f249f2c7240f7d 100644 --- a/deps/v8/src/asmjs/asm-scanner.cc +++ b/deps/v8/src/asmjs/asm-scanner.cc @@ -16,7 +16,7 @@ namespace { // Cap number of identifiers to ensure we can assign both global and // local ones a token id in the range of an int32_t. static const int kMaxIdentifierCount = 0xF000000; -}; +} // namespace AsmJsScanner::AsmJsScanner(Utf16CharacterStream* stream) : stream_(stream), diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc index 383d6f67fe2ebe..04567377bd4917 100644 --- a/deps/v8/src/assembler.cc +++ b/deps/v8/src/assembler.cc @@ -37,9 +37,9 @@ #include "src/assembler-inl.h" #include "src/deoptimizer.h" #include "src/disassembler.h" +#include "src/heap/heap-inl.h" // For MemoryAllocator. TODO(jkummerow): Drop. #include "src/isolate.h" #include "src/ostreams.h" -#include "src/simulator.h" // For flushing instruction cache. #include "src/snapshot/embedded-data.h" #include "src/snapshot/serializer-common.h" #include "src/snapshot/snapshot.h" @@ -68,7 +68,7 @@ AssemblerOptions AssemblerOptions::Default( const bool serializer = isolate->serializer_enabled() || explicitly_support_serialization; const bool generating_embedded_builtin = - isolate->ShouldLoadConstantsFromRootList(); + isolate->IsGeneratingEmbeddedBuiltins(); options.record_reloc_info_for_serialization = serializer; options.enable_root_array_delta_access = !serializer && !generating_embedded_builtin; @@ -161,17 +161,6 @@ AssemblerBase::AssemblerBase(const AssemblerOptions& options, AssemblerBase::~AssemblerBase() = default; -void AssemblerBase::FlushICache(void* start, size_t size) { - if (size == 0) return; - -#if defined(USE_SIMULATOR) - base::MutexGuard lock_guard(Simulator::i_cache_mutex()); - Simulator::FlushICache(Simulator::i_cache(), start, size); -#else - CpuFeatures::FlushICache(start, size); -#endif // USE_SIMULATOR -} - void AssemblerBase::Print(Isolate* isolate) { StdoutStream os; v8::internal::Disassembler::Decode(isolate, &os, buffer_start_, pc_); diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index 69ab58cdb4fe51..7efaf6a7daa47c 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -247,6 +247,10 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced { int pc_offset() const { return static_cast(pc_ - buffer_start_); } + byte* buffer_start() const { return buffer_->start(); } + int buffer_size() const { return buffer_->size(); } + int instruction_size() const { return pc_offset(); } + // This function is called when code generation is aborted, so that // the assembler could clean up internal data structures. virtual void AbortedCodeGeneration() { } @@ -264,11 +268,6 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced { static const int kMinimalBufferSize = 4*KB; - static void FlushICache(void* start, size_t size); - static void FlushICache(Address start, size_t size) { - return FlushICache(reinterpret_cast(start), size); - } - protected: // Add 'target' to the {code_targets_} vector, if necessary, and return the // offset at which it is stored. diff --git a/deps/v8/src/assert-scope.cc b/deps/v8/src/assert-scope.cc index 114942f1d33a05..3655a5e5991e64 100644 --- a/deps/v8/src/assert-scope.cc +++ b/deps/v8/src/assert-scope.cc @@ -16,7 +16,7 @@ namespace { DEFINE_LAZY_LEAKY_OBJECT_GETTER(base::Thread::LocalStorageKey, GetPerThreadAssertKey, - base::Thread::CreateThreadLocalKey()); + base::Thread::CreateThreadLocalKey()) } // namespace diff --git a/deps/v8/src/assert-scope.h b/deps/v8/src/assert-scope.h index 0a41af7f6a1ae4..8d0ad5e0c07bae 100644 --- a/deps/v8/src/assert-scope.h +++ b/deps/v8/src/assert-scope.h @@ -133,11 +133,9 @@ typedef PerThreadAssertScopeDebugOnly typedef PerThreadAssertScopeDebugOnly DisallowHeapAllocation; #ifdef DEBUG -#define DISALLOW_HEAP_ALLOCATION(name) DisallowHeapAllocation name -#define DISALLOW_HEAP_ALLOCATION_REF(name) const DisallowHeapAllocation& name +#define DISALLOW_HEAP_ALLOCATION(name) DisallowHeapAllocation name; #else #define DISALLOW_HEAP_ALLOCATION(name) -#define DISALLOW_HEAP_ALLOCATION_REF(name) #endif // Scope to introduce an exception to DisallowHeapAllocation. @@ -232,6 +230,35 @@ typedef PerIsolateAssertScopeDebugOnly // Scope to introduce an exception to DisallowExceptions. typedef PerIsolateAssertScopeDebugOnly AllowExceptions; + +// Explicit instantiation declarations. +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; +extern template class PerThreadAssertScope; + +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; +extern template class PerIsolateAssertScope; + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/ast/ast-traversal-visitor.h b/deps/v8/src/ast/ast-traversal-visitor.h index 21986789ba7a80..b4836ff7847488 100644 --- a/deps/v8/src/ast/ast-traversal-visitor.h +++ b/deps/v8/src/ast/ast-traversal-visitor.h @@ -467,7 +467,7 @@ void AstTraversalVisitor::VisitCompareOperation( } template -void AstTraversalVisitor::VisitThisFunction(ThisFunction* expr) { +void AstTraversalVisitor::VisitThisExpression(ThisExpression* expr) { PROCESS_EXPRESSION(expr); } @@ -555,7 +555,6 @@ template void AstTraversalVisitor::VisitSuperPropertyReference( SuperPropertyReference* expr) { PROCESS_EXPRESSION(expr); - RECURSE_EXPRESSION(VisitVariableProxy(expr->this_var())); RECURSE_EXPRESSION(Visit(expr->home_object())); } @@ -563,7 +562,6 @@ template void AstTraversalVisitor::VisitSuperCallReference( SuperCallReference* expr) { PROCESS_EXPRESSION(expr); - RECURSE_EXPRESSION(VisitVariableProxy(expr->this_var())); RECURSE_EXPRESSION(VisitVariableProxy(expr->new_target_var())); RECURSE_EXPRESSION(VisitVariableProxy(expr->this_function_var())); } diff --git a/deps/v8/src/ast/ast-value-factory.cc b/deps/v8/src/ast/ast-value-factory.cc index 2a35097f9c721a..94d500c07de3f4 100644 --- a/deps/v8/src/ast/ast-value-factory.cc +++ b/deps/v8/src/ast/ast-value-factory.cc @@ -124,6 +124,7 @@ bool AstRawString::Compare(void* a, void* b) { DCHECK_EQ(lhs->Hash(), rhs->Hash()); if (lhs->length() != rhs->length()) return false; + if (lhs->length() == 0) return true; const unsigned char* l = lhs->raw_data(); const unsigned char* r = rhs->raw_data(); size_t length = rhs->length(); diff --git a/deps/v8/src/ast/ast-value-factory.h b/deps/v8/src/ast/ast-value-factory.h index 472527bebe25a2..dd557f5ac95d0d 100644 --- a/deps/v8/src/ast/ast-value-factory.h +++ b/deps/v8/src/ast/ast-value-factory.h @@ -202,10 +202,12 @@ class AstBigInt { F(await, "await") \ F(bigint, "bigint") \ F(boolean, "boolean") \ + F(computed, "") \ F(constructor, "constructor") \ F(default, "default") \ F(done, "done") \ F(dot, ".") \ + F(dot_default, ".default") \ F(dot_for, ".for") \ F(dot_generator_object, ".generator_object") \ F(dot_iterator, ".iterator") \ @@ -235,7 +237,6 @@ class AstBigInt { F(return, "return") \ F(set, "set") \ F(set_space, "set ") \ - F(star_default_star, "*default*") \ F(string, "string") \ F(symbol, "symbol") \ F(target, "target") \ diff --git a/deps/v8/src/ast/ast.cc b/deps/v8/src/ast/ast.cc index 1c1802d602f48e..f70579bd69c61c 100644 --- a/deps/v8/src/ast/ast.cc +++ b/deps/v8/src/ast/ast.cc @@ -23,6 +23,7 @@ #include "src/property-details.h" #include "src/property.h" #include "src/string-stream.h" +#include "src/zone/zone-list-inl.h" namespace v8 { namespace internal { @@ -155,8 +156,8 @@ VariableProxy::VariableProxy(Variable* var, int start_position) : Expression(start_position, kVariableProxy), raw_name_(var->raw_name()), next_unresolved_(nullptr) { - bit_field_ |= IsThisField::encode(var->is_this()) | - IsAssignedField::encode(false) | + DCHECK(!var->is_this()); + bit_field_ |= IsAssignedField::encode(false) | IsResolvedField::encode(false) | HoleCheckModeField::encode(HoleCheckMode::kElided); BindTo(var); @@ -171,7 +172,7 @@ VariableProxy::VariableProxy(const VariableProxy* copy_from) } void VariableProxy::BindTo(Variable* var) { - DCHECK((is_this() && var->is_this()) || raw_name() == var->raw_name()); + DCHECK_EQ(raw_name(), var->raw_name()); set_var(var); set_is_resolved(); var->set_is_used(); @@ -213,6 +214,18 @@ bool FunctionLiteral::AllowsLazyCompilation() { return scope()->AllowsLazyCompilation(); } +bool FunctionLiteral::SafeToSkipArgumentsAdaptor() const { + // TODO(bmeurer,verwaest): The --fast_calls_with_arguments_mismatches + // is mostly here for checking the real-world impact of the calling + // convention. There's not really a point in turning off this flag + // otherwise, so we should remove it at some point, when we're done + // with the experiments (https://crbug.com/v8/8895). + return FLAG_fast_calls_with_arguments_mismatches && + language_mode() == LanguageMode::kStrict && + scope()->arguments() == nullptr && + scope()->rest_parameter() == nullptr; +} + Handle FunctionLiteral::name(Isolate* isolate) const { return raw_name_ ? raw_name_->string() : isolate->factory()->empty_string(); } @@ -457,15 +470,10 @@ void ObjectLiteral::BuildBoilerplateDescription(Isolate* isolate) { has_seen_proto = true; continue; } - if (property->is_computed_name()) { - continue; - } + if (property->is_computed_name()) continue; Literal* key = property->key()->AsLiteral(); - - if (!key->IsPropertyName()) { - index_keys++; - } + if (!key->IsPropertyName()) index_keys++; } Handle boilerplate_description = diff --git a/deps/v8/src/ast/ast.h b/deps/v8/src/ast/ast.h index 4f9f083d123ab3..80f76bd6e4bf7c 100644 --- a/deps/v8/src/ast/ast.h +++ b/deps/v8/src/ast/ast.h @@ -11,6 +11,7 @@ #include "src/ast/modules.h" #include "src/ast/variables.h" #include "src/bailout-reason.h" +#include "src/base/threaded-list.h" #include "src/globals.h" #include "src/heap/factory.h" #include "src/isolate.h" @@ -100,7 +101,7 @@ namespace internal { V(SuperCallReference) \ V(SuperPropertyReference) \ V(TemplateLiteral) \ - V(ThisFunction) \ + V(ThisExpression) \ V(Throw) \ V(UnaryOperation) \ V(VariableProxy) \ @@ -483,26 +484,14 @@ inline NestedVariableDeclaration* VariableDeclaration::AsNested() { class FunctionDeclaration final : public Declaration { public: FunctionLiteral* fun() const { return fun_; } - bool declares_sloppy_block_function() const { - return DeclaresSloppyBlockFunction::decode(bit_field_); - } private: friend class AstNodeFactory; - class DeclaresSloppyBlockFunction - : public BitField {}; - - FunctionDeclaration(FunctionLiteral* fun, bool declares_sloppy_block_function, - int pos) - : Declaration(pos, kFunctionDeclaration), fun_(fun) { - bit_field_ = DeclaresSloppyBlockFunction::update( - bit_field_, declares_sloppy_block_function); - } + FunctionDeclaration(FunctionLiteral* fun, int pos) + : Declaration(pos, kFunctionDeclaration), fun_(fun) {} FunctionLiteral* fun_; - - static const uint8_t kNextBitFieldIndex = DeclaresSloppyBlockFunction::kNext; }; @@ -977,14 +966,30 @@ class SloppyBlockFunctionStatement final : public Statement { public: Statement* statement() const { return statement_; } void set_statement(Statement* statement) { statement_ = statement; } + Scope* scope() const { return var_->scope(); } + Variable* var() const { return var_; } + Token::Value init() const { return TokenField::decode(bit_field_); } + const AstRawString* name() const { return var_->raw_name(); } + SloppyBlockFunctionStatement** next() { return &next_; } private: friend class AstNodeFactory; - SloppyBlockFunctionStatement(int pos, Statement* statement) - : Statement(pos, kSloppyBlockFunctionStatement), statement_(statement) {} + class TokenField + : public BitField {}; + + SloppyBlockFunctionStatement(int pos, Variable* var, Token::Value init, + Statement* statement) + : Statement(pos, kSloppyBlockFunctionStatement), + var_(var), + statement_(statement), + next_(nullptr) { + bit_field_ = TokenField::update(bit_field_, init); + } + Variable* var_; Statement* statement_; + SloppyBlockFunctionStatement* next_; }; @@ -1495,11 +1500,15 @@ class ArrayLiteral final : public AggregateLiteral { enum class HoleCheckMode { kRequired, kElided }; +class ThisExpression final : public Expression { + private: + friend class AstNodeFactory; + ThisExpression() : Expression(kNoSourcePosition, kThisExpression) {} +}; + class VariableProxy final : public Expression { public: - bool IsValidReferenceExpression() const { - return !is_this() && !is_new_target(); - } + bool IsValidReferenceExpression() const { return !is_new_target(); } Handle name() const { return raw_name()->string(); } const AstRawString* raw_name() const { @@ -1520,8 +1529,6 @@ class VariableProxy final : public Expression { return Scanner::Location(position(), position() + raw_name()->length()); } - bool is_this() const { return IsThisField::decode(bit_field_); } - bool is_assigned() const { return IsAssignedField::decode(bit_field_); } void set_is_assigned() { bit_field_ = IsAssignedField::update(bit_field_, true); @@ -1594,8 +1601,8 @@ class VariableProxy final : public Expression { : Expression(start_position, kVariableProxy), raw_name_(name), next_unresolved_(nullptr) { - bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) | - IsAssignedField::encode(false) | + DCHECK_NE(THIS_VARIABLE, variable_kind); + bit_field_ |= IsAssignedField::encode(false) | IsResolvedField::encode(false) | IsRemovedFromUnresolvedField::encode(false) | HoleCheckModeField::encode(HoleCheckMode::kElided); @@ -1603,9 +1610,8 @@ class VariableProxy final : public Expression { explicit VariableProxy(const VariableProxy* copy_from); - class IsThisField : public BitField { - }; - class IsAssignedField : public BitField {}; + class IsAssignedField + : public BitField {}; class IsResolvedField : public BitField {}; class IsRemovedFromUnresolvedField : public BitField {}; @@ -2190,8 +2196,6 @@ class FunctionLiteral final : public Expression { kWrapped, }; - enum IdType { kIdTypeInvalid = -1, kIdTypeTopLevel = 0 }; - enum ParameterFlag : uint8_t { kNoDuplicateParameters, kHasDuplicateParameters @@ -2226,7 +2230,7 @@ class FunctionLiteral final : public Expression { } bool is_oneshot_iife() const { return OneshotIIFEBit::decode(bit_field_); } bool is_toplevel() const { - return function_literal_id() == FunctionLiteral::kIdTypeTopLevel; + return function_literal_id() == kFunctionLiteralIdTopLevel; } bool is_wrapped() const { return function_type() == kWrapped; } LanguageMode language_mode() const; @@ -2251,6 +2255,18 @@ class FunctionLiteral final : public Expression { return false; } + // We can safely skip the arguments adaptor frame setup even + // in case of arguments mismatches for strict mode functions, + // as long as there's + // + // 1. no use of the arguments object (either explicitly or + // potentially implicitly via a direct eval() call), and + // 2. rest parameters aren't being used in the function. + // + // See http://bit.ly/v8-faster-calls-with-arguments-mismatch + // for the details here (https://crbug.com/v8/8895). + bool SafeToSkipArgumentsAdaptor() const; + // Returns either name or inferred name as a cstring. std::unique_ptr GetDebugName() const; @@ -2550,56 +2566,41 @@ class NativeFunctionLiteral final : public Expression { }; -class ThisFunction final : public Expression { - private: - friend class AstNodeFactory; - explicit ThisFunction(int pos) : Expression(pos, kThisFunction) {} -}; - - class SuperPropertyReference final : public Expression { public: - VariableProxy* this_var() const { return this_var_; } Expression* home_object() const { return home_object_; } private: friend class AstNodeFactory; - SuperPropertyReference(VariableProxy* this_var, Expression* home_object, - int pos) - : Expression(pos, kSuperPropertyReference), - this_var_(this_var), - home_object_(home_object) { - DCHECK(this_var->is_this()); + // We take in ThisExpression* only as a proof that it was accessed. + SuperPropertyReference(Expression* home_object, int pos) + : Expression(pos, kSuperPropertyReference), home_object_(home_object) { DCHECK(home_object->IsProperty()); } - VariableProxy* this_var_; Expression* home_object_; }; class SuperCallReference final : public Expression { public: - VariableProxy* this_var() const { return this_var_; } VariableProxy* new_target_var() const { return new_target_var_; } VariableProxy* this_function_var() const { return this_function_var_; } private: friend class AstNodeFactory; - SuperCallReference(VariableProxy* this_var, VariableProxy* new_target_var, + // We take in ThisExpression* only as a proof that it was accessed. + SuperCallReference(VariableProxy* new_target_var, VariableProxy* this_function_var, int pos) : Expression(pos, kSuperCallReference), - this_var_(this_var), new_target_var_(new_target_var), this_function_var_(this_function_var) { - DCHECK(this_var->is_this()); DCHECK(new_target_var->raw_name()->IsOneByteEqualTo(".new.target")); DCHECK(this_function_var->raw_name()->IsOneByteEqualTo(".this_function")); } - VariableProxy* this_var_; VariableProxy* new_target_var_; VariableProxy* this_function_var_; }; @@ -2780,6 +2781,7 @@ class AstNodeFactory final { : zone_(zone), ast_value_factory_(ast_value_factory), empty_statement_(new (zone) class EmptyStatement()), + this_expression_(new (zone) class ThisExpression()), failure_expression_(new (zone) class FailureExpression()) {} AstNodeFactory* ast_node_factory() { return this; } @@ -2794,10 +2796,8 @@ class AstNodeFactory final { return new (zone_) NestedVariableDeclaration(scope, pos); } - FunctionDeclaration* NewFunctionDeclaration(FunctionLiteral* fun, - bool is_sloppy_block_function, - int pos) { - return new (zone_) FunctionDeclaration(fun, is_sloppy_block_function, pos); + FunctionDeclaration* NewFunctionDeclaration(FunctionLiteral* fun, int pos) { + return new (zone_) FunctionDeclaration(fun, pos); } Block* NewBlock(int capacity, bool ignore_completion_value) { @@ -2936,12 +2936,18 @@ class AstNodeFactory final { return empty_statement_; } + class ThisExpression* ThisExpression() { + return this_expression_; + } + class FailureExpression* FailureExpression() { return failure_expression_; } - SloppyBlockFunctionStatement* NewSloppyBlockFunctionStatement(int pos) { - return new (zone_) SloppyBlockFunctionStatement(pos, EmptyStatement()); + SloppyBlockFunctionStatement* NewSloppyBlockFunctionStatement( + int pos, Variable* var, Token::Value init) { + return new (zone_) + SloppyBlockFunctionStatement(pos, var, init, EmptyStatement()); } CaseClause* NewCaseClause(Expression* label, @@ -3143,6 +3149,8 @@ class AstNodeFactory final { Expression* value, int pos) { DCHECK(Token::IsAssignmentOp(op)); + DCHECK_NOT_NULL(target); + DCHECK_NOT_NULL(value); if (op != Token::INIT && target->IsVariableProxy()) { target->AsVariableProxy()->set_is_assigned(); @@ -3206,7 +3214,7 @@ class AstNodeFactory final { FunctionLiteral::kAnonymousExpression, FunctionLiteral::kNoDuplicateParameters, FunctionLiteral::kShouldLazyCompile, 0, /* has_braces */ false, - FunctionLiteral::kIdTypeTopLevel); + kFunctionLiteralIdTopLevel); } ClassLiteral::Property* NewClassLiteralProperty( @@ -3242,22 +3250,16 @@ class AstNodeFactory final { return new (zone_) DoExpression(block, result, pos); } - ThisFunction* NewThisFunction(int pos) { - return new (zone_) ThisFunction(pos); - } - - SuperPropertyReference* NewSuperPropertyReference(VariableProxy* this_var, - Expression* home_object, + SuperPropertyReference* NewSuperPropertyReference(Expression* home_object, int pos) { - return new (zone_) SuperPropertyReference(this_var, home_object, pos); + return new (zone_) SuperPropertyReference(home_object, pos); } - SuperCallReference* NewSuperCallReference(VariableProxy* this_var, - VariableProxy* new_target_var, + SuperCallReference* NewSuperCallReference(VariableProxy* new_target_var, VariableProxy* this_function_var, int pos) { return new (zone_) - SuperCallReference(this_var, new_target_var, this_function_var, pos); + SuperCallReference(new_target_var, this_function_var, pos); } EmptyParentheses* NewEmptyParentheses(int pos) { @@ -3295,6 +3297,7 @@ class AstNodeFactory final { Zone* zone_; AstValueFactory* ast_value_factory_; class EmptyStatement* empty_statement_; + class ThisExpression* this_expression_; class FailureExpression* failure_expression_; }; diff --git a/deps/v8/src/ast/modules.cc b/deps/v8/src/ast/modules.cc index 0f66ac91ecde74..d1be965a4abd69 100644 --- a/deps/v8/src/ast/modules.cc +++ b/deps/v8/src/ast/modules.cc @@ -91,20 +91,11 @@ void ModuleDescriptor::AddStarExport(const AstRawString* module_request, } namespace { - Handle ToStringOrUndefined(Isolate* isolate, const AstRawString* s) { return (s == nullptr) ? Handle::cast(isolate->factory()->undefined_value()) : Handle::cast(s->string()); } - -const AstRawString* FromStringOrUndefined(Isolate* isolate, - AstValueFactory* avfactory, - Handle object) { - if (object->IsUndefined(isolate)) return nullptr; - return avfactory->GetString(Handle::cast(object)); -} - } // namespace Handle ModuleDescriptor::Entry::Serialize( @@ -117,21 +108,6 @@ Handle ModuleDescriptor::Entry::Serialize( location.beg_pos, location.end_pos); } -ModuleDescriptor::Entry* ModuleDescriptor::Entry::Deserialize( - Isolate* isolate, AstValueFactory* avfactory, - Handle entry) { - Entry* result = new (avfactory->zone()) Entry(Scanner::Location::invalid()); - result->export_name = FromStringOrUndefined( - isolate, avfactory, handle(entry->export_name(), isolate)); - result->local_name = FromStringOrUndefined( - isolate, avfactory, handle(entry->local_name(), isolate)); - result->import_name = FromStringOrUndefined( - isolate, avfactory, handle(entry->import_name(), isolate)); - result->module_request = entry->module_request(); - result->cell_index = entry->cell_index(); - return result; -} - Handle ModuleDescriptor::SerializeRegularExports(Isolate* isolate, Zone* zone) const { // We serialize regular exports in a way that lets us later iterate over their @@ -183,29 +159,6 @@ Handle ModuleDescriptor::SerializeRegularExports(Isolate* isolate, return result; } -void ModuleDescriptor::DeserializeRegularExports( - Isolate* isolate, AstValueFactory* avfactory, - Handle module_info) { - for (int i = 0, count = module_info->RegularExportCount(); i < count; ++i) { - Handle local_name(module_info->RegularExportLocalName(i), isolate); - int cell_index = module_info->RegularExportCellIndex(i); - Handle export_names(module_info->RegularExportExportNames(i), - isolate); - - for (int j = 0, length = export_names->length(); j < length; ++j) { - Handle export_name(String::cast(export_names->get(j)), isolate); - - Entry* entry = - new (avfactory->zone()) Entry(Scanner::Location::invalid()); - entry->local_name = avfactory->GetString(local_name); - entry->export_name = avfactory->GetString(export_name); - entry->cell_index = cell_index; - - AddRegularExport(entry); - } - } -} - void ModuleDescriptor::MakeIndirectExportsExplicit(Zone* zone) { for (auto it = regular_exports_.begin(); it != regular_exports_.end();) { Entry* entry = it->second; diff --git a/deps/v8/src/ast/modules.h b/deps/v8/src/ast/modules.h index 44e86dce42b480..ebc3e3a28847c0 100644 --- a/deps/v8/src/ast/modules.h +++ b/deps/v8/src/ast/modules.h @@ -107,12 +107,7 @@ class ModuleDescriptor : public ZoneObject { module_request(-1), cell_index(0) {} - // (De-)serialization support. - // Note that the location value is not preserved as it's only needed by the - // parser. (A Deserialize'd entry has an invalid location.) Handle Serialize(Isolate* isolate) const; - static Entry* Deserialize(Isolate* isolate, AstValueFactory* avfactory, - Handle entry); }; enum CellIndexKind { kInvalid, kExport, kImport }; @@ -191,8 +186,6 @@ class ModuleDescriptor : public ZoneObject { Handle SerializeRegularExports(Isolate* isolate, Zone* zone) const; - void DeserializeRegularExports(Isolate* isolate, AstValueFactory* avfactory, - Handle module_info); private: ModuleRequestMap module_requests_; diff --git a/deps/v8/src/ast/prettyprinter.cc b/deps/v8/src/ast/prettyprinter.cc index a53d07064d5847..c7f6e3d9f03cab 100644 --- a/deps/v8/src/ast/prettyprinter.cc +++ b/deps/v8/src/ast/prettyprinter.cc @@ -12,6 +12,7 @@ #include "src/globals.h" #include "src/objects-inl.h" #include "src/string-builder-inl.h" +#include "src/vector.h" namespace v8 { namespace internal { @@ -500,8 +501,7 @@ void CallPrinter::VisitImportCallExpression(ImportCallExpression* node) { Print(")"); } -void CallPrinter::VisitThisFunction(ThisFunction* node) {} - +void CallPrinter::VisitThisExpression(ThisExpression* node) { Print("this"); } void CallPrinter::VisitSuperPropertyReference(SuperPropertyReference* node) {} @@ -1391,11 +1391,10 @@ void AstPrinter::VisitImportCallExpression(ImportCallExpression* node) { Visit(node->argument()); } -void AstPrinter::VisitThisFunction(ThisFunction* node) { - IndentedScope indent(this, "THIS-FUNCTION", node->position()); +void AstPrinter::VisitThisExpression(ThisExpression* node) { + IndentedScope indent(this, "THIS-EXPRESSION", node->position()); } - void AstPrinter::VisitSuperPropertyReference(SuperPropertyReference* node) { IndentedScope indent(this, "SUPER-PROPERTY-REFERENCE", node->position()); } diff --git a/deps/v8/src/ast/prettyprinter.h b/deps/v8/src/ast/prettyprinter.h index e6f2766915893c..e1efdbfb88664d 100644 --- a/deps/v8/src/ast/prettyprinter.h +++ b/deps/v8/src/ast/prettyprinter.h @@ -8,6 +8,7 @@ #include "src/allocation.h" #include "src/ast/ast.h" #include "src/base/compiler-specific.h" +#include "src/function-kind.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/ast/scopes.cc b/deps/v8/src/ast/scopes.cc index 28869cd94ae045..e625865a110bbb 100644 --- a/deps/v8/src/ast/scopes.cc +++ b/deps/v8/src/ast/scopes.cc @@ -83,28 +83,6 @@ Variable* VariableMap::Lookup(const AstRawString* name) { return nullptr; } -void SloppyBlockFunctionMap::Delegate::set_statement(Statement* statement) { - if (statement_ != nullptr) { - statement_->set_statement(statement); - } -} - -SloppyBlockFunctionMap::SloppyBlockFunctionMap(Zone* zone) - : ZoneHashMap(8, ZoneAllocationPolicy(zone)), count_(0) {} - -void SloppyBlockFunctionMap::Declare(Zone* zone, const AstRawString* name, - Scope* scope, - SloppyBlockFunctionStatement* statement) { - auto* delegate = new (zone) Delegate(scope, statement, count_++); - // AstRawStrings are unambiguous, i.e., the same string is always represented - // by the same AstRawString*. - Entry* p = - ZoneHashMap::LookupOrInsert(const_cast(name), name->Hash(), - ZoneAllocationPolicy(zone)); - delegate->set_next(static_cast(p->value)); - p->value = delegate; -} - // ---------------------------------------------------------------------------- // Implementation of Scope @@ -132,10 +110,8 @@ DeclarationScope::DeclarationScope(Zone* zone, : Scope(zone), function_kind_(kNormalFunction), params_(4, zone) { DCHECK_EQ(scope_type_, SCRIPT_SCOPE); SetDefaults(); - - // Make sure that if we don't find the global 'this', it won't be declared as - // a regular dynamic global by predeclaring it with the right variable kind. - DeclareDynamicGlobal(ast_value_factory->this_string(), THIS_VARIABLE, this); + receiver_ = DeclareDynamicGlobal(ast_value_factory->this_string(), + THIS_VARIABLE, this); } DeclarationScope::DeclarationScope(Zone* zone, Scope* outer_scope, @@ -149,59 +125,19 @@ DeclarationScope::DeclarationScope(Zone* zone, Scope* outer_scope, } ModuleScope::ModuleScope(DeclarationScope* script_scope, - AstValueFactory* ast_value_factory) - : DeclarationScope(ast_value_factory->zone(), script_scope, MODULE_SCOPE, - kModule) { - Zone* zone = ast_value_factory->zone(); - module_descriptor_ = new (zone) ModuleDescriptor(zone); + AstValueFactory* avfactory) + : DeclarationScope(avfactory->zone(), script_scope, MODULE_SCOPE, kModule), + module_descriptor_(new (avfactory->zone()) + ModuleDescriptor(avfactory->zone())) { set_language_mode(LanguageMode::kStrict); - DeclareThis(ast_value_factory); + DeclareThis(avfactory); } ModuleScope::ModuleScope(Isolate* isolate, Handle scope_info, AstValueFactory* avfactory) - : DeclarationScope(avfactory->zone(), MODULE_SCOPE, scope_info) { - Zone* zone = avfactory->zone(); - Handle module_info(scope_info->ModuleDescriptorInfo(), isolate); - + : DeclarationScope(avfactory->zone(), MODULE_SCOPE, scope_info), + module_descriptor_(nullptr) { set_language_mode(LanguageMode::kStrict); - module_descriptor_ = new (zone) ModuleDescriptor(zone); - - // Deserialize special exports. - Handle special_exports(module_info->special_exports(), isolate); - for (int i = 0, n = special_exports->length(); i < n; ++i) { - Handle serialized_entry( - ModuleInfoEntry::cast(special_exports->get(i)), isolate); - module_descriptor_->AddSpecialExport( - ModuleDescriptor::Entry::Deserialize(isolate, avfactory, - serialized_entry), - avfactory->zone()); - } - - // Deserialize regular exports. - module_descriptor_->DeserializeRegularExports(isolate, avfactory, - module_info); - - // Deserialize namespace imports. - Handle namespace_imports(module_info->namespace_imports(), - isolate); - for (int i = 0, n = namespace_imports->length(); i < n; ++i) { - Handle serialized_entry( - ModuleInfoEntry::cast(namespace_imports->get(i)), isolate); - module_descriptor_->AddNamespaceImport( - ModuleDescriptor::Entry::Deserialize(isolate, avfactory, - serialized_entry), - avfactory->zone()); - } - - // Deserialize regular imports. - Handle regular_imports(module_info->regular_imports(), isolate); - for (int i = 0, n = regular_imports->length(); i < n; ++i) { - Handle serialized_entry( - ModuleInfoEntry::cast(regular_imports->get(i)), isolate); - module_descriptor_->AddRegularImport(ModuleDescriptor::Entry::Deserialize( - isolate, avfactory, serialized_entry)); - } } Scope::Scope(Zone* zone, ScopeType scope_type, Handle scope_info) @@ -262,8 +198,11 @@ void DeclarationScope::SetDefaults() { force_eager_compilation_ = false; has_arguments_parameter_ = false; scope_uses_super_property_ = false; + has_checked_syntax_ = false; + has_this_reference_ = false; + has_this_declaration_ = + (is_function_scope() && !is_arrow_scope()) || is_module_scope(); has_rest_ = false; - sloppy_block_function_map_ = nullptr; receiver_ = nullptr; new_target_ = nullptr; function_ = nullptr; @@ -319,10 +258,6 @@ bool Scope::HasSimpleParameters() { return !scope->is_function_scope() || scope->has_simple_parameters(); } -bool DeclarationScope::ShouldEagerCompile() const { - return force_eager_compilation_ || should_eager_compile_; -} - void DeclarationScope::set_should_eager_compile() { should_eager_compile_ = !was_lazily_parsed_; } @@ -359,15 +294,16 @@ Scope* Scope::DeserializeScopeChain(Isolate* isolate, Zone* zone, Scope* outer_scope = nullptr; while (!scope_info.is_null()) { if (scope_info->scope_type() == WITH_SCOPE) { - // For scope analysis, debug-evaluate is equivalent to a with scope. - outer_scope = - new (zone) Scope(zone, WITH_SCOPE, handle(scope_info, isolate)); - - // TODO(yangguo): Remove once debug-evaluate properly keeps track of the - // function scope in which we are evaluating. if (scope_info->IsDebugEvaluateScope()) { + outer_scope = new (zone) + DeclarationScope(zone, FUNCTION_SCOPE, handle(scope_info, isolate)); outer_scope->set_is_debug_evaluate_scope(); + } else { + // For scope analysis, debug-evaluate is equivalent to a with scope. + outer_scope = + new (zone) Scope(zone, WITH_SCOPE, handle(scope_info, isolate)); } + } else if (scope_info->scope_type() == SCRIPT_SCOPE) { // If we reach a script scope, it's the outermost scope. Install the // scope info of this script context onto the existing script scope to @@ -455,19 +391,9 @@ const ModuleScope* Scope::AsModuleScope() const { return static_cast(this); } -int Scope::num_parameters() const { - return is_declaration_scope() ? AsDeclarationScope()->num_parameters() : 0; -} - void DeclarationScope::DeclareSloppyBlockFunction( - const AstRawString* name, Scope* scope, - SloppyBlockFunctionStatement* statement) { - if (sloppy_block_function_map_ == nullptr) { - sloppy_block_function_map_ = - new (zone()->New(sizeof(SloppyBlockFunctionMap))) - SloppyBlockFunctionMap(zone()); - } - sloppy_block_function_map_->Declare(zone(), name, scope, statement); + SloppyBlockFunctionStatement* sloppy_block_function) { + sloppy_block_functions_.Add(sloppy_block_function); } void DeclarationScope::HoistSloppyBlockFunctions(AstNodeFactory* factory) { @@ -477,8 +403,7 @@ void DeclarationScope::HoistSloppyBlockFunctions(AstNodeFactory* factory) { DCHECK(HasSimpleParameters() || is_block_scope() || is_being_lazily_parsed_); DCHECK_EQ(factory == nullptr, is_being_lazily_parsed_); - SloppyBlockFunctionMap* map = sloppy_block_function_map(); - if (map == nullptr) return; + if (sloppy_block_functions_.is_empty()) return; // In case of complex parameters the current scope is the body scope and the // parameters are stored in the outer scope. @@ -486,14 +411,17 @@ void DeclarationScope::HoistSloppyBlockFunctions(AstNodeFactory* factory) { DCHECK(parameter_scope->is_function_scope() || is_eval_scope() || is_script_scope()); - // The declarations need to be added in the order they were seen, - // so accumulate declared names sorted by index. - ZoneMap names_to_declare(zone()); + DeclarationScope* decl_scope = this; + while (decl_scope->is_eval_scope()) { + decl_scope = decl_scope->outer_scope()->GetDeclarationScope(); + } + Scope* outer_scope = decl_scope->outer_scope(); // For each variable which is used as a function declaration in a sloppy // block, - for (ZoneHashMap::Entry* p = map->Start(); p != nullptr; p = map->Next(p)) { - const AstRawString* name = static_cast(p->key); + for (SloppyBlockFunctionStatement* sloppy_block_function : + sloppy_block_functions_) { + const AstRawString* name = sloppy_block_function->name(); // If the variable wouldn't conflict with a lexical declaration // or parameter, @@ -504,79 +432,52 @@ void DeclarationScope::HoistSloppyBlockFunctions(AstNodeFactory* factory) { continue; } - bool declaration_queued = false; - - // Write in assignments to var for each block-scoped function declaration - auto delegates = static_cast(p->value); - - DeclarationScope* decl_scope = this; - while (decl_scope->is_eval_scope()) { - decl_scope = decl_scope->outer_scope()->GetDeclarationScope(); - } - Scope* outer_scope = decl_scope->outer_scope(); - - for (SloppyBlockFunctionMap::Delegate* delegate = delegates; - delegate != nullptr; delegate = delegate->next()) { - // Check if there's a conflict with a lexical declaration - Scope* query_scope = delegate->scope()->outer_scope(); - Variable* var = nullptr; - bool should_hoist = true; - - // Note that we perform this loop for each delegate named 'name', - // which may duplicate work if those delegates share scopes. - // It is not sufficient to just do a Lookup on query_scope: for - // example, that does not prevent hoisting of the function in - // `{ let e; try {} catch (e) { function e(){} } }` - do { - var = query_scope->LookupInScopeOrScopeInfo(name); - if (var != nullptr && IsLexicalVariableMode(var->mode())) { - should_hoist = false; - break; - } - query_scope = query_scope->outer_scope(); - } while (query_scope != outer_scope); - - if (!should_hoist) continue; - - if (!declaration_queued) { - declaration_queued = true; - names_to_declare.insert({delegate->index(), name}); - } - - if (factory) { - DCHECK(!is_being_lazily_parsed_); - int pos = delegate->position(); - Assignment* assignment = factory->NewAssignment( - Token::ASSIGN, NewUnresolved(factory, name, pos), - delegate->scope()->NewUnresolved(factory, name, pos), pos); - assignment->set_lookup_hoisting_mode(LookupHoistingMode::kLegacySloppy); - Statement* statement = factory->NewExpressionStatement(assignment, pos); - delegate->set_statement(statement); + // Check if there's a conflict with a lexical declaration + Scope* query_scope = sloppy_block_function->scope()->outer_scope(); + Variable* var = nullptr; + bool should_hoist = true; + + // It is not sufficient to just do a Lookup on query_scope: for + // example, that does not prevent hoisting of the function in + // `{ let e; try {} catch (e) { function e(){} } }` + do { + var = query_scope->LookupInScopeOrScopeInfo(name); + if (var != nullptr && IsLexicalVariableMode(var->mode())) { + should_hoist = false; + break; } - } - } + query_scope = query_scope->outer_scope(); + } while (query_scope != outer_scope); - if (names_to_declare.empty()) return; + if (!should_hoist) continue; - for (const auto& index_and_name : names_to_declare) { - const AstRawString* name = index_and_name.second; if (factory) { DCHECK(!is_being_lazily_parsed_); - VariableProxy* proxy = factory->NewVariableProxy(name, NORMAL_VARIABLE); - auto declaration = factory->NewVariableDeclaration(kNoSourcePosition); + int pos = sloppy_block_function->position(); + bool ok = true; bool was_added; + auto declaration = factory->NewVariableDeclaration(pos); // Based on the preceding checks, it doesn't matter what we pass as // sloppy_mode_block_scope_function_redefinition. - bool ok = true; - DeclareVariable(declaration, proxy, VariableMode::kVar, NORMAL_VARIABLE, - Variable::DefaultInitializationFlag(VariableMode::kVar), - &was_added, nullptr, &ok); + Variable* var = DeclareVariable( + declaration, name, pos, VariableMode::kVar, NORMAL_VARIABLE, + Variable::DefaultInitializationFlag(VariableMode::kVar), &was_added, + nullptr, &ok); DCHECK(ok); + VariableProxy* source = + factory->NewVariableProxy(sloppy_block_function->var()); + VariableProxy* target = factory->NewVariableProxy(var); + Assignment* assignment = factory->NewAssignment( + sloppy_block_function->init(), target, source, pos); + assignment->set_lookup_hoisting_mode(LookupHoistingMode::kLegacySloppy); + Statement* statement = factory->NewExpressionStatement(assignment, pos); + sloppy_block_function->set_statement(statement); } else { DCHECK(is_being_lazily_parsed_); bool was_added; Variable* var = DeclareVariableName(name, VariableMode::kVar, &was_added); - var->set_maybe_assigned(); + if (sloppy_block_function->init() == Token::ASSIGN) + var->set_maybe_assigned(); } } } @@ -605,8 +506,7 @@ bool DeclarationScope::Analyze(ParseInfo* info) { // 1) top-level code, // 2) a function/eval/module on the top-level // 3) a function/eval in a scope that was already resolved. - DCHECK(scope->scope_type() == SCRIPT_SCOPE || - scope->outer_scope()->scope_type() == SCRIPT_SCOPE || + DCHECK(scope->is_script_scope() || scope->outer_scope()->is_script_scope() || scope->outer_scope()->already_resolved_); // The outer scope is never lazy. @@ -633,20 +533,16 @@ bool DeclarationScope::Analyze(ParseInfo* info) { } void DeclarationScope::DeclareThis(AstValueFactory* ast_value_factory) { - DCHECK(!already_resolved_); - DCHECK(is_declaration_scope()); DCHECK(has_this_declaration()); bool derived_constructor = IsDerivedConstructor(function_kind_); - bool was_added; - Variable* var = - Declare(zone(), ast_value_factory->this_string(), - derived_constructor ? VariableMode::kConst : VariableMode::kVar, - THIS_VARIABLE, - derived_constructor ? kNeedsInitialization : kCreatedInitialized, - kNotAssigned, &was_added); - DCHECK(was_added); - receiver_ = var; + + receiver_ = new (zone()) + Variable(this, ast_value_factory->this_string(), + derived_constructor ? VariableMode::kConst : VariableMode::kVar, + THIS_VARIABLE, + derived_constructor ? kNeedsInitialization : kCreatedInitialized, + kNotAssigned); } void DeclarationScope::DeclareArguments(AstValueFactory* ast_value_factory) { @@ -844,8 +740,9 @@ void Scope::ReplaceOuterScope(Scope* outer) { Variable* Scope::LookupInScopeInfo(const AstRawString* name, Scope* cache) { DCHECK(!scope_info_.is_null()); DCHECK_NULL(cache->variables_.Lookup(name)); + DisallowHeapAllocation no_gc; - Handle name_handle = name->string(); + String name_handle = *name->string(); // The Scope is backed up by ScopeInfo. This means it cannot operate in a // heap-independent mode, and all strings must be internalized immediately. So // it's ok to get the Handle here. @@ -859,12 +756,12 @@ Variable* Scope::LookupInScopeInfo(const AstRawString* name, Scope* cache) { { location = VariableLocation::CONTEXT; - index = ScopeInfo::ContextSlotIndex(scope_info_, name_handle, &mode, + index = ScopeInfo::ContextSlotIndex(*scope_info_, name_handle, &mode, &init_flag, &maybe_assigned_flag); found = index >= 0; } - if (!found && scope_type() == MODULE_SCOPE) { + if (!found && is_module_scope()) { location = VariableLocation::MODULE; index = scope_info_->ModuleIndex(name_handle, &mode, &init_flag, &maybe_assigned_flag); @@ -872,7 +769,7 @@ Variable* Scope::LookupInScopeInfo(const AstRawString* name, Scope* cache) { } if (!found) { - index = scope_info_->FunctionContextSlotIndex(*name_handle); + index = scope_info_->FunctionContextSlotIndex(name_handle); if (index < 0) return nullptr; // Nowhere found. Variable* var = AsDeclarationScope()->DeclareFunctionVar(name, cache); DCHECK_EQ(VariableMode::kConst, var->mode()); @@ -880,18 +777,14 @@ Variable* Scope::LookupInScopeInfo(const AstRawString* name, Scope* cache) { return cache->variables_.Lookup(name); } - VariableKind kind = NORMAL_VARIABLE; - if (location == VariableLocation::CONTEXT && - index == scope_info_->ReceiverContextSlotIndex()) { - kind = THIS_VARIABLE; + if (!is_module_scope()) { + DCHECK_NE(index, scope_info_->ReceiverContextSlotIndex()); } - // TODO(marja, rossberg): Correctly declare FUNCTION, CLASS, NEW_TARGET, and - // ARGUMENTS bindings as their corresponding VariableKind. bool was_added; Variable* var = - cache->variables_.Declare(zone(), this, name, mode, kind, init_flag, - maybe_assigned_flag, &was_added); + cache->variables_.Declare(zone(), this, name, mode, NORMAL_VARIABLE, + init_flag, maybe_assigned_flag, &was_added); DCHECK(was_added); var->AllocateTo(location, index); return var; @@ -953,15 +846,29 @@ Variable* Scope::DeclareLocal(const AstRawString* name, VariableMode mode, mode == VariableMode::kVar || mode == VariableMode::kLet || mode == VariableMode::kConst); DCHECK(!GetDeclarationScope()->was_lazily_parsed()); - return Declare(zone(), name, mode, kind, init_flag, kNotAssigned, was_added); + Variable* var = + Declare(zone(), name, mode, kind, init_flag, kNotAssigned, was_added); + + // Pessimistically assume that top-level variables will be assigned and used. + // + // Top-level variables in a script can be accessed by other scripts or even + // become global properties. While this does not apply to top-level variables + // in a module (assuming they are not exported), we must still mark these as + // assigned because they might be accessed by a lazily parsed top-level + // function, which, for efficiency, we preparse without variable tracking. + if (is_script_scope() || is_module_scope()) { + if (mode != VariableMode::kConst) var->set_maybe_assigned(); + var->set_is_used(); + } + + return var; } -// TODO(leszeks): Avoid passing the proxy into here, passing the raw_name alone -// instead. Variable* Scope::DeclareVariable( - Declaration* declaration, VariableProxy* proxy, VariableMode mode, - VariableKind kind, InitializationFlag init, bool* was_added, - bool* sloppy_mode_block_scope_function_redefinition, bool* ok) { + Declaration* declaration, const AstRawString* name, int pos, + VariableMode mode, VariableKind kind, InitializationFlag init, + bool* was_added, bool* sloppy_mode_block_scope_function_redefinition, + bool* ok) { DCHECK(IsDeclaredVariableMode(mode)); DCHECK(!already_resolved_); DCHECK(!GetDeclarationScope()->is_being_lazily_parsed()); @@ -969,7 +876,7 @@ Variable* Scope::DeclareVariable( if (mode == VariableMode::kVar && !is_declaration_scope()) { return GetDeclarationScope()->DeclareVariable( - declaration, proxy, mode, kind, init, was_added, + declaration, name, pos, mode, kind, init, was_added, sloppy_mode_block_scope_function_redefinition, ok); } DCHECK(!is_catch_scope()); @@ -977,19 +884,7 @@ Variable* Scope::DeclareVariable( DCHECK(is_declaration_scope() || (IsLexicalVariableMode(mode) && is_block_scope())); - DCHECK_NOT_NULL(proxy->raw_name()); - const AstRawString* name = proxy->raw_name(); - - // Pessimistically assume that top-level variables will be assigned. - // - // Top-level variables in a script can be accessed by other scripts or even - // become global properties. While this does not apply to top-level variables - // in a module (assuming they are not exported), we must still mark these as - // assigned because they might be accessed by a lazily parsed top-level - // function, which, for efficiency, we preparse without variable tracking. - if (is_script_scope() || is_module_scope()) { - if (mode != VariableMode::kConst) proxy->set_is_assigned(); - } + DCHECK_NOT_NULL(name); Variable* var = LookupLocal(name); // Declare the variable in the declaration scope. @@ -1002,7 +897,9 @@ Variable* Scope::DeclareVariable( // The proxy is bound to a lookup variable to force a dynamic declaration // using the DeclareEvalVar or DeclareEvalFunction runtime functions. DCHECK_EQ(NORMAL_VARIABLE, kind); - var = NonLocal(proxy->raw_name(), VariableMode::kDynamic); + var = NonLocal(name, VariableMode::kDynamic); + // Mark the var as used in case anyone outside the eval wants to use it. + var->set_is_used(); } else { // Declare the name. var = DeclareLocal(name, mode, kind, was_added, init); @@ -1029,16 +926,9 @@ Variable* Scope::DeclareVariable( // In harmony we treat re-declarations as early errors. See ES5 16 for a // definition of early errors. // - // Allow duplicate function decls for web compat, see bug 4693. If the - // duplication is allowed, then the var will show up in the - // SloppyBlockFunctionMap. - SloppyBlockFunctionMap* map = - GetDeclarationScope()->sloppy_block_function_map(); - *ok = - map != nullptr && declaration->IsFunctionDeclaration() && - declaration->AsFunctionDeclaration() - ->declares_sloppy_block_function() && - map->Lookup(const_cast(name), name->Hash()) != nullptr; + // Allow duplicate function decls for web compat, see bug 4693. + *ok = var->is_sloppy_block_function() && + kind == SLOPPY_BLOCK_FUNCTION_VARIABLE; *sloppy_mode_block_scope_function_redefinition = *ok; } } @@ -1055,7 +945,6 @@ Variable* Scope::DeclareVariable( // lead to repeated DeclareEvalVar or DeclareEvalFunction calls. decls_.Add(declaration); declaration->set_var(var); - proxy->BindTo(var); return var; } @@ -1079,12 +968,16 @@ Variable* Scope::DeclareVariableName(const AstRawString* name, Variable* var = DeclareLocal(name, mode, kind, was_added); if (!*was_added) { if (IsLexicalVariableMode(mode) || IsLexicalVariableMode(var->mode())) { - // Duplicate functions are allowed in the sloppy mode, but if this is not - // a function declaration, it's an error. This is an error PreParser - // hasn't previously detected. - return nullptr; + if (!var->is_sloppy_block_function() || + kind != SLOPPY_BLOCK_FUNCTION_VARIABLE) { + // Duplicate functions are allowed in the sloppy mode, but if this is + // not a function declaration, it's an error. This is an error PreParser + // hasn't previously detected. + return nullptr; + } + // Sloppy block function redefinition. } - if (mode == VariableMode::kVar) var->set_maybe_assigned(); + var->set_maybe_assigned(); } var->set_is_used(); return var; @@ -1142,36 +1035,61 @@ Variable* Scope::NewTemporary(const AstRawString* name, return var; } -Declaration* Scope::CheckConflictingVarDeclarations() { +Declaration* DeclarationScope::CheckConflictingVarDeclarations() { + if (has_checked_syntax_) return nullptr; for (Declaration* decl : decls_) { // Lexical vs lexical conflicts within the same scope have already been // captured in Parser::Declare. The only conflicts we still need to check // are lexical vs nested var. - Scope* current = nullptr; if (decl->IsVariableDeclaration() && decl->AsVariableDeclaration()->AsNested() != nullptr) { - current = decl->AsVariableDeclaration()->AsNested()->scope(); - } else if (is_eval_scope() && is_sloppy(language_mode())) { - if (IsLexicalVariableMode(decl->var()->mode())) continue; - current = outer_scope_; + Scope* current = decl->AsVariableDeclaration()->AsNested()->scope(); + DCHECK(decl->var()->mode() == VariableMode::kVar || + decl->var()->mode() == VariableMode::kDynamic); + // Iterate through all scopes until the declaration scope. + do { + // There is a conflict if there exists a non-VAR binding. + if (current->is_catch_scope()) { + current = current->outer_scope(); + continue; + } + Variable* other_var = current->LookupLocal(decl->var()->raw_name()); + if (other_var != nullptr) { + DCHECK(IsLexicalVariableMode(other_var->mode())); + return decl; + } + current = current->outer_scope(); + } while (current != this); } - if (current == nullptr) continue; - DCHECK(decl->var()->mode() == VariableMode::kVar || - decl->var()->mode() == VariableMode::kDynamic); + } + + if (V8_LIKELY(!is_eval_scope())) return nullptr; + if (!is_sloppy(language_mode())) return nullptr; + + // Var declarations in sloppy eval are hoisted to the first non-eval + // declaration scope. Check for conflicts between the eval scope that + // declaration scope. + Scope* end = this; + do { + end = end->outer_scope_->GetDeclarationScope(); + } while (end->is_eval_scope()); + end = end->outer_scope_; + + for (Declaration* decl : decls_) { + if (IsLexicalVariableMode(decl->var()->mode())) continue; + Scope* current = outer_scope_; // Iterate through all scopes until and including the declaration scope. - while (true) { - // There is a conflict if there exists a non-VAR binding. + do { + // There is a conflict if there exists a non-VAR binding up to the + // declaration scope in which this sloppy-eval runs. Variable* other_var = current->LookupInScopeOrScopeInfo(decl->var()->raw_name()); if (other_var != nullptr && IsLexicalVariableMode(other_var->mode())) { + DCHECK(!current->is_catch_scope()); return decl; } - if (current->is_declaration_scope() && - !(current->is_eval_scope() && is_sloppy(current->language_mode()))) { - break; - } current = current->outer_scope(); - } + } while (current != end); } return nullptr; } @@ -1188,6 +1106,21 @@ const AstRawString* Scope::FindVariableDeclaredIn(Scope* scope, return nullptr; } +void DeclarationScope::DeserializeReceiver(AstValueFactory* ast_value_factory) { + if (is_script_scope()) { + DCHECK_NOT_NULL(receiver_); + return; + } + DCHECK(has_this_declaration()); + DeclareThis(ast_value_factory); + if (is_debug_evaluate_scope()) { + receiver_->AllocateTo(VariableLocation::LOOKUP, -1); + } else { + receiver_->AllocateTo(VariableLocation::CONTEXT, + scope_info_->ReceiverContextSlotIndex()); + } +} + bool DeclarationScope::AllocateVariables(ParseInfo* info) { // Module variables must be allocated before variable resolution // to ensure that UpdateNeedsHoleCheck() can detect import variables. @@ -1197,11 +1130,28 @@ bool DeclarationScope::AllocateVariables(ParseInfo* info) { DCHECK(info->pending_error_handler()->has_pending_error()); return false; } - AllocateVariablesRecursively(); + + // // Don't allocate variables of preparsed scopes. + if (!was_lazily_parsed()) AllocateVariablesRecursively(); return true; } +bool Scope::HasThisReference() const { + if (is_declaration_scope() && AsDeclarationScope()->has_this_reference()) { + return true; + } + + for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { + if (!scope->is_declaration_scope() || + !scope->AsDeclarationScope()->has_this_declaration()) { + if (scope->HasThisReference()) return true; + } + } + + return false; +} + bool Scope::AllowsLazyParsingWithoutUnresolvedVariables( const Scope* outer) const { // If none of the outer scopes need to decide whether to context allocate @@ -1227,7 +1177,10 @@ bool Scope::AllowsLazyParsingWithoutUnresolvedVariables( } bool DeclarationScope::AllowsLazyCompilation() const { - return !force_eager_compilation_; + // Functions which force eager compilation and class member initializer + // functions are not lazily compilable. + return !force_eager_compilation_ && + !IsClassMembersInitializerFunction(function_kind()); } int Scope::ContextChainLength(Scope* scope) const { @@ -1294,9 +1247,9 @@ bool Scope::ShouldBanArguments() { DeclarationScope* Scope::GetReceiverScope() { Scope* scope = this; - while (!scope->is_script_scope() && - (!scope->is_function_scope() || - scope->AsDeclarationScope()->is_arrow_scope())) { + while (!scope->is_declaration_scope() || + (!scope->is_script_scope() && + !scope->AsDeclarationScope()->has_this_declaration())) { scope = scope->outer_scope(); } return scope->AsDeclarationScope(); @@ -1310,77 +1263,103 @@ Scope* Scope::GetOuterScopeWithContext() { return scope; } -void Scope::CollectNonLocals(DeclarationScope* max_outer_scope, - Isolate* isolate, ParseInfo* info, - Handle* non_locals) { - // Module variables must be allocated before variable resolution - // to ensure that UpdateNeedsHoleCheck() can detect import variables. - if (is_module_scope()) AsModuleScope()->AllocateModuleVariables(); +namespace { +bool WasLazilyParsed(Scope* scope) { + return scope->is_declaration_scope() && + scope->AsDeclarationScope()->was_lazily_parsed(); +} - // Lazy parsed declaration scopes are already partially analyzed. If there are - // unresolved references remaining, they just need to be resolved in outer - // scopes. - Scope* lookup = - is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed() - ? outer_scope() - : this; - - for (VariableProxy* proxy : unresolved_list_) { - DCHECK(!proxy->is_resolved()); - Variable* var = - Lookup(proxy, lookup, max_outer_scope->outer_scope()); - if (var == nullptr) { - *non_locals = StringSet::Add(isolate, *non_locals, proxy->name()); +} // namespace + +template +void Scope::ForEach(FunctionType callback) { + Scope* scope = this; + while (true) { + Iteration iteration = callback(scope); + // Try to descend into inner scopes first. + if ((iteration == Iteration::kDescend) && scope->inner_scope_ != nullptr) { + scope = scope->inner_scope_; } else { - // In this case we need to leave scopes in a way that they can be - // allocated. If we resolved variables from lazy parsed scopes, we need - // to context allocate the var. - ResolveTo(info, proxy, var); - if (!var->is_dynamic() && lookup != this) var->ForceContextAllocation(); + // Find the next outer scope with a sibling. + while (scope->sibling_ == nullptr) { + if (scope == this) return; + scope = scope->outer_scope_; + } + if (scope == this) return; + scope = scope->sibling_; } } +} - // Clear unresolved_list_ as it's in an inconsistent state. - unresolved_list_.Clear(); +void Scope::CollectNonLocals(DeclarationScope* max_outer_scope, + Isolate* isolate, ParseInfo* info, + Handle* non_locals) { + this->ForEach([max_outer_scope, isolate, info, non_locals](Scope* scope) { + // Module variables must be allocated before variable resolution + // to ensure that UpdateNeedsHoleCheck() can detect import variables. + if (scope->is_module_scope()) { + scope->AsModuleScope()->AllocateModuleVariables(); + } - for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { - scope->CollectNonLocals(max_outer_scope, isolate, info, non_locals); - } + // Lazy parsed declaration scopes are already partially analyzed. If there + // are unresolved references remaining, they just need to be resolved in + // outer scopes. + Scope* lookup = WasLazilyParsed(scope) ? scope->outer_scope() : scope; + + for (VariableProxy* proxy : scope->unresolved_list_) { + DCHECK(!proxy->is_resolved()); + Variable* var = + Lookup(proxy, lookup, max_outer_scope->outer_scope()); + if (var == nullptr) { + *non_locals = StringSet::Add(isolate, *non_locals, proxy->name()); + } else { + // In this case we need to leave scopes in a way that they can be + // allocated. If we resolved variables from lazy parsed scopes, we need + // to context allocate the var. + scope->ResolveTo(info, proxy, var); + if (!var->is_dynamic() && lookup != scope) + var->ForceContextAllocation(); + } + } + + // Clear unresolved_list_ as it's in an inconsistent state. + scope->unresolved_list_.Clear(); + return Iteration::kDescend; + }); } void Scope::AnalyzePartially(DeclarationScope* max_outer_scope, AstNodeFactory* ast_node_factory, UnresolvedList* new_unresolved_list) { - DCHECK_IMPLIES(is_declaration_scope(), - !AsDeclarationScope()->was_lazily_parsed()); - - for (VariableProxy* proxy = unresolved_list_.first(); proxy != nullptr; - proxy = proxy->next_unresolved()) { - DCHECK(!proxy->is_resolved()); - Variable* var = - Lookup(proxy, this, max_outer_scope->outer_scope()); - if (var == nullptr) { - // Don't copy unresolved references to the script scope, unless it's a - // reference to a private name or method. In that case keep it so we - // can fail later. - if (!max_outer_scope->outer_scope()->is_script_scope() || - proxy->IsPrivateName()) { - VariableProxy* copy = ast_node_factory->CopyVariableProxy(proxy); - new_unresolved_list->Add(copy); + this->ForEach([max_outer_scope, ast_node_factory, + new_unresolved_list](Scope* scope) { + DCHECK_IMPLIES(scope->is_declaration_scope(), + !scope->AsDeclarationScope()->was_lazily_parsed()); + + for (VariableProxy* proxy = scope->unresolved_list_.first(); + proxy != nullptr; proxy = proxy->next_unresolved()) { + DCHECK(!proxy->is_resolved()); + Variable* var = + Lookup(proxy, scope, max_outer_scope->outer_scope()); + if (var == nullptr) { + // Don't copy unresolved references to the script scope, unless it's a + // reference to a private name or method. In that case keep it so we + // can fail later. + if (!max_outer_scope->outer_scope()->is_script_scope() || + proxy->IsPrivateName()) { + VariableProxy* copy = ast_node_factory->CopyVariableProxy(proxy); + new_unresolved_list->Add(copy); + } + } else { + var->set_is_used(); + if (proxy->is_assigned()) var->set_maybe_assigned(); } - } else { - var->set_is_used(); - if (proxy->is_assigned()) var->set_maybe_assigned(); } - } - // Clear unresolved_list_ as it's in an inconsistent state. - unresolved_list_.Clear(); - - for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { - scope->AnalyzePartially(max_outer_scope, ast_node_factory, - new_unresolved_list); - } + // Clear unresolved_list_ as it's in an inconsistent state. + scope->unresolved_list_.Clear(); + return Iteration::kDescend; + }); } Handle DeclarationScope::CollectNonLocals( @@ -1399,7 +1378,7 @@ void DeclarationScope::ResetAfterPreparsing(AstValueFactory* ast_value_factory, locals_.Clear(); inner_scope_ = nullptr; unresolved_list_.Clear(); - sloppy_block_function_map_ = nullptr; + sloppy_block_functions_.Clear(); rare_data_ = nullptr; has_rest_ = false; @@ -1428,14 +1407,25 @@ void DeclarationScope::ResetAfterPreparsing(AstValueFactory* ast_value_factory, was_lazily_parsed_ = !aborted; } -void Scope::SavePreparseData(Parser* parser) { - if (PreparseDataBuilder::ScopeIsSkippableFunctionScope(this)) { - AsDeclarationScope()->SavePreparseDataForDeclarationScope(parser); - } +bool Scope::IsSkippableFunctionScope() { + // Lazy non-arrow function scopes are skippable. Lazy functions are exactly + // those Scopes which have their own PreparseDataBuilder object. This + // logic ensures that the scope allocation data is consistent with the + // skippable function data (both agree on where the lazy function boundaries + // are). + if (!is_function_scope()) return false; + DeclarationScope* declaration_scope = AsDeclarationScope(); + return !declaration_scope->is_arrow_scope() && + declaration_scope->preparse_data_builder() != nullptr; +} - for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { - scope->SavePreparseData(parser); - } +void Scope::SavePreparseData(Parser* parser) { + this->ForEach([parser](Scope* scope) { + if (scope->IsSkippableFunctionScope()) { + scope->AsDeclarationScope()->SavePreparseDataForDeclarationScope(parser); + } + return Iteration::kDescend; + }); } void DeclarationScope::SavePreparseDataForDeclarationScope(Parser* parser) { @@ -1685,27 +1675,26 @@ void Scope::Print(int n) { } void Scope::CheckScopePositions() { - // Visible leaf scopes must have real positions. - if (!is_hidden() && inner_scope_ == nullptr) { - DCHECK_NE(kNoSourcePosition, start_position()); - DCHECK_NE(kNoSourcePosition, end_position()); - } - for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { - scope->CheckScopePositions(); - } + this->ForEach([](Scope* scope) { + // Visible leaf scopes must have real positions. + if (!scope->is_hidden() && scope->inner_scope_ == nullptr) { + DCHECK_NE(kNoSourcePosition, scope->start_position()); + DCHECK_NE(kNoSourcePosition, scope->end_position()); + } + return Iteration::kDescend; + }); } void Scope::CheckZones() { DCHECK(!needs_migration_); - for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { - if (scope->is_declaration_scope() && - scope->AsDeclarationScope()->was_lazily_parsed()) { + this->ForEach([](Scope* scope) { + if (WasLazilyParsed(scope)) { DCHECK_NULL(scope->zone()); DCHECK_NULL(scope->inner_scope_); - continue; + return Iteration::kContinue; } - scope->CheckZones(); - } + return Iteration::kDescend; + }); } #endif // DEBUG @@ -1803,16 +1792,6 @@ template Variable* Scope::Lookup( VariableProxy* proxy, Scope* scope, Scope* outer_scope_end, Scope* entry_point, bool force_context_allocation); -namespace { -bool CanBeShadowed(Scope* scope, Variable* var) { - if (var == nullptr) return false; - - // "this" can't be shadowed by "eval"-introduced bindings or by "with" scopes. - // TODO(wingo): There are other variables in this category; add them. - return !var->is_this(); -} -}; // namespace - Variable* Scope::LookupWith(VariableProxy* proxy, Scope* scope, Scope* outer_scope_end, Scope* entry_point, bool force_context_allocation) { @@ -1825,7 +1804,7 @@ Variable* Scope::LookupWith(VariableProxy* proxy, Scope* scope, : Lookup(proxy, scope->outer_scope_, outer_scope_end, entry_point); - if (!CanBeShadowed(scope, var)) return var; + if (var == nullptr) return var; // The current scope is a with scope, so the variable binding can not be // statically resolved. However, note that it was necessary to do a lookup @@ -1859,7 +1838,7 @@ Variable* Scope::LookupSloppyEval(VariableProxy* proxy, Scope* scope, nullptr, force_context_allocation) : Lookup(proxy, scope->outer_scope_, outer_scope_end, entry); - if (!CanBeShadowed(scope, var)) return var; + if (var == nullptr) return var; // A variable binding may have been found in an outer scope, but the current // scope makes a sloppy 'eval' call, so the found variable may not be the @@ -1949,12 +1928,6 @@ void UpdateNeedsHoleCheck(Variable* var, VariableProxy* proxy, Scope* scope) { return SetNeedsHoleCheck(var, proxy); } - if (var->is_this()) { - DCHECK(IsDerivedConstructor(scope->GetClosureScope()->function_kind())); - // TODO(littledan): implement 'this' hole check elimination. - return SetNeedsHoleCheck(var, proxy); - } - // We should always have valid source positions. DCHECK_NE(var->initializer_position(), kNoSourcePosition); DCHECK_NE(proxy->position(), kNoSourcePosition); @@ -1994,16 +1967,50 @@ void Scope::ResolveTo(ParseInfo* info, VariableProxy* proxy, Variable* var) { proxy->BindTo(var); } +bool Scope::ResolvePreparsedVariable(VariableProxy* proxy, Scope* scope, + Scope* end) { + // Resolve the variable in all parsed scopes to force context allocation. + for (; scope != end; scope = scope->outer_scope_) { + Variable* var = scope->LookupLocal(proxy->raw_name()); + if (var != nullptr) { + var->set_is_used(); + if (!var->is_dynamic()) { + var->ForceContextAllocation(); + if (proxy->is_assigned()) var->set_maybe_assigned(); + } + return true; + } + } + + if (!proxy->IsPrivateName()) return true; + + // If we're resolving a private name, throw an exception of we didn't manage + // to resolve. In case of eval, also look in all outer scope-info backed + // scopes except for the script scope. Don't throw an exception if a reference + // was found. + Scope* start = scope; + for (; !scope->is_script_scope(); scope = scope->outer_scope_) { + if (scope->LookupInScopeInfo(proxy->raw_name(), start) != nullptr) { + return true; + } + } + + return false; +} + bool Scope::ResolveVariablesRecursively(ParseInfo* info) { DCHECK(info->script_scope()->is_script_scope()); // Lazy parsed declaration scopes are already partially analyzed. If there are // unresolved references remaining, they just need to be resolved in outer // scopes. - if (is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed()) { + if (WasLazilyParsed(this)) { DCHECK_EQ(variables_.occupancy(), 0); + Scope* end = info->scope(); + // Resolve in all parsed scopes except for the script scope. + if (!end->is_script_scope()) end = end->outer_scope(); + for (VariableProxy* proxy : unresolved_list_) { - Variable* var = Lookup(proxy, outer_scope(), nullptr); - if (var == nullptr) { + if (!ResolvePreparsedVariable(proxy, outer_scope(), end)) { info->pending_error_handler()->ReportMessageAt( proxy->position(), proxy->position() + 1, MessageTemplate::kInvalidPrivateFieldResolution, proxy->raw_name(), @@ -2011,11 +2018,6 @@ bool Scope::ResolveVariablesRecursively(ParseInfo* info) { DCHECK(proxy->IsPrivateName()); return false; } - if (!var->is_dynamic()) { - var->set_is_used(); - var->ForceContextAllocation(); - if (proxy->is_assigned()) var->set_maybe_assigned(); - } } } else { // Resolve unresolved variables for this scope. @@ -2037,7 +2039,7 @@ bool Scope::MustAllocate(Variable* var) { // Give var a read/write use if there is a chance it might be accessed // via an eval() call. This is only possible if the variable has a // visible name. - if ((var->is_this() || !var->raw_name()->IsEmpty()) && + if (!var->raw_name()->IsEmpty() && (inner_scope_calls_eval_ || is_catch_scope() || is_script_scope())) { var->set_is_used(); if (inner_scope_calls_eval_) var->set_maybe_assigned(); @@ -2118,18 +2120,15 @@ void DeclarationScope::AllocateParameterLocals() { } void DeclarationScope::AllocateParameter(Variable* var, int index) { - if (MustAllocate(var)) { - if (has_forced_context_allocation_for_parameters() || - MustAllocateInContext(var)) { - DCHECK(var->IsUnallocated() || var->IsContextSlot()); - if (var->IsUnallocated()) { - AllocateHeapSlot(var); - } - } else { - DCHECK(var->IsUnallocated() || var->IsParameter()); - if (var->IsUnallocated()) { - var->AllocateTo(VariableLocation::PARAMETER, index); - } + if (!MustAllocate(var)) return; + if (has_forced_context_allocation_for_parameters() || + MustAllocateInContext(var)) { + DCHECK(var->IsUnallocated() || var->IsContextSlot()); + if (var->IsUnallocated()) AllocateHeapSlot(var); + } else { + DCHECK(var->IsUnallocated() || var->IsParameter()); + if (var->IsUnallocated()) { + var->AllocateTo(VariableLocation::PARAMETER, index); } } } @@ -2142,7 +2141,7 @@ void DeclarationScope::AllocateReceiver() { } void Scope::AllocateNonParameterLocal(Variable* var) { - DCHECK(var->scope() == this); + DCHECK_EQ(var->scope(), this); if (var->IsUnallocated() && MustAllocate(var)) { if (MustAllocateInContext(var)) { AllocateHeapSlot(var); @@ -2201,51 +2200,47 @@ void ModuleScope::AllocateModuleVariables() { } void Scope::AllocateVariablesRecursively() { - DCHECK(!already_resolved_); - - // Don't allocate variables of preparsed scopes. - if (is_declaration_scope() && AsDeclarationScope()->was_lazily_parsed()) { - return; - } - - // Allocate variables for inner scopes. - for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) { - scope->AllocateVariablesRecursively(); - } - - DCHECK(!already_resolved_); - DCHECK_EQ(Context::MIN_CONTEXT_SLOTS, num_heap_slots_); + this->ForEach([](Scope* scope) -> Iteration { + DCHECK(!scope->already_resolved_); + if (WasLazilyParsed(scope)) return Iteration::kContinue; + DCHECK_EQ(Context::MIN_CONTEXT_SLOTS, scope->num_heap_slots_); + + // Allocate variables for this scope. + // Parameters must be allocated first, if any. + if (scope->is_declaration_scope()) { + if (scope->is_function_scope()) { + scope->AsDeclarationScope()->AllocateParameterLocals(); + } + scope->AsDeclarationScope()->AllocateReceiver(); + } + scope->AllocateNonParameterLocalsAndDeclaredGlobals(); + + // Force allocation of a context for this scope if necessary. For a 'with' + // scope and for a function scope that makes an 'eval' call we need a + // context, even if no local variables were statically allocated in the + // scope. Likewise for modules and function scopes representing asm.js + // modules. Also force a context, if the scope is stricter than the outer + // scope. + bool must_have_context = + scope->is_with_scope() || scope->is_module_scope() || + scope->IsAsmModule() || scope->ForceContextForLanguageMode() || + (scope->is_function_scope() && + scope->AsDeclarationScope()->calls_sloppy_eval()) || + (scope->is_block_scope() && scope->is_declaration_scope() && + scope->AsDeclarationScope()->calls_sloppy_eval()); - // Allocate variables for this scope. - // Parameters must be allocated first, if any. - if (is_declaration_scope()) { - if (is_function_scope()) { - AsDeclarationScope()->AllocateParameterLocals(); + // If we didn't allocate any locals in the local context, then we only + // need the minimal number of slots if we must have a context. + if (scope->num_heap_slots_ == Context::MIN_CONTEXT_SLOTS && + !must_have_context) { + scope->num_heap_slots_ = 0; } - AsDeclarationScope()->AllocateReceiver(); - } - AllocateNonParameterLocalsAndDeclaredGlobals(); - - // Force allocation of a context for this scope if necessary. For a 'with' - // scope and for a function scope that makes an 'eval' call we need a context, - // even if no local variables were statically allocated in the scope. - // Likewise for modules and function scopes representing asm.js modules. - // Also force a context, if the scope is stricter than the outer scope. - bool must_have_context = - is_with_scope() || is_module_scope() || IsAsmModule() || - ForceContextForLanguageMode() || - (is_function_scope() && AsDeclarationScope()->calls_sloppy_eval()) || - (is_block_scope() && is_declaration_scope() && - AsDeclarationScope()->calls_sloppy_eval()); - - // If we didn't allocate any locals in the local context, then we only - // need the minimal number of slots if we must have a context. - if (num_heap_slots_ == Context::MIN_CONTEXT_SLOTS && !must_have_context) { - num_heap_slots_ = 0; - } - - // Allocation done. - DCHECK(num_heap_slots_ == 0 || num_heap_slots_ >= Context::MIN_CONTEXT_SLOTS); + + // Allocation done. + DCHECK(scope->num_heap_slots_ == 0 || + scope->num_heap_slots_ >= Context::MIN_CONTEXT_SLOTS); + return Iteration::kDescend; + }); } void Scope::AllocateScopeInfosRecursively(Isolate* isolate, diff --git a/deps/v8/src/ast/scopes.h b/deps/v8/src/ast/scopes.h index 971cfc519b06b0..732d759757fd44 100644 --- a/deps/v8/src/ast/scopes.h +++ b/deps/v8/src/ast/scopes.h @@ -8,6 +8,8 @@ #include "src/ast/ast.h" #include "src/base/compiler-specific.h" #include "src/base/hashmap.h" +#include "src/base/threaded-list.h" +#include "src/function-kind.h" #include "src/globals.h" #include "src/objects.h" #include "src/pointer-with-payload.h" @@ -43,37 +45,6 @@ class VariableMap: public ZoneHashMap { void Add(Zone* zone, Variable* var); }; - -// Sloppy block-scoped function declarations to var-bind -class SloppyBlockFunctionMap : public ZoneHashMap { - public: - class Delegate : public ZoneObject { - public: - Delegate(Scope* scope, SloppyBlockFunctionStatement* statement, int index) - : scope_(scope), statement_(statement), next_(nullptr), index_(index) {} - void set_statement(Statement* statement); - - void set_next(Delegate* next) { next_ = next; } - Delegate* next() const { return next_; } - Scope* scope() const { return scope_; } - int index() const { return index_; } - int position() const { return statement_->position(); } - - private: - Scope* scope_; - SloppyBlockFunctionStatement* statement_; - Delegate* next_; - int index_; - }; - - explicit SloppyBlockFunctionMap(Zone* zone); - void Declare(Zone* zone, const AstRawString* name, Scope* scope, - SloppyBlockFunctionStatement* statement); - - private: - int count_; -}; - class Scope; template <> @@ -111,9 +82,6 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { typedef base::ThreadedList UnresolvedList; - // TODO(verwaest): Is this needed on Scope? - int num_parameters() const; - DeclarationScope* AsDeclarationScope(); const DeclarationScope* AsDeclarationScope() const; ModuleScope* AsModuleScope(); @@ -225,8 +193,8 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { VariableKind kind, bool* was_added, InitializationFlag init_flag = kCreatedInitialized); - Variable* DeclareVariable(Declaration* declaration, VariableProxy* proxy, - VariableMode mode, VariableKind kind, + Variable* DeclareVariable(Declaration* declaration, const AstRawString* name, + int pos, VariableMode mode, VariableKind kind, InitializationFlag init, bool* was_added, bool* sloppy_mode_block_scope_function_redefinition, bool* ok); @@ -280,14 +248,6 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { // TODO(verwaest): Move to DeclarationScope? Variable* NewTemporary(const AstRawString* name); - // --------------------------------------------------------------------------- - // Illegal redeclaration support. - - // Check if the scope has conflicting var - // declarations, i.e. a var declaration that has been hoisted from a nested - // scope over a let binding of the same name. - Declaration* CheckConflictingVarDeclarations(); - // Find variable with (variable->mode() <= |mode_limit|) that was declared in // |scope|. This is used to catch patterns like `try{}catch(e){let e;}` and // function([e]) { let e }, which are errors even though the two 'e's are each @@ -421,6 +381,33 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { return num_heap_slots() > 0; } + // Use Scope::ForEach for depth first traversal of scopes. + // Before: + // void Scope::VisitRecursively() { + // DoSomething(); + // for (Scope* s = inner_scope_; s != nullptr; s = s->sibling_) { + // if (s->ShouldContinue()) continue; + // s->VisitRecursively(); + // } + // } + // + // After: + // void Scope::VisitIteratively() { + // this->ForEach([](Scope* s) { + // s->DoSomething(); + // return s->ShouldContinue() ? kContinue : kDescend; + // }); + // } + template + V8_INLINE void ForEach(FunctionType callback); + enum Iteration { + // Continue the iteration on the same level, do not recurse/descent into + // inner scopes. + kContinue, + // Recurse/descend into inner scopes. + kDescend + }; + // --------------------------------------------------------------------------- // Accessors. @@ -488,6 +475,8 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { // Find the innermost outer scope that needs a context. Scope* GetOuterScopeWithContext(); + bool HasThisReference() const; + // Analyze() must have been called once to create the ScopeInfo. Handle scope_info() const { DCHECK(!scope_info_.is_null()); @@ -513,6 +502,7 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { bool HasSimpleParameters(); void set_is_debug_evaluate_scope() { is_debug_evaluate_scope_ = true; } bool is_debug_evaluate_scope() const { return is_debug_evaluate_scope_; } + bool IsSkippableFunctionScope(); bool RemoveInnerScope(Scope* inner_scope) { DCHECK_NOT_NULL(inner_scope); @@ -599,6 +589,8 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { static Variable* LookupSloppyEval(VariableProxy* proxy, Scope* scope, Scope* outer_scope_end, Scope* entry_point, bool force_context_allocation); + static bool ResolvePreparsedVariable(VariableProxy* proxy, Scope* scope, + Scope* end); void ResolveTo(ParseInfo* info, VariableProxy* proxy, Variable* var); V8_WARN_UNUSED_RESULT bool ResolveVariable(ParseInfo* info, VariableProxy* proxy); @@ -618,14 +610,15 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { // Variable allocation. void AllocateStackSlot(Variable* var); - void AllocateHeapSlot(Variable* var); + V8_INLINE void AllocateHeapSlot(Variable* var); void AllocateNonParameterLocal(Variable* var); void AllocateDeclaredGlobal(Variable* var); - void AllocateNonParameterLocalsAndDeclaredGlobals(); + V8_INLINE void AllocateNonParameterLocalsAndDeclaredGlobals(); void AllocateVariablesRecursively(); void AllocateScopeInfosRecursively(Isolate* isolate, MaybeHandle outer_scope); + void AllocateDebuggerScopeInfos(Isolate* isolate, MaybeHandle outer_scope); @@ -766,6 +759,8 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { return var; } + void DeserializeReceiver(AstValueFactory* ast_value_factory); + #ifdef DEBUG void set_is_being_lazily_parsed(bool is_being_lazily_parsed) { is_being_lazily_parsed_ = is_being_lazily_parsed; @@ -779,7 +774,23 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { zone_ = zone; } - bool ShouldEagerCompile() const; + // --------------------------------------------------------------------------- + // Illegal redeclaration support. + + // Check if the scope has conflicting var + // declarations, i.e. a var declaration that has been hoisted from a nested + // scope over a let binding of the same name. + Declaration* CheckConflictingVarDeclarations(); + + void set_has_checked_syntax(bool has_checked_syntax) { + has_checked_syntax_ = has_checked_syntax; + } + bool has_checked_syntax() const { return has_checked_syntax_; } + + bool ShouldEagerCompile() const { + return force_eager_compilation_ || should_eager_compile_; + } + void set_should_eager_compile(); void SetScriptScopeInfo(Handle scope_info) { @@ -833,17 +844,12 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { // The variable corresponding to the 'this' value. Variable* receiver() { - DCHECK(has_this_declaration()); + DCHECK(has_this_declaration() || is_script_scope()); DCHECK_NOT_NULL(receiver_); return receiver_; } - // TODO(wingo): Add a GLOBAL_SCOPE scope type which will lexically allocate - // "this" (and no other variable) on the native context. Script scopes then - // will not have a "this" declaration. - bool has_this_declaration() const { - return (is_function_scope() && !is_arrow_scope()) || is_module_scope(); - } + bool has_this_declaration() const { return has_this_declaration_; } // The variable corresponding to the 'new.target' value. Variable* new_target_var() { return new_target_; } @@ -935,17 +941,12 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { void AddLocal(Variable* var); void DeclareSloppyBlockFunction( - const AstRawString* name, Scope* scope, - SloppyBlockFunctionStatement* statement = nullptr); + SloppyBlockFunctionStatement* sloppy_block_function); - // Go through sloppy_block_function_map_ and hoist those (into this scope) + // Go through sloppy_block_functions_ and hoist those (into this scope) // which should be hoisted. void HoistSloppyBlockFunctions(AstNodeFactory* factory); - SloppyBlockFunctionMap* sloppy_block_function_map() { - return sloppy_block_function_map_; - } - // Compute top scope and allocate variables. For lazy compilation the top // scope only contains the single lazily compiled function, so this // doesn't re-allocate variables repeatedly. @@ -988,9 +989,9 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { void PrintParameters(); #endif - void AllocateLocals(); - void AllocateParameterLocals(); - void AllocateReceiver(); + V8_INLINE void AllocateLocals(); + V8_INLINE void AllocateParameterLocals(); + V8_INLINE void AllocateReceiver(); void ResetAfterPreparsing(AstValueFactory* ast_value_factory, bool aborted); @@ -1020,8 +1021,15 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { return preparse_data_builder_; } + void set_has_this_reference() { has_this_reference_ = true; } + bool has_this_reference() const { return has_this_reference_; } + void UsesThis() { + set_has_this_reference(); + GetReceiverScope()->receiver()->ForceContextAllocation(); + } + private: - void AllocateParameter(Variable* var, int index); + V8_INLINE void AllocateParameter(Variable* var, int index); // Resolve and fill in the allocation information for all variables // in this scopes. Must be called *after* all scopes have been @@ -1055,16 +1063,19 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope { #endif bool is_skipped_function_ : 1; bool has_inferred_function_name_ : 1; - - int num_parameters_ = 0; + bool has_checked_syntax_ : 1; + bool has_this_reference_ : 1; + bool has_this_declaration_ : 1; // If the scope is a function scope, this is the function kind. const FunctionKind function_kind_; + int num_parameters_ = 0; + // Parameter list in source order. ZonePtrList params_; // Map of function names to lists of functions defined in sloppy blocks - SloppyBlockFunctionMap* sloppy_block_function_map_; + base::ThreadedList sloppy_block_functions_; // Convenience variable. Variable* receiver_; // Function variable, if any; function scopes only. @@ -1128,27 +1139,21 @@ Scope::Snapshot::Snapshot(Scope* scope) class ModuleScope final : public DeclarationScope { public: - ModuleScope(DeclarationScope* script_scope, - AstValueFactory* ast_value_factory); + ModuleScope(DeclarationScope* script_scope, AstValueFactory* avfactory); - // Deserialization. - // The generated ModuleDescriptor does not preserve all information. In - // particular, its module_requests map will be empty because we no longer need - // the map after parsing. + // Deserialization. Does not restore the module descriptor. ModuleScope(Isolate* isolate, Handle scope_info, - AstValueFactory* ast_value_factory); + AstValueFactory* avfactory); - ModuleDescriptor* module() const { - DCHECK_NOT_NULL(module_descriptor_); - return module_descriptor_; - } + // Returns nullptr in a deserialized scope. + ModuleDescriptor* module() const { return module_descriptor_; } // Set MODULE as VariableLocation for all variables that will live in a // module's export table. void AllocateModuleVariables(); private: - ModuleDescriptor* module_descriptor_; + ModuleDescriptor* const module_descriptor_; }; } // namespace internal diff --git a/deps/v8/src/ast/variables.h b/deps/v8/src/ast/variables.h index 13a444536dddd8..6dbb9dbac48da8 100644 --- a/deps/v8/src/ast/variables.h +++ b/deps/v8/src/ast/variables.h @@ -6,6 +6,7 @@ #define V8_AST_VARIABLES_H_ #include "src/ast/ast-value-factory.h" +#include "src/base/threaded-list.h" #include "src/globals.h" #include "src/zone/zone.h" @@ -59,7 +60,7 @@ class Variable final : public ZoneObject { return ForceContextAllocationField::decode(bit_field_); } void ForceContextAllocation() { - DCHECK(IsUnallocated() || IsContextSlot() || + DCHECK(IsUnallocated() || IsContextSlot() || IsLookupSlot() || location() == VariableLocation::MODULE); bit_field_ = ForceContextAllocationField::update(bit_field_, true); } @@ -137,6 +138,9 @@ class Variable final : public ZoneObject { } bool is_parameter() const { return kind() == PARAMETER_VARIABLE; } + bool is_sloppy_block_function() { + return kind() == SLOPPY_BLOCK_FUNCTION_VARIABLE; + } Variable* local_if_not_shadowed() const { DCHECK(mode() == VariableMode::kDynamicLocal && @@ -207,7 +211,7 @@ class Variable final : public ZoneObject { class VariableModeField : public BitField16 {}; class VariableKindField - : public BitField16 {}; + : public BitField16 {}; class LocationField : public BitField16 {}; class ForceContextAllocationField diff --git a/deps/v8/src/bailout-reason.h b/deps/v8/src/bailout-reason.h index a5f14c611e7393..139ee1493160e5 100644 --- a/deps/v8/src/bailout-reason.h +++ b/deps/v8/src/bailout-reason.h @@ -50,6 +50,7 @@ namespace internal { V(kOperandIsNotAFunction, "Operand is not a function") \ V(kOperandIsNotAGeneratorObject, "Operand is not a generator object") \ V(kOperandIsNotASmi, "Operand is not a smi") \ + V(kPromiseAlreadySettled, "Promise already settled") \ V(kReceivedInvalidReturnAddress, "Received invalid return address") \ V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \ V(kRegisterWasClobbered, "Register was clobbered") \ diff --git a/deps/v8/src/base/division-by-constant.cc b/deps/v8/src/base/division-by-constant.cc index 4e0900fa241e72..7aa3a6901493f6 100644 --- a/deps/v8/src/base/division-by-constant.cc +++ b/deps/v8/src/base/division-by-constant.cc @@ -93,16 +93,22 @@ MagicNumbersForDivision UnsignedDivisionByConstant(T d, // ----------------------------------------------------------------------------- // Instantiations. -template struct V8_BASE_EXPORT MagicNumbersForDivision; -template struct V8_BASE_EXPORT MagicNumbersForDivision; +template struct EXPORT_TEMPLATE_DEFINE(V8_BASE_EXPORT) + MagicNumbersForDivision; +template struct EXPORT_TEMPLATE_DEFINE(V8_BASE_EXPORT) + MagicNumbersForDivision; -template MagicNumbersForDivision SignedDivisionByConstant(uint32_t d); -template MagicNumbersForDivision SignedDivisionByConstant(uint64_t d); +template EXPORT_TEMPLATE_DEFINE(V8_BASE_EXPORT) + MagicNumbersForDivision SignedDivisionByConstant(uint32_t d); +template EXPORT_TEMPLATE_DEFINE(V8_BASE_EXPORT) + MagicNumbersForDivision SignedDivisionByConstant(uint64_t d); -template MagicNumbersForDivision UnsignedDivisionByConstant( - uint32_t d, unsigned leading_zeros); -template MagicNumbersForDivision UnsignedDivisionByConstant( - uint64_t d, unsigned leading_zeros); +template EXPORT_TEMPLATE_DEFINE(V8_BASE_EXPORT) + MagicNumbersForDivision UnsignedDivisionByConstant( + uint32_t d, unsigned leading_zeros); +template EXPORT_TEMPLATE_DEFINE(V8_BASE_EXPORT) + MagicNumbersForDivision UnsignedDivisionByConstant( + uint64_t d, unsigned leading_zeros); } // namespace base } // namespace v8 diff --git a/deps/v8/src/base/division-by-constant.h b/deps/v8/src/base/division-by-constant.h index 5d063f8bd5af53..744283981bc3de 100644 --- a/deps/v8/src/base/division-by-constant.h +++ b/deps/v8/src/base/division-by-constant.h @@ -8,6 +8,7 @@ #include #include "src/base/base-export.h" +#include "src/base/export-template.h" namespace v8 { namespace base { @@ -18,7 +19,7 @@ namespace base { // Delight", chapter 10. The template parameter must be one of the unsigned // integral types. template -struct V8_BASE_EXPORT MagicNumbersForDivision { +struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision { MagicNumbersForDivision(T m, unsigned s, bool a) : multiplier(m), shift(s), add(a) {} bool operator==(const MagicNumbersForDivision& rhs) const { @@ -34,25 +35,35 @@ struct V8_BASE_EXPORT MagicNumbersForDivision { // Calculate the multiplier and shift for signed division via multiplication. // The divisor must not be -1, 0 or 1 when interpreted as a signed value. template -V8_BASE_EXPORT MagicNumbersForDivision SignedDivisionByConstant(T d); +EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) +MagicNumbersForDivision SignedDivisionByConstant(T d); // Calculate the multiplier and shift for unsigned division via multiplication, // see Warren's "Hacker's Delight", chapter 10. The divisor must not be 0 and // leading_zeros can be used to speed up the calculation if the given number of // upper bits of the dividend value are known to be zero. template -V8_BASE_EXPORT MagicNumbersForDivision UnsignedDivisionByConstant( +EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) +MagicNumbersForDivision UnsignedDivisionByConstant( T d, unsigned leading_zeros = 0); -extern template V8_BASE_EXPORT MagicNumbersForDivision -SignedDivisionByConstant(uint32_t d); -extern template V8_BASE_EXPORT MagicNumbersForDivision -SignedDivisionByConstant(uint64_t d); +// Explicit instantiation declarations. +extern template struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) + MagicNumbersForDivision; +extern template struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) + MagicNumbersForDivision; -extern template V8_BASE_EXPORT MagicNumbersForDivision -UnsignedDivisionByConstant(uint32_t d, unsigned leading_zeros); -extern template V8_BASE_EXPORT MagicNumbersForDivision -UnsignedDivisionByConstant(uint64_t d, unsigned leading_zeros); +extern template EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) + MagicNumbersForDivision SignedDivisionByConstant(uint32_t d); +extern template EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) + MagicNumbersForDivision SignedDivisionByConstant(uint64_t d); + +extern template EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) + MagicNumbersForDivision UnsignedDivisionByConstant( + uint32_t d, unsigned leading_zeros); +extern template EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) + MagicNumbersForDivision UnsignedDivisionByConstant( + uint64_t d, unsigned leading_zeros); } // namespace base } // namespace v8 diff --git a/deps/v8/src/base/ieee754.cc b/deps/v8/src/base/ieee754.cc index d9846b7254b53e..4fcb4df00166d8 100644 --- a/deps/v8/src/base/ieee754.cc +++ b/deps/v8/src/base/ieee754.cc @@ -309,7 +309,7 @@ int32_t __ieee754_rem_pio2(double x, double *y) { GET_LOW_WORD(low, x); SET_LOW_WORD(z, low); e0 = (ix >> 20) - 1046; /* e0 = ilogb(z)-23; */ - SET_HIGH_WORD(z, ix - static_cast(e0 << 20)); + SET_HIGH_WORD(z, ix - static_cast(static_cast(e0) << 20)); for (i = 0; i < 2; i++) { tx[i] = static_cast(static_cast(z)); z = (z - tx[i]) * two24; @@ -1569,9 +1569,12 @@ double exp(double x) { /* x is now in primary range */ t = x * x; if (k >= -1021) { - INSERT_WORDS(twopk, 0x3FF00000 + (k << 20), 0); + INSERT_WORDS( + twopk, + 0x3FF00000 + static_cast(static_cast(k) << 20), 0); } else { - INSERT_WORDS(twopk, 0x3FF00000 + ((k + 1000) << 20), 0); + INSERT_WORDS(twopk, 0x3FF00000 + (static_cast(k + 1000) << 20), + 0); } c = x - t * (P1 + t * (P2 + t * (P3 + t * (P4 + t * P5)))); if (k == 0) { @@ -2341,7 +2344,10 @@ double expm1(double x) { if (k == 0) { return x - (x * e - hxs); /* c is 0 */ } else { - INSERT_WORDS(twopk, 0x3FF00000 + (k << 20), 0); /* 2^k */ + INSERT_WORDS( + twopk, + 0x3FF00000 + static_cast(static_cast(k) << 20), + 0); /* 2^k */ e = (x * (e - c) - c); e -= hxs; if (k == -1) return 0.5 * (x - e) - 0.5; @@ -2641,6 +2647,317 @@ double cosh(double x) { return huge * huge; } +/* + * ES2019 Draft 2019-01-02 12.6.4 + * Math.pow & Exponentiation Operator + * + * Return X raised to the Yth power + * + * Method: + * Let x = 2 * (1+f) + * 1. Compute and return log2(x) in two pieces: + * log2(x) = w1 + w2, + * where w1 has 53-24 = 29 bit trailing zeros. + * 2. Perform y*log2(x) = n+y' by simulating muti-precision + * arithmetic, where |y'|<=0.5. + * 3. Return x**y = 2**n*exp(y'*log2) + * + * Special cases: + * 1. (anything) ** 0 is 1 + * 2. (anything) ** 1 is itself + * 3. (anything) ** NAN is NAN + * 4. NAN ** (anything except 0) is NAN + * 5. +-(|x| > 1) ** +INF is +INF + * 6. +-(|x| > 1) ** -INF is +0 + * 7. +-(|x| < 1) ** +INF is +0 + * 8. +-(|x| < 1) ** -INF is +INF + * 9. +-1 ** +-INF is NAN + * 10. +0 ** (+anything except 0, NAN) is +0 + * 11. -0 ** (+anything except 0, NAN, odd integer) is +0 + * 12. +0 ** (-anything except 0, NAN) is +INF + * 13. -0 ** (-anything except 0, NAN, odd integer) is +INF + * 14. -0 ** (odd integer) = -( +0 ** (odd integer) ) + * 15. +INF ** (+anything except 0,NAN) is +INF + * 16. +INF ** (-anything except 0,NAN) is +0 + * 17. -INF ** (anything) = -0 ** (-anything) + * 18. (-anything) ** (integer) is (-1)**(integer)*(+anything**integer) + * 19. (-anything except 0 and inf) ** (non-integer) is NAN + * + * Accuracy: + * pow(x,y) returns x**y nearly rounded. In particular, + * pow(integer, integer) always returns the correct integer provided it is + * representable. + * + * Constants: + * The hexadecimal values are the intended ones for the following + * constants. The decimal values may be used, provided that the + * compiler will convert from decimal to binary accurately enough + * to produce the hexadecimal values shown. + */ + +double pow(double x, double y) { + static const double + bp[] = {1.0, 1.5}, + dp_h[] = {0.0, 5.84962487220764160156e-01}, // 0x3FE2B803, 0x40000000 + dp_l[] = {0.0, 1.35003920212974897128e-08}, // 0x3E4CFDEB, 0x43CFD006 + zero = 0.0, one = 1.0, two = 2.0, + two53 = 9007199254740992.0, // 0x43400000, 0x00000000 + huge = 1.0e300, tiny = 1.0e-300, + // poly coefs for (3/2)*(log(x)-2s-2/3*s**3 + L1 = 5.99999999999994648725e-01, // 0x3FE33333, 0x33333303 + L2 = 4.28571428578550184252e-01, // 0x3FDB6DB6, 0xDB6FABFF + L3 = 3.33333329818377432918e-01, // 0x3FD55555, 0x518F264D + L4 = 2.72728123808534006489e-01, // 0x3FD17460, 0xA91D4101 + L5 = 2.30660745775561754067e-01, // 0x3FCD864A, 0x93C9DB65 + L6 = 2.06975017800338417784e-01, // 0x3FCA7E28, 0x4A454EEF + P1 = 1.66666666666666019037e-01, // 0x3FC55555, 0x5555553E + P2 = -2.77777777770155933842e-03, // 0xBF66C16C, 0x16BEBD93 + P3 = 6.61375632143793436117e-05, // 0x3F11566A, 0xAF25DE2C + P4 = -1.65339022054652515390e-06, // 0xBEBBBD41, 0xC5D26BF1 + P5 = 4.13813679705723846039e-08, // 0x3E663769, 0x72BEA4D0 + lg2 = 6.93147180559945286227e-01, // 0x3FE62E42, 0xFEFA39EF + lg2_h = 6.93147182464599609375e-01, // 0x3FE62E43, 0x00000000 + lg2_l = -1.90465429995776804525e-09, // 0xBE205C61, 0x0CA86C39 + ovt = 8.0085662595372944372e-0017, // -(1024-log2(ovfl+.5ulp)) + cp = 9.61796693925975554329e-01, // 0x3FEEC709, 0xDC3A03FD =2/(3ln2) + cp_h = 9.61796700954437255859e-01, // 0x3FEEC709, 0xE0000000 =(float)cp + cp_l = -7.02846165095275826516e-09, // 0xBE3E2FE0, 0x145B01F5 =tail cp_h + ivln2 = 1.44269504088896338700e+00, // 0x3FF71547, 0x652B82FE =1/ln2 + ivln2_h = + 1.44269502162933349609e+00, // 0x3FF71547, 0x60000000 =24b 1/ln2 + ivln2_l = + 1.92596299112661746887e-08; // 0x3E54AE0B, 0xF85DDF44 =1/ln2 tail + + double z, ax, z_h, z_l, p_h, p_l; + double y1, t1, t2, r, s, t, u, v, w; + int i, j, k, yisint, n; + int hx, hy, ix, iy; + unsigned lx, ly; + + EXTRACT_WORDS(hx, lx, x); + EXTRACT_WORDS(hy, ly, y); + ix = hx & 0x7fffffff; + iy = hy & 0x7fffffff; + + /* y==zero: x**0 = 1 */ + if ((iy | ly) == 0) return one; + + /* +-NaN return x+y */ + if (ix > 0x7ff00000 || ((ix == 0x7ff00000) && (lx != 0)) || iy > 0x7ff00000 || + ((iy == 0x7ff00000) && (ly != 0))) { + return x + y; + } + + /* determine if y is an odd int when x < 0 + * yisint = 0 ... y is not an integer + * yisint = 1 ... y is an odd int + * yisint = 2 ... y is an even int + */ + yisint = 0; + if (hx < 0) { + if (iy >= 0x43400000) { + yisint = 2; /* even integer y */ + } else if (iy >= 0x3ff00000) { + k = (iy >> 20) - 0x3ff; /* exponent */ + if (k > 20) { + j = ly >> (52 - k); + if ((j << (52 - k)) == static_cast(ly)) yisint = 2 - (j & 1); + } else if (ly == 0) { + j = iy >> (20 - k); + if ((j << (20 - k)) == iy) yisint = 2 - (j & 1); + } + } + } + + /* special value of y */ + if (ly == 0) { + if (iy == 0x7ff00000) { /* y is +-inf */ + if (((ix - 0x3ff00000) | lx) == 0) { + return y - y; /* inf**+-1 is NaN */ + } else if (ix >= 0x3ff00000) { /* (|x|>1)**+-inf = inf,0 */ + return (hy >= 0) ? y : zero; + } else { /* (|x|<1)**-,+inf = inf,0 */ + return (hy < 0) ? -y : zero; + } + } + if (iy == 0x3ff00000) { /* y is +-1 */ + if (hy < 0) { + return base::Divide(one, x); + } else { + return x; + } + } + if (hy == 0x40000000) return x * x; /* y is 2 */ + if (hy == 0x3fe00000) { /* y is 0.5 */ + if (hx >= 0) { /* x >= +0 */ + return sqrt(x); + } + } + } + + ax = fabs(x); + /* special value of x */ + if (lx == 0) { + if (ix == 0x7ff00000 || ix == 0 || ix == 0x3ff00000) { + z = ax; /*x is +-0,+-inf,+-1*/ + if (hy < 0) z = base::Divide(one, z); /* z = (1/|x|) */ + if (hx < 0) { + if (((ix - 0x3ff00000) | yisint) == 0) { + /* (-1)**non-int is NaN */ + z = std::numeric_limits::signaling_NaN(); + } else if (yisint == 1) { + z = -z; /* (x<0)**odd = -(|x|**odd) */ + } + } + return z; + } + } + + n = (hx >> 31) + 1; + + /* (x<0)**(non-int) is NaN */ + if ((n | yisint) == 0) { + return std::numeric_limits::signaling_NaN(); + } + + s = one; /* s (sign of result -ve**odd) = -1 else = 1 */ + if ((n | (yisint - 1)) == 0) s = -one; /* (-ve)**(odd int) */ + + /* |y| is huge */ + if (iy > 0x41e00000) { /* if |y| > 2**31 */ + if (iy > 0x43f00000) { /* if |y| > 2**64, must o/uflow */ + if (ix <= 0x3fefffff) return (hy < 0) ? huge * huge : tiny * tiny; + if (ix >= 0x3ff00000) return (hy > 0) ? huge * huge : tiny * tiny; + } + /* over/underflow if x is not close to one */ + if (ix < 0x3fefffff) return (hy < 0) ? s * huge * huge : s * tiny * tiny; + if (ix > 0x3ff00000) return (hy > 0) ? s * huge * huge : s * tiny * tiny; + /* now |1-x| is tiny <= 2**-20, suffice to compute + log(x) by x-x^2/2+x^3/3-x^4/4 */ + t = ax - one; /* t has 20 trailing zeros */ + w = (t * t) * (0.5 - t * (0.3333333333333333333333 - t * 0.25)); + u = ivln2_h * t; /* ivln2_h has 21 sig. bits */ + v = t * ivln2_l - w * ivln2; + t1 = u + v; + SET_LOW_WORD(t1, 0); + t2 = v - (t1 - u); + } else { + double ss, s2, s_h, s_l, t_h, t_l; + n = 0; + /* take care subnormal number */ + if (ix < 0x00100000) { + ax *= two53; + n -= 53; + GET_HIGH_WORD(ix, ax); + } + n += ((ix) >> 20) - 0x3ff; + j = ix & 0x000fffff; + /* determine interval */ + ix = j | 0x3ff00000; /* normalize ix */ + if (j <= 0x3988E) { + k = 0; /* |x|> 1) | 0x20000000) + 0x00080000 + (k << 18)); + t_l = ax - (t_h - bp[k]); + s_l = v * ((u - s_h * t_h) - s_h * t_l); + /* compute log(ax) */ + s2 = ss * ss; + r = s2 * s2 * + (L1 + s2 * (L2 + s2 * (L3 + s2 * (L4 + s2 * (L5 + s2 * L6))))); + r += s_l * (s_h + ss); + s2 = s_h * s_h; + t_h = 3.0 + s2 + r; + SET_LOW_WORD(t_h, 0); + t_l = r - ((t_h - 3.0) - s2); + /* u+v = ss*(1+...) */ + u = s_h * t_h; + v = s_l * t_h + t_l * ss; + /* 2/(3log2)*(ss+...) */ + p_h = u + v; + SET_LOW_WORD(p_h, 0); + p_l = v - (p_h - u); + z_h = cp_h * p_h; /* cp_h+cp_l = 2/(3*log2) */ + z_l = cp_l * p_h + p_l * cp + dp_l[k]; + /* log2(ax) = (ss+..)*2/(3*log2) = n + dp_h + z_h + z_l */ + t = static_cast(n); + t1 = (((z_h + z_l) + dp_h[k]) + t); + SET_LOW_WORD(t1, 0); + t2 = z_l - (((t1 - t) - dp_h[k]) - z_h); + } + + /* split up y into y1+y2 and compute (y1+y2)*(t1+t2) */ + y1 = y; + SET_LOW_WORD(y1, 0); + p_l = (y - y1) * t1 + y * t2; + p_h = y1 * t1; + z = p_l + p_h; + EXTRACT_WORDS(j, i, z); + if (j >= 0x40900000) { /* z >= 1024 */ + if (((j - 0x40900000) | i) != 0) { /* if z > 1024 */ + return s * huge * huge; /* overflow */ + } else { + if (p_l + ovt > z - p_h) return s * huge * huge; /* overflow */ + } + } else if ((j & 0x7fffffff) >= 0x4090cc00) { /* z <= -1075 */ + if (((j - 0xc090cc00) | i) != 0) { /* z < -1075 */ + return s * tiny * tiny; /* underflow */ + } else { + if (p_l <= z - p_h) return s * tiny * tiny; /* underflow */ + } + } + /* + * compute 2**(p_h+p_l) + */ + i = j & 0x7fffffff; + k = (i >> 20) - 0x3ff; + n = 0; + if (i > 0x3fe00000) { /* if |z| > 0.5, set n = [z+0.5] */ + n = j + (0x00100000 >> (k + 1)); + k = ((n & 0x7fffffff) >> 20) - 0x3ff; /* new k for n */ + t = zero; + SET_HIGH_WORD(t, n & ~(0x000fffff >> k)); + n = ((n & 0x000fffff) | 0x00100000) >> (20 - k); + if (j < 0) n = -n; + p_h -= t; + } + t = p_l + p_h; + SET_LOW_WORD(t, 0); + u = t * lg2_h; + v = (p_l - (t - p_h)) * lg2 + t * lg2_l; + z = u + v; + w = v - (z - u); + t = z * z; + t1 = z - t * (P1 + t * (P2 + t * (P3 + t * (P4 + t * P5)))); + r = base::Divide(z * t1, (t1 - two) - (w + z * w)); + z = one - (r - z); + GET_HIGH_WORD(j, z); + j += static_cast(static_cast(n) << 20); + if ((j >> 20) <= 0) { + z = scalbn(z, n); /* subnormal output */ + } else { + int tmp; + GET_HIGH_WORD(tmp, z); + SET_HIGH_WORD(z, tmp + static_cast(static_cast(n) << 20)); + } + return s * z; +} + /* * ES6 draft 09-27-13, section 20.2.2.30. * Math.sinh @@ -2752,6 +3069,16 @@ double tanh(double x) { return (jx >= 0) ? z : -z; } +#undef EXTRACT_WORDS +#undef EXTRACT_WORD64 +#undef GET_HIGH_WORD +#undef GET_LOW_WORD +#undef INSERT_WORDS +#undef INSERT_WORD64 +#undef SET_HIGH_WORD +#undef SET_LOW_WORD +#undef STRICT_ASSIGN + } // namespace ieee754 } // namespace base } // namespace v8 diff --git a/deps/v8/src/base/ieee754.h b/deps/v8/src/base/ieee754.h index 72f3db15ef22af..f2b3a3eb5808c5 100644 --- a/deps/v8/src/base/ieee754.h +++ b/deps/v8/src/base/ieee754.h @@ -60,6 +60,14 @@ V8_BASE_EXPORT double cbrt(double x); // Returns exp(x)-1, the exponential of |x| minus 1. V8_BASE_EXPORT double expm1(double x); +// Returns |x| to the power of |y|. +// The result of base ** exponent when base is 1 or -1 and exponent is +// +Infinity or -Infinity differs from IEEE 754-2008. The first edition +// of ECMAScript specified a result of NaN for this operation, whereas +// later versions of IEEE 754-2008 specified 1. The historical ECMAScript +// behaviour is preserved for compatibility reasons. +V8_BASE_EXPORT double pow(double x, double y); + // Returns the sine of |x|, where |x| is given in radians. V8_BASE_EXPORT double sin(double x); diff --git a/deps/v8/src/base/macros.h b/deps/v8/src/base/macros.h index 8a088ffc40eacb..1276805182084b 100644 --- a/deps/v8/src/base/macros.h +++ b/deps/v8/src/base/macros.h @@ -146,7 +146,7 @@ V8_INLINE Dest bit_cast(Source const& source) { // odr-used by the definition of the destructor of that class, [...] #define DISALLOW_NEW_AND_DELETE() \ void* operator new(size_t) { base::OS::Abort(); } \ - void* operator new[](size_t) { base::OS::Abort(); }; \ + void* operator new[](size_t) { base::OS::Abort(); } \ void operator delete(void*, size_t) { base::OS::Abort(); } \ void operator delete[](void*, size_t) { base::OS::Abort(); } @@ -318,7 +318,7 @@ V8_INLINE A implicit_cast(A x) { #define V8PRIdPTR V8_PTR_PREFIX "d" #define V8PRIuPTR V8_PTR_PREFIX "u" -#ifdef V8_TARGET_ARCH_64_BIT +#if V8_TARGET_ARCH_64_BIT #define V8_PTR_HEX_DIGITS 12 #define V8PRIxPTR_FMT "0x%012" V8PRIxPTR #else diff --git a/deps/v8/src/base/platform/mutex.cc b/deps/v8/src/base/platform/mutex.cc index a044075c16884b..2e2f7f93209b22 100644 --- a/deps/v8/src/base/platform/mutex.cc +++ b/deps/v8/src/base/platform/mutex.cc @@ -155,6 +155,45 @@ bool RecursiveMutex::TryLock() { return true; } +SharedMutex::SharedMutex() { pthread_rwlock_init(&native_handle_, nullptr); } + +SharedMutex::~SharedMutex() { + int result = pthread_rwlock_destroy(&native_handle_); + DCHECK_EQ(0, result); + USE(result); +} + +void SharedMutex::LockShared() { + int result = pthread_rwlock_rdlock(&native_handle_); + DCHECK_EQ(0, result); + USE(result); +} + +void SharedMutex::LockExclusive() { + int result = pthread_rwlock_wrlock(&native_handle_); + DCHECK_EQ(0, result); + USE(result); +} + +void SharedMutex::UnlockShared() { + int result = pthread_rwlock_unlock(&native_handle_); + DCHECK_EQ(0, result); + USE(result); +} + +void SharedMutex::UnlockExclusive() { + // Same code as {UnlockShared} on POSIX. + UnlockShared(); +} + +bool SharedMutex::TryLockShared() { + return pthread_rwlock_tryrdlock(&native_handle_) == 0; +} + +bool SharedMutex::TryLockExclusive() { + return pthread_rwlock_trywrlock(&native_handle_) == 0; +} + #elif V8_OS_WIN Mutex::Mutex() : native_handle_(SRWLOCK_INIT) { @@ -233,6 +272,28 @@ bool RecursiveMutex::TryLock() { return true; } +SharedMutex::SharedMutex() : native_handle_(SRWLOCK_INIT) {} + +SharedMutex::~SharedMutex() {} + +void SharedMutex::LockShared() { AcquireSRWLockShared(&native_handle_); } + +void SharedMutex::LockExclusive() { AcquireSRWLockExclusive(&native_handle_); } + +void SharedMutex::UnlockShared() { ReleaseSRWLockShared(&native_handle_); } + +void SharedMutex::UnlockExclusive() { + ReleaseSRWLockExclusive(&native_handle_); +} + +bool SharedMutex::TryLockShared() { + return TryAcquireSRWLockShared(&native_handle_); +} + +bool SharedMutex::TryLockExclusive() { + return TryAcquireSRWLockExclusive(&native_handle_); +} + #endif // V8_OS_POSIX } // namespace base diff --git a/deps/v8/src/base/platform/mutex.h b/deps/v8/src/base/platform/mutex.h index a69eee0bc6e1ff..ea589d5b984eeb 100644 --- a/deps/v8/src/base/platform/mutex.h +++ b/deps/v8/src/base/platform/mutex.h @@ -20,7 +20,7 @@ namespace v8 { namespace base { // ---------------------------------------------------------------------------- -// Mutex +// Mutex - a replacement for std::mutex // // This class is a synchronization primitive that can be used to protect shared // data from being simultaneously accessed by multiple threads. A mutex offers @@ -106,9 +106,8 @@ typedef LazyStaticInstance, #define LAZY_MUTEX_INITIALIZER LAZY_STATIC_INSTANCE_INITIALIZER - // ----------------------------------------------------------------------------- -// RecursiveMutex +// RecursiveMutex - a replacement for std::recursive_mutex // // This class is a synchronization primitive that can be used to protect shared // data from being simultaneously accessed by multiple threads. A recursive @@ -151,6 +150,7 @@ class V8_BASE_EXPORT RecursiveMutex final { // successfully locked. bool TryLock() V8_WARN_UNUSED_RESULT; + private: // The implementation-defined native handle type. #if V8_OS_POSIX typedef pthread_mutex_t NativeHandle; @@ -158,14 +158,6 @@ class V8_BASE_EXPORT RecursiveMutex final { typedef CRITICAL_SECTION NativeHandle; #endif - NativeHandle& native_handle() { - return native_handle_; - } - const NativeHandle& native_handle() const { - return native_handle_; - } - - private: NativeHandle native_handle_; #ifdef DEBUG int level_; @@ -191,6 +183,73 @@ typedef LazyStaticInstance class LockGuard final { public: explicit LockGuard(Mutex* mutex) : mutex_(mutex) { - if (Behavior == NullBehavior::kRequireNotNull || mutex_ != nullptr) { - mutex_->Lock(); - } + if (has_mutex()) mutex_->Lock(); } ~LockGuard() { - if (mutex_ != nullptr) mutex_->Unlock(); + if (has_mutex()) mutex_->Unlock(); } private: - Mutex* mutex_; + Mutex* const mutex_; + + bool V8_INLINE has_mutex() const { + DCHECK_IMPLIES(Behavior == NullBehavior::kRequireNotNull, + mutex_ != nullptr); + return Behavior == NullBehavior::kRequireNotNull || mutex_ != nullptr; + } DISALLOW_COPY_AND_ASSIGN(LockGuard); }; using MutexGuard = LockGuard; +enum MutexSharedType : bool { kShared = true, kExclusive = false }; + +template +class SharedMutexGuard final { + public: + explicit SharedMutexGuard(SharedMutex* mutex) : mutex_(mutex) { + if (!has_mutex()) return; + if (kIsShared) { + mutex_->LockShared(); + } else { + mutex_->LockExclusive(); + } + } + ~SharedMutexGuard() { + if (!has_mutex()) return; + if (kIsShared) { + mutex_->UnlockShared(); + } else { + mutex_->UnlockExclusive(); + } + } + + private: + SharedMutex* const mutex_; + + bool V8_INLINE has_mutex() const { + DCHECK_IMPLIES(Behavior == NullBehavior::kRequireNotNull, + mutex_ != nullptr); + return Behavior == NullBehavior::kRequireNotNull || mutex_ != nullptr; + } + + DISALLOW_COPY_AND_ASSIGN(SharedMutexGuard); +}; + } // namespace base } // namespace v8 diff --git a/deps/v8/src/base/platform/platform-fuchsia.cc b/deps/v8/src/base/platform/platform-fuchsia.cc index f6123437ec2d09..7dd29dc39ec4b5 100644 --- a/deps/v8/src/base/platform/platform-fuchsia.cc +++ b/deps/v8/src/base/platform/platform-fuchsia.cc @@ -20,14 +20,13 @@ uint32_t GetProtectionFromMemoryPermission(OS::MemoryPermission access) { case OS::MemoryPermission::kNoAccess: return 0; // no permissions case OS::MemoryPermission::kRead: - return ZX_VM_FLAG_PERM_READ; + return ZX_VM_PERM_READ; case OS::MemoryPermission::kReadWrite: - return ZX_VM_FLAG_PERM_READ | ZX_VM_FLAG_PERM_WRITE; + return ZX_VM_PERM_READ | ZX_VM_PERM_WRITE; case OS::MemoryPermission::kReadWriteExecute: - return ZX_VM_FLAG_PERM_READ | ZX_VM_FLAG_PERM_WRITE | - ZX_VM_FLAG_PERM_EXECUTE; + return ZX_VM_PERM_READ | ZX_VM_PERM_WRITE | ZX_VM_PERM_EXECUTE; case OS::MemoryPermission::kReadExecute: - return ZX_VM_FLAG_PERM_READ | ZX_VM_FLAG_PERM_EXECUTE; + return ZX_VM_PERM_READ | ZX_VM_PERM_EXECUTE; } UNREACHABLE(); } @@ -55,13 +54,17 @@ void* OS::Allocate(void* address, size_t size, size_t alignment, static const char kVirtualMemoryName[] = "v8-virtualmem"; zx_object_set_property(vmo, ZX_PROP_NAME, kVirtualMemoryName, strlen(kVirtualMemoryName)); + + // Always call zx_vmo_replace_as_executable() in case the memory will need + // to be marked as executable in the future. + // TOOD(https://crbug.com/v8/8899): Only call this when we know that the + // region will need to be marked as executable in the future. + if (zx_vmo_replace_as_executable(vmo, ZX_HANDLE_INVALID, &vmo) != ZX_OK) { + return nullptr; + } + uintptr_t reservation; uint32_t prot = GetProtectionFromMemoryPermission(access); - if ((prot & ZX_VM_FLAG_PERM_EXECUTE) != 0) { - if (zx_vmo_replace_as_executable(vmo, ZX_HANDLE_INVALID, &vmo) != ZX_OK) { - return nullptr; - } - } zx_status_t status = zx_vmar_map(zx_vmar_root_self(), prot, 0, vmo, 0, request_size, &reservation); // Either the vmo is now referenced by the vmar, or we failed and are bailing, diff --git a/deps/v8/src/base/platform/platform-posix.cc b/deps/v8/src/base/platform/platform-posix.cc index e7edbf5f5841a4..33a9371a922633 100644 --- a/deps/v8/src/base/platform/platform-posix.cc +++ b/deps/v8/src/base/platform/platform-posix.cc @@ -93,7 +93,7 @@ bool g_hard_abort = false; const char* g_gc_fake_mmap = nullptr; DEFINE_LAZY_LEAKY_OBJECT_GETTER(RandomNumberGenerator, - GetPlatformRandomNumberGenerator); + GetPlatformRandomNumberGenerator) static LazyMutex rng_mutex = LAZY_MUTEX_INITIALIZER; #if !V8_OS_FUCHSIA @@ -269,7 +269,7 @@ void* OS::GetRandomMmapAddr() { return reinterpret_cast(raw_addr); } -// TODO(bbudge) Move Cygwin and Fuschia stuff into platform-specific files. +// TODO(bbudge) Move Cygwin and Fuchsia stuff into platform-specific files. #if !V8_OS_CYGWIN && !V8_OS_FUCHSIA // static void* OS::Allocate(void* address, size_t size, size_t alignment, diff --git a/deps/v8/src/base/platform/platform-posix.h b/deps/v8/src/base/platform/platform-posix.h index 8cf5e54604f897..7d732b4a8f1f56 100644 --- a/deps/v8/src/base/platform/platform-posix.h +++ b/deps/v8/src/base/platform/platform-posix.h @@ -14,7 +14,7 @@ namespace base { class PosixTimezoneCache : public TimezoneCache { public: double DaylightSavingsOffset(double time_ms) override; - void Clear() override {} + void Clear(TimeZoneDetection) override {} ~PosixTimezoneCache() override = default; protected: diff --git a/deps/v8/src/base/platform/platform-win32.cc b/deps/v8/src/base/platform/platform-win32.cc index 45aabf390f4333..c82ec5335a4190 100644 --- a/deps/v8/src/base/platform/platform-win32.cc +++ b/deps/v8/src/base/platform/platform-win32.cc @@ -113,7 +113,7 @@ class WindowsTimezoneCache : public TimezoneCache { ~WindowsTimezoneCache() override {} - void Clear() override { initialized_ = false; } + void Clear(TimeZoneDetection) override { initialized_ = false; } const char* LocalTimezone(double time) override; @@ -690,7 +690,7 @@ void OS::StrNCpy(char* dest, int length, const char* src, size_t n) { #undef STRUNCATE DEFINE_LAZY_LEAKY_OBJECT_GETTER(RandomNumberGenerator, - GetPlatformRandomNumberGenerator); + GetPlatformRandomNumberGenerator) static LazyMutex rng_mutex = LAZY_MUTEX_INITIALIZER; void OS::Initialize(bool hard_abort, const char* const gc_fake_mmap) { @@ -920,6 +920,11 @@ void OS::Sleep(TimeDelta interval) { void OS::Abort() { + // Give a chance to debug the failure. + if (IsDebuggerPresent()) { + DebugBreak(); + } + // Before aborting, make sure to flush output buffers. fflush(stdout); fflush(stderr); diff --git a/deps/v8/src/base/platform/time.cc b/deps/v8/src/base/platform/time.cc index 2e10f539f45aa6..5339e14adea199 100644 --- a/deps/v8/src/base/platform/time.cc +++ b/deps/v8/src/base/platform/time.cc @@ -314,8 +314,8 @@ class Clock final { }; namespace { -DEFINE_LAZY_LEAKY_OBJECT_GETTER(Clock, GetClock); -}; +DEFINE_LAZY_LEAKY_OBJECT_GETTER(Clock, GetClock) +} Time Time::Now() { return GetClock()->Now(); } diff --git a/deps/v8/src/base/small-vector.h b/deps/v8/src/base/small-vector.h index 5138e65ab53176..bbfe2e2e9ce0fc 100644 --- a/deps/v8/src/base/small-vector.h +++ b/deps/v8/src/base/small-vector.h @@ -15,7 +15,7 @@ namespace base { // Minimal SmallVector implementation. Uses inline storage first, switches to // malloc when it overflows. -template +template class SmallVector { // Currently only support trivially copyable and trivially destructible data // types, as it uses memcpy to copy elements and never calls destructors. @@ -23,6 +23,8 @@ class SmallVector { STATIC_ASSERT(std::is_trivially_destructible::value); public: + static constexpr size_t kInlineSize = kSize; + SmallVector() = default; SmallVector(const SmallVector& other) V8_NOEXCEPT { *this = other; } SmallVector(SmallVector&& other) V8_NOEXCEPT { *this = std::move(other); } @@ -62,9 +64,15 @@ class SmallVector { return *this; } - T* data() const { return begin_; } - T* begin() const { return begin_; } - T* end() const { return end_; } + T* data() { return begin_; } + const T* data() const { return begin_; } + + T* begin() { return begin_; } + const T* begin() const { return begin_; } + + T* end() { return end_; } + const T* end() const { return end_; } + size_t size() const { return end_ - begin_; } bool empty() const { return end_ == begin_; } size_t capacity() const { return end_of_storage_ - begin_; } diff --git a/deps/v8/src/base/timezone-cache.h b/deps/v8/src/base/timezone-cache.h index 3d97eee126936d..5b8e5a20dabc39 100644 --- a/deps/v8/src/base/timezone-cache.h +++ b/deps/v8/src/base/timezone-cache.h @@ -23,8 +23,22 @@ class TimezoneCache { // https://github.com/tc39/ecma262/pull/778 virtual double LocalTimeOffset(double time_ms, bool is_utc) = 0; + /** + * Time zone redetection indicator for Clear function. + * + * kSkip indicates host time zone doesn't have to be redetected. + * kRedetect indicates host time zone should be redetected, and used to set + * the default time zone. + * + * The host time zone detection may require file system access or similar + * operations unlikely to be available inside a sandbox. If v8 is run inside a + * sandbox, the host time zone has to be detected outside the sandbox + * separately. + */ + enum class TimeZoneDetection { kSkip, kRedetect }; + // Called when the local timezone changes - virtual void Clear() = 0; + virtual void Clear(TimeZoneDetection time_zone_detection) = 0; // Called when tearing down the isolate virtual ~TimezoneCache() = default; diff --git a/deps/v8/src/basic-block-profiler.cc b/deps/v8/src/basic-block-profiler.cc index 47fd6330985912..444c2bb3971736 100644 --- a/deps/v8/src/basic-block-profiler.cc +++ b/deps/v8/src/basic-block-profiler.cc @@ -13,7 +13,7 @@ namespace v8 { namespace internal { -DEFINE_LAZY_LEAKY_OBJECT_GETTER(BasicBlockProfiler, BasicBlockProfiler::Get); +DEFINE_LAZY_LEAKY_OBJECT_GETTER(BasicBlockProfiler, BasicBlockProfiler::Get) BasicBlockProfiler::Data::Data(size_t n_blocks) : n_blocks_(n_blocks), diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index 23399546ee0b05..8dc879b7a00bf3 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -17,9 +17,11 @@ #include "src/extensions/ignition-statistics-extension.h" #include "src/extensions/statistics-extension.h" #include "src/extensions/trigger-failure-extension.h" -#include "src/heap/heap.h" +#include "src/function-kind.h" +#include "src/heap/heap-inl.h" #include "src/isolate-inl.h" #include "src/math-random.h" +#include "src/microtask-queue.h" #include "src/objects/api-callbacks.h" #include "src/objects/arguments.h" #include "src/objects/builtin-function-id.h" @@ -120,42 +122,13 @@ static const char* GCFunctionName() { return flag_given ? FLAG_expose_gc_as : "gc"; } -v8::Extension* Bootstrapper::free_buffer_extension_ = nullptr; -v8::Extension* Bootstrapper::gc_extension_ = nullptr; -v8::Extension* Bootstrapper::externalize_string_extension_ = nullptr; -v8::Extension* Bootstrapper::statistics_extension_ = nullptr; -v8::Extension* Bootstrapper::trigger_failure_extension_ = nullptr; -v8::Extension* Bootstrapper::ignition_statistics_extension_ = nullptr; - void Bootstrapper::InitializeOncePerProcess() { - free_buffer_extension_ = new FreeBufferExtension; - v8::RegisterExtension(free_buffer_extension_); - gc_extension_ = new GCExtension(GCFunctionName()); - v8::RegisterExtension(gc_extension_); - externalize_string_extension_ = new ExternalizeStringExtension; - v8::RegisterExtension(externalize_string_extension_); - statistics_extension_ = new StatisticsExtension; - v8::RegisterExtension(statistics_extension_); - trigger_failure_extension_ = new TriggerFailureExtension; - v8::RegisterExtension(trigger_failure_extension_); - ignition_statistics_extension_ = new IgnitionStatisticsExtension; - v8::RegisterExtension(ignition_statistics_extension_); -} - - -void Bootstrapper::TearDownExtensions() { - delete free_buffer_extension_; - free_buffer_extension_ = nullptr; - delete gc_extension_; - gc_extension_ = nullptr; - delete externalize_string_extension_; - externalize_string_extension_ = nullptr; - delete statistics_extension_; - statistics_extension_ = nullptr; - delete trigger_failure_extension_; - trigger_failure_extension_ = nullptr; - delete ignition_statistics_extension_; - ignition_statistics_extension_ = nullptr; + v8::RegisterExtension(v8::base::make_unique()); + v8::RegisterExtension(v8::base::make_unique(GCFunctionName())); + v8::RegisterExtension(v8::base::make_unique()); + v8::RegisterExtension(v8::base::make_unique()); + v8::RegisterExtension(v8::base::make_unique()); + v8::RegisterExtension(v8::base::make_unique()); } void Bootstrapper::TearDown() { @@ -167,7 +140,8 @@ class Genesis { Genesis(Isolate* isolate, MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, size_t context_snapshot_index, - v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer); + v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue); Genesis(Isolate* isolate, MaybeHandle maybe_global_proxy, v8::Local global_proxy_template); ~Genesis() = default; @@ -330,12 +304,14 @@ Handle Bootstrapper::CreateEnvironment( MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, v8::ExtensionConfiguration* extensions, size_t context_snapshot_index, - v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer) { + v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) { HandleScope scope(isolate_); Handle env; { Genesis genesis(isolate_, maybe_global_proxy, global_proxy_template, - context_snapshot_index, embedder_fields_deserializer); + context_snapshot_index, embedder_fields_deserializer, + microtask_queue); env = genesis.result(); if (env.is_null() || !InstallExtensions(env, extensions)) { return Handle(); @@ -381,6 +357,8 @@ void Bootstrapper::DetachGlobal(Handle env) { if (FLAG_track_detached_contexts) { isolate_->AddDetachedContext(env); } + + env->native_context()->set_microtask_queue(nullptr); } namespace { @@ -1761,6 +1739,10 @@ void Genesis::InitializeGlobal(Handle global_object, native_context()->set_array_for_each_iterator(*for_each_fun); SimpleInstallFunction(isolate_, proto, "filter", Builtins::kArrayFilter, 1, false); + SimpleInstallFunction(isolate_, proto, "flat", + Builtins::kArrayPrototypeFlat, 0, false); + SimpleInstallFunction(isolate_, proto, "flatMap", + Builtins::kArrayPrototypeFlatMap, 1, false); SimpleInstallFunction(isolate_, proto, "map", Builtins::kArrayMap, 1, false); SimpleInstallFunction(isolate_, proto, "every", Builtins::kArrayEvery, 1, @@ -2183,6 +2165,11 @@ void Genesis::InitializeGlobal(Handle global_object, Builtins::kSymbolPrototypeValueOf, 0, true, BuiltinFunctionId::kSymbolPrototypeValueOf); + // Install the Symbol.prototype.description getter. + SimpleInstallGetter(isolate_, prototype, + factory->InternalizeUtf8String("description"), + Builtins::kSymbolPrototypeDescriptionGetter, true); + // Install the @@toPrimitive function. InstallFunctionAtSymbol( isolate_, prototype, factory->to_primitive_symbol(), @@ -2628,19 +2615,9 @@ void Genesis::InitializeGlobal(Handle global_object, writable, Representation::Tagged()); initial_map->AppendDescriptor(isolate(), &d); - { // Internal: RegExpInternalMatch - Handle function = - SimpleCreateFunction(isolate_, isolate_->factory()->empty_string(), - Builtins::kRegExpInternalMatch, 2, true); - native_context()->set(Context::REGEXP_INTERNAL_MATCH, *function); - } - - // Create the last match info. One for external use, and one for internal - // use when we don't want to modify the externally visible match info. + // Create the last match info. Handle last_match_info = factory->NewRegExpMatchInfo(); native_context()->set_regexp_last_match_info(*last_match_info); - Handle internal_match_info = factory->NewRegExpMatchInfo(); - native_context()->set_regexp_internal_match_info(*internal_match_info); // Force the RegExp constructor to fast properties, so that we can use the // fast paths for various things like @@ -2874,7 +2851,7 @@ void Genesis::InitializeGlobal(Handle global_object, SimpleInstallFunction(isolate(), intl, "getCanonicalLocales", Builtins::kIntlGetCanonicalLocales, 1, false); - { + { // -- D a t e T i m e F o r m a t Handle date_time_format_constructor = InstallFunction( isolate_, intl, "DateTimeFormat", JS_INTL_DATE_TIME_FORMAT_TYPE, JSDateTimeFormat::kSize, 0, factory->the_hole_value(), @@ -2907,7 +2884,7 @@ void Genesis::InitializeGlobal(Handle global_object, Builtins::kDateTimeFormatPrototypeFormat, false); } - { + { // -- N u m b e r F o r m a t Handle number_format_constructor = InstallFunction( isolate_, intl, "NumberFormat", JS_INTL_NUMBER_FORMAT_TYPE, JSNumberFormat::kSize, 0, factory->the_hole_value(), @@ -2939,7 +2916,7 @@ void Genesis::InitializeGlobal(Handle global_object, Builtins::kNumberFormatPrototypeFormatNumber, false); } - { + { // -- C o l l a t o r Handle collator_constructor = InstallFunction( isolate_, intl, "Collator", JS_INTL_COLLATOR_TYPE, JSCollator::kSize, 0, factory->the_hole_value(), Builtins::kCollatorConstructor); @@ -2965,7 +2942,7 @@ void Genesis::InitializeGlobal(Handle global_object, Builtins::kCollatorPrototypeCompare, false); } - { + { // -- V 8 B r e a k I t e r a t o r Handle v8_break_iterator_constructor = InstallFunction( isolate_, intl, "v8BreakIterator", JS_INTL_V8_BREAK_ITERATOR_TYPE, JSV8BreakIterator::kSize, 0, factory->the_hole_value(), @@ -3006,7 +2983,7 @@ void Genesis::InitializeGlobal(Handle global_object, Builtins::kV8BreakIteratorPrototypeBreakType, false); } - { + { // -- P l u r a l R u l e s Handle plural_rules_constructor = InstallFunction( isolate_, intl, "PluralRules", JS_INTL_PLURAL_RULES_TYPE, JSPluralRules::kSize, 0, factory->the_hole_value(), @@ -3029,6 +3006,63 @@ void Genesis::InitializeGlobal(Handle global_object, SimpleInstallFunction(isolate_, prototype, "select", Builtins::kPluralRulesPrototypeSelect, 1, false); } + + { // -- R e l a t i v e T i m e F o r m a t e + Handle relative_time_format_fun = InstallFunction( + isolate(), intl, "RelativeTimeFormat", + JS_INTL_RELATIVE_TIME_FORMAT_TYPE, JSRelativeTimeFormat::kSize, 0, + factory->the_hole_value(), Builtins::kRelativeTimeFormatConstructor); + relative_time_format_fun->shared()->set_length(0); + relative_time_format_fun->shared()->DontAdaptArguments(); + + SimpleInstallFunction( + isolate(), relative_time_format_fun, "supportedLocalesOf", + Builtins::kRelativeTimeFormatSupportedLocalesOf, 1, false); + + // Setup %RelativeTimeFormatPrototype%. + Handle prototype( + JSObject::cast(relative_time_format_fun->instance_prototype()), + isolate()); + + InstallToStringTag(isolate(), prototype, "Intl.RelativeTimeFormat"); + + SimpleInstallFunction( + isolate(), prototype, "resolvedOptions", + Builtins::kRelativeTimeFormatPrototypeResolvedOptions, 0, false); + SimpleInstallFunction(isolate(), prototype, "format", + Builtins::kRelativeTimeFormatPrototypeFormat, 2, + false); + SimpleInstallFunction(isolate(), prototype, "formatToParts", + Builtins::kRelativeTimeFormatPrototypeFormatToParts, + 2, false); + } + + { // -- L i s t F o r m a t + Handle list_format_fun = InstallFunction( + isolate(), intl, "ListFormat", JS_INTL_LIST_FORMAT_TYPE, + JSListFormat::kSize, 0, factory->the_hole_value(), + Builtins::kListFormatConstructor); + list_format_fun->shared()->set_length(0); + list_format_fun->shared()->DontAdaptArguments(); + + SimpleInstallFunction(isolate(), list_format_fun, "supportedLocalesOf", + Builtins::kListFormatSupportedLocalesOf, 1, false); + + // Setup %ListFormatPrototype%. + Handle prototype( + JSObject::cast(list_format_fun->instance_prototype()), isolate()); + + InstallToStringTag(isolate(), prototype, "Intl.ListFormat"); + + SimpleInstallFunction(isolate(), prototype, "resolvedOptions", + Builtins::kListFormatPrototypeResolvedOptions, 0, + false); + SimpleInstallFunction(isolate(), prototype, "format", + Builtins::kListFormatPrototypeFormat, 1, false); + SimpleInstallFunction(isolate(), prototype, "formatToParts", + Builtins::kListFormatPrototypeFormatToParts, 1, + false); + } } #endif // V8_INTL_SUPPORT @@ -4211,6 +4245,11 @@ EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_regexp_sequence) EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_await_optimization) EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_hashbang) +#ifdef V8_INTL_SUPPORT +EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_intl_bigint) +EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_intl_datetime_style) +#endif // V8_INTL_SUPPORT + #undef EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE void Genesis::InitializeGlobal_harmony_global() { @@ -4237,30 +4276,6 @@ void Genesis::InitializeGlobal_harmony_sharedarraybuffer() { InstallToStringTag(isolate_, isolate()->atomics_object(), "Atomics"); } -void Genesis::InitializeGlobal_harmony_array_flat() { - if (!FLAG_harmony_array_flat) return; - Handle array_constructor(native_context()->array_function(), - isolate()); - Handle array_prototype( - JSObject::cast(array_constructor->instance_prototype()), isolate()); - SimpleInstallFunction(isolate(), array_prototype, "flat", - Builtins::kArrayPrototypeFlat, 0, false); - SimpleInstallFunction(isolate(), array_prototype, "flatMap", - Builtins::kArrayPrototypeFlatMap, 1, false); -} - -void Genesis::InitializeGlobal_harmony_symbol_description() { - if (!FLAG_harmony_symbol_description) return; - - // Symbol.prototype.description - Handle symbol_fun(native_context()->symbol_function(), isolate()); - Handle symbol_prototype( - JSObject::cast(symbol_fun->instance_prototype()), isolate()); - SimpleInstallGetter(isolate(), symbol_prototype, - factory()->InternalizeUtf8String("description"), - Builtins::kSymbolPrototypeDescriptionGetter, true); -} - void Genesis::InitializeGlobal_harmony_string_matchall() { if (!FLAG_harmony_string_matchall) return; @@ -4329,55 +4344,43 @@ void Genesis::InitializeGlobal_harmony_weak_refs() { Handle global(native_context()->global_object(), isolate()); { - // Create %WeakFactoryPrototype% - Handle weak_factory_name = factory->WeakFactory_string(); - Handle weak_factory_prototype = + // Create %FinalizationGroupPrototype% + Handle finalization_group_name = + factory->NewStringFromStaticChars("FinalizationGroup"); + Handle finalization_group_prototype = factory->NewJSObject(isolate()->object_function(), TENURED); - // Create %WeakFactory% - Handle weak_factory_fun = - CreateFunction(isolate(), weak_factory_name, JS_WEAK_FACTORY_TYPE, - JSWeakFactory::kSize, 0, weak_factory_prototype, - Builtins::kWeakFactoryConstructor); + // Create %FinalizationGroup% + Handle finalization_group_fun = CreateFunction( + isolate(), finalization_group_name, JS_FINALIZATION_GROUP_TYPE, + JSFinalizationGroup::kSize, 0, finalization_group_prototype, + Builtins::kFinalizationGroupConstructor); - weak_factory_fun->shared()->DontAdaptArguments(); - weak_factory_fun->shared()->set_length(1); + finalization_group_fun->shared()->DontAdaptArguments(); + finalization_group_fun->shared()->set_length(1); // Install the "constructor" property on the prototype. - JSObject::AddProperty(isolate(), weak_factory_prototype, - factory->constructor_string(), weak_factory_fun, + JSObject::AddProperty(isolate(), finalization_group_prototype, + factory->constructor_string(), finalization_group_fun, DONT_ENUM); - InstallToStringTag(isolate(), weak_factory_prototype, weak_factory_name); + InstallToStringTag(isolate(), finalization_group_prototype, + finalization_group_name); + + JSObject::AddProperty(isolate(), global, finalization_group_name, + finalization_group_fun, DONT_ENUM); - JSObject::AddProperty(isolate(), global, weak_factory_name, - weak_factory_fun, DONT_ENUM); + SimpleInstallFunction(isolate(), finalization_group_prototype, "register", + Builtins::kFinalizationGroupRegister, 3, false); - SimpleInstallFunction(isolate(), weak_factory_prototype, "makeCell", - Builtins::kWeakFactoryMakeCell, 2, false); + SimpleInstallFunction(isolate(), finalization_group_prototype, "unregister", + Builtins::kFinalizationGroupUnregister, 1, false); - SimpleInstallFunction(isolate(), weak_factory_prototype, "cleanupSome", - Builtins::kWeakFactoryCleanupSome, 0, false); + SimpleInstallFunction(isolate(), finalization_group_prototype, + "cleanupSome", + Builtins::kFinalizationGroupCleanupSome, 0, false); } { - // Create %WeakCellPrototype% - Handle weak_cell_map = - factory->NewMap(JS_WEAK_CELL_TYPE, JSWeakCell::kSize); - native_context()->set_js_weak_cell_map(*weak_cell_map); - - Handle weak_cell_prototype = - factory->NewJSObject(isolate()->object_function(), TENURED); - Map::SetPrototype(isolate(), weak_cell_map, weak_cell_prototype); - - InstallToStringTag(isolate(), weak_cell_prototype, - factory->WeakCell_string()); - - SimpleInstallGetter(isolate(), weak_cell_prototype, - factory->InternalizeUtf8String("holdings"), - Builtins::kWeakCellHoldingsGetter, false); - SimpleInstallFunction(isolate(), weak_cell_prototype, "clear", - Builtins::kWeakCellClear, 0, false); - // Create %WeakRefPrototype% Handle weak_ref_map = factory->NewMap(JS_WEAK_REF_TYPE, JSWeakRef::kSize); @@ -4387,7 +4390,6 @@ void Genesis::InitializeGlobal_harmony_weak_refs() { Handle weak_ref_prototype = factory->NewJSObject(isolate()->object_function(), TENURED); Map::SetPrototype(isolate(), weak_ref_map, weak_ref_prototype); - JSObject::ForceSetPrototype(weak_ref_prototype, weak_cell_prototype); InstallToStringTag(isolate(), weak_ref_prototype, factory->WeakRef_string()); @@ -4414,7 +4416,7 @@ void Genesis::InitializeGlobal_harmony_weak_refs() { } { - // Create cleanup iterator for JSWeakFactory. + // Create cleanup iterator for JSFinalizationGroup. Handle iterator_prototype( native_context()->initial_iterator_prototype(), isolate()); @@ -4423,55 +4425,22 @@ void Genesis::InitializeGlobal_harmony_weak_refs() { JSObject::ForceSetPrototype(cleanup_iterator_prototype, iterator_prototype); InstallToStringTag(isolate(), cleanup_iterator_prototype, - "JSWeakFactoryCleanupIterator"); + "JSFinalizationGroupCleanupIterator"); SimpleInstallFunction(isolate(), cleanup_iterator_prototype, "next", - Builtins::kWeakFactoryCleanupIteratorNext, 0, true); + Builtins::kFinalizationGroupCleanupIteratorNext, 0, + true); Handle cleanup_iterator_map = - factory->NewMap(JS_WEAK_FACTORY_CLEANUP_ITERATOR_TYPE, - JSWeakFactoryCleanupIterator::kSize); + factory->NewMap(JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE, + JSFinalizationGroupCleanupIterator::kSize); Map::SetPrototype(isolate(), cleanup_iterator_map, cleanup_iterator_prototype); - native_context()->set_js_weak_factory_cleanup_iterator_map( + native_context()->set_js_finalization_group_cleanup_iterator_map( *cleanup_iterator_map); } } #ifdef V8_INTL_SUPPORT -void Genesis::InitializeGlobal_harmony_intl_list_format() { - if (!FLAG_harmony_intl_list_format) return; - Handle intl = Handle::cast( - JSReceiver::GetProperty( - isolate(), - Handle(native_context()->global_object(), isolate()), - factory()->InternalizeUtf8String("Intl")) - .ToHandleChecked()); - - Handle list_format_fun = - InstallFunction(isolate(), intl, "ListFormat", JS_INTL_LIST_FORMAT_TYPE, - JSListFormat::kSize, 0, factory()->the_hole_value(), - Builtins::kListFormatConstructor); - list_format_fun->shared()->set_length(0); - list_format_fun->shared()->DontAdaptArguments(); - - SimpleInstallFunction(isolate(), list_format_fun, "supportedLocalesOf", - Builtins::kListFormatSupportedLocalesOf, 1, false); - - // Setup %ListFormatPrototype%. - Handle prototype( - JSObject::cast(list_format_fun->instance_prototype()), isolate()); - - InstallToStringTag(isolate(), prototype, "Intl.ListFormat"); - - SimpleInstallFunction(isolate(), prototype, "resolvedOptions", - Builtins::kListFormatPrototypeResolvedOptions, 0, - false); - SimpleInstallFunction(isolate(), prototype, "format", - Builtins::kListFormatPrototypeFormat, 1, false); - SimpleInstallFunction(isolate(), prototype, "formatToParts", - Builtins::kListFormatPrototypeFormatToParts, 1, false); -} - void Genesis::InitializeGlobal_harmony_locale() { if (!FLAG_harmony_locale) return; @@ -4536,43 +4505,6 @@ void Genesis::InitializeGlobal_harmony_locale() { Builtins::kLocalePrototypeNumberingSystem, true); } -void Genesis::InitializeGlobal_harmony_intl_relative_time_format() { - if (!FLAG_harmony_intl_relative_time_format) return; - Handle intl = Handle::cast( - JSReceiver::GetProperty( - isolate(), - Handle(native_context()->global_object(), isolate()), - factory()->InternalizeUtf8String("Intl")) - .ToHandleChecked()); - - Handle relative_time_format_fun = InstallFunction( - isolate(), intl, "RelativeTimeFormat", JS_INTL_RELATIVE_TIME_FORMAT_TYPE, - JSRelativeTimeFormat::kSize, 0, factory()->the_hole_value(), - Builtins::kRelativeTimeFormatConstructor); - relative_time_format_fun->shared()->set_length(0); - relative_time_format_fun->shared()->DontAdaptArguments(); - - SimpleInstallFunction( - isolate(), relative_time_format_fun, "supportedLocalesOf", - Builtins::kRelativeTimeFormatSupportedLocalesOf, 1, false); - - // Setup %RelativeTimeFormatPrototype%. - Handle prototype( - JSObject::cast(relative_time_format_fun->instance_prototype()), - isolate()); - - InstallToStringTag(isolate(), prototype, "Intl.RelativeTimeFormat"); - - SimpleInstallFunction(isolate(), prototype, "resolvedOptions", - Builtins::kRelativeTimeFormatPrototypeResolvedOptions, - 0, false); - SimpleInstallFunction(isolate(), prototype, "format", - Builtins::kRelativeTimeFormatPrototypeFormat, 2, false); - SimpleInstallFunction(isolate(), prototype, "formatToParts", - Builtins::kRelativeTimeFormatPrototypeFormatToParts, 2, - false); -} - void Genesis::InitializeGlobal_harmony_intl_segmenter() { if (!FLAG_harmony_intl_segmenter) return; Handle intl = Handle::cast( @@ -4777,8 +4709,7 @@ bool Genesis::InstallNatives() { // Set up the extras utils object as a shared container between native // scripts and extras. (Extras consume things added there by native scripts.) - Handle extras_utils = - factory()->NewJSObject(isolate()->object_function()); + Handle extras_utils = factory()->NewJSObjectWithNullProto(); native_context()->set_extras_utils_object(*extras_utils); InstallInternalPackedArray(extras_utils, "InternalPackedArray"); @@ -5156,8 +5087,7 @@ bool Genesis::InstallNatives() { bool Genesis::InstallExtraNatives() { HandleScope scope(isolate()); - Handle extras_binding = - factory()->NewJSObject(isolate()->object_function()); + Handle extras_binding = factory()->NewJSObjectWithNullProto(); // binding.isTraceCategoryEnabled(category) SimpleInstallFunction(isolate(), extras_binding, "isTraceCategoryEnabled", @@ -5221,8 +5151,7 @@ bool Bootstrapper::InstallExtensions(Handle native_context, // Don't install extensions into the snapshot. if (isolate_->serializer_enabled()) return true; BootstrapperActive active(this); - SaveContext saved_context(isolate_); - isolate_->set_context(*native_context); + SaveAndSwitchContext saved_context(isolate_, *native_context); return Genesis::InstallExtensions(isolate_, native_context, extensions) && Genesis::InstallSpecialObjects(isolate_, native_context); } @@ -5579,7 +5508,8 @@ Genesis::Genesis( Isolate* isolate, MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, size_t context_snapshot_index, - v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer) + v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue) : isolate_(isolate), active_(isolate->bootstrapper()) { RuntimeCallTimerScope rcs_timer(isolate, RuntimeCallCounterId::kGenesis); result_ = Handle::null(); @@ -5675,7 +5605,9 @@ Genesis::Genesis( } } - native_context()->set_microtask_queue(isolate->default_microtask_queue()); + native_context()->set_microtask_queue( + microtask_queue ? static_cast(microtask_queue) + : isolate->default_microtask_queue()); // Install experimental natives. Do not include them into the // snapshot as we should be able to turn them off at runtime. Re-installing diff --git a/deps/v8/src/bootstrapper.h b/deps/v8/src/bootstrapper.h index 6deff78097fde2..329bf57c509f04 100644 --- a/deps/v8/src/bootstrapper.h +++ b/deps/v8/src/bootstrapper.h @@ -8,7 +8,6 @@ #include "src/heap/factory.h" #include "src/objects/fixed-array.h" #include "src/objects/shared-function-info.h" -#include "src/objects/slots.h" #include "src/snapshot/natives.h" #include "src/visitors.h" @@ -46,7 +45,6 @@ class SourceCodeCache final { class Bootstrapper final { public: static void InitializeOncePerProcess(); - static void TearDownExtensions(); // Requires: Heap::SetUp has been called. void Initialize(bool create_heap_objects); @@ -58,7 +56,8 @@ class Bootstrapper final { MaybeHandle maybe_global_proxy, v8::Local global_object_template, v8::ExtensionConfiguration* extensions, size_t context_snapshot_index, - v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer); + v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, + v8::MicrotaskQueue* microtask_queue); Handle NewRemoteContext( MaybeHandle maybe_global_proxy, @@ -109,13 +108,6 @@ class Bootstrapper final { explicit Bootstrapper(Isolate* isolate); - static v8::Extension* free_buffer_extension_; - static v8::Extension* gc_extension_; - static v8::Extension* externalize_string_extension_; - static v8::Extension* statistics_extension_; - static v8::Extension* trigger_failure_extension_; - static v8::Extension* ignition_statistics_extension_; - DISALLOW_COPY_AND_ASSIGN(Bootstrapper); }; diff --git a/deps/v8/src/builtins/arguments.tq b/deps/v8/src/builtins/arguments.tq index b7581531550161..3a6c26c0008d49 100644 --- a/deps/v8/src/builtins/arguments.tq +++ b/deps/v8/src/builtins/arguments.tq @@ -28,7 +28,7 @@ namespace arguments { const shared: SharedFunctionInfo = f.shared_function_info; const formalParameterCount: bint = - Convert(shared.formal_parameter_count); + Convert(Convert(shared.formal_parameter_count)); let argumentCount: bint = formalParameterCount; const adaptor: ArgumentsAdaptorFrame = diff --git a/deps/v8/src/builtins/arm/builtins-arm.cc b/deps/v8/src/builtins/arm/builtins-arm.cc index 6fdd93821dfc9a..6c5a59ff8c4748 100644 --- a/deps/v8/src/builtins/arm/builtins-arm.cc +++ b/deps/v8/src/builtins/arm/builtins-arm.cc @@ -11,6 +11,8 @@ #include "src/deoptimizer.h" #include "src/frame-constants.h" #include "src/frames.h" +// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop. +#include "src/heap/heap-inl.h" #include "src/macro-assembler-inl.h" #include "src/objects-inl.h" #include "src/objects/cell.h" @@ -52,7 +54,6 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) { // -- lr : return address // -- sp[...]: constructor arguments // ----------------------------------- - Label generic_array_code, one_or_more_arguments, two_or_more_arguments; if (FLAG_debug_code) { // Initial map for the builtin InternalArray functions should be maps. @@ -218,8 +219,9 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset)); - __ tst(r4, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask)); - __ b(ne, ¬_create_implicit_receiver); + __ DecodeField(r4); + __ JumpIfIsInRange(r4, kDefaultDerivedConstructor, kDerivedConstructor, + ¬_create_implicit_receiver); // If not derived class constructor: Allocate the new receiver object. __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1, @@ -2306,111 +2308,157 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // -- r3 : new target (passed through to callee) // ----------------------------------- - Label invoke, dont_adapt_arguments, stack_overflow; - - Label enough, too_few; + Label dont_adapt_arguments, stack_overflow, skip_adapt_arguments; __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); __ b(eq, &dont_adapt_arguments); - __ cmp(r0, r2); - __ b(lt, &too_few); - - Register scratch = r5; - - { // Enough parameters: actual >= expected - __ bind(&enough); - EnterArgumentsAdaptorFrame(masm); - Generate_StackOverflowCheck(masm, r2, scratch, &stack_overflow); - - // Calculate copy start address into r0 and copy end address into r4. - // r0: actual number of arguments as a smi - // r1: function - // r2: expected number of arguments - // r3: new target (passed through to callee) - __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); - // adjust for return address and receiver - __ add(r0, r0, Operand(2 * kPointerSize)); - __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2)); - - // Copy the arguments (including the receiver) to the new stack frame. - // r0: copy start address - // r1: function - // r2: expected number of arguments - // r3: new target (passed through to callee) - // r4: copy end address - - Label copy; - __ bind(©); - __ ldr(scratch, MemOperand(r0, 0)); - __ push(scratch); - __ cmp(r0, r4); // Compare before moving to next argument. - __ sub(r0, r0, Operand(kPointerSize)); - __ b(ne, ©); - - __ b(&invoke); - } + __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset)); + __ tst(r4, + Operand(SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit::kMask)); + __ b(ne, &skip_adapt_arguments); - { // Too few parameters: Actual < expected - __ bind(&too_few); - EnterArgumentsAdaptorFrame(masm); - Generate_StackOverflowCheck(masm, r2, scratch, &stack_overflow); + // ------------------------------------------- + // Adapt arguments. + // ------------------------------------------- + { + Label under_application, over_application, invoke; + __ cmp(r0, r2); + __ b(lt, &under_application); - // Calculate copy start address into r0 and copy end address is fp. - // r0: actual number of arguments as a smi - // r1: function - // r2: expected number of arguments - // r3: new target (passed through to callee) - __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); + // Enough parameters: actual >= expected + __ bind(&over_application); + { + EnterArgumentsAdaptorFrame(masm); + Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow); + + // Calculate copy start address into r0 and copy end address into r4. + // r0: actual number of arguments as a smi + // r1: function + // r2: expected number of arguments + // r3: new target (passed through to callee) + __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); + // adjust for return address and receiver + __ add(r0, r0, Operand(2 * kPointerSize)); + __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2)); + + // Copy the arguments (including the receiver) to the new stack frame. + // r0: copy start address + // r1: function + // r2: expected number of arguments + // r3: new target (passed through to callee) + // r4: copy end address + + Label copy; + __ bind(©); + __ ldr(r5, MemOperand(r0, 0)); + __ push(r5); + __ cmp(r0, r4); // Compare before moving to next argument. + __ sub(r0, r0, Operand(kPointerSize)); + __ b(ne, ©); + + __ b(&invoke); + } - // Copy the arguments (including the receiver) to the new stack frame. - // r0: copy start address - // r1: function - // r2: expected number of arguments - // r3: new target (passed through to callee) - Label copy; - __ bind(©); + // Too few parameters: Actual < expected + __ bind(&under_application); + { + EnterArgumentsAdaptorFrame(masm); + Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow); + + // Calculate copy start address into r0 and copy end address is fp. + // r0: actual number of arguments as a smi + // r1: function + // r2: expected number of arguments + // r3: new target (passed through to callee) + __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); + + // Copy the arguments (including the receiver) to the new stack frame. + // r0: copy start address + // r1: function + // r2: expected number of arguments + // r3: new target (passed through to callee) + Label copy; + __ bind(©); + + // Adjust load for return address and receiver. + __ ldr(r5, MemOperand(r0, 2 * kPointerSize)); + __ push(r5); + + __ cmp(r0, fp); // Compare before moving to next argument. + __ sub(r0, r0, Operand(kPointerSize)); + __ b(ne, ©); + + // Fill the remaining expected arguments with undefined. + // r1: function + // r2: expected number of arguments + // r3: new target (passed through to callee) + __ LoadRoot(r5, RootIndex::kUndefinedValue); + __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2)); + // Adjust for frame. + __ sub(r4, r4, + Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp + + kPointerSize)); + + Label fill; + __ bind(&fill); + __ push(r5); + __ cmp(sp, r4); + __ b(ne, &fill); + } - // Adjust load for return address and receiver. - __ ldr(scratch, MemOperand(r0, 2 * kPointerSize)); - __ push(scratch); + // Call the entry point. + __ bind(&invoke); + __ mov(r0, r2); + // r0 : expected number of arguments + // r1 : function (passed through to callee) + // r3 : new target (passed through to callee) + static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch"); + __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset)); + __ CallCodeObject(r2); - __ cmp(r0, fp); // Compare before moving to next argument. - __ sub(r0, r0, Operand(kPointerSize)); - __ b(ne, ©); + // Store offset of return address for deoptimizer. + masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset( + masm->pc_offset()); - // Fill the remaining expected arguments with undefined. - // r1: function - // r2: expected number of arguments - // r3: new target (passed through to callee) - __ LoadRoot(scratch, RootIndex::kUndefinedValue); - __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2)); - // Adjust for frame. - __ sub(r4, r4, - Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp + - kPointerSize)); - - Label fill; - __ bind(&fill); - __ push(scratch); - __ cmp(sp, r4); - __ b(ne, &fill); + // Exit frame and return. + LeaveArgumentsAdaptorFrame(masm); + __ Jump(lr); } - // Call the entry point. - __ bind(&invoke); - __ mov(r0, r2); - // r0 : expected number of arguments - // r1 : function (passed through to callee) - // r3 : new target (passed through to callee) - static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch"); - __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset)); - __ CallCodeObject(r2); - - // Store offset of return address for deoptimizer. - masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); + // ------------------------------------------- + // Skip adapt arguments. + // ------------------------------------------- + __ bind(&skip_adapt_arguments); + { + // The callee cannot observe the actual arguments, so it's safe to just + // pass the expected arguments by massaging the stack appropriately. See + // http://bit.ly/v8-faster-calls-with-arguments-mismatch for details. + Label under_application, over_application; + __ cmp(r0, r2); + __ b(lt, &under_application); + + __ bind(&over_application); + { + // Remove superfluous parameters from the stack. + __ sub(r4, r0, r2); + __ mov(r0, r2); + __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2)); + __ b(&dont_adapt_arguments); + } - // Exit frame and return. - LeaveArgumentsAdaptorFrame(masm); - __ Jump(lr); + __ bind(&under_application); + { + // Fill remaining expected arguments with undefined values. + Label fill; + __ LoadRoot(r4, RootIndex::kUndefinedValue); + __ bind(&fill); + __ add(r0, r0, Operand(1)); + __ push(r4); + __ cmp(r0, r2); + __ b(lt, &fill); + __ b(&dont_adapt_arguments); + } + } // ------------------------------------------- // Dont adapt arguments. @@ -2707,80 +2755,6 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) { __ Ret(); } -void Builtins::Generate_MathPowInternal(MacroAssembler* masm) { - const LowDwVfpRegister double_base = d0; - const LowDwVfpRegister double_exponent = d1; - const LowDwVfpRegister double_result = d2; - const LowDwVfpRegister double_scratch = d3; - const SwVfpRegister single_scratch = s6; - // Avoid using Registers r0-r3 as they may be needed when calling to C if the - // ABI is softfloat. - const Register integer_exponent = r4; - const Register scratch = r5; - - Label call_runtime, done, int_exponent; - - // Detect integer exponents stored as double. - __ TryDoubleToInt32Exact(integer_exponent, double_exponent, double_scratch); - __ b(eq, &int_exponent); - - __ push(lr); - { - AllowExternalCallThatCantCauseGC scope(masm); - __ PrepareCallCFunction(0, 2); - __ MovToFloatParameters(double_base, double_exponent); - __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2); - } - __ pop(lr); - __ MovFromFloatResult(double_result); - __ b(&done); - - // Calculate power with integer exponent. - __ bind(&int_exponent); - - __ vmov(double_scratch, double_base); // Back up base. - __ vmov(double_result, Double(1.0), scratch); - - // Get absolute value of exponent. - __ cmp(integer_exponent, Operand::Zero()); - __ mov(scratch, integer_exponent); - __ rsb(scratch, integer_exponent, Operand::Zero(), LeaveCC, mi); - - Label while_true; - __ bind(&while_true); - __ mov(scratch, Operand(scratch, LSR, 1), SetCC); - __ vmul(double_result, double_result, double_scratch, cs); - __ vmul(double_scratch, double_scratch, double_scratch, ne); - __ b(ne, &while_true); - - __ cmp(integer_exponent, Operand::Zero()); - __ b(ge, &done); - __ vmov(double_scratch, Double(1.0), scratch); - __ vdiv(double_result, double_scratch, double_result); - // Test whether result is zero. Bail out to check for subnormal result. - // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. - __ VFPCompareAndSetFlags(double_result, 0.0); - __ b(ne, &done); - // double_exponent may not containe the exponent value if the input was a - // smi. We set it with exponent value before bailing out. - __ vmov(single_scratch, integer_exponent); - __ vcvt_f64_s32(double_exponent, single_scratch); - - // Returning or bailing out. - __ push(lr); - { - AllowExternalCallThatCantCauseGC scope(masm); - __ PrepareCallCFunction(0, 2); - __ MovToFloatParameters(double_base, double_exponent); - __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2); - } - __ pop(lr); - __ MovFromFloatResult(double_result); - - __ bind(&done); - __ Ret(); -} - void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r0 : argc @@ -2953,32 +2927,24 @@ void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address, void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // ----------- S t a t e ------------- - // -- cp : kTargetContext - // -- r1 : kApiFunctionAddress - // -- r2 : kArgc - // -- + // -- cp : context + // -- r1 : api function address + // -- r2 : arguments count (not including the receiver) + // -- r3 : call data + // -- r0 : holder // -- sp[0] : last argument // -- ... // -- sp[(argc - 1) * 4] : first argument // -- sp[(argc + 0) * 4] : receiver - // -- sp[(argc + 1) * 4] : kHolder - // -- sp[(argc + 2) * 4] : kCallData // ----------------------------------- Register api_function_address = r1; Register argc = r2; + Register call_data = r3; + Register holder = r0; Register scratch = r4; - Register index = r5; // For indexing MemOperands. - - DCHECK(!AreAliased(api_function_address, argc, scratch, index)); - - // Stack offsets (without argc). - static constexpr int kReceiverOffset = 0; - static constexpr int kHolderOffset = kReceiverOffset + 1; - static constexpr int kCallDataOffset = kHolderOffset + 1; - // Extra stack arguments are: the receiver, kHolder, kCallData. - static constexpr int kExtraStackArgumentCount = 3; + DCHECK(!AreAliased(api_function_address, argc, call_data, holder, scratch)); typedef FunctionCallbackArguments FCA; @@ -3004,24 +2970,22 @@ void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { __ sub(sp, sp, Operand(FCA::kArgsLength * kPointerSize)); // kHolder. - __ add(index, argc, Operand(FCA::kArgsLength + kHolderOffset)); - __ ldr(scratch, MemOperand(sp, index, LSL, kPointerSizeLog2)); - __ str(scratch, MemOperand(sp, 0 * kPointerSize)); + __ str(holder, MemOperand(sp, 0 * kPointerSize)); // kIsolate. __ Move(scratch, ExternalReference::isolate_address(masm->isolate())); __ str(scratch, MemOperand(sp, 1 * kPointerSize)); - // kReturnValueDefaultValue, kReturnValue, and kNewTarget. + // kReturnValueDefaultValue and kReturnValue. __ LoadRoot(scratch, RootIndex::kUndefinedValue); __ str(scratch, MemOperand(sp, 2 * kPointerSize)); __ str(scratch, MemOperand(sp, 3 * kPointerSize)); - __ str(scratch, MemOperand(sp, 5 * kPointerSize)); // kData. - __ add(index, argc, Operand(FCA::kArgsLength + kCallDataOffset)); - __ ldr(scratch, MemOperand(sp, index, LSL, kPointerSizeLog2)); - __ str(scratch, MemOperand(sp, 4 * kPointerSize)); + __ str(call_data, MemOperand(sp, 4 * kPointerSize)); + + // kNewTarget. + __ str(scratch, MemOperand(sp, 5 * kPointerSize)); // Keep a pointer to kHolder (= implicit_args) in a scratch register. // We use it below to set up the FunctionCallbackInfo object. @@ -3050,7 +3014,7 @@ void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // We also store the number of bytes to drop from the stack after returning // from the API function here. __ mov(scratch, - Operand((FCA::kArgsLength + kExtraStackArgumentCount) * kPointerSize)); + Operand((FCA::kArgsLength + 1 /* receiver */) * kPointerSize)); __ add(scratch, scratch, Operand(argc, LSL, kPointerSizeLog2)); __ str(scratch, MemOperand(sp, 4 * kPointerSize)); diff --git a/deps/v8/src/builtins/arm64/builtins-arm64.cc b/deps/v8/src/builtins/arm64/builtins-arm64.cc index 8fadff4768d818..35ed82a83e035d 100644 --- a/deps/v8/src/builtins/arm64/builtins-arm64.cc +++ b/deps/v8/src/builtins/arm64/builtins-arm64.cc @@ -11,6 +11,8 @@ #include "src/deoptimizer.h" #include "src/frame-constants.h" #include "src/frames.h" +// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop. +#include "src/heap/heap-inl.h" #include "src/macro-assembler-inl.h" #include "src/objects-inl.h" #include "src/objects/cell.h" @@ -50,7 +52,8 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) { if (FLAG_debug_code) { // Initial map for the builtin InternalArray functions should be maps. - __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); + __ LoadTaggedPointerField( + x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); __ Tst(x10, kSmiTagMask); __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction); __ CompareObjectType(x10, x11, x12, MAP_TYPE); @@ -140,7 +143,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ Tbnz(slot_count_without_rounding, 0, &already_aligned); // Store padding, if needed. - __ Str(padreg, MemOperand(x2, 1 * kPointerSize)); + __ Str(padreg, MemOperand(x2, 1 * kSystemPointerSize)); __ Bind(&already_aligned); // Copy arguments to the expression stack. @@ -155,26 +158,26 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { } // ----------- S t a t e ------------- - // -- x0: number of arguments (untagged) - // -- x1: constructor function - // -- x3: new target + // -- x0: number of arguments (untagged) + // -- x1: constructor function + // -- x3: new target // If argc is odd: - // -- sp[0*kPointerSize]: argument n - 1 + // -- sp[0*kSystemPointerSize]: argument n - 1 // -- ... - // -- sp[(n-1)*kPointerSize]: argument 0 - // -- sp[(n+0)*kPointerSize]: the hole (receiver) - // -- sp[(n+1)*kPointerSize]: padding - // -- sp[(n+2)*kPointerSize]: padding - // -- sp[(n+3)*kPointerSize]: number of arguments (tagged) - // -- sp[(n+4)*kPointerSize]: context (pushed by FrameScope) + // -- sp[(n-1)*kSystemPointerSize]: argument 0 + // -- sp[(n+0)*kSystemPointerSize]: the hole (receiver) + // -- sp[(n+1)*kSystemPointerSize]: padding + // -- sp[(n+2)*kSystemPointerSize]: padding + // -- sp[(n+3)*kSystemPointerSize]: number of arguments (tagged) + // -- sp[(n+4)*kSystemPointerSize]: context (pushed by FrameScope) // If argc is even: - // -- sp[0*kPointerSize]: argument n - 1 + // -- sp[0*kSystemPointerSize]: argument n - 1 // -- ... - // -- sp[(n-1)*kPointerSize]: argument 0 - // -- sp[(n+0)*kPointerSize]: the hole (receiver) - // -- sp[(n+1)*kPointerSize]: padding - // -- sp[(n+2)*kPointerSize]: number of arguments (tagged) - // -- sp[(n+3)*kPointerSize]: context (pushed by FrameScope) + // -- sp[(n-1)*kSystemPointerSize]: argument 0 + // -- sp[(n+0)*kSystemPointerSize]: the hole (receiver) + // -- sp[(n+1)*kSystemPointerSize]: padding + // -- sp[(n+2)*kSystemPointerSize]: number of arguments (tagged) + // -- sp[(n+3)*kSystemPointerSize]: context (pushed by FrameScope) // ----------------------------------- // Call the function. @@ -203,13 +206,13 @@ void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, // Check the stack for overflow. // We are not trying to catch interruptions (e.g. debug break and // preemption) here, so the "real stack limit" is checked. - Label enough_stack_space; + __ LoadRoot(scratch, RootIndex::kRealStackLimit); // Make scratch the space we have left. The stack might already be overflowed // here which will cause scratch to become negative. __ Sub(scratch, sp, scratch); // Check if the arguments will overflow the stack. - __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2)); + __ Cmp(scratch, Operand(num_args, LSL, kSystemPointerSizeLog2)); __ B(le, stack_overflow); #if defined(V8_OS_WIN) @@ -218,7 +221,7 @@ void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, Label chkstk, chkstk_done; Register probe = temps.AcquireX(); - __ Sub(scratch, sp, Operand(num_args, LSL, kPointerSizeLog2)); + __ Sub(scratch, sp, Operand(num_args, LSL, kSystemPointerSizeLog2)); __ Mov(probe, sp); // Loop start of stack probe. @@ -265,18 +268,19 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ Push(x0, x1, padreg, x3); // ----------- S t a t e ------------- - // -- sp[0*kPointerSize]: new target - // -- sp[1*kPointerSize]: padding - // -- x1 and sp[2*kPointerSize]: constructor function - // -- sp[3*kPointerSize]: number of arguments (tagged) - // -- sp[4*kPointerSize]: context (pushed by FrameScope) + // -- sp[0*kSystemPointerSize]: new target + // -- sp[1*kSystemPointerSize]: padding + // -- x1 and sp[2*kSystemPointerSize]: constructor function + // -- sp[3*kSystemPointerSize]: number of arguments (tagged) + // -- sp[4*kSystemPointerSize]: context (pushed by FrameScope) // ----------------------------------- - __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset)); - __ TestAndBranchIfAnySet(w4, - SharedFunctionInfo::IsDerivedConstructorBit::kMask, - ¬_create_implicit_receiver); + __ DecodeField(w4); + __ JumpIfIsInRange(w4, kDefaultDerivedConstructor, kDerivedConstructor, + ¬_create_implicit_receiver); // If not derived class constructor: Allocate the new receiver object. __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1, @@ -290,12 +294,12 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ LoadRoot(x0, RootIndex::kTheHoleValue); // ----------- S t a t e ------------- - // -- x0: receiver - // -- Slot 4 / sp[0*kPointerSize]: new target - // -- Slot 3 / sp[1*kPointerSize]: padding - // -- Slot 2 / sp[2*kPointerSize]: constructor function - // -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged) - // -- Slot 0 / sp[4*kPointerSize]: context + // -- x0: receiver + // -- Slot 4 / sp[0*kSystemPointerSize]: new target + // -- Slot 3 / sp[1*kSystemPointerSize]: padding + // -- Slot 2 / sp[2*kSystemPointerSize]: constructor function + // -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments (tagged) + // -- Slot 0 / sp[4*kSystemPointerSize]: context // ----------------------------------- // Deoptimizer enters here. masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset( @@ -304,7 +308,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ Bind(&post_instantiation_deopt_entry); // Restore new target from the top of the stack. - __ Peek(x3, 0 * kPointerSize); + __ Peek(x3, 0 * kSystemPointerSize); // Restore constructor function and argument count. __ Ldr(x1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset)); @@ -326,15 +330,16 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ Push(x0, x0); // ----------- S t a t e ------------- - // -- x3: new target - // -- x12: number of arguments (untagged) - // -- sp[0*kPointerSize]: implicit receiver (overwrite if argc odd) - // -- sp[1*kPointerSize]: implicit receiver - // -- sp[2*kPointerSize]: implicit receiver - // -- sp[3*kPointerSize]: padding - // -- x1 and sp[4*kPointerSize]: constructor function - // -- sp[5*kPointerSize]: number of arguments (tagged) - // -- sp[6*kPointerSize]: context + // -- x3: new target + // -- x12: number of arguments (untagged) + // -- sp[0*kSystemPointerSize]: implicit receiver (overwrite if argc + // odd) + // -- sp[1*kSystemPointerSize]: implicit receiver + // -- sp[2*kSystemPointerSize]: implicit receiver + // -- sp[3*kSystemPointerSize]: padding + // -- x1 and sp[4*kSystemPointerSize]: constructor function + // -- sp[5*kSystemPointerSize]: number of arguments (tagged) + // -- sp[6*kSystemPointerSize]: context // ----------------------------------- // Round the number of arguments down to the next even number, and claim @@ -373,11 +378,11 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ InvokeFunction(x1, x3, actual, CALL_FUNCTION); // ----------- S t a t e ------------- - // -- sp[0*kPointerSize]: implicit receiver - // -- sp[1*kPointerSize]: padding - // -- sp[2*kPointerSize]: constructor function - // -- sp[3*kPointerSize]: number of arguments - // -- sp[4*kPointerSize]: context + // -- sp[0*kSystemPointerSize]: implicit receiver + // -- sp[1*kSystemPointerSize]: padding + // -- sp[2*kSystemPointerSize]: constructor function + // -- sp[3*kSystemPointerSize]: number of arguments + // -- sp[4*kSystemPointerSize]: context // ----------------------------------- // Store offset of return address for deoptimizer. @@ -414,7 +419,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // Throw away the result of the constructor invocation and use the // on-stack receiver as the result. __ Bind(&use_receiver); - __ Peek(x0, 0 * kPointerSize); + __ Peek(x0, 0 * kSystemPointerSize); __ CompareRoot(x0, RootIndex::kTheHoleValue); __ B(eq, &do_throw); @@ -443,8 +448,9 @@ static void GetSharedFunctionInfoBytecode(MacroAssembler* masm, Label done; __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE); __ B(ne, &done); - __ Ldr(sfi_data, - FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset)); + __ LoadTaggedPointerField( + sfi_data, + FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset)); __ Bind(&done); } @@ -458,13 +464,16 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ AssertGeneratorObject(x1); // Store input value into generator object. - __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset)); + __ StoreTaggedField( + x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset)); __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3, kLRHasNotBeenSaved, kDontSaveFPRegs); // Load suspended function and context. - __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); - __ Ldr(cp, FieldMemOperand(x4, JSFunction::kContextOffset)); + __ LoadTaggedPointerField( + x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); + __ LoadTaggedPointerField(cp, + FieldMemOperand(x4, JSFunction::kContextOffset)); // Flood function if we are stepping. Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; @@ -491,7 +500,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ B(lo, &stack_overflow); // Get number of arguments for generator function. - __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); __ Ldrh(w10, FieldMemOperand( x10, SharedFunctionInfo::kFormalParameterCountOffset)); @@ -502,11 +512,12 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Store padding (which might be replaced by the receiver). __ Sub(x11, x11, 1); - __ Poke(padreg, Operand(x11, LSL, kPointerSizeLog2)); + __ Poke(padreg, Operand(x11, LSL, kSystemPointerSizeLog2)); // Poke receiver into highest claimed slot. - __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset)); - __ Poke(x5, Operand(x10, LSL, kPointerSizeLog2)); + __ LoadTaggedPointerField( + x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset)); + __ Poke(x5, Operand(x10, LSL, kSystemPointerSizeLog2)); // ----------- S t a t e ------------- // -- x1 : the JSGeneratorObject to resume @@ -520,8 +531,9 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Copy the function arguments from the generator object's register file. - __ Ldr(x5, - FieldMemOperand(x1, JSGeneratorObject::kParametersAndRegistersOffset)); + __ LoadTaggedPointerField( + x5, + FieldMemOperand(x1, JSGeneratorObject::kParametersAndRegistersOffset)); { Label loop, done; __ Cbz(x10, &done); @@ -529,9 +541,9 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ Bind(&loop); __ Sub(x10, x10, 1); - __ Add(x11, x5, Operand(x12, LSL, kPointerSizeLog2)); - __ Ldr(x11, FieldMemOperand(x11, FixedArray::kHeaderSize)); - __ Poke(x11, Operand(x10, LSL, kPointerSizeLog2)); + __ Add(x11, x5, Operand(x12, LSL, kTaggedSizeLog2)); + __ LoadAnyTaggedField(x11, FieldMemOperand(x11, FixedArray::kHeaderSize)); + __ Poke(x11, Operand(x10, LSL, kSystemPointerSizeLog2)); __ Add(x12, x12, 1); __ Cbnz(x10, &loop); __ Bind(&done); @@ -539,8 +551,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Underlying function needs to have bytecode available. if (FLAG_debug_code) { - __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); - __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset)); + __ LoadTaggedPointerField( + x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset)); GetSharedFunctionInfoBytecode(masm, x3, x0); __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE); __ Assert(eq, AbortReason::kMissingBytecodeArray); @@ -548,7 +562,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Resume (Ignition/TurboFan) generator object. { - __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); __ Ldrh(w0, FieldMemOperand( x0, SharedFunctionInfo::kFormalParameterCountOffset)); // We abuse new.target both to indicate that this is a resume call and to @@ -557,7 +572,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ Mov(x3, x1); __ Mov(x1, x4); static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); - __ Ldr(x2, FieldMemOperand(x1, JSFunction::kCodeOffset)); + __ LoadTaggedPointerField(x2, FieldMemOperand(x1, JSFunction::kCodeOffset)); __ JumpCodeObject(x2); } @@ -569,7 +584,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ Push(x1, padreg, x4, x5); __ CallRuntime(Runtime::kDebugOnFunctionCall); __ Pop(padreg, x1); - __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); + __ LoadTaggedPointerField( + x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); } __ B(&stepping_prepared); @@ -579,7 +595,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ Push(x1, padreg); __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); __ Pop(padreg, x1); - __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); + __ LoadTaggedPointerField( + x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); } __ B(&stepping_prepared); @@ -650,7 +667,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, // Push the JS entry frame marker. Also set js_entry_sp if this is the // outermost JS call. - Label non_outermost_js, done; + Label done; ExternalReference js_entry_sp = ExternalReference::Create( IsolateAddressId::kJSEntrySPAddress, masm->isolate()); __ Mov(x10, js_entry_sp); @@ -709,9 +726,9 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, __ Bind(&invoke); // Push new stack handler. - static_assert(StackHandlerConstants::kSize == 2 * kPointerSize, + static_assert(StackHandlerConstants::kSize == 2 * kSystemPointerSize, "Unexpected offset for StackHandlerConstants::kSize"); - static_assert(StackHandlerConstants::kNextOffset == 0 * kPointerSize, + static_assert(StackHandlerConstants::kNextOffset == 0 * kSystemPointerSize, "Unexpected offset for StackHandlerConstants::kNextOffset"); // Link the current handler as the next handler. @@ -740,7 +757,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, __ Call(trampoline_code, RelocInfo::CODE_TARGET); // Pop the stack handler and unlink this frame from the handler chain. - static_assert(StackHandlerConstants::kNextOffset == 0 * kPointerSize, + static_assert(StackHandlerConstants::kNextOffset == 0 * kSystemPointerSize, "Unexpected offset for StackHandlerConstants::kNextOffset"); __ Pop(x10, padreg); __ Mov(x11, ExternalReference::Create(IsolateAddressId::kHandlerAddress, @@ -763,7 +780,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, Label non_outermost_js_2; { Register c_entry_fp = x11; - __ PeekPair(x10, c_entry_fp, 1 * kPointerSize); + __ PeekPair(x10, c_entry_fp, 1 * kSystemPointerSize); __ Cmp(x10, StackFrame::OUTERMOST_JSENTRY_FRAME); __ B(ne, &non_outermost_js_2); __ Mov(x12, js_entry_sp); @@ -777,9 +794,10 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, } // Reset the stack to the callee saved registers. - static_assert(EntryFrameConstants::kFixedFrameSize % (2 * kPointerSize) == 0, - "Size of entry frame is not a multiple of 16 bytes"); - __ Drop(EntryFrameConstants::kFixedFrameSize / kPointerSize); + static_assert( + EntryFrameConstants::kFixedFrameSize % (2 * kSystemPointerSize) == 0, + "Size of entry frame is not a multiple of 16 bytes"); + __ Drop(EntryFrameConstants::kFixedFrameSize / kSystemPointerSize); // Restore the callee-saved registers and return. __ PopCalleeSavedRegisters(); __ Ret(); @@ -848,7 +866,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Store padding (which might be overwritten). __ SlotAddress(scratch, slots_to_claim); - __ Str(padreg, MemOperand(scratch, -kPointerSize)); + __ Str(padreg, MemOperand(scratch, -kSystemPointerSize)); // Store receiver and function on the stack. __ SlotAddress(scratch, argc); @@ -867,11 +885,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ Bind(&loop); // Load the handle. - __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); + __ Ldr(x11, MemOperand(argv, kSystemPointerSize, PostIndex)); // Dereference the handle. __ Ldr(x11, MemOperand(x11)); // Poke the result into the stack. - __ Str(x11, MemOperand(scratch, -kPointerSize, PreIndex)); + __ Str(x11, MemOperand(scratch, -kSystemPointerSize, PreIndex)); // Loop if we've not reached the end of copy marker. __ Cmp(sp, scratch); __ B(lt, &loop); @@ -936,7 +954,8 @@ static void ReplaceClosureCodeWithOptimizedCode( MacroAssembler* masm, Register optimized_code, Register closure, Register scratch1, Register scratch2, Register scratch3) { // Store code entry in the closure. - __ Str(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset)); + __ StoreTaggedField(optimized_code, + FieldMemOperand(closure, JSFunction::kCodeOffset)); __ Mov(scratch1, optimized_code); // Write barrier clobbers scratch1 below. __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2, kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, @@ -957,10 +976,10 @@ static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { // Drop receiver + arguments. if (__ emit_debug_code()) { - __ Tst(args_size, kPointerSize - 1); + __ Tst(args_size, kSystemPointerSize - 1); __ Check(eq, AbortReason::kUnexpectedValue); } - __ Lsr(args_size, args_size, kPointerSizeLog2); + __ Lsr(args_size, args_size, kSystemPointerSizeLog2); __ DropArguments(args_size); } @@ -993,7 +1012,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, Register closure = x1; Register optimized_code_entry = scratch1; - __ Ldr( + __ LoadAnyTaggedField( optimized_code_entry, FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset)); @@ -1046,8 +1065,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, // Check if the optimized code is marked for deopt. If it is, call the // runtime to clear it. Label found_deoptimized_code; - __ Ldr(scratch2, FieldMemOperand(optimized_code_entry, - Code::kCodeDataContainerOffset)); + __ LoadTaggedPointerField( + scratch2, + FieldMemOperand(optimized_code_entry, Code::kCodeDataContainerOffset)); __ Ldr( scratch2, FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset)); @@ -1149,9 +1169,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { // Get the bytecode array from the function object and load it into // kInterpreterBytecodeArrayRegister. - __ Ldr(x0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); - __ Ldr(kInterpreterBytecodeArrayRegister, - FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); + __ LoadTaggedPointerField( + x0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + kInterpreterBytecodeArrayRegister, + FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset)); GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, x11); // The bytecode array could have been flushed from the shared function info, @@ -1162,9 +1184,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { __ B(ne, &compile_lazy); // Load the feedback vector from the closure. - __ Ldr(feedback_vector, - FieldMemOperand(closure, JSFunction::kFeedbackCellOffset)); - __ Ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset)); + __ LoadTaggedPointerField( + feedback_vector, + FieldMemOperand(closure, JSFunction::kFeedbackCellOffset)); + __ LoadTaggedPointerField( + feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset)); Label push_stack_frame; // Check if feedback vector is valid. If valid, check for optimized code @@ -1224,7 +1248,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { // register in the register file. Label loop_header; __ LoadRoot(x10, RootIndex::kUndefinedValue); - __ Lsr(x11, x11, kPointerSizeLog2); + __ Lsr(x11, x11, kSystemPointerSizeLog2); // Round up the number of registers to a multiple of 2, to align the stack // to 16 bytes. __ Add(x11, x11, 1); @@ -1241,7 +1265,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { kInterpreterBytecodeArrayRegister, BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset)); __ Cbz(x10, &no_incoming_new_target_or_generator_register); - __ Str(x3, MemOperand(fp, x10, LSL, kPointerSizeLog2)); + __ Str(x3, MemOperand(fp, x10, LSL, kSystemPointerSizeLog2)); __ Bind(&no_incoming_new_target_or_generator_register); // Load accumulator with undefined. @@ -1257,11 +1281,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { #if defined(V8_OS_WIN) __ Ldrb(x23, MemOperand(kInterpreterBytecodeArrayRegister, kInterpreterBytecodeOffsetRegister)); - __ Mov(x1, Operand(x23, LSL, kPointerSizeLog2)); + __ Mov(x1, Operand(x23, LSL, kSystemPointerSizeLog2)); #else __ Ldrb(x18, MemOperand(kInterpreterBytecodeArrayRegister, kInterpreterBytecodeOffsetRegister)); - __ Mov(x1, Operand(x18, LSL, kPointerSizeLog2)); + __ Mov(x1, Operand(x18, LSL, kSystemPointerSizeLog2)); #endif __ Ldr(kJavaScriptCallCodeStartRegister, MemOperand(kInterpreterDispatchTableRegister, x1)); @@ -1342,7 +1366,7 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm, UseScratchRegisterScope temps(masm); Register scratch = temps.AcquireX(); __ Sub(scratch, slots_to_claim, 1); - __ Poke(padreg, Operand(scratch, LSL, kPointerSizeLog2)); + __ Poke(padreg, Operand(scratch, LSL, kSystemPointerSizeLog2)); } if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) { @@ -1359,12 +1383,12 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm, } __ Sub(last_arg_addr, first_arg_index, - Operand(slots_to_copy, LSL, kPointerSizeLog2)); - __ Add(last_arg_addr, last_arg_addr, kPointerSize); + Operand(slots_to_copy, LSL, kSystemPointerSizeLog2)); + __ Add(last_arg_addr, last_arg_addr, kSystemPointerSize); // Load the final spread argument into spread_arg_out, if necessary. if (mode == InterpreterPushArgsMode::kWithFinalSpread) { - __ Ldr(spread_arg_out, MemOperand(last_arg_addr, -kPointerSize)); + __ Ldr(spread_arg_out, MemOperand(last_arg_addr, -kSystemPointerSize)); } // Copy the rest of the arguments. @@ -1458,8 +1482,10 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { // get the custom trampoline, otherwise grab the entry address of the global // trampoline. __ Ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); - __ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); - __ Ldr(x1, FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset)); + __ LoadTaggedPointerField( + x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x1, FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset)); __ CompareObjectType(x1, kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister, INTERPRETER_DATA_TYPE); @@ -1508,11 +1534,11 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { #if defined(V8_OS_WIN) __ Ldrb(x23, MemOperand(kInterpreterBytecodeArrayRegister, kInterpreterBytecodeOffsetRegister)); - __ Mov(x1, Operand(x23, LSL, kPointerSizeLog2)); + __ Mov(x1, Operand(x23, LSL, kSystemPointerSizeLog2)); #else __ Ldrb(x18, MemOperand(kInterpreterBytecodeArrayRegister, kInterpreterBytecodeOffsetRegister)); - __ Mov(x1, Operand(x18, LSL, kPointerSizeLog2)); + __ Mov(x1, Operand(x18, LSL, kSystemPointerSizeLog2)); #endif __ Ldr(kJavaScriptCallCodeStartRegister, MemOperand(kInterpreterDispatchTableRegister, x1)); @@ -1610,7 +1636,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { // Three arguments. __ Bind(&three_args); __ Ldr(scratch3, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + - 2 * kPointerSize)); + 2 * kSystemPointerSize)); __ Push(new_target, scratch3, scratch1, scratch2); __ Bind(&args_done); @@ -1622,7 +1648,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { __ JumpIfSmi(x0, &failed); // Peek the argument count from the stack, untagging at the same time. - __ SmiUntag(x4, MemOperand(sp, 3 * kPointerSize)); + __ SmiUntag(x4, MemOperand(sp, 3 * kSystemPointerSize)); __ Drop(4); scope.GenerateLeaveFrame(); @@ -1637,7 +1663,8 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { } // On failure, tail call back to regular js by re-calling the function // which has be reset to the compile lazy builtin. - __ Ldr(x4, FieldMemOperand(new_target, JSFunction::kCodeOffset)); + __ LoadTaggedPointerField( + x4, FieldMemOperand(new_target, JSFunction::kCodeOffset)); __ JumpCodeObject(x4); } @@ -1651,7 +1678,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm, (allocatable_register_count + BuiltinContinuationFrameConstants::PaddingSlotCount( allocatable_register_count)) * - kPointerSize; + kSystemPointerSize; // Set up frame pointer. __ Add(fp, sp, frame_size); @@ -1665,14 +1692,14 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm, // Restore registers in pairs. int offset = -BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - - allocatable_register_count * kPointerSize; + allocatable_register_count * kSystemPointerSize; for (int i = allocatable_register_count - 1; i > 0; i -= 2) { int code1 = config->GetAllocatableGeneralCode(i); int code2 = config->GetAllocatableGeneralCode(i - 1); Register reg1 = Register::from_code(code1); Register reg2 = Register::from_code(code2); __ Ldp(reg1, reg2, MemOperand(fp, offset)); - offset += 2 * kPointerSize; + offset += 2 * kSystemPointerSize; } // Restore first register separately, if number of registers is odd. @@ -1753,13 +1780,14 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { // Load deoptimization data from the code object. // = [#deoptimization_data_offset] - __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); + __ LoadTaggedPointerField( + x1, FieldMemOperand(x0, Code::kDeoptimizationDataOffset)); // Load the OSR entrypoint offset from the deoptimization data. // = [#header_size + #osr_pc_offset] - __ SmiUntag(x1, - FieldMemOperand(x1, FixedArray::OffsetOfElementAt( - DeoptimizationData::kOsrPcOffsetIndex))); + __ SmiUntagField( + x1, FieldMemOperand(x1, FixedArray::OffsetOfElementAt( + DeoptimizationData::kOsrPcOffsetIndex))); // Compute the target address = code_obj + header_size + osr_offset // = + #header_size + @@ -1817,7 +1845,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { __ Mov(saved_argc, argc); __ Ldp(arg_array, this_arg, MemOperand(scratch)); // Overwrites argc. - __ Ldr(receiver, MemOperand(scratch, 2 * kPointerSize)); + __ Ldr(receiver, MemOperand(scratch, 2 * kSystemPointerSize)); __ Drop(2); // Drop the undefined values we pushed above. __ DropArguments(saved_argc, TurboAssembler::kCountExcludesReceiver); @@ -1889,7 +1917,6 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { // Copy arguments two slots higher in memory, overwriting the original // receiver and padding. { - Label loop; Register copy_from = x10; Register copy_to = x11; Register count = x12; @@ -1897,7 +1924,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { __ Mov(count, argc); __ Sub(last_arg_slot, argc, 1); __ SlotAddress(copy_from, last_arg_slot); - __ Add(copy_to, copy_from, 2 * kPointerSize); + __ Add(copy_to, copy_from, 2 * kSystemPointerSize); __ CopyDoubleWords(copy_to, copy_from, count, TurboAssembler::kSrcLessThanDst); // Drop two slots. These are copies of the last two arguments. @@ -1964,8 +1991,8 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { Register scratch = x10; __ SlotAddress(scratch, argc); __ Ldp(arguments_list, this_argument, - MemOperand(scratch, 1 * kPointerSize)); - __ Ldr(target, MemOperand(scratch, 3 * kPointerSize)); + MemOperand(scratch, 1 * kSystemPointerSize)); + __ Ldr(target, MemOperand(scratch, 3 * kSystemPointerSize)); __ Drop(4); // Drop the undefined values we pushed above. __ DropArguments(argc, TurboAssembler::kCountExcludesReceiver); @@ -2041,8 +2068,9 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { // slot (argc + 1). Register scratch = x10; __ SlotAddress(scratch, argc); - __ Ldp(new_target, arguments_list, MemOperand(scratch, 1 * kPointerSize)); - __ Ldr(target, MemOperand(scratch, 3 * kPointerSize)); + __ Ldp(new_target, arguments_list, + MemOperand(scratch, 1 * kSystemPointerSize)); + __ Ldr(target, MemOperand(scratch, 3 * kSystemPointerSize)); __ Cmp(argc, 2); __ CmovX(new_target, target, ls); // target if argc <= 2. @@ -2153,7 +2181,8 @@ void Generate_PrepareForCopyingVarargs(MacroAssembler* masm, Register argc, __ Mov(dst, len); // CopySlots will corrupt dst. __ CopySlots(dst, src, slots_to_copy); __ Add(scratch, argc, 1); - __ Poke(padreg, Operand(scratch, LSL, kPointerSizeLog2)); // Store padding. + __ Poke(padreg, + Operand(scratch, LSL, kSystemPointerSizeLog2)); // Store padding. } __ Bind(&exit); @@ -2175,7 +2204,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, // Allow x2 to be a FixedArray, or a FixedDoubleArray if x4 == 0. Label ok, fail; __ AssertNotSmi(x2, AbortReason::kOperandIsNotAFixedArray); - __ Ldr(x10, FieldMemOperand(x2, HeapObject::kMapOffset)); + __ LoadTaggedPointerField(x10, FieldMemOperand(x2, HeapObject::kMapOffset)); __ Ldrh(x13, FieldMemOperand(x10, Map::kInstanceTypeOffset)); __ Cmp(x13, FIXED_ARRAY_TYPE); __ B(eq, &ok); @@ -2218,10 +2247,10 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, // TODO(all): Consider using Ldp and Stp. __ Bind(&loop); __ Sub(len, len, 1); - __ Ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); + __ LoadAnyTaggedField(scratch, MemOperand(src, kTaggedSize, PostIndex)); __ Cmp(scratch, the_hole_value); __ Csel(scratch, scratch, undefined_value, ne); - __ Poke(scratch, Operand(len, LSL, kPointerSizeLog2)); + __ Poke(scratch, Operand(len, LSL, kSystemPointerSizeLog2)); __ Cbnz(len, &loop); } __ Bind(&done); @@ -2251,7 +2280,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, if (mode == CallOrConstructMode::kConstruct) { Label new_target_constructor, new_target_not_constructor; __ JumpIfSmi(x3, &new_target_not_constructor); - __ Ldr(x5, FieldMemOperand(x3, HeapObject::kMapOffset)); + __ LoadTaggedPointerField(x5, FieldMemOperand(x3, HeapObject::kMapOffset)); __ Ldrb(x5, FieldMemOperand(x5, Map::kBitFieldOffset)); __ TestAndBranchIfAnySet(x5, Map::IsConstructorBit::kMask, &new_target_constructor); @@ -2282,8 +2311,9 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, { __ Ldr(scratch, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); - __ Ldr(scratch, - FieldMemOperand(scratch, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + scratch, + FieldMemOperand(scratch, JSFunction::kSharedFunctionInfoOffset)); __ Ldrh(len, FieldMemOperand(scratch, SharedFunctionInfo::kFormalParameterCountOffset)); @@ -2311,7 +2341,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, // Push varargs. { Register dst = x13; - __ Add(args_fp, args_fp, 2 * kPointerSize); + __ Add(args_fp, args_fp, 2 * kSystemPointerSize); __ SlotAddress(dst, 0); __ CopyDoubleWords(dst, args_fp, len); } @@ -2337,7 +2367,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) // Check that function is not a "classConstructor". Label class_constructor; - __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kFlagsOffset)); __ TestAndBranchIfAnySet(w3, SharedFunctionInfo::IsClassConstructorBit::kMask, &class_constructor); @@ -2345,7 +2376,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, // Enter the context of the function; ToObject has to run in the function // context, and we also need to take the global proxy from the function // context in case of conversion. - __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); + __ LoadTaggedPointerField(cp, + FieldMemOperand(x1, JSFunction::kContextOffset)); // We need to convert the receiver for non-native sloppy mode functions. Label done_convert; __ TestAndBranchIfAnySet(w3, @@ -2396,7 +2428,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, __ Pop(cp, x1, x0, padreg); __ SmiUntag(x0); } - __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ Bind(&convert_receiver); } __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2)); @@ -2439,10 +2472,10 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { // Load [[BoundArguments]] into x2 and length of that into x4. Label no_bound_arguments; - __ Ldr(bound_argv, - FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset)); - __ SmiUntag(bound_argc, - FieldMemOperand(bound_argv, FixedArray::kLengthOffset)); + __ LoadTaggedPointerField( + bound_argv, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset)); + __ SmiUntagField(bound_argc, + FieldMemOperand(bound_argv, FixedArray::kLengthOffset)); __ Cbz(bound_argc, &no_bound_arguments); { // ----------- S t a t e ------------- @@ -2466,7 +2499,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { // here which will cause x10 to become negative. __ Sub(x10, sp, x10); // Check if the arguments will overflow the stack. - __ Cmp(x10, Operand(bound_argc, LSL, kPointerSizeLog2)); + __ Cmp(x10, Operand(bound_argc, LSL, kSystemPointerSizeLog2)); __ B(hs, &done); __ TailCallRuntime(Runtime::kThrowStackOverflow); __ Bind(&done); @@ -2483,7 +2516,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { // Load receiver before we start moving the arguments. We will only // need this in this path because the bound arguments are odd. Register receiver = x14; - __ Peek(receiver, Operand(argc, LSL, kPointerSizeLog2)); + __ Peek(receiver, Operand(argc, LSL, kSystemPointerSizeLog2)); // Claim space we need. If argc is even, slots_to_claim = bound_argc + 1, // as we need one extra padding slot. If argc is odd, we know that the @@ -2522,12 +2555,12 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { __ SlotAddress(copy_to, argc); __ Add(argc, argc, bound_argc); // Update argc to include bound arguments. - __ Lsl(counter, bound_argc, kPointerSizeLog2); + __ Lsl(counter, bound_argc, kTaggedSizeLog2); __ Bind(&loop); - __ Sub(counter, counter, kPointerSize); - __ Ldr(scratch, MemOperand(bound_argv, counter)); + __ Sub(counter, counter, kTaggedSize); + __ LoadAnyTaggedField(scratch, MemOperand(bound_argv, counter)); // Poke into claimed area of stack. - __ Str(scratch, MemOperand(copy_to, kPointerSize, PostIndex)); + __ Str(scratch, MemOperand(copy_to, kSystemPointerSize, PostIndex)); __ Cbnz(counter, &loop); } @@ -2536,8 +2569,8 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { Register scratch = x10; __ Tbz(bound_argc, 0, &done); // Store receiver. - __ Add(scratch, sp, Operand(total_argc, LSL, kPointerSizeLog2)); - __ Str(receiver, MemOperand(scratch, kPointerSize, PostIndex)); + __ Add(scratch, sp, Operand(total_argc, LSL, kSystemPointerSizeLog2)); + __ Str(receiver, MemOperand(scratch, kSystemPointerSize, PostIndex)); __ Tbnz(total_argc, 0, &done); // Store padding. __ Str(padreg, MemOperand(scratch)); @@ -2559,14 +2592,16 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) { __ AssertBoundFunction(x1); // Patch the receiver to [[BoundThis]]. - __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset)); - __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2)); + __ LoadAnyTaggedField(x10, + FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset)); + __ Poke(x10, Operand(x0, LSL, kSystemPointerSizeLog2)); // Push the [[BoundArguments]] onto the stack. Generate_PushBoundArguments(masm); // Call the [[BoundTargetFunction]] via the Call builtin. - __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); + __ LoadTaggedPointerField( + x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny), RelocInfo::CODE_TARGET); } @@ -2634,7 +2669,8 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { Label call_generic_stub; // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric. - __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ LoadTaggedPointerField( + x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset)); __ TestAndBranchIfAllClear( w4, SharedFunctionInfo::ConstructAsBuiltinBit::kMask, &call_generic_stub); @@ -2665,13 +2701,14 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { Label done; __ Cmp(x1, x3); __ B(ne, &done); - __ Ldr(x3, - FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); + __ LoadTaggedPointerField( + x3, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); __ Bind(&done); } // Construct the [[BoundTargetFunction]] via the Construct builtin. - __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); + __ LoadTaggedPointerField( + x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET); } @@ -2689,7 +2726,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { __ JumpIfSmi(x1, &non_constructor); // Check if target has a [[Construct]] internal method. - __ Ldr(x4, FieldMemOperand(x1, HeapObject::kMapOffset)); + __ LoadTaggedPointerField(x4, FieldMemOperand(x1, HeapObject::kMapOffset)); __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset)); __ TestAndBranchIfAllClear(x2, Map::IsConstructorBit::kMask, &non_constructor); @@ -2792,123 +2829,198 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { Register argc_actual = x0; // Excluding the receiver. Register argc_expected = x2; // Excluding the receiver. Register function = x1; + Register argc_actual_minus_expected = x5; - Label dont_adapt_arguments, stack_overflow; + Label create_adaptor_frame, dont_adapt_arguments, stack_overflow, + adapt_arguments_in_place; - Label enough_arguments; __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel); __ B(eq, &dont_adapt_arguments); - EnterArgumentsAdaptorFrame(masm); - - Register copy_from = x10; - Register copy_end = x11; - Register copy_to = x12; - Register argc_to_copy = x13; - Register argc_unused_actual = x14; - Register scratch1 = x15, scratch2 = x16; - - // We need slots for the expected arguments, with one extra slot for the - // receiver. - __ RecordComment("-- Stack check --"); - __ Add(scratch1, argc_expected, 1); - Generate_StackOverflowCheck(masm, scratch1, &stack_overflow); - - // Round up number of slots to be even, to maintain stack alignment. - __ RecordComment("-- Allocate callee frame slots --"); - __ Add(scratch1, scratch1, 1); - __ Bic(scratch1, scratch1, 1); - __ Claim(scratch1, kPointerSize); - - __ Mov(copy_to, sp); - - // Preparing the expected arguments is done in four steps, the order of - // which is chosen so we can use LDP/STP and avoid conditional branches as - // much as possible. - - // (1) If we don't have enough arguments, fill the remaining expected - // arguments with undefined, otherwise skip this step. - __ Subs(scratch1, argc_actual, argc_expected); - __ Csel(argc_unused_actual, xzr, scratch1, lt); - __ Csel(argc_to_copy, argc_expected, argc_actual, ge); - __ B(ge, &enough_arguments); - - // Fill the remaining expected arguments with undefined. - __ RecordComment("-- Fill slots with undefined --"); - __ Sub(copy_end, copy_to, Operand(scratch1, LSL, kPointerSizeLog2)); - __ LoadRoot(scratch1, RootIndex::kUndefinedValue); - - Label fill; - __ Bind(&fill); - __ Stp(scratch1, scratch1, MemOperand(copy_to, 2 * kPointerSize, PostIndex)); - // We might write one slot extra, but that is ok because we'll overwrite it - // below. - __ Cmp(copy_end, copy_to); - __ B(hi, &fill); - - // Correct copy_to, for the case where we wrote one additional slot. - __ Mov(copy_to, copy_end); - - __ Bind(&enough_arguments); - // (2) Copy all of the actual arguments, or as many as we need. - Label skip_copy; - __ RecordComment("-- Copy actual arguments --"); - __ Cbz(argc_to_copy, &skip_copy); - __ Add(copy_end, copy_to, Operand(argc_to_copy, LSL, kPointerSizeLog2)); - __ Add(copy_from, fp, 2 * kPointerSize); - // Adjust for difference between actual and expected arguments. - __ Add(copy_from, copy_from, - Operand(argc_unused_actual, LSL, kPointerSizeLog2)); - - // Copy arguments. We use load/store pair instructions, so we might overshoot - // by one slot, but since we copy the arguments starting from the last one, if - // we do overshoot, the extra slot will be overwritten later by the receiver. - Label copy_2_by_2; - __ Bind(©_2_by_2); - __ Ldp(scratch1, scratch2, - MemOperand(copy_from, 2 * kPointerSize, PostIndex)); - __ Stp(scratch1, scratch2, MemOperand(copy_to, 2 * kPointerSize, PostIndex)); - __ Cmp(copy_end, copy_to); - __ B(hi, ©_2_by_2); - __ Bind(&skip_copy); - - // (3) Store padding, which might be overwritten by the receiver, if it is not - // necessary. - __ RecordComment("-- Store padding --"); - __ Str(padreg, MemOperand(fp, -5 * kPointerSize)); - - // (4) Store receiver. Calculate target address from the sp to avoid checking - // for padding. Storing the receiver will overwrite either the extra slot - // we copied with the actual arguments, if we did copy one, or the padding we - // stored above. - __ RecordComment("-- Store receiver --"); - __ Add(copy_from, fp, 2 * kPointerSize); - __ Ldr(scratch1, MemOperand(copy_from, argc_actual, LSL, kPointerSizeLog2)); - __ Str(scratch1, MemOperand(sp, argc_expected, LSL, kPointerSizeLog2)); - - // Arguments have been adapted. Now call the entry point. - __ RecordComment("-- Call entry point --"); - __ Mov(argc_actual, argc_expected); - // x0 : expected number of arguments - // x1 : function (passed through to callee) - // x3 : new target (passed through to callee) - static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); - __ Ldr(x2, FieldMemOperand(function, JSFunction::kCodeOffset)); - __ CallCodeObject(x2); + // When the difference between argc_actual and argc_expected is odd, we + // create an arguments adaptor frame. + __ Sub(argc_actual_minus_expected, argc_actual, argc_expected); + __ Tbnz(argc_actual_minus_expected, 0, &create_adaptor_frame); - // Store offset of return address for deoptimizer. - masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); + // When the difference is even, check if we are allowed to adjust the + // existing frame instead. + __ LoadTaggedPointerField( + x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset)); + __ TestAndBranchIfAnySet( + w4, SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit::kMask, + &adapt_arguments_in_place); + + // ------------------------------------------- + // Create an arguments adaptor frame. + // ------------------------------------------- + __ Bind(&create_adaptor_frame); + { + __ RecordComment("-- Adapt arguments --"); + EnterArgumentsAdaptorFrame(masm); - // Exit frame and return. - LeaveArgumentsAdaptorFrame(masm); - __ Ret(); + Register copy_from = x10; + Register copy_end = x11; + Register copy_to = x12; + Register argc_to_copy = x13; + Register argc_unused_actual = x14; + Register scratch1 = x15, scratch2 = x16; + + // We need slots for the expected arguments, with one extra slot for the + // receiver. + __ RecordComment("-- Stack check --"); + __ Add(scratch1, argc_expected, 1); + Generate_StackOverflowCheck(masm, scratch1, &stack_overflow); + + // Round up number of slots to be even, to maintain stack alignment. + __ RecordComment("-- Allocate callee frame slots --"); + __ Add(scratch1, scratch1, 1); + __ Bic(scratch1, scratch1, 1); + __ Claim(scratch1, kSystemPointerSize); + + __ Mov(copy_to, sp); + + // Preparing the expected arguments is done in four steps, the order of + // which is chosen so we can use LDP/STP and avoid conditional branches as + // much as possible. + + // (1) If we don't have enough arguments, fill the remaining expected + // arguments with undefined, otherwise skip this step. + Label enough_arguments; + __ Subs(scratch1, argc_actual, argc_expected); + __ Csel(argc_unused_actual, xzr, scratch1, lt); + __ Csel(argc_to_copy, argc_expected, argc_actual, ge); + __ B(ge, &enough_arguments); + + // Fill the remaining expected arguments with undefined. + __ RecordComment("-- Fill slots with undefined --"); + __ Sub(copy_end, copy_to, Operand(scratch1, LSL, kSystemPointerSizeLog2)); + __ LoadRoot(scratch1, RootIndex::kUndefinedValue); + + Label fill; + __ Bind(&fill); + __ Stp(scratch1, scratch1, + MemOperand(copy_to, 2 * kSystemPointerSize, PostIndex)); + // We might write one slot extra, but that is ok because we'll overwrite it + // below. + __ Cmp(copy_end, copy_to); + __ B(hi, &fill); + + // Correct copy_to, for the case where we wrote one additional slot. + __ Mov(copy_to, copy_end); + + __ Bind(&enough_arguments); + // (2) Copy all of the actual arguments, or as many as we need. + Label skip_copy; + __ RecordComment("-- Copy actual arguments --"); + __ Cbz(argc_to_copy, &skip_copy); + __ Add(copy_end, copy_to, + Operand(argc_to_copy, LSL, kSystemPointerSizeLog2)); + __ Add(copy_from, fp, 2 * kSystemPointerSize); + // Adjust for difference between actual and expected arguments. + __ Add(copy_from, copy_from, + Operand(argc_unused_actual, LSL, kSystemPointerSizeLog2)); + + // Copy arguments. We use load/store pair instructions, so we might + // overshoot by one slot, but since we copy the arguments starting from the + // last one, if we do overshoot, the extra slot will be overwritten later by + // the receiver. + Label copy_2_by_2; + __ Bind(©_2_by_2); + __ Ldp(scratch1, scratch2, + MemOperand(copy_from, 2 * kSystemPointerSize, PostIndex)); + __ Stp(scratch1, scratch2, + MemOperand(copy_to, 2 * kSystemPointerSize, PostIndex)); + __ Cmp(copy_end, copy_to); + __ B(hi, ©_2_by_2); + __ Bind(&skip_copy); + + // (3) Store padding, which might be overwritten by the receiver, if it is + // not necessary. + __ RecordComment("-- Store padding --"); + __ Str(padreg, MemOperand(fp, -5 * kSystemPointerSize)); + + // (4) Store receiver. Calculate target address from the sp to avoid + // checking for padding. Storing the receiver will overwrite either the + // extra slot we copied with the actual arguments, if we did copy one, or + // the padding we stored above. + __ RecordComment("-- Store receiver --"); + __ Add(copy_from, fp, 2 * kSystemPointerSize); + __ Ldr(scratch1, + MemOperand(copy_from, argc_actual, LSL, kSystemPointerSizeLog2)); + __ Str(scratch1, + MemOperand(sp, argc_expected, LSL, kSystemPointerSizeLog2)); + + // Arguments have been adapted. Now call the entry point. + __ RecordComment("-- Call entry point --"); + __ Mov(argc_actual, argc_expected); + // x0 : expected number of arguments + // x1 : function (passed through to callee) + // x3 : new target (passed through to callee) + static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); + __ LoadTaggedPointerField( + x2, FieldMemOperand(function, JSFunction::kCodeOffset)); + __ CallCodeObject(x2); - // Call the entry point without adapting the arguments. - __ RecordComment("-- Call without adapting args --"); + // Store offset of return address for deoptimizer. + masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset( + masm->pc_offset()); + + // Exit frame and return. + LeaveArgumentsAdaptorFrame(masm); + __ Ret(); + } + + // ----------------------------------------- + // Adapt arguments in the existing frame. + // ----------------------------------------- + __ Bind(&adapt_arguments_in_place); + { + __ RecordComment("-- Update arguments in place --"); + // The callee cannot observe the actual arguments, so it's safe to just + // pass the expected arguments by massaging the stack appropriately. See + // http://bit.ly/v8-faster-calls-with-arguments-mismatch for details. + Label under_application, over_application; + __ Tbnz(argc_actual_minus_expected, kXSignBit, &under_application); + + __ Bind(&over_application); + { + // Remove superfluous arguments from the stack. The number of superflous + // arguments is even. + __ RecordComment("-- Over-application --"); + __ Mov(argc_actual, argc_expected); + __ Drop(argc_actual_minus_expected); + __ B(&dont_adapt_arguments); + } + + __ Bind(&under_application); + { + // Fill remaining expected arguments with undefined values. + __ RecordComment("-- Under-application --"); + Label fill; + Register undef_value = x16; + __ LoadRoot(undef_value, RootIndex::kUndefinedValue); + __ Bind(&fill); + __ Add(argc_actual, argc_actual, 2); + __ Push(undef_value, undef_value); + __ Cmp(argc_actual, argc_expected); + __ B(lt, &fill); + __ B(&dont_adapt_arguments); + } + } + + // ------------------------------------------- + // Dont adapt arguments. + // ------------------------------------------- __ Bind(&dont_adapt_arguments); - static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); - __ Ldr(x2, FieldMemOperand(function, JSFunction::kCodeOffset)); - __ JumpCodeObject(x2); + { + // Call the entry point without adapting the arguments. + __ RecordComment("-- Call without adapting args --"); + static_assert(kJavaScriptCallCodeStartRegister == x2, "ABI mismatch"); + __ LoadTaggedPointerField( + x2, FieldMemOperand(function, JSFunction::kCodeOffset)); + __ JumpCodeObject(x2); + } __ Bind(&stack_overflow); __ RecordComment("-- Stack overflow --"); @@ -2944,8 +3056,9 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { // function. __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister); // Load the correct CEntry builtin from the instance object. - __ Ldr(x2, FieldMemOperand(kWasmInstanceRegister, - WasmInstanceObject::kCEntryStubOffset)); + __ LoadTaggedPointerField( + x2, FieldMemOperand(kWasmInstanceRegister, + WasmInstanceObject::kCEntryStubOffset)); // Initialize the JavaScript context with 0. CEntry will use it to // set the current context on the isolate. __ Mov(cp, Smi::zero()); @@ -3011,7 +3124,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, if (argv_mode == kArgvOnStack) { __ SlotAddress(temp_argv, x0); // - Adjust for the receiver. - __ Sub(temp_argv, temp_argv, 1 * kPointerSize); + __ Sub(temp_argv, temp_argv, 1 * kSystemPointerSize); } // Reserve three slots to preserve x21-x23 callee-saved registers. @@ -3023,9 +3136,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT); // Poke callee-saved registers into reserved space. - __ Poke(argv, 1 * kPointerSize); - __ Poke(argc, 2 * kPointerSize); - __ Poke(target, 3 * kPointerSize); + __ Poke(argv, 1 * kSystemPointerSize); + __ Poke(argc, 2 * kSystemPointerSize); + __ Poke(target, 3 * kSystemPointerSize); // We normally only keep tagged values in callee-saved registers, as they // could be pushed onto the stack by called stubs and functions, and on the @@ -3096,9 +3209,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, // Restore callee-saved registers x21-x23. __ Mov(x11, argc); - __ Peek(argv, 1 * kPointerSize); - __ Peek(argc, 2 * kPointerSize); - __ Peek(target, 3 * kPointerSize); + __ Peek(argv, 1 * kSystemPointerSize); + __ Peek(argc, 2 * kSystemPointerSize); + __ Peek(target, 3 * kSystemPointerSize); __ LeaveExitFrame(save_doubles == kSaveFPRegs, x10, x9); if (argv_mode == kArgvOnStack) { @@ -3178,7 +3291,7 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) { DoubleRegister double_scratch = temps.AcquireD(); // Account for saved regs. - const int kArgumentOffset = 2 * kPointerSize; + const int kArgumentOffset = 2 * kSystemPointerSize; __ Push(result, scratch1); // scratch1 is also pushed to preserve alignment. __ Peek(double_scratch, kArgumentOffset); @@ -3232,98 +3345,6 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) { __ Ret(); } -void Builtins::Generate_MathPowInternal(MacroAssembler* masm) { - Register exponent_integer = x12; - Register saved_lr = x19; - VRegister result_double = d0; - VRegister base_double = d0; - VRegister exponent_double = d1; - VRegister base_double_copy = d2; - VRegister scratch1_double = d6; - VRegister scratch0_double = d7; - - // A fast-path for integer exponents. - Label exponent_is_integer; - // Allocate a heap number for the result, and return it. - Label done; - - // Unpack the inputs. - - // Handle double (heap number) exponents. - // Detect integer exponents stored as doubles and handle those in the - // integer fast-path. - __ TryRepresentDoubleAsInt64(exponent_integer, exponent_double, - scratch0_double, &exponent_is_integer); - - { - AllowExternalCallThatCantCauseGC scope(masm); - __ Mov(saved_lr, lr); - __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2); - __ Mov(lr, saved_lr); - __ B(&done); - } - - __ Bind(&exponent_is_integer); - - // Find abs(exponent). For negative exponents, we can find the inverse later. - Register exponent_abs = x13; - __ Cmp(exponent_integer, 0); - __ Cneg(exponent_abs, exponent_integer, mi); - - // Repeatedly multiply to calculate the power. - // result = 1.0; - // For each bit n (exponent_integer{n}) { - // if (exponent_integer{n}) { - // result *= base; - // } - // base *= base; - // if (remaining bits in exponent_integer are all zero) { - // break; - // } - // } - Label power_loop, power_loop_entry, power_loop_exit; - __ Fmov(scratch1_double, base_double); - __ Fmov(base_double_copy, base_double); - __ Fmov(result_double, 1.0); - __ B(&power_loop_entry); - - __ Bind(&power_loop); - __ Fmul(scratch1_double, scratch1_double, scratch1_double); - __ Lsr(exponent_abs, exponent_abs, 1); - __ Cbz(exponent_abs, &power_loop_exit); - - __ Bind(&power_loop_entry); - __ Tbz(exponent_abs, 0, &power_loop); - __ Fmul(result_double, result_double, scratch1_double); - __ B(&power_loop); - - __ Bind(&power_loop_exit); - - // If the exponent was positive, result_double holds the result. - __ Tbz(exponent_integer, kXSignBit, &done); - - // The exponent was negative, so find the inverse. - __ Fmov(scratch0_double, 1.0); - __ Fdiv(result_double, scratch0_double, result_double); - // ECMA-262 only requires Math.pow to return an 'implementation-dependent - // approximation' of base^exponent. However, mjsunit/math-pow uses Math.pow - // to calculate the subnormal value 2^-1074. This method of calculating - // negative powers doesn't work because 2^1074 overflows to infinity. To - // catch this corner-case, we bail out if the result was 0. (This can only - // occur if the divisor is infinity or the base is zero.) - __ Fcmp(result_double, 0.0); - __ B(&done, ne); - - AllowExternalCallThatCantCauseGC scope(masm); - __ Mov(saved_lr, lr); - __ Fmov(base_double, base_double_copy); - __ Scvtf(exponent_double, exponent_integer); - __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2); - __ Mov(lr, saved_lr); - __ Bind(&done); - __ Ret(); -} - void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- x0 : argc @@ -3340,8 +3361,9 @@ void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) { Label unexpected_map, map_ok; // Initial map for the builtin Array function should be a map. - __ Ldr(x10, FieldMemOperand(constructor, - JSFunction::kPrototypeOrInitialMapOffset)); + __ LoadTaggedPointerField( + x10, + FieldMemOperand(constructor, JSFunction::kPrototypeOrInitialMapOffset)); // Will both indicate a nullptr and a Smi. __ JumpIfSmi(x10, &unexpected_map); __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok); @@ -3351,8 +3373,9 @@ void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) { Register kind = w3; // Figure out the right elements kind - __ Ldr(x10, FieldMemOperand(constructor, - JSFunction::kPrototypeOrInitialMapOffset)); + __ LoadTaggedPointerField( + x10, + FieldMemOperand(constructor, JSFunction::kPrototypeOrInitialMapOffset)); // Retrieve elements_kind from map. __ LoadElementsKindFromMap(kind, x10); @@ -3531,32 +3554,24 @@ void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address, void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // ----------- S t a t e ------------- - // -- cp : kTargetContext - // -- r1 : kApiFunctionAddress - // -- r2 : kArgc - // -- + // -- cp : context + // -- x1 : api function address + // -- x2 : arguments count (not including the receiver) + // -- x3 : call data + // -- x0 : holder // -- sp[0] : last argument // -- ... // -- sp[(argc - 1) * 8] : first argument // -- sp[(argc + 0) * 8] : receiver - // -- sp[(argc + 1) * 8] : kHolder - // -- sp[(argc + 2) * 8] : kCallData // ----------------------------------- Register api_function_address = x1; Register argc = x2; + Register call_data = x3; + Register holder = x0; Register scratch = x4; - Register index = x5; // For indexing MemOperands. - - DCHECK(!AreAliased(api_function_address, argc, scratch, index)); - // Stack offsets (without argc). - static constexpr int kReceiverOffset = 0; - static constexpr int kHolderOffset = kReceiverOffset + 1; - static constexpr int kCallDataOffset = kHolderOffset + 1; - - // Extra stack arguments are: the receiver, kHolder, kCallData. - static constexpr int kExtraStackArgumentCount = 3; + DCHECK(!AreAliased(api_function_address, argc, call_data, holder, scratch)); typedef FunctionCallbackArguments FCA; @@ -3571,35 +3586,33 @@ void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // Set up FunctionCallbackInfo's implicit_args on the stack as follows: // // Target state: - // sp[0 * kPointerSize]: kHolder - // sp[1 * kPointerSize]: kIsolate - // sp[2 * kPointerSize]: undefined (kReturnValueDefaultValue) - // sp[3 * kPointerSize]: undefined (kReturnValue) - // sp[4 * kPointerSize]: kData - // sp[5 * kPointerSize]: undefined (kNewTarget) + // sp[0 * kSystemPointerSize]: kHolder + // sp[1 * kSystemPointerSize]: kIsolate + // sp[2 * kSystemPointerSize]: undefined (kReturnValueDefaultValue) + // sp[3 * kSystemPointerSize]: undefined (kReturnValue) + // sp[4 * kSystemPointerSize]: kData + // sp[5 * kSystemPointerSize]: undefined (kNewTarget) // Reserve space on the stack. - __ Sub(sp, sp, Operand(FCA::kArgsLength * kPointerSize)); + __ Sub(sp, sp, Operand(FCA::kArgsLength * kSystemPointerSize)); // kHolder. - __ Add(index, argc, Operand(FCA::kArgsLength + kHolderOffset)); - __ Ldr(scratch, MemOperand(sp, index, LSL, kPointerSizeLog2)); - __ Str(scratch, MemOperand(sp, 0 * kPointerSize)); + __ Str(holder, MemOperand(sp, 0 * kSystemPointerSize)); // kIsolate. __ Mov(scratch, ExternalReference::isolate_address(masm->isolate())); - __ Str(scratch, MemOperand(sp, 1 * kPointerSize)); + __ Str(scratch, MemOperand(sp, 1 * kSystemPointerSize)); - // kReturnValueDefaultValue, kReturnValue, and kNewTarget. + // kReturnValueDefaultValue and kReturnValue. __ LoadRoot(scratch, RootIndex::kUndefinedValue); - __ Str(scratch, MemOperand(sp, 2 * kPointerSize)); - __ Str(scratch, MemOperand(sp, 3 * kPointerSize)); - __ Str(scratch, MemOperand(sp, 5 * kPointerSize)); + __ Str(scratch, MemOperand(sp, 2 * kSystemPointerSize)); + __ Str(scratch, MemOperand(sp, 3 * kSystemPointerSize)); // kData. - __ Add(index, argc, Operand(FCA::kArgsLength + kCallDataOffset)); - __ Ldr(scratch, MemOperand(sp, index, LSL, kPointerSizeLog2)); - __ Str(scratch, MemOperand(sp, 4 * kPointerSize)); + __ Str(call_data, MemOperand(sp, 4 * kSystemPointerSize)); + + // kNewTarget. + __ Str(scratch, MemOperand(sp, 5 * kSystemPointerSize)); // Keep a pointer to kHolder (= implicit_args) in a scratch register. // We use it below to set up the FunctionCallbackInfo object. @@ -3616,16 +3629,17 @@ void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above). // Arguments are after the return address (pushed by EnterExitFrame()). - __ Str(scratch, MemOperand(sp, 1 * kPointerSize)); + __ Str(scratch, MemOperand(sp, 1 * kSystemPointerSize)); // FunctionCallbackInfo::values_ (points at the first varargs argument passed // on the stack). - __ Add(scratch, scratch, Operand((FCA::kArgsLength - 1) * kPointerSize)); - __ Add(scratch, scratch, Operand(argc, LSL, kPointerSizeLog2)); - __ Str(scratch, MemOperand(sp, 2 * kPointerSize)); + __ Add(scratch, scratch, + Operand((FCA::kArgsLength - 1) * kSystemPointerSize)); + __ Add(scratch, scratch, Operand(argc, LSL, kSystemPointerSizeLog2)); + __ Str(scratch, MemOperand(sp, 2 * kSystemPointerSize)); // FunctionCallbackInfo::length_. - __ Str(argc, MemOperand(sp, 3 * kPointerSize)); + __ Str(argc, MemOperand(sp, 3 * kSystemPointerSize)); // We also store the number of slots to drop from the stack after returning // from the API function here. @@ -3633,12 +3647,12 @@ void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // drop, not the number of bytes. arm64 must always drop a slot count that is // a multiple of two, and related helper functions (DropArguments) expect a // register containing the slot count. - __ Add(scratch, argc, Operand(FCA::kArgsLength + kExtraStackArgumentCount)); - __ Str(scratch, MemOperand(sp, 4 * kPointerSize)); + __ Add(scratch, argc, Operand(FCA::kArgsLength + 1 /*receiver*/)); + __ Str(scratch, MemOperand(sp, 4 * kSystemPointerSize)); // v8::InvocationCallback's argument. DCHECK(!AreAliased(x0, api_function_address)); - __ add(x0, sp, Operand(1 * kPointerSize)); + __ add(x0, sp, Operand(1 * kSystemPointerSize)); ExternalReference thunk_ref = ExternalReference::invoke_function_callback(); @@ -3649,11 +3663,11 @@ void Builtins::Generate_CallApiCallback(MacroAssembler* masm) { // TODO(jgruber): Document what these arguments are. static constexpr int kStackSlotsAboveFCA = 2; MemOperand return_value_operand( - fp, (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kPointerSize); + fp, (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize); static constexpr int kSpillOffset = 1 + kApiStackSpace; static constexpr int kUseStackSpaceOperand = 0; - MemOperand stack_space_operand(sp, 4 * kPointerSize); + MemOperand stack_space_operand(sp, 4 * kSystemPointerSize); AllowExternalCallThatCantCauseGC scope(masm); CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, @@ -3681,10 +3695,12 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { DCHECK(!AreAliased(receiver, holder, callback, data, undef, isolate_address, name)); - __ Ldr(data, FieldMemOperand(callback, AccessorInfo::kDataOffset)); + __ LoadAnyTaggedField(data, + FieldMemOperand(callback, AccessorInfo::kDataOffset)); __ LoadRoot(undef, RootIndex::kUndefinedValue); __ Mov(isolate_address, ExternalReference::isolate_address(masm->isolate())); - __ Ldr(name, FieldMemOperand(callback, AccessorInfo::kNameOffset)); + __ LoadTaggedPointerField( + name, FieldMemOperand(callback, AccessorInfo::kNameOffset)); // PropertyCallbackArguments: // receiver, data, return value, return value default, isolate, holder, @@ -3700,8 +3716,8 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { "slots must be a multiple of 2 for stack pointer alignment"); // Load address of v8::PropertyAccessorInfo::args_ array and name handle. - __ Mov(x0, sp); // x0 = Handle - __ Add(x1, x0, 1 * kPointerSize); // x1 = v8::PCI::args_ + __ Mov(x0, sp); // x0 = Handle + __ Add(x1, x0, 1 * kSystemPointerSize); // x1 = v8::PCI::args_ const int kApiStackSpace = 1; @@ -3710,7 +3726,7 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { // Create v8::PropertyCallbackInfo object on the stack and initialize // it's args_ field. - __ Poke(x1, 1 * kPointerSize); + __ Poke(x1, 1 * kSystemPointerSize); __ SlotAddress(x1, 1); // x1 = v8::PropertyCallbackInfo& @@ -3719,14 +3735,16 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { Register api_function_address = x2; Register js_getter = x4; - __ Ldr(js_getter, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); + __ LoadTaggedPointerField( + js_getter, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); __ Ldr(api_function_address, FieldMemOperand(js_getter, Foreign::kForeignAddressOffset)); const int spill_offset = 1 + kApiStackSpace; // +3 is to skip prolog, return address and name handle. MemOperand return_value_operand( - fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); + fp, + (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize); MemOperand* const kUseStackSpaceConstant = nullptr; CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, kStackUnwindSpace, kUseStackSpaceConstant, diff --git a/deps/v8/src/builtins/array-copywithin.tq b/deps/v8/src/builtins/array-copywithin.tq index d4929922329cd9..bfc95a28bf46df 100644 --- a/deps/v8/src/builtins/array-copywithin.tq +++ b/deps/v8/src/builtins/array-copywithin.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_copywithin { macro ConvertToRelativeIndex(index: Number, length: Number): Number { return index < 0 ? Max(index + length, 0) : Min(index, length); } diff --git a/deps/v8/src/builtins/array-every.tq b/deps/v8/src/builtins/array-every.tq new file mode 100644 index 00000000000000..245b07556cba3a --- /dev/null +++ b/deps/v8/src/builtins/array-every.tq @@ -0,0 +1,151 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array { + transitioning javascript builtin + ArrayEveryLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArrayEveryLoopContinuation( + jsreceiver, callbackfn, thisArg, Undefined, jsreceiver, numberK, + numberLength, Undefined); + } + + transitioning javascript builtin + ArrayEveryLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object, result: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // This custom lazy deopt point is right after the callback. every() needs + // to pick up at the next step, which is either continuing to the next + // array element or returning false if {result} is false. + if (!ToBoolean(result)) { + return False; + } + + numberK = numberK + 1; + + return ArrayEveryLoopContinuation( + jsreceiver, callbackfn, thisArg, Undefined, jsreceiver, numberK, + numberLength, Undefined); + } + + transitioning builtin ArrayEveryLoopContinuation(implicit context: Context)( + receiver: JSReceiver, callbackfn: Callable, thisArg: Object, + array: Object, o: JSReceiver, initialK: Number, length: Number, + initialTo: Object): Object { + // 5. Let k be 0. + // 6. Repeat, while k < len + for (let k: Number = initialK; k < length; k++) { + // 6a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 6b. Let kPresent be ? HasProperty(O, Pk). + const kPresent: Boolean = HasProperty_Inline(o, k); + + // 6c. If kPresent is true, then + if (kPresent == True) { + // 6c. i. Let kValue be ? Get(O, Pk). + const kValue: Object = GetProperty(o, k); + + // 6c. ii. Perform ? Call(callbackfn, T, ). + const result: Object = Call(context, callbackfn, thisArg, kValue, k, o); + + // iii. If selected is true, then... + if (!ToBoolean(result)) { + return False; + } + } + + // 6d. Increase k by 1. (done by the loop). + } + return True; + } + + transitioning macro FastArrayEvery(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object): Object + labels Bailout(Smi) { + let k: Smi = 0; + const smiLen = Cast(len) otherwise goto Bailout(k); + let fastO: FastJSArray = Cast(o) otherwise goto Bailout(k); + let fastOW = NewFastJSArrayWitness(fastO); + + // Build a fast loop over the smi array. + for (; k < smiLen; k++) { + fastOW.Recheck() otherwise goto Bailout(k); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto Bailout(k); + const value: Object = fastOW.LoadElementNoHole(k) otherwise continue; + const result: Object = + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); + if (!ToBoolean(result)) { + return False; + } + } + return True; + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.every + transitioning javascript builtin + ArrayEvery(implicit context: Context)(receiver: Object, ...arguments): + Object { + try { + if (IsNullOrUndefined(receiver)) { + goto NullOrUndefinedError; + } + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) { + goto TypeError; + } + const callbackfn = Cast(arguments[0]) otherwise TypeError; + + // 4. If thisArg is present, let T be thisArg; else let T be undefined. + const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; + + // Special cases. + try { + return FastArrayEvery(o, len, callbackfn, thisArg) + otherwise Bailout; + } + label Bailout(kValue: Smi) deferred { + return ArrayEveryLoopContinuation( + o, callbackfn, thisArg, Undefined, o, kValue, len, Undefined); + } + } + label TypeError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.every'); + } + } +} diff --git a/deps/v8/src/builtins/array-filter.tq b/deps/v8/src/builtins/array-filter.tq index 222e4e291b44d7..4bf175a787aefe 100644 --- a/deps/v8/src/builtins/array-filter.tq +++ b/deps/v8/src/builtins/array-filter.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_filter { transitioning javascript builtin ArrayFilterLoopEagerDeoptContinuation(implicit context: Context)( receiver: Object, callback: Object, thisArg: Object, array: Object, @@ -14,14 +14,12 @@ namespace array { // Also, this great mass of casts is necessary because the signature // of Torque javascript builtins requires Object type for all parameters // other than {context}. - const jsreceiver: JSReceiver = - Cast(receiver) otherwise unreachable; - const callbackfn: Callable = Cast(callback) otherwise unreachable; - const outputArray: JSReceiver = - Cast(array) otherwise unreachable; - const numberK: Number = Cast(initialK) otherwise unreachable; - const numberTo: Number = Cast(initialTo) otherwise unreachable; - const numberLength: Number = Cast(length) otherwise unreachable; + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const outputArray = Cast(array) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberTo = Cast(initialTo) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; return ArrayFilterLoopContinuation( jsreceiver, callbackfn, thisArg, outputArray, jsreceiver, numberK, @@ -36,21 +34,19 @@ namespace array { // All continuation points in the optimized filter implementation are // after the ToObject(O) call that ensures we are dealing with a // JSReceiver. - const jsreceiver: JSReceiver = - Cast(receiver) otherwise unreachable; - const callbackfn: Callable = Cast(callback) otherwise unreachable; - const outputArray: JSReceiver = - Cast(array) otherwise unreachable; - let numberK: Number = Cast(initialK) otherwise unreachable; - let numberTo: Number = Cast(initialTo) otherwise unreachable; - const numberLength: Number = Cast(length) otherwise unreachable; + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const outputArray = Cast(array) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + let numberTo = Cast(initialTo) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; // This custom lazy deopt point is right after the callback. filter() needs // to pick up at the next step, which is setting the callback result in // the output array. After incrementing k and to, we can glide into the loop // continuation builtin. if (ToBoolean(result)) { - CreateDataProperty(outputArray, numberTo, valueK); + FastCreateDataProperty(outputArray, numberTo, valueK); numberTo = numberTo + 1; } @@ -87,7 +83,7 @@ namespace array { // iii. If selected is true, then... if (ToBoolean(result)) { // 1. Perform ? CreateDataPropertyOrThrow(A, ToString(to), kValue). - CreateDataProperty(array, to, kValue); + FastCreateDataProperty(array, to, kValue); // 2. Increase to by 1. to = to + 1; } @@ -98,80 +94,42 @@ namespace array { return array; } - transitioning macro - FilterVisitAllElements(implicit context: Context)( - kind: constexpr ElementsKind, o: JSArray, len: Smi, callbackfn: Callable, - thisArg: Object, a: JSArray) labels Bailout(Smi, Smi) { + transitioning macro FastArrayFilter(implicit context: Context)( + fastO: FastJSArray, len: Smi, callbackfn: Callable, thisArg: Object, + output: FastJSArray) labels Bailout(Number, Number) { let k: Smi = 0; let to: Smi = 0; - const fastOWitness: FastJSArrayWitness = - MakeWitness(Cast(o) otherwise goto Bailout(k, to)); - const fastAWitness: FastJSArrayWitness = - MakeWitness(Cast(a) otherwise goto Bailout(k, to)); + let fastOW = NewFastJSArrayWitness(fastO); + let fastOutputW = NewFastJSArrayWitness(output); + + fastOutputW.EnsureArrayPushable() otherwise goto Bailout(k, to); - // Build a fast loop over the smi array. + // Build a fast loop over the array. for (; k < len; k++) { - let fastO: FastJSArray = - Testify(fastOWitness) otherwise goto Bailout(k, to); + fastOW.Recheck() otherwise goto Bailout(k, to); // Ensure that we haven't walked beyond a possibly updated length. - if (k >= fastO.length) goto Bailout(k, to); - - try { - const value: Object = - LoadElementNoHole(fastO, k) otherwise FoundHole; - const result: Object = - Call(context, callbackfn, thisArg, value, k, fastO); - if (ToBoolean(result)) { - try { - // Since the call to {callbackfn} is observable, we can't - // use the Bailout label until we've successfully stored. - // Hence the {SlowStore} label. - const fastA: FastJSArray = - Testify(fastAWitness) otherwise SlowStore; - if (fastA.length != to) goto SlowStore; - BuildAppendJSArray(kind, fastA, value) - otherwise SlowStore; - } - label SlowStore { - CreateDataProperty(a, to, value); - } - to = to + 1; + if (k >= fastOW.Get().length) goto Bailout(k, to); + const value: Object = fastOW.LoadElementNoHole(k) otherwise continue; + const result: Object = + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); + if (ToBoolean(result)) { + try { + // Since the call to {callbackfn} is observable, we can't + // use the Bailout label until we've successfully stored. + // Hence the {SlowStore} label. + fastOutputW.Recheck() otherwise SlowStore; + if (fastOutputW.Get().length != to) goto SlowStore; + fastOutputW.Push(value) otherwise SlowStore; + } + label SlowStore { + FastCreateDataProperty(fastOutputW.stable, to, value); } + to = to + 1; } - label FoundHole {} } } - transitioning macro FastArrayFilter(implicit context: Context)( - o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object, - array: JSReceiver): Object - labels Bailout(Smi, Smi) { - let k: Smi = 0; - let to: Smi = 0; - const smiLen: Smi = Cast(len) otherwise goto Bailout(k, to); - const fastArray: FastJSArray = - Cast(array) otherwise goto Bailout(k, to); - let fastO: FastJSArray = Cast(o) otherwise goto Bailout(k, to); - EnsureArrayPushable(fastArray.map) otherwise goto Bailout(k, to); - const elementsKind: ElementsKind = fastO.map.elements_kind; - if (IsElementsKindLessThanOrEqual(elementsKind, HOLEY_SMI_ELEMENTS)) { - FilterVisitAllElements( - HOLEY_SMI_ELEMENTS, fastO, smiLen, callbackfn, thisArg, fastArray) - otherwise Bailout; - } else if (IsElementsKindLessThanOrEqual(elementsKind, HOLEY_ELEMENTS)) { - FilterVisitAllElements( - HOLEY_ELEMENTS, fastO, smiLen, callbackfn, thisArg, fastArray) - otherwise Bailout; - } else { - assert(IsDoubleElementsKind(elementsKind)); - FilterVisitAllElements( - HOLEY_DOUBLE_ELEMENTS, fastO, smiLen, callbackfn, thisArg, fastArray) - otherwise Bailout; - } - return array; - } - // This method creates a 0-length array with the ElementsKind of the // receiver if possible, otherwise, bails out. It makes sense for the // caller to know that the slow case needs to be invoked. @@ -179,7 +137,7 @@ namespace array { receiver: JSReceiver): JSReceiver labels Slow { const len: Smi = 0; if (IsArraySpeciesProtectorCellInvalid()) goto Slow; - const o: FastJSArray = Cast(receiver) otherwise Slow; + const o = Cast(receiver) otherwise Slow; const newMap: Map = LoadJSArrayElementsMap(o.map.elements_kind, LoadNativeContext(context)); return AllocateJSArray(PACKED_SMI_ELEMENTS, newMap, len, len); @@ -204,41 +162,45 @@ namespace array { if (arguments.length == 0) { goto TypeError; } - const callbackfn: Callable = - Cast(arguments[0]) otherwise TypeError; + const callbackfn = Cast(arguments[0]) otherwise TypeError; // 4. If thisArg is present, let T be thisArg; else let T be undefined. const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; - let array: JSReceiver; + let output: JSReceiver; // Special cases. let k: Number = 0; let to: Number = 0; try { - array = FastFilterSpeciesCreate(o) otherwise SlowSpeciesCreate; + output = FastFilterSpeciesCreate(o) otherwise SlowSpeciesCreate; try { - return FastArrayFilter(o, len, callbackfn, thisArg, array) + const smiLen: Smi = Cast(len) otherwise goto Bailout(k, to); + const fastOutput = + Cast(output) otherwise goto Bailout(k, to); + const fastO = Cast(o) otherwise goto Bailout(k, to); + + FastArrayFilter(fastO, smiLen, callbackfn, thisArg, fastOutput) otherwise Bailout; + return output; } - label Bailout(kValue: Smi, toValue: Smi) deferred { + label Bailout(kValue: Number, toValue: Number) deferred { k = kValue; to = toValue; } } label SlowSpeciesCreate { - array = ArraySpeciesCreate(context, receiver, 0); + output = ArraySpeciesCreate(context, receiver, 0); } return ArrayFilterLoopContinuation( - o, callbackfn, thisArg, array, o, k, len, to); + o, callbackfn, thisArg, output, o, k, len, to); } label TypeError deferred { - ThrowTypeError(context, kCalledNonCallable, arguments[0]); + ThrowTypeError(kCalledNonCallable, arguments[0]); } label NullOrUndefinedError deferred { - ThrowTypeError( - context, kCalledOnNullOrUndefined, 'Array.prototype.filter'); + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.filter'); } } } diff --git a/deps/v8/src/builtins/array-find.tq b/deps/v8/src/builtins/array-find.tq new file mode 100644 index 00000000000000..28223e4c492bdc --- /dev/null +++ b/deps/v8/src/builtins/array-find.tq @@ -0,0 +1,158 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array_find { + transitioning javascript builtin + ArrayFindLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object): Object { + // All continuation points in the optimized find implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArrayFindLoopContinuation( + jsreceiver, callbackfn, thisArg, jsreceiver, numberK, numberLength); + } + + transitioning javascript builtin + ArrayFindLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object, result: Object): Object { + // This deopt continuation point is never actually called, it just + // exists to make stack traces correct from a ThrowTypeError if the + // callback was found to be non-callable. + unreachable; + } + + // Continuation that is called after a lazy deoptimization from TF that + // happens right after the callback and it's returned value must be handled + // before iteration continues. + transitioning javascript builtin + ArrayFindLoopAfterCallbackLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object, foundValue: Object, isFound: Object): Object { + // All continuation points in the optimized find implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // This custom lazy deopt point is right after the callback. find() needs + // to pick up at the next step, which is returning the element if the + // callback value is truthy. Otherwise, continue the search by calling the + // continuation. + + if (ToBoolean(isFound)) { + return foundValue; + } + + return ArrayFindLoopContinuation( + jsreceiver, callbackfn, thisArg, jsreceiver, numberK, numberLength); + } + + transitioning builtin ArrayFindLoopContinuation(implicit context: Context)( + receiver: JSReceiver, callbackfn: Callable, thisArg: Object, + o: JSReceiver, initialK: Number, length: Number): Object { + // 5. Let k be 0. + // 6. Repeat, while k < len + for (let k: Number = initialK; k < length; k++) { + // 6a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 6b. i. Let kValue be ? Get(O, Pk). + const value: Object = GetProperty(o, k); + + // 6c. Let testResult be ToBoolean(? Call(predicate, T, <>)). + const testResult: Object = + Call(context, callbackfn, thisArg, value, k, o); + + // 6d. If testResult is true, return kValue. + if (ToBoolean(testResult)) { + return value; + } + + // 6e. Increase k by 1. (done by the loop). + } + return Undefined; + } + + transitioning macro FastArrayFind(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object): Object + labels Bailout(Smi) { + let k: Smi = 0; + const smiLen = Cast(len) otherwise goto Bailout(k); + const fastO = Cast(o) otherwise goto Bailout(k); + let fastOW = NewFastJSArrayWitness(fastO); + + // Build a fast loop over the smi array. + for (; k < smiLen; k++) { + fastOW.Recheck() otherwise goto Bailout(k); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto Bailout(k); + + const value: Object = fastOW.LoadElementOrUndefined(k); + const testResult: Object = + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); + if (ToBoolean(testResult)) { + return value; + } + } + return Undefined; + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.find + transitioning javascript builtin + ArrayPrototypeFind(implicit context: Context)(receiver: Object, ...arguments): + Object { + try { + if (IsNullOrUndefined(receiver)) { + goto NullOrUndefinedError; + } + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) { + goto NotCallableError; + } + const callbackfn = + Cast(arguments[0]) otherwise NotCallableError; + + // 4. If thisArg is present, let T be thisArg; else let T be undefined. + const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; + + // Special cases. + try { + return FastArrayFind(o, len, callbackfn, thisArg) + otherwise Bailout; + } + label Bailout(k: Smi) deferred { + return ArrayFindLoopContinuation(o, callbackfn, thisArg, o, k, len); + } + } + label NotCallableError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.find'); + } + } +} diff --git a/deps/v8/src/builtins/array-findindex.tq b/deps/v8/src/builtins/array-findindex.tq new file mode 100644 index 00000000000000..00d8378dfa6979 --- /dev/null +++ b/deps/v8/src/builtins/array-findindex.tq @@ -0,0 +1,161 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array_findindex { + transitioning javascript builtin + ArrayFindIndexLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object): Object { + // All continuation points in the optimized findIndex implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArrayFindIndexLoopContinuation( + jsreceiver, callbackfn, thisArg, jsreceiver, numberK, numberLength); + } + + transitioning javascript builtin + ArrayFindIndexLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object, result: Object): Object { + // This deopt continuation point is never actually called, it just + // exists to make stack traces correct from a ThrowTypeError if the + // callback was found to be non-callable. + unreachable; + } + + // Continuation that is called after a lazy deoptimization from TF that + // happens right after the callback and it's returned value must be handled + // before iteration continues. + transitioning javascript builtin + ArrayFindIndexLoopAfterCallbackLazyDeoptContinuation(implicit context: + Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object, foundValue: Object, isFound: Object): Object { + // All continuation points in the optimized findIndex implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // This custom lazy deopt point is right after the callback. find() needs + // to pick up at the next step, which is returning the element if the + // callback value is truthy. Otherwise, continue the search by calling the + // continuation. + + if (ToBoolean(isFound)) { + return foundValue; + } + + return ArrayFindIndexLoopContinuation( + jsreceiver, callbackfn, thisArg, jsreceiver, numberK, numberLength); + } + + transitioning builtin ArrayFindIndexLoopContinuation(implicit context: + Context)( + receiver: JSReceiver, callbackfn: Callable, thisArg: Object, + o: JSReceiver, initialK: Number, length: Number): Number { + // 5. Let k be 0. + // 6. Repeat, while k < len + for (let k: Number = initialK; k < length; k++) { + // 6a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 6b. i. Let kValue be ? Get(O, Pk). + const value: Object = GetProperty(o, k); + + // 6c. Let testResult be ToBoolean(? Call(predicate, T, <>)). + const testResult: Object = + Call(context, callbackfn, thisArg, value, k, o); + + // 6d. If testResult is true, return k. + if (ToBoolean(testResult)) { + return k; + } + + // 6e. Increase k by 1. (done by the loop). + } + return Convert(-1); + } + + transitioning macro FastArrayFindIndex(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object): Number + labels Bailout(Smi) { + let k: Smi = 0; + const smiLen = Cast(len) otherwise goto Bailout(k); + const fastO = Cast(o) otherwise goto Bailout(k); + let fastOW = NewFastJSArrayWitness(fastO); + + // Build a fast loop over the smi array. + for (; k < smiLen; k++) { + fastOW.Recheck() otherwise goto Bailout(k); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto Bailout(k); + + const value: Object = fastOW.LoadElementOrUndefined(k); + const testResult: Object = + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); + if (ToBoolean(testResult)) { + return k; + } + } + return -1; + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.findIndex + transitioning javascript builtin + ArrayPrototypeFindIndex(implicit context: + Context)(receiver: Object, ...arguments): Object { + try { + if (IsNullOrUndefined(receiver)) { + goto NullOrUndefinedError; + } + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) { + goto NotCallableError; + } + const callbackfn = + Cast(arguments[0]) otherwise NotCallableError; + + // 4. If thisArg is present, let T be thisArg; else let T be undefined. + const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; + + // Special cases. + try { + return FastArrayFindIndex(o, len, callbackfn, thisArg) + otherwise Bailout; + } + label Bailout(k: Smi) deferred { + return ArrayFindIndexLoopContinuation( + o, callbackfn, thisArg, o, k, len); + } + } + label NotCallableError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.findIndex'); + } + } +} diff --git a/deps/v8/src/builtins/array-foreach.tq b/deps/v8/src/builtins/array-foreach.tq index 7967058e6b4960..d362e95950dc62 100644 --- a/deps/v8/src/builtins/array-foreach.tq +++ b/deps/v8/src/builtins/array-foreach.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_foreach { transitioning javascript builtin ArrayForEachLoopEagerDeoptContinuation(implicit context: Context)( receiver: Object, callback: Object, thisArg: Object, initialK: Object, @@ -10,11 +10,10 @@ namespace array { // All continuation points in the optimized forEach implemntation are // after the ToObject(O) call that ensures we are dealing with a // JSReceiver. - const jsreceiver: JSReceiver = - Cast(receiver) otherwise unreachable; - const callbackfn: Callable = Cast(callback) otherwise unreachable; - const numberK: Number = Cast(initialK) otherwise unreachable; - const numberLength: Number = Cast(length) otherwise unreachable; + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; return ArrayForEachLoopContinuation( jsreceiver, callbackfn, thisArg, Undefined, jsreceiver, numberK, @@ -28,11 +27,10 @@ namespace array { // All continuation points in the optimized forEach implemntation are // after the ToObject(O) call that ensures we are dealing with a // JSReceiver. - const jsreceiver: JSReceiver = - Cast(receiver) otherwise unreachable; - const callbackfn: Callable = Cast(callback) otherwise unreachable; - const numberK: Number = Cast(initialK) otherwise unreachable; - const numberLength: Number = Cast(length) otherwise unreachable; + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; return ArrayForEachLoopContinuation( jsreceiver, callbackfn, thisArg, Undefined, jsreceiver, numberK, @@ -69,43 +67,23 @@ namespace array { return Undefined; } - transitioning macro VisitAllElements(implicit context: - Context)( - o: JSArray, len: Smi, callbackfn: Callable, thisArg: Object) labels - Bailout(Smi) { + transitioning macro FastArrayForEach(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object): Object + labels Bailout(Smi) { let k: Smi = 0; - const fastOWitness: FastJSArrayWitness = - MakeWitness(Cast(o) otherwise goto Bailout(k)); + const smiLen = Cast(len) otherwise goto Bailout(k); + let fastO = Cast(o) otherwise goto Bailout(k); + let fastOW = NewFastJSArrayWitness(fastO); // Build a fast loop over the smi array. - for (; k < len; k++) { - let fastO: FastJSArray = Testify(fastOWitness) otherwise goto Bailout(k); + for (; k < smiLen; k++) { + fastOW.Recheck() otherwise goto Bailout(k); // Ensure that we haven't walked beyond a possibly updated length. - if (k >= fastO.length) goto Bailout(k); - - try { - const value: Object = - LoadElementNoHole(fastO, k) otherwise FoundHole; - Call(context, callbackfn, thisArg, value, k, fastO); - } - label FoundHole {} - } - } - - transitioning macro FastArrayForEach(implicit context: Context)( - o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object): Object - labels Bailout(Smi) { - let k: Smi = 0; - const smiLen: Smi = Cast(len) otherwise goto Bailout(k); - let fastO: FastJSArray = Cast(o) otherwise goto Bailout(k); - const elementsKind: ElementsKind = fastO.map.elements_kind; - if (IsElementsKindGreaterThan(elementsKind, HOLEY_ELEMENTS)) { - VisitAllElements(fastO, smiLen, callbackfn, thisArg) - otherwise Bailout; - } else { - VisitAllElements(fastO, smiLen, callbackfn, thisArg) - otherwise Bailout; + if (k >= fastOW.Get().length) goto Bailout(k); + const value: Object = fastOW.LoadElementNoHole(k) + otherwise continue; + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); } return Undefined; } @@ -128,8 +106,7 @@ namespace array { if (arguments.length == 0) { goto TypeError; } - const callbackfn: Callable = - Cast(arguments[0]) otherwise TypeError; + const callbackfn = Cast(arguments[0]) otherwise TypeError; // 4. If thisArg is present, let T be thisArg; else let T be undefined. const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; @@ -148,11 +125,10 @@ namespace array { o, callbackfn, thisArg, Undefined, o, k, len, Undefined); } label TypeError deferred { - ThrowTypeError(context, kCalledNonCallable, arguments[0]); + ThrowTypeError(kCalledNonCallable, arguments[0]); } label NullOrUndefinedError deferred { - ThrowTypeError( - context, kCalledOnNullOrUndefined, 'Array.prototype.forEach'); + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.forEach'); } } } diff --git a/deps/v8/src/builtins/array-join.tq b/deps/v8/src/builtins/array-join.tq index 16ac7a710435dc..f29f6694d4d8a1 100644 --- a/deps/v8/src/builtins/array-join.tq +++ b/deps/v8/src/builtins/array-join.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_join { type LoadJoinElementFn = builtin(Context, JSReceiver, Number) => Object; // Fast C call to write a fixed array (see Buffer.fixedArray) to a single @@ -16,7 +16,7 @@ namespace array { return GetProperty(receiver, k); } - LoadJoinElement( + LoadJoinElement( context: Context, receiver: JSReceiver, k: Number): Object { const array: JSArray = UnsafeCast(receiver); const dict: NumberDictionary = UnsafeCast(array.elements); @@ -32,27 +32,22 @@ namespace array { } } - LoadJoinElement( + LoadJoinElement( context: Context, receiver: JSReceiver, k: Number): Object { const array: JSArray = UnsafeCast(receiver); const fixedArray: FixedArray = UnsafeCast(array.elements); - const element: Object = fixedArray[UnsafeCast(k)]; + const element: Object = fixedArray.objects[UnsafeCast(k)]; return element == Hole ? kEmptyString : element; } - LoadJoinElement( + LoadJoinElement( context: Context, receiver: JSReceiver, k: Number): Object { const array: JSArray = UnsafeCast(receiver); const fixedDoubleArray: FixedDoubleArray = UnsafeCast(array.elements); - try { - const element: float64 = LoadDoubleWithHoleCheck( - fixedDoubleArray, UnsafeCast(k)) otherwise IfHole; - return AllocateHeapNumberWithValue(element); - } - label IfHole { - return kEmptyString; - } + const element: float64 = LoadDoubleWithHoleCheck( + fixedDoubleArray, UnsafeCast(k)) otherwise return kEmptyString; + return AllocateHeapNumberWithValue(element); } builtin LoadJoinTypedElement( @@ -83,7 +78,7 @@ namespace array { return ToString_Inline(context, result); } label TypeError { - ThrowTypeError(context, kCalledNonCallable, prop); + ThrowTypeError(kCalledNonCallable, prop); } } @@ -98,7 +93,7 @@ namespace array { loadFn: LoadJoinElementFn, receiver: JSReceiver, originalMap: Map, originalLen: Number): never labels Cannot, Can { - if (loadFn == LoadJoinElement) goto Can; + if (loadFn == LoadJoinElement) goto Can; const array: JSArray = UnsafeCast(receiver); if (originalMap != array.map) goto Cannot; @@ -139,7 +134,7 @@ namespace array { const length: intptr = fixedArray.length_intptr; assert(index <= length); if (index < length) { - fixedArray[index] = element; + fixedArray.objects[index] = element; return fixedArray; } else deferred { @@ -147,16 +142,49 @@ namespace array { assert(index < newLength); const newfixedArray: FixedArray = ExtractFixedArray(fixedArray, 0, length, newLength, kFixedArrays); - newfixedArray[index] = element; + newfixedArray.objects[index] = element; return newfixedArray; } } // Contains the information necessary to create a single, separator delimited, // flattened one or two byte string. - // The buffer is maintained and updated by BufferInit(), BufferAdd(), - // BufferAddSeparators(). + // The buffer is maintained and updated by Buffer.constructor, Buffer.Add(), + // Buffer.AddSeparators(). struct Buffer { + Add(implicit context: Context)( + str: String, nofSeparators: intptr, separatorLength: intptr) { + // Add separators if necessary (at the beginning or more than one) + const writeSeparators: bool = this.index == 0 | nofSeparators > 1; + this.AddSeparators(nofSeparators, separatorLength, writeSeparators); + + this.totalStringLength = + AddStringLength(this.totalStringLength, str.length); + this.fixedArray = + StoreAndGrowFixedArray(this.fixedArray, this.index++, str); + this.isOneByte = + IsOneByteStringInstanceType(str.instanceType) & this.isOneByte; + } + + AddSeparators(implicit context: Context)( + nofSeparators: intptr, separatorLength: intptr, write: bool) { + if (nofSeparators == 0 || separatorLength == 0) return; + + const nofSeparatorsInt: intptr = nofSeparators; + const sepsLen: intptr = separatorLength * nofSeparatorsInt; + // Detect integer overflow + // TODO(tebbi): Replace with overflow-checked multiplication. + if (sepsLen / separatorLength != nofSeparatorsInt) deferred { + ThrowInvalidStringLength(context); + } + + this.totalStringLength = AddStringLength(this.totalStringLength, sepsLen); + if (write) deferred { + this.fixedArray = StoreAndGrowFixedArray( + this.fixedArray, this.index++, Convert(nofSeparatorsInt)); + } + } + // Fixed array holding elements that are either: // 1) String result of `ToString(next)`. // 2) Smi representing the number of consecutive separators. @@ -185,57 +213,17 @@ namespace array { isOneByte: bool; } - macro BufferInit(len: uintptr, sep: String): Buffer { + macro NewBuffer(len: uintptr, sep: String): Buffer { const cappedBufferSize: intptr = len > kMaxNewSpaceFixedArrayElements ? kMaxNewSpaceFixedArrayElements : Signed(len); assert(cappedBufferSize > 0); - const fixedArray: FixedArray = AllocateZeroedFixedArray(cappedBufferSize); - const isOneByte: bool = HasOnlyOneByteChars(sep.instanceType); - return Buffer{fixedArray, 0, 0, isOneByte}; - } - - macro BufferAdd(implicit context: Context)( - initialBuffer: Buffer, str: String, nofSeparators: intptr, - separatorLength: intptr): Buffer { - let buffer: Buffer = initialBuffer; - // Add separators if necessary (at the beginning or more than one) - const writeSeparators: bool = buffer.index == 0 | nofSeparators > 1; - buffer = BufferAddSeparators( - buffer, nofSeparators, separatorLength, writeSeparators); - - const totalStringLength: intptr = - AddStringLength(buffer.totalStringLength, str.length); - let index: intptr = buffer.index; - const fixedArray: FixedArray = - StoreAndGrowFixedArray(buffer.fixedArray, index++, str); - const isOneByte: bool = - HasOnlyOneByteChars(str.instanceType) & buffer.isOneByte; - return Buffer{fixedArray, index, totalStringLength, isOneByte}; - } - - macro BufferAddSeparators(implicit context: Context)( - buffer: Buffer, nofSeparators: intptr, separatorLength: intptr, - write: bool): Buffer { - if (nofSeparators == 0 || separatorLength == 0) return buffer; - - const nofSeparatorsInt: intptr = nofSeparators; - const sepsLen: intptr = separatorLength * nofSeparatorsInt; - // Detect integer overflow - // TODO(tebbi): Replace with overflow-checked multiplication. - if (sepsLen / separatorLength != nofSeparatorsInt) deferred { - ThrowInvalidStringLength(context); - } - - const totalStringLength: intptr = - AddStringLength(buffer.totalStringLength, sepsLen); - let index: intptr = buffer.index; - let fixedArray: FixedArray = buffer.fixedArray; - if (write) deferred { - fixedArray = StoreAndGrowFixedArray( - buffer.fixedArray, index++, Convert(nofSeparatorsInt)); - } - return Buffer{fixedArray, index, totalStringLength, buffer.isOneByte}; + return Buffer{ + AllocateZeroedFixedArray(cappedBufferSize), + 0, + 0, + IsOneByteStringInstanceType(sep.instanceType) + }; } macro BufferJoin(implicit context: Context)(buffer: Buffer, sep: String): @@ -246,7 +234,7 @@ namespace array { // Fast path when there's only one buffer element. if (buffer.index == 1) { const fixedArray: FixedArray = buffer.fixedArray; - typeswitch (fixedArray[0]) { + typeswitch (fixedArray.objects[0]) { // When the element is a string, just return it and completely avoid // allocating another string. case (str: String): { @@ -280,7 +268,7 @@ namespace array { const separatorLength: intptr = sep.length; let nofSeparators: intptr = 0; let loadFn: LoadJoinElementFn = initialLoadFn; - let buffer: Buffer = BufferInit(len, sep); + let buffer: Buffer = NewBuffer(len, sep); // 6. Let k be 0. let k: uintptr = 0; @@ -290,7 +278,7 @@ namespace array { if (CannotUseSameArrayAccessor( loadFn, receiver, initialMap, lengthNumber)) deferred { - loadFn = LoadJoinElement; + loadFn = LoadJoinElement; } if (k > 0) { @@ -324,12 +312,12 @@ namespace array { } // d. Set R to the string-concatenation of R and next. - buffer = BufferAdd(buffer, next, nofSeparators, separatorLength); + buffer.Add(next, nofSeparators, separatorLength); nofSeparators = 0; } // Add any separators at the end. - buffer = BufferAddSeparators(buffer, nofSeparators, separatorLength, true); + buffer.AddSeparators(nofSeparators, separatorLength, true); // 8. Return R. return BufferJoin(buffer, sep); @@ -353,9 +341,9 @@ namespace array { if (IsNoElementsProtectorCellInvalid()) goto IfSlowPath; if (IsElementsKindLessThanOrEqual(kind, HOLEY_ELEMENTS)) { - loadFn = LoadJoinElement; + loadFn = LoadJoinElement; } else if (IsElementsKindLessThanOrEqual(kind, HOLEY_DOUBLE_ELEMENTS)) { - loadFn = LoadJoinElement; + loadFn = LoadJoinElement; } else if (kind == DICTIONARY_ELEMENTS) deferred { const dict: NumberDictionary = @@ -372,7 +360,7 @@ namespace array { ThrowInvalidStringLength(context); } } else { - loadFn = LoadJoinElement; + loadFn = LoadJoinElement; } } else { @@ -380,7 +368,7 @@ namespace array { } } label IfSlowPath { - loadFn = LoadJoinElement; + loadFn = LoadJoinElement; } return ArrayJoinImpl( receiver, sep, lenNumber, useToLocaleString, locales, options, loadFn); @@ -457,11 +445,11 @@ namespace array { stack: FixedArray, receiver: JSReceiver): Boolean { const capacity: intptr = stack.length_intptr; for (let i: intptr = 0; i < capacity; i++) { - const previouslyVisited: Object = stack[i]; + const previouslyVisited: Object = stack.objects[i]; // Add `receiver` to the first open slot if (previouslyVisited == Hole) { - stack[i] = receiver; + stack.objects[i] = receiver; return True; } @@ -485,8 +473,8 @@ namespace array { try { const stack: FixedArray = LoadJoinStack() otherwise IfUninitialized; - if (stack[0] == Hole) { - stack[0] = receiver; + if (stack.objects[0] == Hole) { + stack.objects[0] = receiver; } else if (JoinStackPush(stack, receiver) == False) deferred { goto ReceiverNotAdded; @@ -495,7 +483,7 @@ namespace array { label IfUninitialized { const stack: FixedArray = AllocateFixedArrayWithHoles(kMinJoinStackSize, kNone); - stack[0] = receiver; + stack.objects[0] = receiver; SetJoinStack(stack); } goto ReceiverAdded; @@ -507,7 +495,7 @@ namespace array { stack: FixedArray, receiver: JSReceiver): Object { const len: intptr = stack.length_intptr; for (let i: intptr = 0; i < len; i++) { - if (stack[i] == receiver) { + if (stack.objects[i] == receiver) { // Shrink the Join Stack if the stack will be empty and is larger than // the minimum size. if (i == 0 && len > kMinJoinStackSize) deferred { @@ -516,7 +504,7 @@ namespace array { SetJoinStack(newStack); } else { - stack[i] = Hole; + stack.objects[i] = Hole; } return Undefined; } @@ -532,7 +520,7 @@ namespace array { // Builtin call was not nested (receiver is the first entry) and // did not contain other nested arrays that expanded the stack. - if (stack[0] == receiver && len == kMinJoinStackSize) { + if (stack.objects[0] == receiver && len == kMinJoinStackSize) { StoreFixedArrayElement(stack, 0, Hole, SKIP_WRITE_BARRIER); } else deferred { @@ -580,7 +568,7 @@ namespace array { // Only handle valid array lengths. Although the spec allows larger values, // this matches historical V8 behavior. - if (len > kMaxArrayIndex + 1) ThrowTypeError(context, kInvalidArrayLength); + if (len > kMaxArrayIndex + 1) ThrowTypeError(kInvalidArrayLength); return CycleProtectedArrayJoin( false, o, len, separator, Undefined, Undefined); @@ -600,7 +588,7 @@ namespace array { // Only handle valid array lengths. Although the spec allows larger values, // this matches historical V8 behavior. - if (len > kMaxArrayIndex + 1) ThrowTypeError(context, kInvalidArrayLength); + if (len > kMaxArrayIndex + 1) ThrowTypeError(kInvalidArrayLength); return CycleProtectedArrayJoin( true, o, len, ',', locales, options); diff --git a/deps/v8/src/builtins/array-lastindexof.tq b/deps/v8/src/builtins/array-lastindexof.tq index 967d640e8f3ddc..2ade54156ce8d2 100644 --- a/deps/v8/src/builtins/array-lastindexof.tq +++ b/deps/v8/src/builtins/array-lastindexof.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_lastindexof { macro LoadWithHoleCheck( elements: FixedArrayBase, index: Smi): Object labels IfHole; @@ -11,7 +11,7 @@ namespace array { elements: FixedArrayBase, index: Smi): Object labels IfHole { const elements: FixedArray = UnsafeCast(elements); - const element: Object = elements[index]; + const element: Object = elements.objects[index]; if (element == Hole) goto IfHole; return element; } diff --git a/deps/v8/src/builtins/array-map.tq b/deps/v8/src/builtins/array-map.tq new file mode 100644 index 00000000000000..d3bba562205ddf --- /dev/null +++ b/deps/v8/src/builtins/array-map.tq @@ -0,0 +1,281 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array_map { + transitioning javascript builtin + ArrayMapLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, array: Object, + initialK: Object, length: Object): Object { + // All continuation points in the optimized filter implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const outputArray = Cast(array) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArrayMapLoopContinuation( + jsreceiver, callbackfn, thisArg, outputArray, jsreceiver, numberK, + numberLength); + } + + transitioning javascript builtin + ArrayMapLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, array: Object, + initialK: Object, length: Object, result: Object): Object { + // All continuation points in the optimized filter implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const outputArray = Cast(array) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // This custom lazy deopt point is right after the callback. map() needs + // to pick up at the next step, which is setting the callback result in + // the output array. After incrementing k, we can glide into the loop + // continuation builtin. + + // iii. Perform ? CreateDataPropertyOrThrow(A, Pk, mappedValue). + FastCreateDataProperty(outputArray, numberK, result); + + // 7d. Increase k by 1. + numberK = numberK + 1; + + return ArrayMapLoopContinuation( + jsreceiver, callbackfn, thisArg, outputArray, jsreceiver, numberK, + numberLength); + } + + transitioning builtin ArrayMapLoopContinuation(implicit context: Context)( + receiver: JSReceiver, callbackfn: Callable, thisArg: Object, + array: JSReceiver, o: JSReceiver, initialK: Number, + length: Number): Object { + // 6. Let k be 0. + // 7. Repeat, while k < len + for (let k: Number = initialK; k < length; k++) { + // 7a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 7b. Let kPresent be ? HasProperty(O, Pk). + const kPresent: Boolean = HasProperty_Inline(o, k); + + // 7c. If kPresent is true, then: + if (kPresent == True) { + // i. Let kValue be ? Get(O, Pk). + const kValue: Object = GetProperty(o, k); + + // ii. Let mapped_value be ? Call(callbackfn, T, kValue, k, O). + const mappedValue: Object = + Call(context, callbackfn, thisArg, kValue, k, o); + + // iii. Perform ? CreateDataPropertyOrThrow(A, Pk, mapped_value). + FastCreateDataProperty(array, k, mappedValue); + } + + // 7d. Increase k by 1. (done by the loop). + } + + // 8. Return A. + return array; + } + + struct Vector { + ReportSkippedElement() { + this.skippedElements = true; + } + + CreateJSArray(implicit context: Context)(validLength: Smi): JSArray { + let length: Smi = this.fixedArray.length; + assert(validLength <= length); + let kind: ElementsKind = PACKED_SMI_ELEMENTS; + if (!this.onlySmis) { + if (this.onlyNumbers) { + kind = PACKED_DOUBLE_ELEMENTS; + } else { + kind = PACKED_ELEMENTS; + } + } + + if (this.skippedElements || validLength < length) { + // We also need to create a holey output array if we are + // bailing out of the fast path partway through the array. + // This is indicated by {validLength} < {length}. + // Who knows if the bailout condition will continue to fill in + // every element? + kind = FastHoleyElementsKind(kind); + } + + let map: Map = LoadJSArrayElementsMap(kind, LoadNativeContext(context)); + let a: JSArray; + + if (IsDoubleElementsKind(kind)) { + // We need to allocate and copy. + // First, initialize the elements field before allocation to prevent + // heap corruption. + const elements: FixedDoubleArray = AllocateFixedDoubleArrayWithHoles( + SmiUntag(length), kAllowLargeObjectAllocation); + a = NewJSArray(map, this.fixedArray); + for (let i: Smi = 0; i < validLength; i++) { + typeswitch (this.fixedArray.objects[i]) { + case (n: Number): { + elements.floats[i] = Float64SilenceNaN(Convert(n)); + } + case (h: HeapObject): { + assert(h == Hole); + } + } + } + a.elements = elements; + } else { + // Simply install the given fixedArray in {vector}. + a = NewJSArray(map, this.fixedArray); + } + + // Paranoia. the FixedArray now "belongs" to JSArray {a}. + this.fixedArray = kEmptyFixedArray; + return a; + } + + StoreResult(implicit context: Context)(index: Smi, result: Object) { + typeswitch (result) { + case (s: Smi): { + this.fixedArray.objects[index] = s; + } + case (s: HeapNumber): { + this.onlySmis = false; + this.fixedArray.objects[index] = s; + } + case (s: HeapObject): { + this.onlySmis = false; + this.onlyNumbers = false; + this.fixedArray.objects[index] = s; + } + } + } + + fixedArray: FixedArray; + onlySmis: bool; // initially true. + onlyNumbers: bool; // initially true. + skippedElements: bool; // initially false. + } + + macro NewVector(implicit context: Context)(length: Smi): Vector { + const fixedArray = length > 0 ? + AllocateFixedArrayWithHoles( + SmiUntag(length), kAllowLargeObjectAllocation) : + kEmptyFixedArray; + return Vector{fixedArray, true, true, false}; + } + + transitioning macro FastArrayMap(implicit context: Context)( + fastO: FastJSArray, len: Smi, callbackfn: Callable, + thisArg: Object): JSArray + labels Bailout(JSArray, Smi) { + let k: Smi = 0; + let fastOW = NewFastJSArrayWitness(fastO); + let vector = NewVector(len); + + // Build a fast loop over the smi array. + // 7. Repeat, while k < len. + try { + for (; k < len; k++) { + fastOW.Recheck() otherwise goto PrepareBailout(k); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto PrepareBailout(k); + + try { + const value: Object = fastOW.LoadElementNoHole(k) + otherwise FoundHole; + const result: Object = + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); + vector.StoreResult(k, result); + } + label FoundHole { + // Our output array must necessarily be holey because of holes in + // the input array. + vector.ReportSkippedElement(); + } + } + } + label PrepareBailout(k: Smi) deferred { + // Transform {vector} into a JSArray and bail out. + goto Bailout(vector.CreateJSArray(k), k); + } + + return vector.CreateJSArray(len); + } + + // Bails out if the slow path needs to be taken. + // It's useful to structure it this way, because the consequences of + // using the slow path on species creation are interesting to the caller. + macro FastMapSpeciesCreate(implicit context: Context)( + receiver: JSReceiver, length: Number): JSArray labels Bailout { + if (IsArraySpeciesProtectorCellInvalid()) goto Bailout; + const o = Cast(receiver) otherwise Bailout; + const smiLength = Cast(length) otherwise Bailout; + const newMap: Map = + LoadJSArrayElementsMap(PACKED_SMI_ELEMENTS, LoadNativeContext(context)); + return AllocateJSArray(PACKED_SMI_ELEMENTS, newMap, smiLength, smiLength); + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.map + transitioning javascript builtin + ArrayMap(implicit context: Context)(receiver: Object, ...arguments): Object { + try { + if (IsNullOrUndefined(receiver)) goto NullOrUndefinedError; + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) goto TypeError; + + const callbackfn = Cast(arguments[0]) otherwise TypeError; + + // 4. If thisArg is present, let T be thisArg; else let T be undefined. + const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; + + let array: JSReceiver; + let k: Number = 0; + try { + // 5. Let A be ? ArraySpeciesCreate(O, len). + if (IsArraySpeciesProtectorCellInvalid()) goto SlowSpeciesCreate; + const o: FastJSArray = Cast(receiver) + otherwise SlowSpeciesCreate; + const smiLength: Smi = Cast(len) + otherwise SlowSpeciesCreate; + + return FastArrayMap(o, smiLength, callbackfn, thisArg) + otherwise Bailout; + } + label SlowSpeciesCreate { + array = ArraySpeciesCreate(context, receiver, len); + } + label Bailout(output: JSArray, kValue: Smi) deferred { + array = output; + k = kValue; + } + + return ArrayMapLoopContinuation(o, callbackfn, thisArg, array, o, k, len); + } + label TypeError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.map'); + } + } +} diff --git a/deps/v8/src/builtins/array-of.tq b/deps/v8/src/builtins/array-of.tq index 6434dbc8c8676d..70fda8d2eba5f8 100644 --- a/deps/v8/src/builtins/array-of.tq +++ b/deps/v8/src/builtins/array-of.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_of { // https://tc39.github.io/ecma262/#sec-array.of transitioning javascript builtin ArrayOf(implicit context: Context)(receiver: Object, ...arguments): Object { @@ -39,14 +39,14 @@ namespace array { // b. Let Pk be ! ToString(k). // c. Perform ? CreateDataPropertyOrThrow(A, Pk, kValue). - CreateDataProperty(a, k, kValue); + FastCreateDataProperty(a, k, kValue); // d. Increase k by 1. k++; } // 8. Perform ? Set(A, "length", len, true). - SetPropertyLength(a, len); + array::SetPropertyLength(a, len); // 9. Return A. return a; diff --git a/deps/v8/src/builtins/array-reduce-right.tq b/deps/v8/src/builtins/array-reduce-right.tq new file mode 100644 index 00000000000000..33661c38d106c1 --- /dev/null +++ b/deps/v8/src/builtins/array-reduce-right.tq @@ -0,0 +1,183 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array { + transitioning javascript builtin + ArrayReduceRightPreLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, length: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // Simulate starting the loop at 0, but ensuring that the accumulator is + // the hole. The continuation stub will search for the initial non-hole + // element, rightly throwing an exception if not found. + return ArrayReduceRightLoopContinuation( + jsreceiver, callbackfn, Hole, jsreceiver, 0, numberLength); + } + + transitioning javascript builtin + ArrayReduceRightLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, initialK: Object, length: Object, + accumulator: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArrayReduceRightLoopContinuation( + jsreceiver, callbackfn, accumulator, jsreceiver, numberK, numberLength); + } + + transitioning javascript builtin + ArrayReduceRightLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, initialK: Object, length: Object, + result: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // The accumulator is the result from the callback call which just occured. + let r = ArrayReduceRightLoopContinuation( + jsreceiver, callbackfn, result, jsreceiver, numberK, numberLength); + return r; + } + + transitioning builtin ArrayReduceRightLoopContinuation(implicit context: + Context)( + receiver: JSReceiver, callbackfn: Callable, initialAccumulator: Object, + o: JSReceiver, initialK: Number, length: Number): Object { + let accumulator = initialAccumulator; + + // 8b and 9. Repeat, while k >= 0 + for (let k: Number = initialK; k >= 0; k--) { + // 8b i and 9a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 8b ii and 9b. Set kPresent to ? HasProperty(O, Pk). + const present: Boolean = HasProperty_Inline(o, k); + + // 8b iii and 9c. If kPresent is true, then + if (present == True) { + // 8b iii and 9c i. Let kValue be ? Get(O, Pk). + const value: Object = GetProperty(o, k); + + if (accumulator == Hole) { + // 8b iii 1. + accumulator = value; + } else { + // 9c. ii. Set accumulator to ? Call(callbackfn, undefined, + // ). + accumulator = + Call(context, callbackfn, Undefined, accumulator, value, k, o); + } + } + + // 8b iv and 9d. Decrease k by 1. (done by the loop). + } + + // 8c. if kPresent is false, throw a TypeError exception. + // If the accumulator is discovered with the sentinel hole value, + // this means kPresent is false. + if (accumulator == Hole) { + ThrowTypeError(kReduceNoInitial, 'Array.prototype.reduceRight'); + } + return accumulator; + } + + transitioning macro FastArrayReduceRight(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, + initialAccumulator: Object): Object + labels Bailout(Number, Object) { + let accumulator = initialAccumulator; + const smiLen = Cast(len) otherwise goto Bailout(len - 1, accumulator); + let fastO = + Cast(o) otherwise goto Bailout(len - 1, accumulator); + let fastOW = NewFastJSArrayWitness(fastO); + + // Build a fast loop over the array. + for (let k: Smi = smiLen - 1; k >= 0; k--) { + fastOW.Recheck() otherwise goto Bailout(k, accumulator); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto Bailout(k, accumulator); + + const value: Object = fastOW.LoadElementNoHole(k) otherwise continue; + if (accumulator == Hole) { + accumulator = value; + } else { + accumulator = Call( + context, callbackfn, Undefined, accumulator, value, k, + fastOW.Get()); + } + } + if (accumulator == Hole) { + ThrowTypeError(kReduceNoInitial, 'Array.prototype.reduceRight'); + } + return accumulator; + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.reduceRight + transitioning javascript builtin + ArrayReduceRight(implicit context: Context)(receiver: Object, ...arguments): + Object { + try { + if (IsNullOrUndefined(receiver)) { + goto NullOrUndefinedError; + } + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) { + goto NoCallableError; + } + const callbackfn = Cast(arguments[0]) otherwise NoCallableError; + + // 4. If len is 0 and initialValue is not present, throw a TypeError + // exception. (This case is handled at the end of + // ArrayReduceRightLoopContinuation). + + const initialValue: Object = arguments.length > 1 ? arguments[1] : Hole; + + try { + return FastArrayReduceRight(o, len, callbackfn, initialValue) + otherwise Bailout; + } + label Bailout(value: Number, accumulator: Object) { + return ArrayReduceRightLoopContinuation( + o, callbackfn, accumulator, o, value, len); + } + } + label NoCallableError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.reduceRight'); + } + } +} diff --git a/deps/v8/src/builtins/array-reduce.tq b/deps/v8/src/builtins/array-reduce.tq new file mode 100644 index 00000000000000..67a112fd418878 --- /dev/null +++ b/deps/v8/src/builtins/array-reduce.tq @@ -0,0 +1,182 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array { + transitioning javascript builtin + ArrayReducePreLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, length: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // Simulate starting the loop at 0, but ensuring that the accumulator is + // the hole. The continuation stub will search for the initial non-hole + // element, rightly throwing an exception if not found. + return ArrayReduceLoopContinuation( + jsreceiver, callbackfn, Hole, jsreceiver, 0, numberLength); + } + + transitioning javascript builtin + ArrayReduceLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, initialK: Object, length: Object, + accumulator: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArrayReduceLoopContinuation( + jsreceiver, callbackfn, accumulator, jsreceiver, numberK, numberLength); + } + + transitioning javascript builtin + ArrayReduceLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, initialK: Object, length: Object, + result: Object): Object { + // All continuation points in the optimized every implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // The accumulator is the result from the callback call which just occured. + let r = ArrayReduceLoopContinuation( + jsreceiver, callbackfn, result, jsreceiver, numberK, numberLength); + return r; + } + + transitioning builtin ArrayReduceLoopContinuation(implicit context: Context)( + receiver: JSReceiver, callbackfn: Callable, initialAccumulator: Object, + o: JSReceiver, initialK: Number, length: Number): Object { + let accumulator = initialAccumulator; + + // 8b and 9. Repeat, while k < len + for (let k: Number = initialK; k < length; k++) { + // 8b i and 9a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 8b ii and 9b. Set kPresent to ? HasProperty(O, Pk). + const present: Boolean = HasProperty_Inline(o, k); + + // 6c. If kPresent is true, then + if (present == True) { + // 6c. i. Let kValue be ? Get(O, Pk). + const value: Object = GetProperty(o, k); + + if (accumulator == Hole) { + // 8b. + accumulator = value; + } else { + // 9c. ii. Set accumulator to ? Call(callbackfn, undefined, + // ). + accumulator = + Call(context, callbackfn, Undefined, accumulator, value, k, o); + } + } + + // 8b iv and 9d. Increase k by 1. (done by the loop). + } + + // 8c. if kPresent is false, throw a TypeError exception. + // If the accumulator is discovered with the sentinel hole value, + // this means kPresent is false. + if (accumulator == Hole) { + ThrowTypeError(kReduceNoInitial, 'Array.prototype.reduce'); + } + return accumulator; + } + + transitioning macro FastArrayReduce(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, + initialAccumulator: Object): Object + labels Bailout(Number, Object) { + const k = 0; + let accumulator = initialAccumulator; + const smiLen = Cast(len) otherwise goto Bailout(k, accumulator); + let fastO = Cast(o) otherwise goto Bailout(k, accumulator); + let fastOW = NewFastJSArrayWitness(fastO); + + // Build a fast loop over the array. + for (let k: Smi = 0; k < len; k++) { + fastOW.Recheck() otherwise goto Bailout(k, accumulator); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto Bailout(k, accumulator); + + const value: Object = fastOW.LoadElementNoHole(k) otherwise continue; + if (accumulator == Hole) { + accumulator = value; + } else { + accumulator = Call( + context, callbackfn, Undefined, accumulator, value, k, + fastOW.Get()); + } + } + if (accumulator == Hole) { + ThrowTypeError(kReduceNoInitial, 'Array.prototype.reduce'); + } + return accumulator; + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.reduce + transitioning javascript builtin + ArrayReduce(implicit context: Context)(receiver: Object, ...arguments): + Object { + try { + if (IsNullOrUndefined(receiver)) { + goto NullOrUndefinedError; + } + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) { + goto NoCallableError; + } + const callbackfn = Cast(arguments[0]) otherwise NoCallableError; + + // 4. If len is 0 and initialValue is not present, throw a TypeError + // exception. (This case is handled at the end of + // ArrayReduceLoopContinuation). + + const initialValue: Object = arguments.length > 1 ? arguments[1] : Hole; + + try { + return FastArrayReduce(o, len, callbackfn, initialValue) + otherwise Bailout; + } + label Bailout(value: Number, accumulator: Object) { + return ArrayReduceLoopContinuation( + o, callbackfn, accumulator, o, value, len); + } + } + label NoCallableError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.reduce'); + } + } +} diff --git a/deps/v8/src/builtins/array-reverse.tq b/deps/v8/src/builtins/array-reverse.tq index dddad7b42c7f00..80e9efe2f027e9 100644 --- a/deps/v8/src/builtins/array-reverse.tq +++ b/deps/v8/src/builtins/array-reverse.tq @@ -2,55 +2,54 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_reverse { macro LoadElement( elements: FixedArrayBase, index: Smi): T; - LoadElement(implicit context: Context)( + LoadElement(implicit context: Context)( elements: FixedArrayBase, index: Smi): Smi { - const elems: FixedArray = UnsafeCast(elements); - return UnsafeCast(elems[index]); + const elements: FixedArray = UnsafeCast(elements); + return UnsafeCast(elements.objects[index]); } - LoadElement(implicit context: Context)( - elements: FixedArrayBase, index: Smi): Object { - const elems: FixedArray = UnsafeCast(elements); - return elems[index]; + LoadElement( + implicit context: Context)(elements: FixedArrayBase, index: Smi): Object { + const elements: FixedArray = UnsafeCast(elements); + return elements.objects[index]; } - LoadElement(implicit context: Context)( - elements: FixedArrayBase, index: Smi): float64 { - try { - const elems: FixedDoubleArray = UnsafeCast(elements); - return LoadDoubleWithHoleCheck(elems, index) otherwise Hole; - } - label Hole { - // This macro is only used for PACKED_DOUBLE, loading the hole should - // be impossible. - unreachable; - } + LoadElement( + implicit context: Context)(elements: FixedArrayBase, index: Smi): + float64 { + const elements: FixedDoubleArray = UnsafeCast(elements); + // This macro is only used for PACKED_DOUBLE, loading the hole should + // be impossible. + return LoadDoubleWithHoleCheck(elements, index) + otherwise unreachable; } macro StoreElement( implicit context: Context)(elements: FixedArrayBase, index: Smi, value: T); - StoreElement(implicit context: Context)( + StoreElement(implicit context: Context)( elements: FixedArrayBase, index: Smi, value: Smi) { const elems: FixedArray = UnsafeCast(elements); StoreFixedArrayElementSmi(elems, index, value, SKIP_WRITE_BARRIER); } - StoreElement(implicit context: Context)( - elements: FixedArrayBase, index: Smi, value: Object) { - const elems: FixedArray = UnsafeCast(elements); - elems[index] = value; + StoreElement( + implicit context: + Context)(elements: FixedArrayBase, index: Smi, value: Object) { + const elements: FixedArray = UnsafeCast(elements); + elements.objects[index] = value; } - StoreElement(implicit context: Context)( - elements: FixedArrayBase, index: Smi, value: float64) { + StoreElement( + implicit context: + Context)(elements: FixedArrayBase, index: Smi, value: float64) { const elems: FixedDoubleArray = UnsafeCast(elements); - StoreFixedDoubleArrayElementWithSmiIndex(elems, index, value); + StoreFixedDoubleArrayElementSmi(elems, index, value); } // Fast-path for all PACKED_* elements kinds. These do not need to check @@ -149,15 +148,15 @@ namespace array { const kind: ElementsKind = array.map.elements_kind; if (kind == PACKED_SMI_ELEMENTS) { - EnsureWriteableFastElements(array); - FastPackedArrayReverse( + array::EnsureWriteableFastElements(array); + FastPackedArrayReverse( array.elements, array.length); } else if (kind == PACKED_ELEMENTS) { - EnsureWriteableFastElements(array); - FastPackedArrayReverse( + array::EnsureWriteableFastElements(array); + FastPackedArrayReverse( array.elements, array.length); } else if (kind == PACKED_DOUBLE_ELEMENTS) { - FastPackedArrayReverse( + FastPackedArrayReverse( array.elements, array.length); } else { goto Slow; diff --git a/deps/v8/src/builtins/array-slice.tq b/deps/v8/src/builtins/array-slice.tq index 615b4b70730fd4..847729b60710b5 100644 --- a/deps/v8/src/builtins/array-slice.tq +++ b/deps/v8/src/builtins/array-slice.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_slice { macro HandleSimpleArgumentsSlice( context: Context, args: JSArgumentsObjectWithLength, start: Smi, count: Smi): JSArray @@ -43,13 +43,13 @@ namespace array { // defined arguments const end: Smi = start + count; const unmappedElements: FixedArray = - Cast(sloppyElements[kSloppyArgumentsArgumentsIndex]) + Cast(sloppyElements.objects[kSloppyArgumentsArgumentsIndex]) otherwise Bailout; const unmappedElementsLength: Smi = unmappedElements.length; if (SmiAbove(end, unmappedElementsLength)) goto Bailout; - const argumentsContext: Context = - UnsafeCast(sloppyElements[kSloppyArgumentsContextIndex]); + const argumentsContext: Context = UnsafeCast( + sloppyElements.objects[kSloppyArgumentsContextIndex]); const arrayMap: Map = LoadJSArrayElementsMap(HOLEY_ELEMENTS, context); const result: JSArray = @@ -62,10 +62,10 @@ namespace array { // Fill in the part of the result that map to context-mapped parameters. for (let current: Smi = start; current < to; ++current) { const e: Object = - sloppyElements[current + kSloppyArgumentsParameterMapStart]; + sloppyElements.objects[current + kSloppyArgumentsParameterMapStart]; const newElement: Object = e != Hole ? argumentsContext[UnsafeCast(e)] : - unmappedElements[current]; + unmappedElements.objects[current]; StoreFixedArrayElementSmi( resultElements, indexOut++, newElement, SKIP_WRITE_BARRIER); } @@ -193,7 +193,7 @@ namespace array { const kValue: Object = GetProperty(o, pK); // ii. Perform ? CreateDataPropertyOrThrow(A, ! ToString(n), kValue). - CreateDataProperty(a, n, kValue); + FastCreateDataProperty(a, n, kValue); } // d. Increase k by 1. diff --git a/deps/v8/src/builtins/array-some.tq b/deps/v8/src/builtins/array-some.tq new file mode 100644 index 00000000000000..f68ea4ac30be66 --- /dev/null +++ b/deps/v8/src/builtins/array-some.tq @@ -0,0 +1,150 @@ +// Copyright 2018 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace array { + transitioning javascript builtin + ArraySomeLoopEagerDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object): Object { + // All continuation points in the optimized some implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + // + // Also, this great mass of casts is necessary because the signature + // of Torque javascript builtins requires Object type for all parameters + // other than {context}. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + const numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + return ArraySomeLoopContinuation( + jsreceiver, callbackfn, thisArg, Undefined, jsreceiver, numberK, + numberLength, Undefined); + } + + transitioning javascript builtin + ArraySomeLoopLazyDeoptContinuation(implicit context: Context)( + receiver: Object, callback: Object, thisArg: Object, initialK: Object, + length: Object, result: Object): Object { + // All continuation points in the optimized some implementation are + // after the ToObject(O) call that ensures we are dealing with a + // JSReceiver. + const jsreceiver = Cast(receiver) otherwise unreachable; + const callbackfn = Cast(callback) otherwise unreachable; + let numberK = Cast(initialK) otherwise unreachable; + const numberLength = Cast(length) otherwise unreachable; + + // This custom lazy deopt point is right after the callback. some() needs + // to pick up at the next step: if the result is true, then return, + // otherwise, keep going through the array starting from k + 1. + if (ToBoolean(result)) { + return True; + } + + numberK = numberK + 1; + + return ArraySomeLoopContinuation( + jsreceiver, callbackfn, thisArg, Undefined, jsreceiver, numberK, + numberLength, Undefined); + } + + transitioning builtin ArraySomeLoopContinuation(implicit context: Context)( + receiver: JSReceiver, callbackfn: Callable, thisArg: Object, + array: Object, o: JSReceiver, initialK: Number, length: Number, + initialTo: Object): Object { + // 5. Let k be 0. + // 6. Repeat, while k < len + for (let k: Number = initialK; k < length; k++) { + // 6a. Let Pk be ! ToString(k). + // k is guaranteed to be a positive integer, hence ToString is + // side-effect free and HasProperty/GetProperty do the conversion inline. + + // 6b. Let kPresent be ? HasProperty(O, Pk). + const kPresent: Boolean = HasProperty_Inline(o, k); + + // 6c. If kPresent is true, then + if (kPresent == True) { + // 6c. i. Let kValue be ? Get(O, Pk). + const kValue: Object = GetProperty(o, k); + + // 6c. ii. Perform ? Call(callbackfn, T, ). + const result: Object = Call(context, callbackfn, thisArg, kValue, k, o); + + // iii. If selected is true, then... + if (ToBoolean(result)) { + return True; + } + } + + // 6d. Increase k by 1. (done by the loop). + } + return False; + } + + transitioning macro FastArraySome(implicit context: Context)( + o: JSReceiver, len: Number, callbackfn: Callable, thisArg: Object): Object + labels Bailout(Smi) { + let k: Smi = 0; + const smiLen = Cast(len) otherwise goto Bailout(k); + let fastO = Cast(o) otherwise goto Bailout(k); + let fastOW = NewFastJSArrayWitness(fastO); + + // Build a fast loop over the smi array. + for (; k < smiLen; k++) { + fastOW.Recheck() otherwise goto Bailout(k); + + // Ensure that we haven't walked beyond a possibly updated length. + if (k >= fastOW.Get().length) goto Bailout(k); + const value: Object = fastOW.LoadElementNoHole(k) otherwise continue; + const result: Object = + Call(context, callbackfn, thisArg, value, k, fastOW.Get()); + if (ToBoolean(result)) { + return True; + } + } + return False; + } + + // https://tc39.github.io/ecma262/#sec-array.prototype.some + transitioning javascript builtin + ArraySome(implicit context: Context)(receiver: Object, ...arguments): Object { + try { + if (IsNullOrUndefined(receiver)) { + goto NullOrUndefinedError; + } + + // 1. Let O be ? ToObject(this value). + const o: JSReceiver = ToObject_Inline(context, receiver); + + // 2. Let len be ? ToLength(? Get(O, "length")). + const len: Number = GetLengthProperty(o); + + // 3. If IsCallable(callbackfn) is false, throw a TypeError exception. + if (arguments.length == 0) { + goto TypeError; + } + const callbackfn = Cast(arguments[0]) otherwise TypeError; + + // 4. If thisArg is present, let T be thisArg; else let T be undefined. + const thisArg: Object = arguments.length > 1 ? arguments[1] : Undefined; + + // Special cases. + try { + return FastArraySome(o, len, callbackfn, thisArg) + otherwise Bailout; + } + label Bailout(kValue: Smi) deferred { + return ArraySomeLoopContinuation( + o, callbackfn, thisArg, Undefined, o, kValue, len, Undefined); + } + } + label TypeError deferred { + ThrowTypeError(kCalledNonCallable, arguments[0]); + } + label NullOrUndefinedError deferred { + ThrowTypeError(kCalledOnNullOrUndefined, 'Array.prototype.some'); + } + } +} diff --git a/deps/v8/src/builtins/array-splice.tq b/deps/v8/src/builtins/array-splice.tq index 7307f45f3463de..586630cd3934e4 100644 --- a/deps/v8/src/builtins/array-splice.tq +++ b/deps/v8/src/builtins/array-splice.tq @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_splice { // Given {elements}, we want to create a non-zero length array of type // FixedArrayType. Most of this behavior is outsourced to ExtractFixedArray(), // but the special case of wanting to have a FixedDoubleArray when given a @@ -39,7 +39,7 @@ namespace array { macro StoreHoles( elements: FixedArrayType, holeStartIndex: Smi, holeEndIndex: Smi): void { for (let i: Smi = holeStartIndex; i < holeEndIndex; i++) { - StoreArrayHole(elements, i); + array::StoreArrayHole(elements, i); } } @@ -57,7 +57,7 @@ namespace array { lengthDelta: Smi, actualStart: Smi, insertCount: Smi, actualDeleteCount: Smi): void labels Bailout { // Make sure elements are writable. - EnsureWriteableFastElements(a); + array::EnsureWriteableFastElements(a); if (insertCount != actualDeleteCount) { const elements: FixedArrayBase = a.elements; @@ -197,7 +197,7 @@ namespace array { const fromValue: Object = GetProperty(o, from); // ii. Perform ? CreateDataPropertyOrThrow(A, ! ToString(k), fromValue). - CreateDataProperty(a, k, fromValue); + FastCreateDataProperty(a, k, fromValue); } // d. Increment k by 1. @@ -398,7 +398,7 @@ namespace array { // Bailout exception. const newLength: Number = len + insertCount - actualDeleteCount; if (newLength > kMaxSafeInteger) { - ThrowTypeError(context, kInvalidArrayLength, start); + ThrowTypeError(kInvalidArrayLength, start); } try { diff --git a/deps/v8/src/builtins/array-unshift.tq b/deps/v8/src/builtins/array-unshift.tq index 7d7647427aa2e5..b193e751fd69da 100644 --- a/deps/v8/src/builtins/array-unshift.tq +++ b/deps/v8/src/builtins/array-unshift.tq @@ -2,14 +2,14 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -namespace array { +namespace array_unshift { extern builtin ArrayUnshift(Context, JSFunction, Object, int32); macro TryFastArrayUnshift( context: Context, receiver: Object, arguments: constexpr Arguments): never labels Slow { const array: FastJSArray = Cast(receiver) otherwise Slow; - EnsureWriteableFastElements(array); + array::EnsureWriteableFastElements(array); const map: Map = array.map; if (!IsExtensibleMap(map)) goto Slow; @@ -36,7 +36,7 @@ namespace array { if (argCount > 0) { // a. If len + argCount > 2**53 - 1, throw a TypeError exception. if (length + argCount > kMaxSafeInteger) { - ThrowTypeError(context, kInvalidArrayLength); + ThrowTypeError(kInvalidArrayLength); } // b. Let k be len. diff --git a/deps/v8/src/builtins/array.tq b/deps/v8/src/builtins/array.tq index 8e3b3ea704aed9..9807db19c6d774 100644 --- a/deps/v8/src/builtins/array.tq +++ b/deps/v8/src/builtins/array.tq @@ -43,19 +43,10 @@ namespace array { } macro LoadElementOrUndefined(a: FixedArray, i: Smi): Object { - const e: Object = a[i]; + const e: Object = a.objects[i]; return e == Hole ? Undefined : e; } - macro LoadElementOrUndefined(a: FixedArray, i: intptr): Object { - const e: Object = a[i]; - return e == Hole ? Undefined : e; - } - - macro LoadElementOrUndefined(a: FixedArray, i: constexpr int31): Object { - return LoadElementOrUndefined(a, Convert(i)); - } - macro LoadElementOrUndefined(a: FixedDoubleArray, i: Smi): NumberOrUndefined { try { const f: float64 = LoadDoubleWithHoleCheck(a, i) otherwise IfHole; @@ -66,34 +57,18 @@ namespace array { } } - macro LoadElementOrUndefined(a: FixedDoubleArray, i: intptr): - NumberOrUndefined { - try { - const f: float64 = LoadDoubleWithHoleCheck(a, i) otherwise IfHole; - return AllocateHeapNumberWithValue(f); - } - label IfHole { - return Undefined; - } - } - - macro LoadElementOrUndefined(a: FixedDoubleArray, i: constexpr int31): - NumberOrUndefined { - return LoadElementOrUndefined(a, Convert(i)); - } - macro StoreArrayHole(elements: FixedDoubleArray, k: Smi): void { StoreFixedDoubleArrayHoleSmi(elements, k); } macro StoreArrayHole(elements: FixedArray, k: Smi): void { - elements[k] = Hole; + elements.objects[k] = Hole; } macro CopyArrayElement( elements: FixedArray, newElements: FixedArray, from: Smi, to: Smi): void { - const e: Object = elements[from]; - newElements[to] = e; + const e: Object = elements.objects[from]; + newElements.objects[to] = e; } macro CopyArrayElement( @@ -102,7 +77,7 @@ namespace array { try { const floatValue: float64 = LoadDoubleWithHoleCheck(elements, from) otherwise FoundHole; - newElements[to] = floatValue; + newElements.floats[to] = floatValue; } label FoundHole { StoreArrayHole(newElements, to); diff --git a/deps/v8/src/builtins/base.tq b/deps/v8/src/builtins/base.tq index 7887fa1383a552..eca9e4f66712ae 100644 --- a/deps/v8/src/builtins/base.tq +++ b/deps/v8/src/builtins/base.tq @@ -10,6 +10,10 @@ #include 'src/objects.h' #include 'src/objects/arguments.h' #include 'src/objects/bigint.h' +#include 'src/objects/js-generator.h' +#include 'src/objects/js-promise.h' +#include 'src/objects/module.h' +#include 'src/objects/stack-frame-info.h' type Arguments constexpr 'CodeStubArguments*'; type void; @@ -19,15 +23,27 @@ type Tagged generates 'TNode' constexpr 'ObjectPtr'; type Smi extends Tagged generates 'TNode' constexpr 'Smi'; // A Smi that is greater than or equal to 0. See TaggedIsPositiveSmi. -type PositiveSmi extends Smi generates 'TNode'; +type PositiveSmi extends Smi; -class HeapObject extends Tagged { - map_untyped: Tagged; -} +// The Smi value zero, which is often used as null for HeapObject types. +type Zero extends PositiveSmi; + +extern class HeapObject extends Tagged { map: Map; } type Object = Smi | HeapObject; type int32 generates 'TNode' constexpr 'int32_t'; type uint32 generates 'TNode' constexpr 'uint32_t'; +type int31 extends int32 + generates 'TNode' constexpr 'int31_t'; +type uint31 extends uint32 + generates 'TNode' constexpr 'uint31_t'; +type int16 extends int31 + generates 'TNode' constexpr 'int16_t'; +type uint16 extends uint31 + generates 'TNode' constexpr 'uint16_t'; +type int8 extends int16 generates 'TNode' constexpr 'int8_t'; +type uint8 extends uint16 + generates 'TNode' constexpr 'uint8_t'; type int64 generates 'TNode' constexpr 'int64_t'; type intptr generates 'TNode' constexpr 'intptr_t'; type uintptr generates 'TNode' constexpr 'uintptr_t'; @@ -37,14 +53,12 @@ type bool generates 'TNode' constexpr 'bool'; type bint generates 'TNode' constexpr 'BInt'; type string constexpr 'const char*'; -type int31 extends int32 - generates 'TNode' constexpr 'int31_t'; type RawPtr generates 'TNode' constexpr 'void*'; type AbstractCode extends HeapObject generates 'TNode'; type Code extends AbstractCode generates 'TNode'; type BuiltinPtr extends Smi generates 'TNode'; type Context extends HeapObject generates 'TNode'; -type NativeContext extends Context generates 'TNode'; +type NativeContext extends Context; type String extends HeapObject generates 'TNode'; type Oddball extends HeapObject generates 'TNode'; type HeapNumber extends HeapObject generates 'TNode'; @@ -52,70 +66,129 @@ type Number = Smi | HeapNumber; type BigInt extends HeapObject generates 'TNode'; type Numeric = Number | BigInt; +extern class Struct extends HeapObject {} + +extern class Tuple2 extends Struct { + value_1: Object; + value_2: Object; +} + +extern class Tuple3 extends Tuple2 { value_3: Object; } + +// A direct string can be accessed directly through CSA without going into the +// C++ runtime. See also: ToDirectStringAssembler. +type DirectString extends String; + +type RootIndex generates 'TNode' constexpr 'RootIndex'; + type Map extends HeapObject generates 'TNode'; -// The accessors for HeapObject's map cannot be declared before Map -// is declared because forward declarations are not (yet) supported. -// TODO(danno): Make circular references in classes possible. One way to do that -// would be to pre-process all class declarations and create bindings for them -// with an uninitialized class type, and then process them later properly -extern operator '.map' macro LoadMap(HeapObject): Map; -extern transitioning operator '.map=' macro StoreMap(HeapObject, Map); - -// This intrinsic should never be called from Torque code. It's used internally -// by the 'new' operator and only declared here because it's simpler than -// building the definition from C++. -intrinsic %Allocate(size: intptr): Class; -type FixedArrayBase extends HeapObject generates 'TNode'; -type FixedArray extends FixedArrayBase generates 'TNode'; -type FixedDoubleArray extends FixedArrayBase - generates 'TNode'; +extern class FixedArrayBase extends HeapObject { length: Smi; } -class JSReceiver extends HeapObject { - properties_or_hash: Object; +extern class FixedArray extends FixedArrayBase { objects[length]: Object; } + +extern class FixedDoubleArray extends FixedArrayBase { + floats[length]: float64; } -type Constructor extends JSReceiver generates 'TNode'; -type JSProxy extends JSReceiver generates 'TNode'; +// These intrinsics should never be called from Torque code. They're used +// internally by the 'new' operator and only declared here because it's simpler +// than building the definition from C++. +intrinsic %GetAllocationBaseSize(map: Map): intptr; +intrinsic %Allocate(size: intptr): Class; +intrinsic %AllocateInternalClass(slotCount: constexpr intptr): Class; -class JSObject extends JSReceiver { - elements: FixedArrayBase; +extern class JSReceiver extends HeapObject { + properties_or_hash: FixedArrayBase | Smi; } -class JSArgumentsObjectWithLength extends JSObject { - length: Object; +type Constructor extends JSReceiver; + +extern class JSObject extends JSReceiver { elements: FixedArrayBase; } + +macro NewJSObject( + map: Map, properties: FixedArrayBase | Smi, + elements: FixedArrayBase): JSObject { + return new JSObject{map, properties, elements}; +} +macro NewJSObject(implicit context: Context)(): JSObject { + const objectFunction: JSFunction = GetObjectFunction(); + const map: Map = Cast(objectFunction.prototype_or_initial_map) + otherwise unreachable; + return new JSObject{map, kEmptyFixedArray, kEmptyFixedArray}; } -class JSArray extends JSObject { - constructor(implicit context: Context)() { - super( - GetFastPackedSmiElementsJSArrayMap(), kEmptyFixedArray, - kEmptyFixedArray); - this.length = 0; - } +extern class JSProxy extends JSReceiver { + target: Object; + handler: Object; +} + +extern class JSProxyRevocableResult extends JSObject { + proxy: Object; + revoke: Object; +} + +extern class JSGlobalProxy extends JSObject { native_context: Object; } + +extern class JSValue extends JSObject { value: Object; } + +extern class JSArgumentsObjectWithLength extends JSObject { length: Object; } + +extern class JSArray extends JSObject { IsEmpty(): bool { return this.length == 0; } length: Number; } +macro NewJSArray(implicit context: Context)( + map: Map, elements: FixedArrayBase): JSArray { + return new JSArray{map, kEmptyFixedArray, elements, elements.length}; +} + +macro NewJSArray(implicit context: Context)(): JSArray { + return new JSArray{ + GetFastPackedSmiElementsJSArrayMap(), + kEmptyFixedArray, + kEmptyFixedArray, + 0 + }; +} + // A HeapObject with a JSArray map, and either fast packed elements, or fast // holey elements when the global NoElementsProtector is not invalidated. -transient type FastJSArray extends JSArray - generates 'TNode'; +transient type FastJSArray extends JSArray; // A FastJSArray when the global ArraySpeciesProtector is not invalidated. -transient type FastJSArrayForCopy extends FastJSArray - generates 'TNode'; +transient type FastJSArrayForCopy extends FastJSArray; // A FastJSArray when the global ArrayIteratorProtector is not invalidated. -transient type FastJSArrayWithNoCustomIteration extends FastJSArray - generates 'TNode'; +transient type FastJSArrayWithNoCustomIteration extends FastJSArray; + +type NoSharedNameSentinel extends Smi; +type Script extends HeapObject; +type DebugInfo extends HeapObject; + +type ScopeInfo extends Object generates 'TNode'; + +extern class SharedFunctionInfo extends HeapObject { + weak function_data: Object; + name_or_scope_info: String | NoSharedNameSentinel | ScopeInfo; + outer_scope_info_or_feedback_metadata: HeapObject; + script_or_debug_info: Script | DebugInfo; + length: int16; + formal_parameter_count: uint16; + expected_nof_properties: int8; + builtin_function_id: int8; + function_token_offset: int16; + flags: int32; +} -type SharedFunctionInfo extends HeapObject - generates 'TNode'; +extern class SharedFunctionInfoWithID extends SharedFunctionInfo { + unique_id: int32; +} -class JSFunction extends JSObject { +extern class JSFunction extends JSObject { shared_function_info: SharedFunctionInfo; context: Context; feedback_cell: Smi; @@ -123,35 +196,41 @@ class JSFunction extends JSObject { weak prototype_or_initial_map: JSReceiver | Map; } -extern operator '.formal_parameter_count' - macro LoadSharedFunctionInfoFormalParameterCount(SharedFunctionInfo): int32; - -class JSBoundFunction extends JSObject { +extern class JSBoundFunction extends JSObject { bound_target_function: JSReceiver; bound_this: Object; bound_arguments: FixedArray; } type Callable = JSFunction | JSBoundFunction | JSProxy; -type FixedTypedArrayBase extends FixedArrayBase - generates 'TNode'; + +extern class FixedTypedArrayBase extends FixedArrayBase { + base_pointer: Smi; + external_pointer: RawPtr; +} +extern operator '.length_intptr' macro LoadAndUntagFixedArrayBaseLength( + FixedArrayBase): intptr; + type FixedTypedArray extends FixedTypedArrayBase generates 'TNode'; -type SloppyArgumentsElements extends FixedArray - generates 'TNode'; + +extern class SloppyArgumentsElements extends FixedArray {} type NumberDictionary extends HeapObject generates 'TNode'; -// RawObjectCasts should *never* be used anywhere in Torque code except for +// %RawDownCast should *never* be used anywhere in Torque code except for // in Torque-based UnsafeCast operators preceeded by an appropriate // type assert() -intrinsic %RawObjectCast(o: Object): A; -intrinsic %RawPointerCast(p: RawPtr): A; +intrinsic %RawDownCast(x: From): To; intrinsic %RawConstexprCast(f: From): To; type NativeContextSlot generates 'TNode' constexpr 'int32_t'; const ARRAY_BUFFER_FUN_INDEX: constexpr NativeContextSlot generates 'Context::ARRAY_BUFFER_FUN_INDEX'; +const ARRAY_BUFFER_NOINIT_FUN_INDEX: constexpr NativeContextSlot + generates 'Context::ARRAY_BUFFER_NOINIT_FUN_INDEX'; +const ARRAY_BUFFER_MAP_INDEX: constexpr NativeContextSlot + generates 'Context::ARRAY_BUFFER_MAP_INDEX'; const ARRAY_JOIN_STACK_INDEX: constexpr NativeContextSlot generates 'Context::ARRAY_JOIN_STACK_INDEX'; const OBJECT_FUNCTION_INDEX: constexpr NativeContextSlot @@ -168,23 +247,65 @@ extern operator '[]=' macro StoreContextElement( extern operator '[]' macro LoadContextElement(Context, intptr): Object; extern operator '[]' macro LoadContextElement(Context, Smi): Object; -type JSArrayBuffer extends JSObject generates 'TNode'; -type JSArrayBufferView extends JSObject - generates 'TNode'; -type JSTypedArray extends JSArrayBufferView - generates 'TNode'; +extern class JSArrayBuffer extends JSObject { + byte_length: uintptr; + backing_store: RawPtr; +} + +extern class JSArrayBufferView extends JSObject { + buffer: JSArrayBuffer; + byte_offset: uintptr; + byte_length: uintptr; +} + +extern class JSTypedArray extends JSArrayBufferView { + AttachOffHeapBuffer( + buffer: JSArrayBuffer, map: Map, length: PositiveSmi, + byteOffset: uintptr): void { + const basePointer: Smi = 0; + + // The max byteOffset is 8 * MaxSmi on the particular platform. 32 bit + // platforms are self-limiting, because we can't allocate an array bigger + // than our 32-bit arithmetic range anyway. 64 bit platforms could + // theoretically have an offset up to 2^35 - 1. + const backingStore = buffer.backing_store; + const externalPointer = backingStore + Convert(byteOffset); + + // Assert no overflow has occurred. Only assert if the mock array buffer + // allocator is NOT used. When the mock array buffer is used, impossibly + // large allocations are allowed that would erroneously cause an overflow + // and this assertion to fail. + assert( + IsMockArrayBufferAllocatorFlag() || + Convert(externalPointer) >= Convert(backingStore)); + + this.buffer = buffer; + this.elements = new + FixedTypedArrayBase{map, length, basePointer, externalPointer}; + } + + length: Smi; +} + +extern class JSAccessorPropertyDescriptor extends JSObject { + get: Object; + set: Object; + enumerable: Object; + configurable: Object; +} + +extern class JSCollection extends JSObject { table: Object; } + type JSDataView extends JSArrayBufferView generates 'TNode'; type InstanceType generates 'TNode' constexpr 'InstanceType'; type ElementsKind generates 'TNode' constexpr 'ElementsKind'; -type LanguageMode extends Tagged - generates 'TNode' constexpr 'LanguageMode'; +type LanguageMode extends Smi constexpr 'LanguageMode'; type ExtractFixedArrayFlags generates 'TNode' constexpr 'CodeStubAssembler::ExtractFixedArrayFlags'; type ParameterMode generates 'TNode' constexpr 'ParameterMode'; -type RootIndex generates 'TNode' constexpr 'RootIndex'; type WriteBarrierMode generates 'TNode' constexpr 'WriteBarrierMode'; @@ -193,6 +314,79 @@ type ToIntegerTruncationMode constexpr 'CodeStubAssembler::ToIntegerTruncationMode'; type AllocationFlags constexpr 'AllocationFlags'; +extern class Foreign extends HeapObject { foreign_address: RawPtr; } + +extern class InterceptorInfo extends Struct { + getter: Foreign | Zero; + setter: Foreign | Zero; + query: Foreign | Zero; + descriptor: Foreign | Zero; + deleter: Foreign | Zero; + enumerator: Foreign | Zero; + definer: Foreign | Zero; + data: Object; + flags: Smi; +} + +extern class AccessCheckInfo extends Struct { + callback: Foreign | Zero; + named_interceptor: InterceptorInfo | Zero; + indexed_interceptor: InterceptorInfo | Zero; + data: Object; +} + +extern class ArrayBoilerplateDescription extends Struct { + flags: Smi; + constant_elements: FixedArrayBase; +} + +extern class AliasedArgumentsEntry extends Struct { aliased_context_slot: Smi; } + +extern class Cell extends HeapObject { value: Object; } + +extern class DataHandler extends Struct { + smi_handler: Smi | Code; + validity_cell: Smi | Cell; + weak data_1: Object; + weak data_2: Object; + weak data_3: Object; +} + +extern class JSPromise extends JSObject { + reactions_or_result: Object; + flags: Smi; +} + +extern class Microtask extends Struct {} + +extern class CallbackTask extends Microtask { + callback: Foreign; + data: Foreign; +} + +extern class CallableTask extends Microtask { + callable: JSReceiver; + context: Context; +} + +extern class StackFrameInfo extends Struct { + line_number: Smi; + column_number: Smi; + script_id: Smi; + script_name: Object; + script_name_or_source_url: Object; + function_name: Object; + flag: Smi; + id: Smi; +} + +extern class ClassPositions extends Struct { + start: Smi; + end: Smi; +} + +extern class WasmExceptionTag extends Struct { index: Smi; } + const kSmiTagSize: constexpr int31 generates 'kSmiTagSize'; const NO_ELEMENTS: constexpr ElementsKind generates 'NO_ELEMENTS'; @@ -232,11 +426,12 @@ const BIGINT64_ELEMENTS: const kNone: constexpr AllocationFlags generates 'CodeStubAssembler::kNone'; -const kDoubleAlignment: - constexpr AllocationFlags generates 'kDoubleAlignment'; -const kPretenured: constexpr AllocationFlags generates 'kPretenured'; -const kAllowLargeObjectAllocation: - constexpr AllocationFlags generates 'kAllowLargeObjectAllocation'; +const kDoubleAlignment: constexpr AllocationFlags + generates 'CodeStubAssembler::kDoubleAlignment'; +const kPretenured: + constexpr AllocationFlags generates 'CodeStubAssembler::kPretenured'; +const kAllowLargeObjectAllocation: constexpr AllocationFlags + generates 'CodeStubAssembler::kAllowLargeObjectAllocation'; type FixedUint8Array extends FixedTypedArray; type FixedInt8Array extends FixedTypedArray; @@ -264,27 +459,48 @@ const kEmptyFixedArrayRootIndex: const kTheHoleValueRootIndex: constexpr RootIndex generates 'RootIndex::kTheHoleValue'; +const kInvalidArrayBufferLength: constexpr MessageTemplate + generates 'MessageTemplate::kInvalidArrayBufferLength'; const kInvalidArrayLength: constexpr MessageTemplate generates 'MessageTemplate::kInvalidArrayLength'; const kCalledNonCallable: constexpr MessageTemplate generates 'MessageTemplate::kCalledNonCallable'; const kCalledOnNullOrUndefined: constexpr MessageTemplate generates 'MessageTemplate::kCalledOnNullOrUndefined'; +const kInvalidOffset: constexpr MessageTemplate + generates 'MessageTemplate::kInvalidOffset'; const kInvalidTypedArrayLength: constexpr MessageTemplate generates 'MessageTemplate::kInvalidTypedArrayLength'; +const kIteratorSymbolNonCallable: constexpr MessageTemplate + generates 'MessageTemplate::kIteratorSymbolNonCallable'; const kIteratorValueNotAnObject: constexpr MessageTemplate generates 'MessageTemplate::kIteratorValueNotAnObject'; const kNotIterable: constexpr MessageTemplate generates 'MessageTemplate::kNotIterable'; +const kReduceNoInitial: constexpr MessageTemplate + generates 'MessageTemplate::kReduceNoInitial'; +const kFirstArgumentNotRegExp: constexpr MessageTemplate + generates 'MessageTemplate::kFirstArgumentNotRegExp'; +const kBigIntMixedTypes: constexpr MessageTemplate + generates 'MessageTemplate::kBigIntMixedTypes'; +const kTypedArrayTooShort: constexpr MessageTemplate + generates 'MessageTemplate::kTypedArrayTooShort'; const kMaxArrayIndex: constexpr uint32 generates 'JSArray::kMaxArrayIndex'; const kTypedArrayMaxByteLength: constexpr uintptr generates 'FixedTypedArrayBase::kMaxByteLength'; +const V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP: + constexpr int31 generates 'V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP'; const kMaxSafeInteger: constexpr float64 generates 'kMaxSafeInteger'; +const kSmiMaxValue: constexpr uintptr generates 'kSmiMaxValue'; const kStringMaxLength: constexpr int31 generates 'String::kMaxLength'; const kFixedArrayMaxLength: constexpr int31 generates 'FixedArray::kMaxLength'; +const kFixedTypedArrayBaseHeaderSize: constexpr intptr + generates 'FixedTypedArrayBase::kHeaderSize'; +const kObjectAlignmentMask: constexpr intptr + generates 'kObjectAlignmentMask'; const kMaxRegularHeapObjectSize: constexpr int31 generates 'kMaxRegularHeapObjectSize'; @@ -315,11 +531,12 @@ const kInvalidDataViewAccessorOffset: constexpr MessageTemplate const kStrictReadOnlyProperty: constexpr MessageTemplate generates 'MessageTemplate::kStrictReadOnlyProperty'; -type Hole extends Oddball generates 'TNode'; -type Null extends Oddball generates 'TNode'; -type Undefined extends Oddball generates 'TNode'; -type True extends Oddball generates 'TNode'; -type False extends Oddball generates 'TNode'; +type Hole extends Oddball; +type Null extends Oddball; +type Undefined extends Oddball; +type True extends Oddball; +type False extends Oddball; +type EmptyString extends String; type Boolean = True | False; type NumberOrUndefined = Number | Undefined; @@ -331,7 +548,7 @@ extern macro TrueConstant(): True; extern macro FalseConstant(): False; extern macro Int32TrueConstant(): bool; extern macro Int32FalseConstant(): bool; -extern macro EmptyStringConstant(): String; +extern macro EmptyStringConstant(): EmptyString; extern macro LengthStringConstant(): String; const Hole: Hole = TheHoleConstant(); @@ -339,7 +556,7 @@ const Null: Null = NullConstant(); const Undefined: Undefined = UndefinedConstant(); const True: True = TrueConstant(); const False: False = FalseConstant(); -const kEmptyString: String = EmptyStringConstant(); +const kEmptyString: EmptyString = EmptyStringConstant(); const kLengthString: String = LengthStringConstant(); const true: constexpr bool generates 'true'; @@ -356,6 +573,36 @@ const INTPTR_PARAMETERS: constexpr ParameterMode const SKIP_WRITE_BARRIER: constexpr WriteBarrierMode generates 'SKIP_WRITE_BARRIER'; +extern class AsyncGeneratorRequest extends Struct { + next: AsyncGeneratorRequest | Undefined; + resume_mode: Smi; + value: Object; + promise: JSPromise; +} + +extern class ModuleInfoEntry extends Struct { + export_name: String | Undefined; + local_name: String | Undefined; + import_name: String | Undefined; + module_request: Smi; + cell_index: Smi; + beg_pos: Smi; + end_pos: Smi; +} + +extern class PromiseCapability extends Struct { + promise: JSReceiver | Undefined; + resolve: Object; + reject: Object; +} + +extern class PromiseReaction extends Struct { + next: PromiseReaction | Zero; + reject_handler: Callable | Undefined; + fulfill_handler: Callable | Undefined; + promise_or_capability: JSPromise | PromiseCapability | Undefined; +} + extern macro Is64(): constexpr bool; extern macro SelectBooleanConstant(bool): Boolean; @@ -388,14 +635,18 @@ extern transitioning builtin HasProperty(implicit context: Context)( extern transitioning macro HasProperty_Inline(implicit context: Context)( JSReceiver, Object): Boolean; -extern macro ThrowRangeError(Context, constexpr MessageTemplate): never; -extern macro ThrowRangeError(Context, constexpr MessageTemplate, Object): never; -extern macro ThrowTypeError(Context, constexpr MessageTemplate): never; -extern macro ThrowTypeError( - Context, constexpr MessageTemplate, constexpr string): never; -extern macro ThrowTypeError(Context, constexpr MessageTemplate, Object): never; -extern macro ThrowTypeError( - Context, constexpr MessageTemplate, Object, Object, Object): never; +extern macro ThrowRangeError(implicit context: Context)( + constexpr MessageTemplate): never; +extern macro ThrowRangeError(implicit context: Context)( + constexpr MessageTemplate, Object): never; +extern macro ThrowTypeError(implicit context: Context)( + constexpr MessageTemplate): never; +extern macro ThrowTypeError(implicit context: Context)( + constexpr MessageTemplate, constexpr string): never; +extern macro ThrowTypeError(implicit context: Context)( + constexpr MessageTemplate, Object): never; +extern macro ThrowTypeError(implicit context: Context)( + constexpr MessageTemplate, Object, Object, Object): never; extern macro ArraySpeciesCreate(Context, Object, Number): JSReceiver; extern macro ArrayCreate(implicit context: Context)(Number): JSArray; extern macro BuildAppendJSArray( @@ -411,6 +662,12 @@ extern macro Construct(implicit context: Context)( Constructor, Object, Object): JSReceiver; extern macro Construct(implicit context: Context)( Constructor, Object, Object, Object): JSReceiver; +extern macro ConstructWithTarget(implicit context: Context)( + Constructor, JSReceiver): JSReceiver; +extern macro ConstructWithTarget(implicit context: Context)( + Constructor, JSReceiver, Object): JSReceiver; +extern macro SpeciesConstructor(implicit context: Context)( + Object, JSReceiver): JSReceiver; extern builtin ToObject(Context, Object): JSReceiver; extern macro ToObject_Inline(Context, Object): JSReceiver; @@ -422,19 +679,19 @@ extern builtin ToString(Context, Object): String; extern transitioning runtime NormalizeElements(Context, JSObject); extern transitioning runtime TransitionElementsKindWithKind( Context, JSObject, Smi); -extern transitioning runtime CreateDataProperty(implicit context: Context)( - JSReceiver, Object, Object); extern macro LoadBufferObject(RawPtr, constexpr int32): Object; extern macro LoadBufferPointer(RawPtr, constexpr int32): RawPtr; extern macro LoadBufferSmi(RawPtr, constexpr int32): Smi; +extern macro LoadFixedTypedArrayOnHeapBackingStore(FixedTypedArrayBase): RawPtr; extern macro LoadRoot(constexpr RootIndex): Object; extern macro StoreRoot(constexpr RootIndex, Object): Object; -extern macro LoadAndUntagToWord32Root(constexpr RootIndex): int32; extern runtime StringEqual(Context, String, String): Oddball; extern builtin StringLessThan(Context, String, String): Boolean; +extern macro StringCharCodeAt(String, intptr): int32; +extern runtime StringCompareSequence(Context, String, String, Number): Boolean; extern macro StrictEqual(Object, Object): Boolean; extern macro SmiLexicographicCompare(Smi, Smi): Smi; @@ -443,6 +700,8 @@ extern runtime ThrowInvalidStringLength(Context): never; extern operator '==' macro WordEqual(RawPtr, RawPtr): bool; extern operator '!=' macro WordNotEqual(RawPtr, RawPtr): bool; +extern operator '+' macro RawPtrAdd(RawPtr, intptr): RawPtr; +extern operator '+' macro RawPtrAdd(intptr, RawPtr): RawPtr; extern operator '<' macro Int32LessThan(int32, int32): bool; extern operator '<' macro Uint32LessThan(uint32, uint32): bool; @@ -493,6 +752,11 @@ extern operator '<=' macro IntPtrLessThanOrEqual(intptr, intptr): bool; extern operator '<=' macro UintPtrLessThanOrEqual(uintptr, uintptr): bool; extern operator '>=' macro IntPtrGreaterThanOrEqual(intptr, intptr): bool; extern operator '>=' macro UintPtrGreaterThanOrEqual(uintptr, uintptr): bool; +extern operator '~' macro WordNot(intptr): intptr; +extern operator '~' macro WordNot(uintptr): uintptr; +extern operator '~' macro ConstexprWordNot(constexpr intptr): constexpr intptr; +extern operator '~' macro ConstexprWordNot(constexpr uintptr): + constexpr uintptr; extern operator '==' macro Float64Equal(float64, float64): bool; extern operator '!=' macro Float64NotEqual(float64, float64): bool; @@ -503,8 +767,9 @@ extern macro BranchIfNumberEqual(Number, Number): never operator '==' macro IsNumberEqual(a: Number, b: Number): bool { return (BranchIfNumberEqual(a, b)) ? true : false; } -extern operator '!=' macro BranchIfNumberNotEqual(Number, Number): never - labels Taken, NotTaken; +operator '!=' macro IsNumberNotEqual(a: Number, b: Number): bool { + return (BranchIfNumberEqual(a, b)) ? false : true; +} extern operator '<' macro BranchIfNumberLessThan(Number, Number): never labels Taken, NotTaken; extern operator '<=' macro BranchIfNumberLessThanOrEqual(Number, Number): never @@ -516,8 +781,22 @@ extern operator '>=' macro BranchIfNumberGreaterThanOrEqual( Number, Number): never labels Taken, NotTaken; -extern operator '==' macro WordEqual(Object, Object): bool; -extern operator '!=' macro WordNotEqual(Object, Object): bool; +// The type of all tagged values that can safely be compared with WordEqual. +type TaggedWithIdentity = + JSReceiver | FixedArrayBase | Oddball | Map | EmptyString; + +extern operator '==' macro WordEqual(TaggedWithIdentity, Object): bool; +extern operator '==' macro WordEqual(Object, TaggedWithIdentity): bool; +extern operator '==' macro WordEqual( + TaggedWithIdentity, TaggedWithIdentity): bool; +extern operator '!=' macro WordNotEqual(TaggedWithIdentity, Object): bool; +extern operator '!=' macro WordNotEqual(Object, TaggedWithIdentity): bool; +extern operator '!=' macro WordNotEqual( + TaggedWithIdentity, TaggedWithIdentity): bool; +// Do not overload == and != if it is unclear if object identity is the right +// equality. +extern macro WordEqual(Object, Object): bool; +extern macro WordNotEqual(Object, Object): bool; extern operator '+' macro SmiAdd(Smi, Smi): Smi; extern operator '-' macro SmiSub(Smi, Smi): Smi; @@ -527,6 +806,10 @@ extern operator '<<' macro SmiShl(Smi, constexpr int31): Smi; extern operator '>>' macro SmiSar(Smi, constexpr int31): Smi; extern operator '+' macro IntPtrAdd(intptr, intptr): intptr; +extern operator '+' macro ConstexprIntPtrAdd( + constexpr intptr, constexpr intptr): constexpr intptr; +extern operator '+' macro ConstexprUintPtrAdd( + constexpr uintptr, constexpr uintptr): constexpr intptr; extern operator '-' macro IntPtrSub(intptr, intptr): intptr; extern operator '*' macro IntPtrMul(intptr, intptr): intptr; extern operator '/' macro IntPtrDiv(intptr, intptr): intptr; @@ -537,6 +820,7 @@ extern operator '|' macro WordOr(intptr, intptr): intptr; extern operator '+' macro UintPtrAdd(uintptr, uintptr): uintptr; extern operator '-' macro UintPtrSub(uintptr, uintptr): uintptr; +extern operator '<<' macro WordShl(uintptr, uintptr): uintptr; extern operator '>>>' macro WordShr(uintptr, uintptr): uintptr; extern operator '&' macro WordAnd(uintptr, uintptr): uintptr; extern operator '|' macro WordOr(uintptr, uintptr): uintptr; @@ -551,6 +835,9 @@ extern operator '&' macro Word32And(int32, int32): int32; extern operator '&' macro Word32And(uint32, uint32): uint32; extern operator '==' macro ConstexprInt31Equal(constexpr int31, constexpr int31): constexpr bool; +extern operator '>=' macro +ConstexprInt31GreaterThanEqual( + constexpr int31, constexpr int31): constexpr bool; extern operator '==' macro Word32Equal(int32, int32): bool; extern operator '==' macro Word32Equal(uint32, uint32): bool; @@ -563,6 +850,8 @@ extern operator '|' macro Word32Or(int32, int32): int32; extern operator '|' macro Word32Or(uint32, uint32): uint32; extern operator '&' macro Word32And(bool, bool): bool; extern operator '|' macro Word32Or(bool, bool): bool; +extern operator '==' macro Word32Equal(bool, bool): bool; +extern operator '!=' macro Word32NotEqual(bool, bool): bool; extern operator '+' macro Float64Add(float64, float64): float64; @@ -585,6 +874,10 @@ extern operator '>>>' macro ConstexprUintPtrShr( extern macro SmiMax(Smi, Smi): Smi; extern macro SmiMin(Smi, Smi): Smi; extern macro SmiMul(Smi, Smi): Number; +extern macro SmiMod(Smi, Smi): Number; + +extern macro IntPtrMax(intptr, intptr): intptr; +extern macro IntPtrMin(intptr, intptr): intptr; extern operator '!' macro ConstexprBoolNot(constexpr bool): constexpr bool; extern operator '!' macro Word32BinaryNot(bool): bool; @@ -594,6 +887,7 @@ extern operator '.instanceType' macro LoadInstanceType(HeapObject): InstanceType; extern operator '.length' macro LoadStringLengthAsWord(String): intptr; +extern operator '.length_smi' macro LoadStringLengthAsSmi(String): Smi; extern operator '.length' macro GetArgumentsLength(constexpr Arguments): intptr; extern operator '[]' macro GetArgumentValue( @@ -606,7 +900,7 @@ extern macro IsValidPositiveSmi(intptr): bool; extern macro HeapObjectToJSDataView(HeapObject): JSDataView labels CastError; -extern macro HeapObjectToJSTypedArray(HeapObject): JSTypedArray +extern macro HeapObjectToJSArrayBuffer(HeapObject): JSArrayBuffer labels CastError; extern macro TaggedToHeapObject(Object): HeapObject labels CastError; @@ -614,6 +908,8 @@ extern macro TaggedToSmi(Object): Smi labels CastError; extern macro TaggedToPositiveSmi(Object): PositiveSmi labels CastError; +extern macro TaggedToDirectString(Object): DirectString + labels CastError; extern macro HeapObjectToJSArray(HeapObject): JSArray labels CastError; extern macro HeapObjectToCallable(HeapObject): Callable @@ -634,98 +930,141 @@ extern macro HeapObjectToSloppyArgumentsElements(HeapObject): extern macro TaggedToNumber(Object): Number labels CastError; -macro CastHeapObject(o: HeapObject): A +macro Cast(implicit context: Context)(o: Object): A + labels CastError { + return Cast(TaggedToHeapObject(o) otherwise CastError) + otherwise CastError; +} + +Cast(o: Object): Smi + labels CastError { + return TaggedToSmi(o) otherwise CastError; +} + +Cast(o: Object): PositiveSmi + labels CastError { + return TaggedToPositiveSmi(o) otherwise CastError; +} + +Cast(o: Object): Number + labels CastError { + return TaggedToNumber(o) otherwise CastError; +} + +macro Cast(o: HeapObject): A labels CastError; -CastHeapObject(o: HeapObject): HeapObject +Cast(o: HeapObject): HeapObject labels CastError { return o; } -CastHeapObject(o: HeapObject): FixedArray +Cast(o: HeapObject): FixedArray labels CastError { return HeapObjectToFixedArray(o) otherwise CastError; } -CastHeapObject(o: HeapObject): FixedDoubleArray +Cast(o: HeapObject): FixedDoubleArray labels CastError { return HeapObjectToFixedDoubleArray(o) otherwise CastError; } -CastHeapObject(o: HeapObject): SloppyArgumentsElements +Cast(o: HeapObject): SloppyArgumentsElements labels CastError { return HeapObjectToSloppyArgumentsElements(o) otherwise CastError; } -CastHeapObject(o: HeapObject): JSDataView +Cast(o: HeapObject): JSDataView labels CastError { return HeapObjectToJSDataView(o) otherwise CastError; } -CastHeapObject(o: HeapObject): JSTypedArray +Cast(o: HeapObject): JSTypedArray labels CastError { - if (IsJSTypedArray(o)) return %RawObjectCast(o); + if (IsJSTypedArray(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(o: HeapObject): Callable +Cast(implicit context: Context)(o: Object): JSTypedArray + labels CastError { + const heapObject = Cast(o) otherwise CastError; + return Cast(heapObject) otherwise CastError; +} + +Cast(o: HeapObject): Callable + labels CastError { + return HeapObjectToCallable(o) otherwise CastError; +} + +Cast(o: HeapObject): Undefined | Callable labels CastError { + if (o == Undefined) return Undefined; return HeapObjectToCallable(o) otherwise CastError; } -CastHeapObject(o: HeapObject): JSArray +Cast(o: HeapObject): JSArray labels CastError { return HeapObjectToJSArray(o) otherwise CastError; } -CastHeapObject(o: HeapObject): Context +Cast(o: HeapObject): JSArrayBuffer labels CastError { - if (IsContext(o)) return %RawObjectCast(o); + return HeapObjectToJSArrayBuffer(o) otherwise CastError; +} + +Cast(o: HeapObject): Context + labels CastError { + if (IsContext(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(o: HeapObject): JSObject +Cast(o: HeapObject): JSObject labels CastError { - if (IsJSObject(o)) return %RawObjectCast(o); + if (IsJSObject(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(o: HeapObject): NumberDictionary +Cast(o: HeapObject): NumberDictionary labels CastError { - if (IsNumberDictionary(o)) return %RawObjectCast(o); + if (IsNumberDictionary(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(o: HeapObject): FixedTypedArrayBase +Cast(o: HeapObject): FixedTypedArrayBase labels CastError { - if (IsFixedTypedArray(o)) return %RawObjectCast(o); + if (IsFixedTypedArray(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(o: HeapObject): String +Cast(o: HeapObject): String labels CastError { return HeapObjectToString(o) otherwise CastError; } -CastHeapObject(o: HeapObject): Constructor +Cast(o: HeapObject): DirectString + labels CastError { + return TaggedToDirectString(o) otherwise CastError; +} + +Cast(o: HeapObject): Constructor labels CastError { return HeapObjectToConstructor(o) otherwise CastError; } -CastHeapObject(o: HeapObject): HeapNumber +Cast(o: HeapObject): HeapNumber labels CastError { - if (IsHeapNumber(o)) return %RawObjectCast(o); + if (IsHeapNumber(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(implicit context: Context)(o: HeapObject): Map +Cast(implicit context: Context)(o: HeapObject): Map labels CastError { - if (IsMap(o)) return %RawObjectCast(o); + if (IsMap(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(implicit context: Context)( - o: HeapObject): JSArgumentsObjectWithLength +Cast(implicit context: Context)(o: HeapObject): + JSArgumentsObjectWithLength labels CastError { const map: Map = o.map; try { @@ -736,12 +1075,11 @@ CastHeapObject(implicit context: Context)( goto CastError; } label True { - return %RawObjectCast(o); + return %RawDownCast(o); } } -CastHeapObject(implicit context: Context)(o: HeapObject): - FastJSArray +Cast(implicit context: Context)(o: HeapObject): FastJSArray labels CastError { const map: Map = o.map; if (!IsJSArrayMap(map)) goto CastError; @@ -754,95 +1092,50 @@ CastHeapObject(implicit context: Context)(o: HeapObject): if (!IsPrototypeInitialArrayPrototype(map)) goto CastError; if (IsNoElementsProtectorCellInvalid()) goto CastError; - return %RawObjectCast(o); -} - -struct FastJSArrayWitness { - array: HeapObject; - map: Map; -} - -macro MakeWitness(array: FastJSArray): FastJSArrayWitness { - return FastJSArrayWitness{array, array.map}; -} - -macro Testify(witness: FastJSArrayWitness): FastJSArray labels CastError { - if (witness.array.map != witness.map) goto CastError; - // We don't need to check elements kind or whether the prototype - // has changed away from the default JSArray prototype, because - // if the map remains the same then those properties hold. - // - // However, we have to make sure there are no elements in the - // prototype chain. - if (IsNoElementsProtectorCellInvalid()) goto CastError; - return %RawObjectCast(witness.array); + return %RawDownCast(o); } -CastHeapObject(implicit context: Context)(o: HeapObject): +Cast(implicit context: Context)(o: HeapObject): FastJSArrayForCopy labels CastError { if (IsArraySpeciesProtectorCellInvalid()) goto CastError; const a: FastJSArray = Cast(o) otherwise CastError; - return %RawObjectCast(o); + return %RawDownCast(o); } -CastHeapObject(implicit context: Context)( +Cast(implicit context: Context)( o: HeapObject): FastJSArrayWithNoCustomIteration labels CastError { if (IsArrayIteratorProtectorCellInvalid()) goto CastError; const a: FastJSArray = Cast(o) otherwise CastError; - return %RawObjectCast(o); + return %RawDownCast(o); } -CastHeapObject(implicit context: Context)(o: HeapObject): JSReceiver +Cast(implicit context: Context)(o: HeapObject): JSReceiver labels CastError { - if (IsJSReceiver(o)) return %RawObjectCast(o); + if (IsJSReceiver(o)) return %RawDownCast(o); goto CastError; } -CastHeapObject(implicit context: Context)(o: HeapObject): JSFunction +Cast(implicit context: Context)(o: HeapObject): JSFunction labels CastError { - if (IsJSFunction(o)) return %RawObjectCast(o); + if (IsJSFunction(o)) return %RawDownCast(o); goto CastError; } -macro Cast(implicit context: Context)(o: HeapObject): A - labels CastError { - return CastHeapObject(o) otherwise CastError; -} - -// CastHeapObject allows this default-implementation to be non-recursive. -// Otherwise the generated CSA code might run into infinite recursion. -macro Cast(implicit context: Context)(o: Object): A - labels CastError { - return CastHeapObject(TaggedToHeapObject(o) otherwise CastError) - otherwise CastError; -} - -Cast(o: Object): Smi - labels CastError { - return TaggedToSmi(o) otherwise CastError; -} - -Cast(o: Object): PositiveSmi - labels CastError { - return TaggedToPositiveSmi(o) otherwise CastError; -} - -Cast(o: Object): Number - labels CastError { - return TaggedToNumber(o) otherwise CastError; -} - extern macro AllocateHeapNumberWithValue(float64): HeapNumber; extern macro ChangeInt32ToTagged(int32): Number; extern macro ChangeUint32ToTagged(uint32): Number; extern macro ChangeUintPtrToFloat64(uintptr): float64; extern macro ChangeUintPtrToTagged(uintptr): Number; extern macro Unsigned(int32): uint32; +extern macro Unsigned(int16): uint16; +extern macro Unsigned(int8): uint8; extern macro Unsigned(intptr): uintptr; extern macro Unsigned(RawPtr): uintptr; extern macro Signed(uint32): int32; +extern macro Signed(uint16): int16; +extern macro Signed(uint8): int8; extern macro Signed(uintptr): intptr; extern macro Signed(RawPtr): intptr; extern macro TruncateIntPtrToInt32(intptr): int32; @@ -861,6 +1154,11 @@ extern macro LoadNativeContext(Context): NativeContext; extern macro LoadJSArrayElementsMap(constexpr ElementsKind, Context): Map; extern macro LoadJSArrayElementsMap(ElementsKind, Context): Map; extern macro ChangeNonnegativeNumberToUintPtr(Number): uintptr; +extern macro TryNumberToUintPtr(Number): uintptr labels IfNegative; +macro TryUintPtrToPositiveSmi(ui: uintptr): PositiveSmi labels IfOverflow { + if (ui > kSmiMaxValue) goto IfOverflow; + return %RawDownCast(SmiTag(Signed(ui))); +} extern macro NumberConstant(constexpr float64): Number; extern macro NumberConstant(constexpr int32): Number; @@ -870,6 +1168,7 @@ extern macro IntPtrConstant(constexpr int32): intptr; extern macro Int32Constant(constexpr int31): int31; extern macro Int32Constant(constexpr int32): int32; extern macro Float64Constant(constexpr int31): float64; +extern macro Float64Constant(constexpr float64): float64; extern macro SmiConstant(constexpr int31): Smi; extern macro SmiConstant(constexpr Smi): Smi; extern macro BoolConstant(constexpr bool): bool; @@ -911,6 +1210,10 @@ FromConstexpr(i: constexpr uintptr): uintptr { FromConstexpr(i: constexpr int31): Smi { return %FromConstexpr(i); } +FromConstexpr(i: constexpr int31): PositiveSmi { + assert(i >= 0); + return %FromConstexpr(i); +} FromConstexpr(s: constexpr string): String { return %FromConstexpr(s); } @@ -941,12 +1244,15 @@ FromConstexpr(i: constexpr int31): uintptr { FromConstexpr(i: constexpr int31): float64 { return Float64Constant(i); } +FromConstexpr(i: constexpr float64): float64 { + return Float64Constant(i); +} FromConstexpr(b: constexpr bool): bool { return BoolConstant(b); } FromConstexpr(m: constexpr LanguageMode): LanguageMode { - return %RawObjectCast(%FromConstexpr(m)); + return %RawDownCast(%FromConstexpr(m)); } FromConstexpr(e: constexpr ElementsKind): ElementsKind { @@ -986,6 +1292,15 @@ Convert(ui: uint32): Smi { Convert(ui: uint32): uintptr { return ChangeUint32ToWord(ui); } +Convert(i: uint8): int32 { + return Signed(Convert(i)); +} +Convert(i: uint16): int32 { + return Signed(Convert(i)); +} +Convert(i: uint31): int32 { + return Signed(Convert(i)); +} Convert(i: intptr): int32 { return TruncateIntPtrToInt32(i); } @@ -998,6 +1313,18 @@ Convert(ui: uintptr): uint32 { Convert(s: Smi): intptr { return SmiUntag(s); } +Convert(ps: PositiveSmi): uintptr { + return Unsigned(SmiUntag(ps)); +} +Convert(ui: uintptr): intptr { + const i = Signed(ui); + assert(i >= 0); + return i; +} +Convert(i: intptr): PositiveSmi { + assert(IsValidPositiveSmi(i)); + return %RawDownCast(SmiTag(i)); +} Convert(s: Smi): int32 { return SmiToInt32(s); } @@ -1064,16 +1391,16 @@ macro Is(implicit context: Context)(o: B): bool { macro UnsafeCast(implicit context: Context)(o: Object): A { assert(Is(o)); - return %RawObjectCast(o); + return %RawDownCast(o); } UnsafeCast(o: Object): Object { return o; } -const kCOWMap: Map = %RawObjectCast(LoadRoot(kFixedCOWArrayMapRootIndex)); -const kEmptyFixedArray: FixedArrayBase = - %RawObjectCast(LoadRoot(kEmptyFixedArrayRootIndex)); +const kCOWMap: Map = %RawDownCast(LoadRoot(kFixedCOWArrayMapRootIndex)); +const kEmptyFixedArray: FixedArray = + %RawDownCast(LoadRoot(kEmptyFixedArrayRootIndex)); extern macro IsPrototypeInitialArrayPrototype(implicit context: Context)(Map): bool; @@ -1082,9 +1409,9 @@ extern macro IsArrayIteratorProtectorCellInvalid(): bool; extern macro IsArraySpeciesProtectorCellInvalid(): bool; extern macro IsTypedArraySpeciesProtectorCellInvalid(): bool; extern macro IsPromiseSpeciesProtectorCellInvalid(): bool; - -extern operator '.buffer' macro -TypedArrayBuiltinsAssembler::LoadTypedArrayBuffer(JSTypedArray): JSArrayBuffer; +extern macro IsMockArrayBufferAllocatorFlag(): bool; +extern macro IsPrototypeTypedArrayPrototype(implicit context: Context)(Map): + bool; extern operator '.data_ptr' macro TypedArrayBuiltinsAssembler::LoadDataPtr( JSTypedArray): RawPtr; @@ -1093,49 +1420,55 @@ extern operator '.elements_kind' macro LoadMapElementsKind(Map): ElementsKind; extern operator '.elements_kind' macro LoadElementsKind(JSTypedArray): ElementsKind; -extern operator '.length' macro LoadJSTypedArrayLength(JSTypedArray): Smi; extern operator '.length' macro LoadFastJSArrayLength(FastJSArray): Smi; -extern operator '.length' macro LoadFixedArrayBaseLength(FixedArrayBase): Smi; -extern operator '.length_intptr' macro LoadAndUntagFixedArrayBaseLength( - FixedArrayBase): intptr; -extern operator '[]' macro LoadFixedArrayElement(FixedArray, intptr): Object; -extern operator '[]' macro LoadFixedArrayElement(FixedArray, Smi): Object; -extern operator '[]' macro LoadFixedArrayElement( +extern operator '.objects[]' macro LoadFixedArrayElement( + FixedArray, intptr): Object; +extern operator '.objects[]' macro LoadFixedArrayElement( + FixedArray, Smi): Object; +extern operator '.objects[]' macro LoadFixedArrayElement( FixedArray, constexpr int31): Object; -extern operator '[]=' macro StoreFixedArrayElement( +extern operator '.objects[]=' macro StoreFixedArrayElement( FixedArray, intptr, Smi): void; -extern operator '[]=' macro StoreFixedArrayElement( +extern operator '.objects[]=' macro StoreFixedArrayElement( + FixedArray, Smi, Smi): void; +extern operator '.objects[]=' macro StoreFixedArrayElement( FixedArray, intptr, HeapObject): void; -extern operator '[]=' macro StoreFixedArrayElement( +extern operator '.objects[]=' macro StoreFixedArrayElement( + FixedArray, intptr, Object): void; +extern operator '.objects[]=' macro StoreFixedArrayElement( FixedArray, constexpr int31, Smi): void; -extern operator '[]=' macro StoreFixedArrayElement( +extern operator '.objects[]=' macro StoreFixedArrayElement( FixedArray, constexpr int31, HeapObject): void; -extern operator '[]=' macro StoreFixedArrayElementSmi( +extern operator '.objects[]=' macro StoreFixedArrayElementSmi( FixedArray, Smi, Object): void; -operator '[]=' macro StoreFixedDoubleArrayNumber( - a: FixedDoubleArray, index: Smi, value: Number): void { - a[index] = Convert(value); -} - -extern macro StoreFixedArrayElementSmi( +extern operator '.objects[]=' macro StoreFixedArrayElementSmi( FixedArray, Smi, Object, constexpr WriteBarrierMode): void; +extern macro StoreFixedArrayElement( + FixedArray, intptr, Object, constexpr WriteBarrierMode): void; +extern operator '.floats[]=' macro StoreFixedDoubleArrayElement( + FixedDoubleArray, intptr, float64): void; +extern operator '.floats[]=' macro StoreFixedDoubleArrayElementSmi( + FixedDoubleArray, Smi, float64): void; +operator '.floats[]=' macro StoreFixedDoubleArrayElementSmi( + a: FixedDoubleArray, i: Smi, n: Number): void { + StoreFixedDoubleArrayElementSmi(a, i, Convert(n)); +} +operator '[]=' macro StoreFixedDoubleArrayDirect( + a: FixedDoubleArray, i: Smi, v: Number) { + a.floats[i] = Convert(v); +} +operator '[]=' macro StoreFixedArrayDirect(a: FixedArray, i: Smi, v: Object) { + a.objects[i] = v; +} extern operator '.instance_type' macro LoadMapInstanceType(Map): int32; -extern macro LoadFixedDoubleArrayElement(FixedDoubleArray, Smi): float64; extern macro Float64SilenceNaN(float64): float64; -extern macro StoreFixedDoubleArrayElement( - FixedDoubleArray, Object, float64, constexpr ParameterMode); -extern macro StoreFixedArrayElement( - FixedArray, intptr, Object, constexpr WriteBarrierMode): void; - -macro StoreFixedDoubleArrayElementWithSmiIndex( - array: FixedDoubleArray, index: Smi, value: float64) { - StoreFixedDoubleArrayElement(array, index, value, SMI_PARAMETERS); -} extern macro GetNumberDictionaryNumberOfElements(NumberDictionary): Smi; +extern macro GetIteratorMethod(implicit context: Context)(HeapObject): Object + labels IfIteratorUndefined; extern macro BasicLoadNumberDictionaryElement(NumberDictionary, intptr): Object labels NotData, IfHole; @@ -1148,6 +1481,16 @@ extern macro IsFastSmiOrTaggedElementsKind(ElementsKind): bool; extern macro IsFastSmiElementsKind(ElementsKind): bool; extern macro IsHoleyFastElementsKind(ElementsKind): bool; +macro FastHoleyElementsKind(kind: ElementsKind): ElementsKind { + if (kind == PACKED_SMI_ELEMENTS) { + return HOLEY_SMI_ELEMENTS; + } else if (kind == PACKED_DOUBLE_ELEMENTS) { + return HOLEY_DOUBLE_ELEMENTS; + } + assert(kind == PACKED_ELEMENTS); + return HOLEY_ELEMENTS; +} + macro AllowDoubleElements(kind: ElementsKind): ElementsKind { if (kind == PACKED_SMI_ELEMENTS) { return PACKED_DOUBLE_ELEMENTS; @@ -1177,6 +1520,8 @@ extern macro CalculateNewElementsCapacity(intptr): intptr; extern macro AllocateFixedArrayWithHoles( intptr, constexpr AllocationFlags): FixedArray; +extern macro AllocateFixedDoubleArrayWithHoles( + intptr, constexpr AllocationFlags): FixedDoubleArray; extern macro CopyFixedArrayElements( constexpr ElementsKind, FixedArray, constexpr ElementsKind, FixedArray, intptr, intptr, intptr): void; @@ -1186,12 +1531,9 @@ extern macro CopyFixedArrayElements( extern macro AllocateJSArray(constexpr ElementsKind, Map, intptr, Smi): JSArray; extern macro AllocateJSArray(constexpr ElementsKind, Map, Smi, Smi): JSArray; - +extern macro AllocateJSArray(Map, FixedArrayBase, Smi): JSArray; extern macro AllocateJSObjectFromMap(Map): JSObject; -extern operator '[]=' macro StoreFixedDoubleArrayElementSmi( - FixedDoubleArray, Smi, float64): void; - extern macro LoadDoubleWithHoleCheck(FixedDoubleArray, Smi): float64 labels IfHole; extern macro LoadDoubleWithHoleCheck(FixedDoubleArray, intptr): float64 @@ -1202,10 +1544,14 @@ macro GetObjectFunction(implicit context: Context)(): JSFunction { return UnsafeCast( LoadNativeContext(context)[OBJECT_FUNCTION_INDEX]); } -macro GetArrayBufferFunction(implicit context: Context)(): JSFunction { - return UnsafeCast( +macro GetArrayBufferFunction(implicit context: Context)(): Constructor { + return UnsafeCast( LoadNativeContext(context)[ARRAY_BUFFER_FUN_INDEX]); } +macro GetArrayBufferNoInitFunction(implicit context: Context)(): JSFunction { + return UnsafeCast( + LoadNativeContext(context)[ARRAY_BUFFER_NOINIT_FUN_INDEX]); +} macro GetFastPackedSmiElementsJSArrayMap(implicit context: Context)(): Map { return UnsafeCast( @@ -1274,7 +1620,7 @@ LoadElementNoHole(implicit context: Context)( try { let elements: FixedArray = Cast(a.elements) otherwise Unexpected; - let e: Object = elements[index]; + let e: Object = elements.objects[index]; if (e == Hole) { goto IfHole; } @@ -1299,10 +1645,91 @@ LoadElementNoHole(implicit context: Context)( } } +struct FastJSArrayWitness { + Get(): FastJSArray { + return this.unstable; + } + + Recheck() labels CastError { + if (this.stable.map != this.map) goto CastError; + // We don't need to check elements kind or whether the prototype + // has changed away from the default JSArray prototype, because + // if the map remains the same then those properties hold. + // + // However, we have to make sure there are no elements in the + // prototype chain. + if (IsNoElementsProtectorCellInvalid()) goto CastError; + this.unstable = %RawDownCast(this.stable); + } + + LoadElementNoHole(implicit context: Context)(k: Smi): Object + labels FoundHole { + if (this.hasDoubles) { + return LoadElementNoHole(this.unstable, k) + otherwise FoundHole; + } else { + return LoadElementNoHole(this.unstable, k) + otherwise FoundHole; + } + } + + LoadElementOrUndefined(implicit context: Context)(k: Smi): Object { + try { + return this.LoadElementNoHole(k) otherwise FoundHole; + } + label FoundHole { + return Undefined; + } + } + + EnsureArrayPushable() labels Failed { + EnsureArrayPushable(this.map) otherwise Failed; + this.arrayIsPushable = true; + } + + Push(value: Object) labels Failed { + assert(this.arrayIsPushable); + if (this.hasDoubles) { + BuildAppendJSArray(HOLEY_DOUBLE_ELEMENTS, this.unstable, value) + otherwise Failed; + } else if (this.hasSmis) { + BuildAppendJSArray(HOLEY_SMI_ELEMENTS, this.unstable, value) + otherwise Failed; + } else { + assert( + this.map.elements_kind == HOLEY_ELEMENTS || + this.map.elements_kind == PACKED_ELEMENTS); + BuildAppendJSArray(HOLEY_ELEMENTS, this.unstable, value) + otherwise Failed; + } + } + + stable: JSArray; + unstable: FastJSArray; + map: Map; + hasDoubles: bool; + hasSmis: bool; + arrayIsPushable: bool; +} + +macro NewFastJSArrayWitness(array: FastJSArray): FastJSArrayWitness { + let kind = array.map.elements_kind; + return FastJSArrayWitness{ + array, + array, + array.map, + !IsElementsKindLessThanOrEqual(kind, HOLEY_ELEMENTS), + IsElementsKindLessThanOrEqual(kind, HOLEY_SMI_ELEMENTS), + false + }; +} + extern macro TransitionElementsKind( - JSObject, Map, ElementsKind, ElementsKind): void labels Bailout; + JSObject, Map, constexpr ElementsKind, + constexpr ElementsKind): void labels Bailout; extern macro IsCallable(HeapObject): bool; +extern macro IsConstructor(HeapObject): bool; extern macro IsJSArray(HeapObject): bool; extern macro IsMap(HeapObject): bool; extern macro IsJSFunction(HeapObject): bool; @@ -1384,13 +1811,15 @@ transitioning macro GetLengthProperty(implicit context: Context)(o: Object): } extern macro NumberToString(Number): String; -extern macro HasOnlyOneByteChars(InstanceType): bool; +extern macro IsOneByteStringInstanceType(InstanceType): bool; extern macro AllocateSeqOneByteString(implicit context: Context)(uint32): String; extern macro AllocateSeqTwoByteString(implicit context: Context)(uint32): String; extern macro TryIntPtrAdd(intptr, intptr): intptr labels IfOverflow; +extern macro ConvertToRelativeIndex(implicit context: Context)( + Object, intptr): intptr; extern builtin ObjectToString(Context, Object): Object; extern builtin StringRepeat(Context, String, Number): String; @@ -1439,3 +1868,51 @@ macro BranchIfFastJSArrayForCopy(o: Object, context: Context): never macro IsFastJSArrayWithNoCustomIteration(context: Context, o: Object): bool { return Is(o); } + +extern transitioning runtime +CreateDataProperty(implicit context: Context)(JSReceiver, Object, Object); + +transitioning builtin FastCreateDataProperty(implicit context: Context)( + receiver: JSReceiver, key: Object, value: Object): Object { + try { + let array = Cast(receiver) otherwise Slow; + const index: Smi = Cast(key) otherwise goto Slow; + if (index < 0 || index > array.length) goto Slow; + array::EnsureWriteableFastElements(array); + const isAppend = index == array.length; + const kind = array.map.elements_kind; + // We may have to transition a. + // For now, if transition is required, jump away to slow. + if (IsFastSmiElementsKind(kind)) { + const smiValue = Cast(value) otherwise Slow; + if (isAppend) { + BuildAppendJSArray(HOLEY_SMI_ELEMENTS, array, value) otherwise Slow; + } else { + const elements = Cast(array.elements) otherwise unreachable; + elements[index] = smiValue; + } + } else if (IsDoubleElementsKind(kind)) { + const numberValue = Cast(value) otherwise Slow; + if (isAppend) { + BuildAppendJSArray(HOLEY_DOUBLE_ELEMENTS, array, value) + otherwise Slow; + } else { + const doubleElements = Cast(array.elements) + otherwise unreachable; + doubleElements[index] = numberValue; + } + } else { + assert(IsFastSmiOrTaggedElementsKind(kind)); + if (isAppend) { + BuildAppendJSArray(HOLEY_ELEMENTS, array, value) otherwise Slow; + } else { + const elements = Cast(array.elements) otherwise unreachable; + elements[index] = value; + } + } + } + label Slow { + CreateDataProperty(receiver, key, value); + } + return Undefined; +} diff --git a/deps/v8/src/builtins/builtins-arguments-gen.cc b/deps/v8/src/builtins/builtins-arguments-gen.cc index 21831e9f46296a..2d25cdc32aa5da 100644 --- a/deps/v8/src/builtins/builtins-arguments-gen.cc +++ b/deps/v8/src/builtins/builtins-arguments-gen.cc @@ -101,7 +101,7 @@ Node* ArgumentsBuiltinsAssembler::ConstructParametersObjectFromArgs( [this, elements, &offset](Node* arg) { StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset.value(), arg); - Increment(&offset, kSystemPointerSize); + Increment(&offset, kTaggedSize); }, first_arg, nullptr, param_mode); return result; diff --git a/deps/v8/src/builtins/builtins-array-gen.cc b/deps/v8/src/builtins/builtins-array-gen.cc index db58ecf152729b..7dfe705811e217 100644 --- a/deps/v8/src/builtins/builtins-array-gen.cc +++ b/deps/v8/src/builtins/builtins-array-gen.cc @@ -12,8 +12,10 @@ #include "src/code-stub-assembler.h" #include "src/frame-constants.h" #include "src/heap/factory-inl.h" +#include "src/objects/allocation-site-inl.h" #include "src/objects/arguments-inl.h" #include "src/objects/property-cell.h" +#include "torque-generated/builtins-typed-array-createtypedarray-from-dsl-gen.h" namespace v8 { namespace internal { @@ -59,16 +61,6 @@ Node* ArrayBuiltinsAssembler::FindProcessor(Node* k_value, Node* k) { return a(); } - void ArrayBuiltinsAssembler::ForEachResultGenerator() { - a_.Bind(UndefinedConstant()); - } - - Node* ArrayBuiltinsAssembler::ForEachProcessor(Node* k_value, Node* k) { - CallJS(CodeFactory::Call(isolate()), context(), callbackfn(), this_arg(), - k_value, k, o()); - return a(); - } - void ArrayBuiltinsAssembler::SomeResultGenerator() { a_.Bind(FalseConstant()); } @@ -99,46 +91,15 @@ Node* ArrayBuiltinsAssembler::FindProcessor(Node* k_value, Node* k) { return a(); } - void ArrayBuiltinsAssembler::ReduceResultGenerator() { - return a_.Bind(this_arg()); - } - - Node* ArrayBuiltinsAssembler::ReduceProcessor(Node* k_value, Node* k) { - VARIABLE(result, MachineRepresentation::kTagged); - Label done(this, {&result}), initial(this); - GotoIf(WordEqual(a(), TheHoleConstant()), &initial); - result.Bind(CallJS(CodeFactory::Call(isolate()), context(), callbackfn(), - UndefinedConstant(), a(), k_value, k, o())); - Goto(&done); - - BIND(&initial); - result.Bind(k_value); - Goto(&done); - - BIND(&done); - return result.value(); - } - - void ArrayBuiltinsAssembler::ReducePostLoopAction() { - Label ok(this); - GotoIf(WordNotEqual(a(), TheHoleConstant()), &ok); - ThrowTypeError(context(), MessageTemplate::kReduceNoInitial); - BIND(&ok); - } - - void ArrayBuiltinsAssembler::MapResultGenerator() { - GenerateArraySpeciesCreate(len_); - } - void ArrayBuiltinsAssembler::TypedArrayMapResultGenerator() { // 6. Let A be ? TypedArraySpeciesCreate(O, len). TNode original_array = CAST(o()); TNode length = CAST(len_); const char* method_name = "%TypedArray%.prototype.map"; - TypedArrayBuiltinsAssembler typedarray_asm(state()); + TypedArrayCreatetypedarrayBuiltinsFromDSLAssembler typedarray_asm(state()); TNode a = typedarray_asm.TypedArraySpeciesCreateByLength( - context(), original_array, length, method_name); + context(), method_name, original_array, length); // In the Spec and our current implementation, the length check is already // performed in TypedArraySpeciesCreate. CSA_ASSERT(this, SmiLessThanOrEqual(CAST(len_), LoadJSTypedArrayLength(a))); @@ -148,120 +109,6 @@ Node* ArrayBuiltinsAssembler::FindProcessor(Node* k_value, Node* k) { a_.Bind(a); } - Node* ArrayBuiltinsAssembler::SpecCompliantMapProcessor(Node* k_value, - Node* k) { - // i. Let kValue be ? Get(O, Pk). Performed by the caller of - // SpecCompliantMapProcessor. - // ii. Let mapped_value be ? Call(callbackfn, T, kValue, k, O). - Node* mapped_value = CallJS(CodeFactory::Call(isolate()), context(), - callbackfn(), this_arg(), k_value, k, o()); - - // iii. Perform ? CreateDataPropertyOrThrow(A, Pk, mapped_value). - CallRuntime(Runtime::kCreateDataProperty, context(), a(), k, mapped_value); - return a(); - } - - Node* ArrayBuiltinsAssembler::FastMapProcessor(Node* k_value, Node* k) { - // i. Let kValue be ? Get(O, Pk). Performed by the caller of - // FastMapProcessor. - // ii. Let mapped_value be ? Call(callbackfn, T, kValue, k, O). - Node* mapped_value = CallJS(CodeFactory::Call(isolate()), context(), - callbackfn(), this_arg(), k_value, k, o()); - - // mode is SMI_PARAMETERS because k has tagged representation. - ParameterMode mode = SMI_PARAMETERS; - Label runtime(this), finished(this); - Label transition_pre(this), transition_smi_fast(this), - transition_smi_double(this); - Label array_not_smi(this), array_fast(this), array_double(this); - - TNode kind = LoadElementsKind(a()); - Node* elements = LoadElements(a()); - GotoIf(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS), &array_not_smi); - TryStoreArrayElement(HOLEY_SMI_ELEMENTS, mode, &transition_pre, elements, k, - mapped_value); - Goto(&finished); - - BIND(&transition_pre); - { - // array is smi. Value is either tagged or a heap number. - CSA_ASSERT(this, TaggedIsNotSmi(mapped_value)); - GotoIf(IsHeapNumberMap(LoadMap(mapped_value)), &transition_smi_double); - Goto(&transition_smi_fast); - } - - BIND(&array_not_smi); - { - Branch(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS), &array_double, - &array_fast); - } - - BIND(&transition_smi_fast); - { - // iii. Perform ? CreateDataPropertyOrThrow(A, Pk, mapped_value). - Node* const native_context = LoadNativeContext(context()); - Node* const fast_map = LoadContextElement( - native_context, Context::JS_ARRAY_HOLEY_ELEMENTS_MAP_INDEX); - - // Since this transition is only a map change, just do it right here. - // Since a() doesn't have an allocation site, it's safe to do the - // map store directly, otherwise I'd call TransitionElementsKind(). - StoreMap(a(), fast_map); - Goto(&array_fast); - } - - BIND(&array_fast); - { - TryStoreArrayElement(HOLEY_ELEMENTS, mode, &runtime, elements, k, - mapped_value); - Goto(&finished); - } - - BIND(&transition_smi_double); - { - // iii. Perform ? CreateDataPropertyOrThrow(A, Pk, mapped_value). - Node* const native_context = LoadNativeContext(context()); - Node* const double_map = LoadContextElement( - native_context, Context::JS_ARRAY_HOLEY_DOUBLE_ELEMENTS_MAP_INDEX); - - const ElementsKind kFromKind = HOLEY_SMI_ELEMENTS; - const ElementsKind kToKind = HOLEY_DOUBLE_ELEMENTS; - - Label transition_in_runtime(this, Label::kDeferred); - TransitionElementsKind(a(), double_map, kFromKind, kToKind, - &transition_in_runtime); - Goto(&array_double); - - BIND(&transition_in_runtime); - CallRuntime(Runtime::kTransitionElementsKind, context(), a(), double_map); - Goto(&array_double); - } - - BIND(&array_double); - { - // TODO(mvstanton): If we use a variable for elements and bind it - // appropriately, we can avoid an extra load of elements by binding the - // value only after a transition from smi to double. - elements = LoadElements(a()); - // If the mapped_value isn't a number, this will bail out to the runtime - // to make the transition. - TryStoreArrayElement(HOLEY_DOUBLE_ELEMENTS, mode, &runtime, elements, k, - mapped_value); - Goto(&finished); - } - - BIND(&runtime); - { - // iii. Perform ? CreateDataPropertyOrThrow(A, Pk, mapped_value). - CallRuntime(Runtime::kCreateDataProperty, context(), a(), k, - mapped_value); - Goto(&finished); - } - - BIND(&finished); - return a(); - } - // See tc39.github.io/ecma262/#sec-%typedarray%.prototype.map. Node* ArrayBuiltinsAssembler::TypedArrayMapProcessor(Node* k_value, Node* k) { // 8. c. Let mapped_value be ? Call(callbackfn, T, « kValue, k, O »). @@ -350,97 +197,6 @@ Node* ArrayBuiltinsAssembler::FindProcessor(Node* k_value, Node* k) { argc_ = argc; } - void ArrayBuiltinsAssembler::GenerateIteratingArrayBuiltinBody( - const char* name, const BuiltinResultGenerator& generator, - const CallResultProcessor& processor, const PostLoopAction& action, - const Callable& slow_case_continuation, - MissingPropertyMode missing_property_mode, ForEachDirection direction) { - Label non_array(this), array_changes(this, {&k_, &a_, &to_}); - - // TODO(danno): Seriously? Do we really need to throw the exact error - // message on null and undefined so that the webkit tests pass? - Label throw_null_undefined_exception(this, Label::kDeferred); - GotoIf(IsNullOrUndefined(receiver()), &throw_null_undefined_exception); - - // By the book: taken directly from the ECMAScript 2015 specification - - // 1. Let O be ToObject(this value). - // 2. ReturnIfAbrupt(O) - o_ = ToObject_Inline(context(), receiver()); - - // 3. Let len be ToLength(Get(O, "length")). - // 4. ReturnIfAbrupt(len). - TVARIABLE(Number, merged_length); - Label has_length(this, &merged_length), not_js_array(this); - GotoIf(DoesntHaveInstanceType(o(), JS_ARRAY_TYPE), ¬_js_array); - merged_length = LoadJSArrayLength(CAST(o())); - Goto(&has_length); - - BIND(¬_js_array); - { - Node* len_property = - GetProperty(context(), o(), isolate()->factory()->length_string()); - merged_length = ToLength_Inline(context(), len_property); - Goto(&has_length); - } - BIND(&has_length); - { - len_ = merged_length.value(); - - // 5. If IsCallable(callbackfn) is false, throw a TypeError exception. - Label type_exception(this, Label::kDeferred); - Label done(this); - GotoIf(TaggedIsSmi(callbackfn()), &type_exception); - Branch(IsCallableMap(LoadMap(callbackfn())), &done, &type_exception); - - BIND(&throw_null_undefined_exception); - ThrowTypeError(context(), MessageTemplate::kCalledOnNullOrUndefined, - name); - - BIND(&type_exception); - ThrowTypeError(context(), MessageTemplate::kCalledNonCallable, - callbackfn()); - - BIND(&done); - } - - // 6. If thisArg was supplied, let T be thisArg; else let T be undefined. - // [Already done by the arguments adapter] - - if (direction == ForEachDirection::kForward) { - // 7. Let k be 0. - k_.Bind(SmiConstant(0)); - } else { - k_.Bind(NumberDec(len())); - } - - generator(this); - - HandleFastElements(processor, action, &fully_spec_compliant_, direction, - missing_property_mode); - - BIND(&fully_spec_compliant_); - - Node* result = - CallStub(slow_case_continuation, context(), receiver(), callbackfn(), - this_arg(), a_.value(), o(), k_.value(), len(), to_.value()); - ReturnFromBuiltin(result); - } - - void ArrayBuiltinsAssembler::InitIteratingArrayBuiltinLoopContinuation( - TNode context, TNode receiver, Node* callbackfn, - Node* this_arg, Node* a, TNode o, Node* initial_k, - TNode len, Node* to) { - context_ = context; - this_arg_ = this_arg; - callbackfn_ = callbackfn; - a_.Bind(a); - k_.Bind(initial_k); - o_ = o; - len_ = len; - to_.Bind(to); - } - void ArrayBuiltinsAssembler::GenerateIteratingTypedArrayBuiltinBody( const char* name, const BuiltinResultGenerator& generator, const CallResultProcessor& processor, const PostLoopAction& action, @@ -485,9 +241,9 @@ Node* ArrayBuiltinsAssembler::FindProcessor(Node* k_value, Node* k) { TYPED_ARRAYS(INSTANCE_TYPE) #undef INSTANCE_TYPE }; - std::vector