Merge v11.x. Last Aurora commit: f86bf4d785
This commit is contained in:
commit
87a8ef4a52
1
.gitignore
vendored
1
.gitignore
vendored
@ -68,6 +68,7 @@
|
|||||||
/third_party/jsoncpp/source
|
/third_party/jsoncpp/source
|
||||||
!/third_party/colorama
|
!/third_party/colorama
|
||||||
/third_party/colorama/src
|
/third_party/colorama/src
|
||||||
|
!/third_party/glibc
|
||||||
!/third_party/googletest
|
!/third_party/googletest
|
||||||
/third_party/googletest/src
|
/third_party/googletest/src
|
||||||
!/third_party/test262-harness
|
!/third_party/test262-harness
|
||||||
|
@ -47,7 +47,7 @@ wheel: <
|
|||||||
|
|
||||||
wheel: <
|
wheel: <
|
||||||
name: "infra/python/wheels/coverage/${vpython_platform}"
|
name: "infra/python/wheels/coverage/${vpython_platform}"
|
||||||
version: "version:5.5.chromium.2"
|
version: "version:5.5.chromium.3"
|
||||||
>
|
>
|
||||||
|
|
||||||
wheel: <
|
wheel: <
|
||||||
@ -74,3 +74,8 @@ wheel: <
|
|||||||
name: "infra/python/wheels/protobuf-py3"
|
name: "infra/python/wheels/protobuf-py3"
|
||||||
version: "version:3.19.3"
|
version: "version:3.19.3"
|
||||||
>
|
>
|
||||||
|
|
||||||
|
wheel: <
|
||||||
|
name: "infra/python/wheels/requests-py2_py3"
|
||||||
|
version: "version:2.13.0"
|
||||||
|
>
|
||||||
|
@ -42,7 +42,6 @@ import sys
|
|||||||
# Flags from YCM's default config.
|
# Flags from YCM's default config.
|
||||||
flags = [
|
flags = [
|
||||||
'-DUSE_CLANG_COMPLETER',
|
'-DUSE_CLANG_COMPLETER',
|
||||||
'-std=gnu++14',
|
|
||||||
'-x',
|
'-x',
|
||||||
'c++',
|
'c++',
|
||||||
]
|
]
|
||||||
@ -143,23 +142,25 @@ def GetClangCommandFromNinjaForFilename(v8_root, filename):
|
|||||||
# Parse flags that are important for YCM's purposes.
|
# Parse flags that are important for YCM's purposes.
|
||||||
for flag in clang_line.split(' '):
|
for flag in clang_line.split(' '):
|
||||||
if flag.startswith('-I'):
|
if flag.startswith('-I'):
|
||||||
|
v8_flags.append(MakeIncludePathAbsolute(flag, "-I", out_dir))
|
||||||
|
elif flag.startswith('-isystem'):
|
||||||
|
v8_flags.append(MakeIncludePathAbsolute(flag, "-isystem", out_dir))
|
||||||
|
elif flag.startswith('-std') or flag.startswith(
|
||||||
|
'-pthread') or flag.startswith('-no'):
|
||||||
|
v8_flags.append(flag)
|
||||||
|
elif flag.startswith('-') and flag[1] in 'DWFfmgOX':
|
||||||
|
v8_flags.append(flag)
|
||||||
|
return v8_flags
|
||||||
|
|
||||||
|
|
||||||
|
def MakeIncludePathAbsolute(flag, prefix, out_dir):
|
||||||
# Relative paths need to be resolved, because they're relative to the
|
# Relative paths need to be resolved, because they're relative to the
|
||||||
# output dir, not the source.
|
# output dir, not the source.
|
||||||
if flag[2] == '/':
|
if flag[len(prefix)] == '/':
|
||||||
v8_flags.append(flag)
|
return flag
|
||||||
else:
|
else:
|
||||||
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
|
abs_path = os.path.normpath(os.path.join(out_dir, flag[len(prefix):]))
|
||||||
v8_flags.append('-I' + abs_path)
|
return prefix + abs_path
|
||||||
elif flag.startswith('-std'):
|
|
||||||
v8_flags.append(flag)
|
|
||||||
elif flag.startswith('-') and flag[1] in 'DWFfmO':
|
|
||||||
if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
|
|
||||||
# These flags causes libclang (3.3) to crash. Remove it until things
|
|
||||||
# are fixed.
|
|
||||||
continue
|
|
||||||
v8_flags.append(flag)
|
|
||||||
|
|
||||||
return v8_flags
|
|
||||||
|
|
||||||
|
|
||||||
def FlagsForFile(filename):
|
def FlagsForFile(filename):
|
||||||
@ -180,3 +181,9 @@ def FlagsForFile(filename):
|
|||||||
'flags': final_flags,
|
'flags': final_flags,
|
||||||
'do_cache': True
|
'do_cache': True
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def Settings(**kwargs):
|
||||||
|
if kwargs['language'] == 'cfamily':
|
||||||
|
return FlagsForFile(kwargs['filename'])
|
||||||
|
return {}
|
||||||
|
8
AUTHORS
8
AUTHORS
@ -44,6 +44,7 @@ CodeWeavers, Inc. <*@codeweavers.com>
|
|||||||
Alibaba, Inc. <*@alibaba-inc.com>
|
Alibaba, Inc. <*@alibaba-inc.com>
|
||||||
SiFive, Inc. <*@sifive.com>
|
SiFive, Inc. <*@sifive.com>
|
||||||
|
|
||||||
|
Aapo Alasuutari <aapo.alasuutari@gmail.com>
|
||||||
Aaron Bieber <deftly@gmail.com>
|
Aaron Bieber <deftly@gmail.com>
|
||||||
Aaron O'Mullan <aaron.omullan@gmail.com>
|
Aaron O'Mullan <aaron.omullan@gmail.com>
|
||||||
Abdulla Kamar <abdulla.kamar@gmail.com>
|
Abdulla Kamar <abdulla.kamar@gmail.com>
|
||||||
@ -60,6 +61,7 @@ Allan Sandfeld Jensen <allan.jensen@qt.io>
|
|||||||
Amos Lim <eui-sang.lim@samsung.com>
|
Amos Lim <eui-sang.lim@samsung.com>
|
||||||
Andreas Anyuru <andreas.anyuru@gmail.com>
|
Andreas Anyuru <andreas.anyuru@gmail.com>
|
||||||
Andrei Kashcha <anvaka@gmail.com>
|
Andrei Kashcha <anvaka@gmail.com>
|
||||||
|
Andreu Botella <andreu@andreubotella.com>
|
||||||
Andrew Paprocki <andrew@ishiboo.com>
|
Andrew Paprocki <andrew@ishiboo.com>
|
||||||
Anna Henningsen <anna@addaleax.net>
|
Anna Henningsen <anna@addaleax.net>
|
||||||
Antoine du Hamel <duhamelantoine1995@gmail.com>
|
Antoine du Hamel <duhamelantoine1995@gmail.com>
|
||||||
@ -113,6 +115,7 @@ Fedor Indutny <fedor@indutny.com>
|
|||||||
Felix Geisendörfer <haimuiba@gmail.com>
|
Felix Geisendörfer <haimuiba@gmail.com>
|
||||||
Feng Yu <f3n67u@gmail.com>
|
Feng Yu <f3n67u@gmail.com>
|
||||||
Filipe David Manana <fdmanana@gmail.com>
|
Filipe David Manana <fdmanana@gmail.com>
|
||||||
|
Frank Lemanschik <frank@dspeed.eu>
|
||||||
Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
|
Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
|
||||||
Gao Sheng <gaosheng08@meituan.com>
|
Gao Sheng <gaosheng08@meituan.com>
|
||||||
Geoffrey Garside <ggarside@gmail.com>
|
Geoffrey Garside <ggarside@gmail.com>
|
||||||
@ -136,6 +139,7 @@ Ingvar Stepanyan <me@rreverser.com>
|
|||||||
Ioseb Dzmanashvili <ioseb.dzmanashvili@gmail.com>
|
Ioseb Dzmanashvili <ioseb.dzmanashvili@gmail.com>
|
||||||
Isiah Meadows <impinball@gmail.com>
|
Isiah Meadows <impinball@gmail.com>
|
||||||
Jaime Bernardo <jaime@janeasystems.com>
|
Jaime Bernardo <jaime@janeasystems.com>
|
||||||
|
Jake Hughes <jh@jakehughes.uk>
|
||||||
James M Snell <jasnell@gmail.com>
|
James M Snell <jasnell@gmail.com>
|
||||||
James Pike <g00gle@chilon.net>
|
James Pike <g00gle@chilon.net>
|
||||||
Jan Krems <jan.krems@gmail.com>
|
Jan Krems <jan.krems@gmail.com>
|
||||||
@ -150,6 +154,7 @@ Jiaxun Yang <jiaxun.yang@flygoat.com>
|
|||||||
Joel Stanley <joel@jms.id.au>
|
Joel Stanley <joel@jms.id.au>
|
||||||
Johan Bergström <johan@bergstroem.nu>
|
Johan Bergström <johan@bergstroem.nu>
|
||||||
Jonathan Liu <net147@gmail.com>
|
Jonathan Liu <net147@gmail.com>
|
||||||
|
Juan Arboleda <soyjuanarbol@gmail.com>
|
||||||
Julien Brianceau <jbriance@cisco.com>
|
Julien Brianceau <jbriance@cisco.com>
|
||||||
JunHo Seo <sejunho@gmail.com>
|
JunHo Seo <sejunho@gmail.com>
|
||||||
Junha Park <jpark3@scu.edu>
|
Junha Park <jpark3@scu.edu>
|
||||||
@ -208,6 +213,7 @@ Peter Rybin <peter.rybin@gmail.com>
|
|||||||
Peter Varga <pvarga@inf.u-szeged.hu>
|
Peter Varga <pvarga@inf.u-szeged.hu>
|
||||||
Peter Wong <peter.wm.wong@gmail.com>
|
Peter Wong <peter.wm.wong@gmail.com>
|
||||||
PhistucK <phistuck@gmail.com>
|
PhistucK <phistuck@gmail.com>
|
||||||
|
Pierrick Bouvier <pierrick.bouvier@linaro.org>
|
||||||
Rafal Krypa <rafal@krypa.net>
|
Rafal Krypa <rafal@krypa.net>
|
||||||
Raul Tambre <raul@tambre.ee>
|
Raul Tambre <raul@tambre.ee>
|
||||||
Ray Glover <ray@rayglover.net>
|
Ray Glover <ray@rayglover.net>
|
||||||
@ -254,6 +260,7 @@ Vlad Burlik <vladbph@gmail.com>
|
|||||||
Vladimir Krivosheev <develar@gmail.com>
|
Vladimir Krivosheev <develar@gmail.com>
|
||||||
Vladimir Shutoff <vovan@shutoff.ru>
|
Vladimir Shutoff <vovan@shutoff.ru>
|
||||||
Wael Almattar <waelsy123@gmail.com>
|
Wael Almattar <waelsy123@gmail.com>
|
||||||
|
WANG Xuerui <git@xen0n.name>
|
||||||
Wei Wu <lazyparser@gmail.com>
|
Wei Wu <lazyparser@gmail.com>
|
||||||
Wenlu Wang <kingwenlu@gmail.com>
|
Wenlu Wang <kingwenlu@gmail.com>
|
||||||
Wenming Yang <yangwenming@bytedance.com>
|
Wenming Yang <yangwenming@bytedance.com>
|
||||||
@ -279,3 +286,4 @@ Zheng Liu <i6122f@gmail.com>
|
|||||||
Zhongping Wang <kewpie.w.zp@gmail.com>
|
Zhongping Wang <kewpie.w.zp@gmail.com>
|
||||||
柳荣一 <admin@web-tinker.com>
|
柳荣一 <admin@web-tinker.com>
|
||||||
Yang Xiang <xiangyangemail@gmail.com>
|
Yang Xiang <xiangyangemail@gmail.com>
|
||||||
|
Kotaro Ohsugi <dec4m4rk@gmail.com>
|
||||||
|
115
BUILD.bazel
115
BUILD.bazel
@ -39,6 +39,7 @@ load(":bazel/v8-non-pointer-compression.bzl", "v8_binary_non_pointer_compression
|
|||||||
# v8_enable_trace_baseline_exec
|
# v8_enable_trace_baseline_exec
|
||||||
# v8_enable_trace_feedback_updates
|
# v8_enable_trace_feedback_updates
|
||||||
# v8_enable_atomic_object_field_writes
|
# v8_enable_atomic_object_field_writes
|
||||||
|
# v8_enable_conservative_stack_scanning
|
||||||
# v8_enable_concurrent_marking
|
# v8_enable_concurrent_marking
|
||||||
# v8_enable_ignition_dispatch_counting
|
# v8_enable_ignition_dispatch_counting
|
||||||
# v8_enable_builtins_profiling
|
# v8_enable_builtins_profiling
|
||||||
@ -131,6 +132,8 @@ v8_flag(name = "v8_enable_snapshot_code_comments")
|
|||||||
|
|
||||||
v8_flag(name = "v8_enable_snapshot_native_code_counters")
|
v8_flag(name = "v8_enable_snapshot_native_code_counters")
|
||||||
|
|
||||||
|
v8_flag(name = "v8_enable_static_roots")
|
||||||
|
|
||||||
v8_flag(name = "v8_enable_trace_maps")
|
v8_flag(name = "v8_enable_trace_maps")
|
||||||
|
|
||||||
v8_flag(name = "v8_enable_v8_checks")
|
v8_flag(name = "v8_enable_v8_checks")
|
||||||
@ -315,6 +318,7 @@ v8_config(
|
|||||||
"v8_enable_slow_dchecks": "ENABLE_SLOW_DCHECKS",
|
"v8_enable_slow_dchecks": "ENABLE_SLOW_DCHECKS",
|
||||||
"v8_enable_runtime_call_stats": "V8_RUNTIME_CALL_STATS",
|
"v8_enable_runtime_call_stats": "V8_RUNTIME_CALL_STATS",
|
||||||
"v8_enable_snapshot_native_code_counters": "V8_SNAPSHOT_NATIVE_CODE_COUNTERS",
|
"v8_enable_snapshot_native_code_counters": "V8_SNAPSHOT_NATIVE_CODE_COUNTERS",
|
||||||
|
"v8_enable_static_roots": "V8_STATIC_ROOTS",
|
||||||
"v8_enable_trace_maps": "V8_TRACE_MAPS",
|
"v8_enable_trace_maps": "V8_TRACE_MAPS",
|
||||||
"v8_enable_v8_checks": "V8_ENABLE_CHECKS",
|
"v8_enable_v8_checks": "V8_ENABLE_CHECKS",
|
||||||
"v8_enable_verify_csa": "ENABLE_VERIFY_CSA",
|
"v8_enable_verify_csa": "ENABLE_VERIFY_CSA",
|
||||||
@ -324,7 +328,6 @@ v8_config(
|
|||||||
},
|
},
|
||||||
defines = [
|
defines = [
|
||||||
"GOOGLE3",
|
"GOOGLE3",
|
||||||
"ENABLE_DEBUGGER_SUPPORT",
|
|
||||||
"V8_ADVANCED_BIGINT_ALGORITHMS",
|
"V8_ADVANCED_BIGINT_ALGORITHMS",
|
||||||
"V8_CONCURRENT_MARKING",
|
"V8_CONCURRENT_MARKING",
|
||||||
] + select({
|
] + select({
|
||||||
@ -891,6 +894,7 @@ filegroup(
|
|||||||
"src/builtins/typed-array-sort.tq",
|
"src/builtins/typed-array-sort.tq",
|
||||||
"src/builtins/typed-array-subarray.tq",
|
"src/builtins/typed-array-subarray.tq",
|
||||||
"src/builtins/typed-array-to-reversed.tq",
|
"src/builtins/typed-array-to-reversed.tq",
|
||||||
|
"src/builtins/typed-array-to-sorted.tq",
|
||||||
"src/builtins/typed-array-values.tq",
|
"src/builtins/typed-array-values.tq",
|
||||||
"src/builtins/typed-array-with.tq",
|
"src/builtins/typed-array-with.tq",
|
||||||
"src/builtins/typed-array.tq",
|
"src/builtins/typed-array.tq",
|
||||||
@ -925,6 +929,7 @@ filegroup(
|
|||||||
"src/objects/js-objects.tq",
|
"src/objects/js-objects.tq",
|
||||||
"src/objects/js-promise.tq",
|
"src/objects/js-promise.tq",
|
||||||
"src/objects/js-proxy.tq",
|
"src/objects/js-proxy.tq",
|
||||||
|
"src/objects/js-raw-json.tq",
|
||||||
"src/objects/js-regexp-string-iterator.tq",
|
"src/objects/js-regexp-string-iterator.tq",
|
||||||
"src/objects/js-regexp.tq",
|
"src/objects/js-regexp.tq",
|
||||||
"src/objects/js-shadow-realm.tq",
|
"src/objects/js-shadow-realm.tq",
|
||||||
@ -960,6 +965,7 @@ filegroup(
|
|||||||
"src/objects/templates.tq",
|
"src/objects/templates.tq",
|
||||||
"src/objects/torque-defined-classes.tq",
|
"src/objects/torque-defined-classes.tq",
|
||||||
"src/objects/turbofan-types.tq",
|
"src/objects/turbofan-types.tq",
|
||||||
|
"src/objects/turboshaft-types.tq",
|
||||||
"test/torque/test-torque.tq",
|
"test/torque/test-torque.tq",
|
||||||
"third_party/v8/builtins/array-sort.tq",
|
"third_party/v8/builtins/array-sort.tq",
|
||||||
] + select({
|
] + select({
|
||||||
@ -980,6 +986,7 @@ filegroup(
|
|||||||
"src/objects/js-collator.tq",
|
"src/objects/js-collator.tq",
|
||||||
"src/objects/js-date-time-format.tq",
|
"src/objects/js-date-time-format.tq",
|
||||||
"src/objects/js-display-names.tq",
|
"src/objects/js-display-names.tq",
|
||||||
|
"src/objects/js-duration-format.tq",
|
||||||
"src/objects/js-list-format.tq",
|
"src/objects/js-list-format.tq",
|
||||||
"src/objects/js-locale.tq",
|
"src/objects/js-locale.tq",
|
||||||
"src/objects/js-number-format.tq",
|
"src/objects/js-number-format.tq",
|
||||||
@ -1241,6 +1248,7 @@ filegroup(
|
|||||||
"src/common/message-template.h",
|
"src/common/message-template.h",
|
||||||
"src/common/operation.h",
|
"src/common/operation.h",
|
||||||
"src/common/ptr-compr-inl.h",
|
"src/common/ptr-compr-inl.h",
|
||||||
|
"src/common/ptr-compr.cc",
|
||||||
"src/common/ptr-compr.h",
|
"src/common/ptr-compr.h",
|
||||||
"src/compiler-dispatcher/lazy-compile-dispatcher.cc",
|
"src/compiler-dispatcher/lazy-compile-dispatcher.cc",
|
||||||
"src/compiler-dispatcher/lazy-compile-dispatcher.h",
|
"src/compiler-dispatcher/lazy-compile-dispatcher.h",
|
||||||
@ -1267,8 +1275,6 @@ filegroup(
|
|||||||
"src/debug/debug-scopes.h",
|
"src/debug/debug-scopes.h",
|
||||||
"src/debug/debug-stack-trace-iterator.cc",
|
"src/debug/debug-stack-trace-iterator.cc",
|
||||||
"src/debug/debug-stack-trace-iterator.h",
|
"src/debug/debug-stack-trace-iterator.h",
|
||||||
"src/debug/debug-type-profile.cc",
|
|
||||||
"src/debug/debug-type-profile.h",
|
|
||||||
"src/debug/debug.cc",
|
"src/debug/debug.cc",
|
||||||
"src/debug/debug.h",
|
"src/debug/debug.h",
|
||||||
"src/debug/interface-types.h",
|
"src/debug/interface-types.h",
|
||||||
@ -1380,6 +1386,8 @@ filegroup(
|
|||||||
"src/handles/global-handles-inl.h",
|
"src/handles/global-handles-inl.h",
|
||||||
"src/handles/global-handles.cc",
|
"src/handles/global-handles.cc",
|
||||||
"src/handles/global-handles.h",
|
"src/handles/global-handles.h",
|
||||||
|
"src/handles/traced-handles.cc",
|
||||||
|
"src/handles/traced-handles.h",
|
||||||
"src/handles/handles-inl.h",
|
"src/handles/handles-inl.h",
|
||||||
"src/handles/handles.cc",
|
"src/handles/handles.cc",
|
||||||
"src/handles/handles.h",
|
"src/handles/handles.h",
|
||||||
@ -1425,6 +1433,8 @@ filegroup(
|
|||||||
"src/heap/cppgc-js/cpp-marking-state-inl.h",
|
"src/heap/cppgc-js/cpp-marking-state-inl.h",
|
||||||
"src/heap/cppgc-js/cpp-snapshot.cc",
|
"src/heap/cppgc-js/cpp-snapshot.cc",
|
||||||
"src/heap/cppgc-js/cpp-snapshot.h",
|
"src/heap/cppgc-js/cpp-snapshot.h",
|
||||||
|
"src/heap/cppgc-js/cross-heap-remembered-set.cc",
|
||||||
|
"src/heap/cppgc-js/cross-heap-remembered-set.h",
|
||||||
"src/heap/cppgc-js/unified-heap-marking-state.cc",
|
"src/heap/cppgc-js/unified-heap-marking-state.cc",
|
||||||
"src/heap/cppgc-js/unified-heap-marking-state.h",
|
"src/heap/cppgc-js/unified-heap-marking-state.h",
|
||||||
"src/heap/cppgc-js/unified-heap-marking-state-inl.h",
|
"src/heap/cppgc-js/unified-heap-marking-state-inl.h",
|
||||||
@ -1435,6 +1445,9 @@ filegroup(
|
|||||||
"src/heap/embedder-tracing.cc",
|
"src/heap/embedder-tracing.cc",
|
||||||
"src/heap/embedder-tracing.h",
|
"src/heap/embedder-tracing.h",
|
||||||
"src/heap/embedder-tracing-inl.h",
|
"src/heap/embedder-tracing-inl.h",
|
||||||
|
"src/heap/evacuation-verifier.cc",
|
||||||
|
"src/heap/evacuation-verifier.h",
|
||||||
|
"src/heap/evacuation-verifier-inl.h",
|
||||||
"src/heap/factory-base.cc",
|
"src/heap/factory-base.cc",
|
||||||
"src/heap/factory-base.h",
|
"src/heap/factory-base.h",
|
||||||
"src/heap/factory-base-inl.h",
|
"src/heap/factory-base-inl.h",
|
||||||
@ -1452,8 +1465,6 @@ filegroup(
|
|||||||
"src/heap/gc-tracer.cc",
|
"src/heap/gc-tracer.cc",
|
||||||
"src/heap/gc-tracer-inl.h",
|
"src/heap/gc-tracer-inl.h",
|
||||||
"src/heap/gc-tracer.h",
|
"src/heap/gc-tracer.h",
|
||||||
"src/heap/global-handle-marking-visitor.cc",
|
|
||||||
"src/heap/global-handle-marking-visitor.h",
|
|
||||||
"src/heap/heap-allocator-inl.h",
|
"src/heap/heap-allocator-inl.h",
|
||||||
"src/heap/heap-allocator.cc",
|
"src/heap/heap-allocator.cc",
|
||||||
"src/heap/heap-allocator.h",
|
"src/heap/heap-allocator.h",
|
||||||
@ -1497,6 +1508,8 @@ filegroup(
|
|||||||
"src/heap/marking-barrier.cc",
|
"src/heap/marking-barrier.cc",
|
||||||
"src/heap/marking-barrier.h",
|
"src/heap/marking-barrier.h",
|
||||||
"src/heap/marking-barrier-inl.h",
|
"src/heap/marking-barrier-inl.h",
|
||||||
|
"src/heap/marking-state.h",
|
||||||
|
"src/heap/marking-state-inl.h",
|
||||||
"src/heap/marking-visitor-inl.h",
|
"src/heap/marking-visitor-inl.h",
|
||||||
"src/heap/marking-visitor.h",
|
"src/heap/marking-visitor.h",
|
||||||
"src/heap/marking-worklist-inl.h",
|
"src/heap/marking-worklist-inl.h",
|
||||||
@ -1529,6 +1542,9 @@ filegroup(
|
|||||||
"src/heap/paged-spaces.h",
|
"src/heap/paged-spaces.h",
|
||||||
"src/heap/parallel-work-item.h",
|
"src/heap/parallel-work-item.h",
|
||||||
"src/heap/parked-scope.h",
|
"src/heap/parked-scope.h",
|
||||||
|
"src/heap/pretenuring-handler-inl.h",
|
||||||
|
"src/heap/pretenuring-handler.cc",
|
||||||
|
"src/heap/pretenuring-handler.h",
|
||||||
"src/heap/progress-bar.h",
|
"src/heap/progress-bar.h",
|
||||||
"src/heap/read-only-heap-inl.h",
|
"src/heap/read-only-heap-inl.h",
|
||||||
"src/heap/read-only-heap.cc",
|
"src/heap/read-only-heap.cc",
|
||||||
@ -1555,6 +1571,8 @@ filegroup(
|
|||||||
"src/heap/stress-scavenge-observer.h",
|
"src/heap/stress-scavenge-observer.h",
|
||||||
"src/heap/sweeper.cc",
|
"src/heap/sweeper.cc",
|
||||||
"src/heap/sweeper.h",
|
"src/heap/sweeper.h",
|
||||||
|
"src/heap/traced-handles-marking-visitor.cc",
|
||||||
|
"src/heap/traced-handles-marking-visitor.h",
|
||||||
"src/heap/weak-object-worklists.cc",
|
"src/heap/weak-object-worklists.cc",
|
||||||
"src/heap/weak-object-worklists.h",
|
"src/heap/weak-object-worklists.h",
|
||||||
"src/ic/call-optimization.cc",
|
"src/ic/call-optimization.cc",
|
||||||
@ -1753,6 +1771,9 @@ filegroup(
|
|||||||
"src/objects/js-promise.h",
|
"src/objects/js-promise.h",
|
||||||
"src/objects/js-proxy-inl.h",
|
"src/objects/js-proxy-inl.h",
|
||||||
"src/objects/js-proxy.h",
|
"src/objects/js-proxy.h",
|
||||||
|
"src/objects/js-raw-json-inl.h",
|
||||||
|
"src/objects/js-raw-json.h",
|
||||||
|
"src/objects/js-raw-json.cc",
|
||||||
"src/objects/js-regexp-inl.h",
|
"src/objects/js-regexp-inl.h",
|
||||||
"src/objects/js-regexp-string-iterator-inl.h",
|
"src/objects/js-regexp-string-iterator-inl.h",
|
||||||
"src/objects/js-regexp-string-iterator.h",
|
"src/objects/js-regexp-string-iterator.h",
|
||||||
@ -1900,6 +1921,8 @@ filegroup(
|
|||||||
"src/objects/transitions.h",
|
"src/objects/transitions.h",
|
||||||
"src/objects/turbofan-types-inl.h",
|
"src/objects/turbofan-types-inl.h",
|
||||||
"src/objects/turbofan-types.h",
|
"src/objects/turbofan-types.h",
|
||||||
|
"src/objects/turboshaft-types-inl.h",
|
||||||
|
"src/objects/turboshaft-types.h",
|
||||||
"src/objects/type-hints.cc",
|
"src/objects/type-hints.cc",
|
||||||
"src/objects/type-hints.h",
|
"src/objects/type-hints.h",
|
||||||
"src/objects/value-serializer.cc",
|
"src/objects/value-serializer.cc",
|
||||||
@ -1951,6 +1974,7 @@ filegroup(
|
|||||||
"src/profiler/heap-snapshot-generator-inl.h",
|
"src/profiler/heap-snapshot-generator-inl.h",
|
||||||
"src/profiler/heap-snapshot-generator.cc",
|
"src/profiler/heap-snapshot-generator.cc",
|
||||||
"src/profiler/heap-snapshot-generator.h",
|
"src/profiler/heap-snapshot-generator.h",
|
||||||
|
"src/profiler/output-stream-writer.h",
|
||||||
"src/profiler/profile-generator-inl.h",
|
"src/profiler/profile-generator-inl.h",
|
||||||
"src/profiler/profile-generator.cc",
|
"src/profiler/profile-generator.cc",
|
||||||
"src/profiler/profile-generator.h",
|
"src/profiler/profile-generator.h",
|
||||||
@ -1978,8 +2002,6 @@ filegroup(
|
|||||||
"src/regexp/experimental/experimental-interpreter.h",
|
"src/regexp/experimental/experimental-interpreter.h",
|
||||||
"src/regexp/experimental/experimental.cc",
|
"src/regexp/experimental/experimental.cc",
|
||||||
"src/regexp/experimental/experimental.h",
|
"src/regexp/experimental/experimental.h",
|
||||||
"src/regexp/property-sequences.cc",
|
|
||||||
"src/regexp/property-sequences.h",
|
|
||||||
"src/regexp/regexp-ast.cc",
|
"src/regexp/regexp-ast.cc",
|
||||||
"src/regexp/regexp-ast.h",
|
"src/regexp/regexp-ast.h",
|
||||||
"src/regexp/regexp-bytecode-generator-inl.h",
|
"src/regexp/regexp-bytecode-generator-inl.h",
|
||||||
@ -2017,6 +2039,7 @@ filegroup(
|
|||||||
"src/roots/roots-inl.h",
|
"src/roots/roots-inl.h",
|
||||||
"src/roots/roots.cc",
|
"src/roots/roots.cc",
|
||||||
"src/roots/roots.h",
|
"src/roots/roots.h",
|
||||||
|
"src/roots/static-roots.h",
|
||||||
"src/runtime/runtime-array.cc",
|
"src/runtime/runtime-array.cc",
|
||||||
"src/runtime/runtime-atomics.cc",
|
"src/runtime/runtime-atomics.cc",
|
||||||
"src/runtime/runtime-bigint.cc",
|
"src/runtime/runtime-bigint.cc",
|
||||||
@ -2061,6 +2084,8 @@ filegroup(
|
|||||||
"src/sandbox/sandbox.h",
|
"src/sandbox/sandbox.h",
|
||||||
"src/sandbox/sandboxed-pointer-inl.h",
|
"src/sandbox/sandboxed-pointer-inl.h",
|
||||||
"src/sandbox/sandboxed-pointer.h",
|
"src/sandbox/sandboxed-pointer.h",
|
||||||
|
"src/sandbox/bounded-size-inl.h",
|
||||||
|
"src/sandbox/bounded-size.h",
|
||||||
"src/base/sanitizer/asan.h",
|
"src/base/sanitizer/asan.h",
|
||||||
"src/base/sanitizer/lsan-page-allocator.cc",
|
"src/base/sanitizer/lsan-page-allocator.cc",
|
||||||
"src/base/sanitizer/lsan-page-allocator.h",
|
"src/base/sanitizer/lsan-page-allocator.h",
|
||||||
@ -2527,8 +2552,8 @@ filegroup(
|
|||||||
"src/wasm/names-provider.cc",
|
"src/wasm/names-provider.cc",
|
||||||
"src/wasm/names-provider.h",
|
"src/wasm/names-provider.h",
|
||||||
"src/wasm/object-access.h",
|
"src/wasm/object-access.h",
|
||||||
"src/wasm/signature-map.cc",
|
"src/wasm/pgo.cc",
|
||||||
"src/wasm/signature-map.h",
|
"src/wasm/pgo.h",
|
||||||
"src/wasm/simd-shuffle.cc",
|
"src/wasm/simd-shuffle.cc",
|
||||||
"src/wasm/simd-shuffle.h",
|
"src/wasm/simd-shuffle.h",
|
||||||
"src/wasm/stacks.cc",
|
"src/wasm/stacks.cc",
|
||||||
@ -2606,6 +2631,9 @@ filegroup(
|
|||||||
"src/objects/js-display-names.cc",
|
"src/objects/js-display-names.cc",
|
||||||
"src/objects/js-display-names.h",
|
"src/objects/js-display-names.h",
|
||||||
"src/objects/js-display-names-inl.h",
|
"src/objects/js-display-names-inl.h",
|
||||||
|
"src/objects/js-duration-format.cc",
|
||||||
|
"src/objects/js-duration-format.h",
|
||||||
|
"src/objects/js-duration-format-inl.h",
|
||||||
"src/objects/js-list-format.cc",
|
"src/objects/js-list-format.cc",
|
||||||
"src/objects/js-list-format.h",
|
"src/objects/js-list-format.h",
|
||||||
"src/objects/js-list-format-inl.h",
|
"src/objects/js-list-format-inl.h",
|
||||||
@ -2649,6 +2677,8 @@ filegroup(
|
|||||||
"src/compiler/all-nodes.h",
|
"src/compiler/all-nodes.h",
|
||||||
"src/compiler/allocation-builder.h",
|
"src/compiler/allocation-builder.h",
|
||||||
"src/compiler/allocation-builder-inl.h",
|
"src/compiler/allocation-builder-inl.h",
|
||||||
|
"src/compiler/backend/bitcast-elider.cc",
|
||||||
|
"src/compiler/backend/bitcast-elider.h",
|
||||||
"src/compiler/backend/code-generator.cc",
|
"src/compiler/backend/code-generator.cc",
|
||||||
"src/compiler/backend/code-generator.h",
|
"src/compiler/backend/code-generator.h",
|
||||||
"src/compiler/backend/code-generator-impl.h",
|
"src/compiler/backend/code-generator-impl.h",
|
||||||
@ -2861,7 +2891,10 @@ filegroup(
|
|||||||
"src/compiler/state-values-utils.h",
|
"src/compiler/state-values-utils.h",
|
||||||
"src/compiler/store-store-elimination.cc",
|
"src/compiler/store-store-elimination.cc",
|
||||||
"src/compiler/store-store-elimination.h",
|
"src/compiler/store-store-elimination.h",
|
||||||
|
"src/compiler/turboshaft/assembler.cc",
|
||||||
"src/compiler/turboshaft/assembler.h",
|
"src/compiler/turboshaft/assembler.h",
|
||||||
|
"src/compiler/turboshaft/assert-types-reducer.h",
|
||||||
|
"src/compiler/turboshaft/branch-elimination-reducer.h",
|
||||||
"src/compiler/turboshaft/decompression-optimization.cc",
|
"src/compiler/turboshaft/decompression-optimization.cc",
|
||||||
"src/compiler/turboshaft/decompression-optimization.h",
|
"src/compiler/turboshaft/decompression-optimization.h",
|
||||||
"src/compiler/turboshaft/deopt-data.h",
|
"src/compiler/turboshaft/deopt-data.h",
|
||||||
@ -2870,17 +2903,36 @@ filegroup(
|
|||||||
"src/compiler/turboshaft/graph-builder.h",
|
"src/compiler/turboshaft/graph-builder.h",
|
||||||
"src/compiler/turboshaft/graph.cc",
|
"src/compiler/turboshaft/graph.cc",
|
||||||
"src/compiler/turboshaft/graph.h",
|
"src/compiler/turboshaft/graph.h",
|
||||||
|
"src/compiler/turboshaft/index.h",
|
||||||
"src/compiler/turboshaft/graph-visualizer.cc",
|
"src/compiler/turboshaft/graph-visualizer.cc",
|
||||||
"src/compiler/turboshaft/graph-visualizer.h",
|
"src/compiler/turboshaft/graph-visualizer.h",
|
||||||
|
"src/compiler/turboshaft/late-escape-analysis-reducer.h",
|
||||||
|
"src/compiler/turboshaft/late-escape-analysis-reducer.cc",
|
||||||
|
"src/compiler/turboshaft/layered-hash-map.h",
|
||||||
|
"src/compiler/turboshaft/machine-optimization-reducer.h",
|
||||||
|
"src/compiler/turboshaft/memory-optimization.cc",
|
||||||
|
"src/compiler/turboshaft/memory-optimization.h",
|
||||||
"src/compiler/turboshaft/operations.cc",
|
"src/compiler/turboshaft/operations.cc",
|
||||||
"src/compiler/turboshaft/operations.h",
|
"src/compiler/turboshaft/operations.h",
|
||||||
|
"src/compiler/turboshaft/operation-matching.h",
|
||||||
"src/compiler/turboshaft/optimization-phase.cc",
|
"src/compiler/turboshaft/optimization-phase.cc",
|
||||||
"src/compiler/turboshaft/optimization-phase.h",
|
"src/compiler/turboshaft/optimization-phase.h",
|
||||||
"src/compiler/turboshaft/recreate-schedule.cc",
|
"src/compiler/turboshaft/recreate-schedule.cc",
|
||||||
"src/compiler/turboshaft/recreate-schedule.h",
|
"src/compiler/turboshaft/recreate-schedule.h",
|
||||||
|
"src/compiler/turboshaft/representations.cc",
|
||||||
|
"src/compiler/turboshaft/representations.h",
|
||||||
|
"src/compiler/turboshaft/select-lowering-reducer.h",
|
||||||
"src/compiler/turboshaft/sidetable.h",
|
"src/compiler/turboshaft/sidetable.h",
|
||||||
|
"src/compiler/turboshaft/simplify-tf-loops.cc",
|
||||||
|
"src/compiler/turboshaft/simplify-tf-loops.h",
|
||||||
|
"src/compiler/turboshaft/snapshot-table.h",
|
||||||
|
"src/compiler/turboshaft/type-inference-reducer.h",
|
||||||
|
"src/compiler/turboshaft/types.cc",
|
||||||
|
"src/compiler/turboshaft/types.h",
|
||||||
|
"src/compiler/turboshaft/utils.cc",
|
||||||
"src/compiler/turboshaft/utils.h",
|
"src/compiler/turboshaft/utils.h",
|
||||||
"src/compiler/turboshaft/value-numbering-assembler.h",
|
"src/compiler/turboshaft/value-numbering-reducer.h",
|
||||||
|
"src/compiler/turboshaft/variable-reducer.h",
|
||||||
"src/compiler/type-cache.cc",
|
"src/compiler/type-cache.cc",
|
||||||
"src/compiler/type-cache.h",
|
"src/compiler/type-cache.h",
|
||||||
"src/compiler/type-narrowing-reducer.cc",
|
"src/compiler/type-narrowing-reducer.cc",
|
||||||
@ -2891,6 +2943,7 @@ filegroup(
|
|||||||
"src/compiler/typer.h",
|
"src/compiler/typer.h",
|
||||||
"src/compiler/types.cc",
|
"src/compiler/types.cc",
|
||||||
"src/compiler/types.h",
|
"src/compiler/types.h",
|
||||||
|
"src/compiler/use-info.h",
|
||||||
"src/compiler/value-numbering-reducer.cc",
|
"src/compiler/value-numbering-reducer.cc",
|
||||||
"src/compiler/value-numbering-reducer.h",
|
"src/compiler/value-numbering-reducer.h",
|
||||||
"src/compiler/verifier.cc",
|
"src/compiler/verifier.cc",
|
||||||
@ -2959,6 +3012,7 @@ filegroup(
|
|||||||
"src/builtins/builtins-microtask-queue-gen.cc",
|
"src/builtins/builtins-microtask-queue-gen.cc",
|
||||||
"src/builtins/builtins-number-gen.cc",
|
"src/builtins/builtins-number-gen.cc",
|
||||||
"src/builtins/builtins-object-gen.cc",
|
"src/builtins/builtins-object-gen.cc",
|
||||||
|
"src/builtins/builtins-object-gen.h",
|
||||||
"src/builtins/builtins-promise-gen.cc",
|
"src/builtins/builtins-promise-gen.cc",
|
||||||
"src/builtins/builtins-promise-gen.h",
|
"src/builtins/builtins-promise-gen.h",
|
||||||
"src/builtins/builtins-proxy-gen.cc",
|
"src/builtins/builtins-proxy-gen.cc",
|
||||||
@ -3101,6 +3155,7 @@ filegroup(
|
|||||||
"src/heap/cppgc/stats-collector.h",
|
"src/heap/cppgc/stats-collector.h",
|
||||||
"src/heap/cppgc/sweeper.cc",
|
"src/heap/cppgc/sweeper.cc",
|
||||||
"src/heap/cppgc/sweeper.h",
|
"src/heap/cppgc/sweeper.h",
|
||||||
|
"src/heap/cppgc/heap-config.h",
|
||||||
"src/heap/cppgc/task-handle.h",
|
"src/heap/cppgc/task-handle.h",
|
||||||
"src/heap/cppgc/trace-event.h",
|
"src/heap/cppgc/trace-event.h",
|
||||||
"src/heap/cppgc/trace-trait.cc",
|
"src/heap/cppgc/trace-trait.cc",
|
||||||
@ -3119,6 +3174,7 @@ filegroup(
|
|||||||
srcs = [
|
srcs = [
|
||||||
"src/heap/base/active-system-pages.cc",
|
"src/heap/base/active-system-pages.cc",
|
||||||
"src/heap/base/active-system-pages.h",
|
"src/heap/base/active-system-pages.h",
|
||||||
|
"src/heap/base/basic-slot-set.h",
|
||||||
"src/heap/base/stack.cc",
|
"src/heap/base/stack.cc",
|
||||||
"src/heap/base/stack.h",
|
"src/heap/base/stack.h",
|
||||||
"src/heap/base/worklist.cc",
|
"src/heap/base/worklist.cc",
|
||||||
@ -3127,16 +3183,16 @@ filegroup(
|
|||||||
# Note these cannot be v8_target_is_* selects because these contain
|
# Note these cannot be v8_target_is_* selects because these contain
|
||||||
# inline assembly that runs inside the executable. Since these are
|
# inline assembly that runs inside the executable. Since these are
|
||||||
# linked directly into mksnapshot, they must use the actual target cpu.
|
# linked directly into mksnapshot, they must use the actual target cpu.
|
||||||
"@v8//bazel/config:is_inline_asm_ia32": ["src/heap/base/asm/ia32/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_ia32": ["src/heap/base/asm/ia32/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_inline_asm_x64": ["src/heap/base/asm/x64/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_x64": ["src/heap/base/asm/x64/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_inline_asm_arm": ["src/heap/base/asm/arm/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_arm": ["src/heap/base/asm/arm/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_inline_asm_arm64": ["src/heap/base/asm/arm64/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_arm64": ["src/heap/base/asm/arm64/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_inline_asm_s390x": ["src/heap/base/asm/s390/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_s390x": ["src/heap/base/asm/s390/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_inline_asm_riscv64": ["src/heap/base/asm/riscv64/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_riscv64": ["src/heap/base/asm/riscv64/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_inline_asm_ppc64le": ["src/heap/base/asm/ppc/push_registers_asm.cc"],
|
"@v8//bazel/config:is_inline_asm_ppc64le": ["src/heap/base/asm/ppc/save_registers_asm.cc"],
|
||||||
"@v8//bazel/config:is_msvc_asm_ia32": ["src/heap/base/asm/ia32/push_registers_masm.asm"],
|
"@v8//bazel/config:is_msvc_asm_ia32": ["src/heap/base/asm/ia32/save_registers_masm.asm"],
|
||||||
"@v8//bazel/config:is_msvc_asm_x64": ["src/heap/base/asm/x64/push_registers_masm.asm"],
|
"@v8//bazel/config:is_msvc_asm_x64": ["src/heap/base/asm/x64/save_registers_masm.asm"],
|
||||||
"@v8//bazel/config:is_msvc_asm_arm64": ["src/heap/base/asm/arm64/push_registers_masm.S"],
|
"@v8//bazel/config:is_msvc_asm_arm64": ["src/heap/base/asm/arm64/save_registers_masm.S"],
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3182,6 +3238,8 @@ filegroup(
|
|||||||
"src/snapshot/embedded/platform-embedded-file-writer-mac.h",
|
"src/snapshot/embedded/platform-embedded-file-writer-mac.h",
|
||||||
"src/snapshot/embedded/platform-embedded-file-writer-win.cc",
|
"src/snapshot/embedded/platform-embedded-file-writer-win.cc",
|
||||||
"src/snapshot/embedded/platform-embedded-file-writer-win.h",
|
"src/snapshot/embedded/platform-embedded-file-writer-win.h",
|
||||||
|
"src/snapshot/static-roots-gen.cc",
|
||||||
|
"src/snapshot/static-roots-gen.h",
|
||||||
"src/snapshot/mksnapshot.cc",
|
"src/snapshot/mksnapshot.cc",
|
||||||
"src/snapshot/snapshot-empty.cc",
|
"src/snapshot/snapshot-empty.cc",
|
||||||
],
|
],
|
||||||
@ -3216,6 +3274,8 @@ filegroup(
|
|||||||
"src/inspector/v8-debugger.h",
|
"src/inspector/v8-debugger.h",
|
||||||
"src/inspector/v8-debugger-agent-impl.cc",
|
"src/inspector/v8-debugger-agent-impl.cc",
|
||||||
"src/inspector/v8-debugger-agent-impl.h",
|
"src/inspector/v8-debugger-agent-impl.h",
|
||||||
|
"src/inspector/v8-debugger-barrier.cc",
|
||||||
|
"src/inspector/v8-debugger-barrier.h",
|
||||||
"src/inspector/v8-debugger-id.cc",
|
"src/inspector/v8-debugger-id.cc",
|
||||||
"src/inspector/v8-debugger-id.h",
|
"src/inspector/v8-debugger-id.h",
|
||||||
"src/inspector/v8-debugger-script.cc",
|
"src/inspector/v8-debugger-script.cc",
|
||||||
@ -3516,12 +3576,14 @@ v8_mksnapshot(
|
|||||||
cc_library(
|
cc_library(
|
||||||
name = "icu/generated_torque_headers",
|
name = "icu/generated_torque_headers",
|
||||||
hdrs = [":icu/generated_torque_files"],
|
hdrs = [":icu/generated_torque_files"],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
strip_include_prefix = "icu",
|
strip_include_prefix = "icu",
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "noicu/generated_torque_headers",
|
name = "noicu/generated_torque_headers",
|
||||||
hdrs = [":noicu/generated_torque_files"],
|
hdrs = [":noicu/generated_torque_files"],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
strip_include_prefix = "noicu",
|
strip_include_prefix = "noicu",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3531,6 +3593,7 @@ v8_library(
|
|||||||
":v8_libbase_files",
|
":v8_libbase_files",
|
||||||
":v8_shared_internal_headers",
|
":v8_shared_internal_headers",
|
||||||
],
|
],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
@ -3539,6 +3602,7 @@ cc_library(
|
|||||||
"src/torque/kythe-data.h",
|
"src/torque/kythe-data.h",
|
||||||
"src/torque/torque-compiler.h",
|
"src/torque/torque-compiler.h",
|
||||||
],
|
],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
include_prefix = "third_party/v8",
|
include_prefix = "third_party/v8",
|
||||||
includes = ["."],
|
includes = ["."],
|
||||||
)
|
)
|
||||||
@ -3548,7 +3612,7 @@ cc_library(
|
|||||||
srcs = [
|
srcs = [
|
||||||
":torque_base_files",
|
":torque_base_files",
|
||||||
],
|
],
|
||||||
copts = select({
|
copts = ["-Wno-implicit-fallthrough"] + select({
|
||||||
"@v8//bazel/config:is_posix": ["-fexceptions"],
|
"@v8//bazel/config:is_posix": ["-fexceptions"],
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
@ -3565,6 +3629,7 @@ v8_library(
|
|||||||
":v8_base_without_compiler_files",
|
":v8_base_without_compiler_files",
|
||||||
":v8_common_libshared_files",
|
":v8_common_libshared_files",
|
||||||
],
|
],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
icu_deps = [
|
icu_deps = [
|
||||||
":icu/generated_torque_headers",
|
":icu/generated_torque_headers",
|
||||||
"//external:icu",
|
"//external:icu",
|
||||||
@ -3590,6 +3655,7 @@ v8_library(
|
|||||||
name = "v8",
|
name = "v8",
|
||||||
srcs = [":v8_inspector_files"],
|
srcs = [":v8_inspector_files"],
|
||||||
hdrs = [":public_header_files"],
|
hdrs = [":public_header_files"],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
icu_deps = [":icu/v8_libshared"],
|
icu_deps = [":icu/v8_libshared"],
|
||||||
icu_srcs = [":icu/snapshot_files"],
|
icu_srcs = [":icu/snapshot_files"],
|
||||||
noicu_deps = [":noicu/v8_libshared"],
|
noicu_deps = [":noicu/v8_libshared"],
|
||||||
@ -3602,6 +3668,7 @@ v8_library(
|
|||||||
name = "wee8",
|
name = "wee8",
|
||||||
srcs = [":wee8_files"],
|
srcs = [":wee8_files"],
|
||||||
hdrs = [":public_wasm_c_api_header_files"],
|
hdrs = [":public_wasm_c_api_header_files"],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
strip_include_prefix = "third_party",
|
strip_include_prefix = "third_party",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [":noicu/v8"],
|
deps = [":noicu/v8"],
|
||||||
@ -3631,6 +3698,7 @@ v8_binary(
|
|||||||
"src/interpreter/bytecodes.cc",
|
"src/interpreter/bytecodes.cc",
|
||||||
"src/interpreter/bytecodes.h",
|
"src/interpreter/bytecodes.h",
|
||||||
],
|
],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
deps = ["v8_libbase"],
|
deps = ["v8_libbase"],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3642,6 +3710,7 @@ v8_binary(
|
|||||||
":v8_libbase_files",
|
":v8_libbase_files",
|
||||||
":v8_shared_internal_headers",
|
":v8_shared_internal_headers",
|
||||||
],
|
],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
defines = [
|
defines = [
|
||||||
"V8_INTL_SUPPORT",
|
"V8_INTL_SUPPORT",
|
||||||
"ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
|
"ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
|
||||||
@ -3660,7 +3729,7 @@ v8_binary(
|
|||||||
"src/torque/torque.cc",
|
"src/torque/torque.cc",
|
||||||
":torque_base_files",
|
":torque_base_files",
|
||||||
],
|
],
|
||||||
copts = select({
|
copts = ["-Wno-implicit-fallthrough"] + select({
|
||||||
"@v8//bazel/config:is_posix": ["-fexceptions"],
|
"@v8//bazel/config:is_posix": ["-fexceptions"],
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
@ -3675,6 +3744,7 @@ v8_binary(
|
|||||||
v8_binary(
|
v8_binary(
|
||||||
name = "mksnapshot",
|
name = "mksnapshot",
|
||||||
srcs = [":mksnapshot_files"],
|
srcs = [":mksnapshot_files"],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
icu_deps = [":icu/v8_libshared"],
|
icu_deps = [":icu/v8_libshared"],
|
||||||
linkopts = select({
|
linkopts = select({
|
||||||
"@v8//bazel/config:is_android": ["-llog"],
|
"@v8//bazel/config:is_android": ["-llog"],
|
||||||
@ -3686,6 +3756,7 @@ v8_binary(
|
|||||||
v8_binary(
|
v8_binary(
|
||||||
name = "d8",
|
name = "d8",
|
||||||
srcs = [":d8_files"],
|
srcs = [":d8_files"],
|
||||||
|
copts = ["-Wno-implicit-fallthrough"],
|
||||||
icu_deps = [":icu/v8"],
|
icu_deps = [":icu/v8"],
|
||||||
noicu_deps = [":noicu/v8"],
|
noicu_deps = [":noicu/v8"],
|
||||||
)
|
)
|
||||||
|
@ -18,6 +18,7 @@ machenbach@chromium.org
|
|||||||
manoskouk@chromium.org
|
manoskouk@chromium.org
|
||||||
mathias@chromium.org
|
mathias@chromium.org
|
||||||
marja@chromium.org
|
marja@chromium.org
|
||||||
|
mliedtke@chromium.org
|
||||||
mlippautz@chromium.org
|
mlippautz@chromium.org
|
||||||
mslekova@chromium.org
|
mslekova@chromium.org
|
||||||
nicohartmann@chromium.org
|
nicohartmann@chromium.org
|
||||||
|
99
DEPS
99
DEPS
@ -26,35 +26,46 @@ vars = {
|
|||||||
# most commonly useful for developers. Bots and developers that need to use
|
# most commonly useful for developers. Bots and developers that need to use
|
||||||
# other images (e.g., qemu.arm64) can override this with additional images.
|
# other images (e.g., qemu.arm64) can override this with additional images.
|
||||||
'checkout_fuchsia_boot_images': "qemu.x64",
|
'checkout_fuchsia_boot_images': "qemu.x64",
|
||||||
|
'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""',
|
||||||
|
|
||||||
'checkout_instrumented_libraries': False,
|
'checkout_instrumented_libraries': False,
|
||||||
'checkout_ittapi': False,
|
'checkout_ittapi': False,
|
||||||
|
|
||||||
|
# Fetch the prebuilt binaries for llvm-cov and llvm-profdata. Needed to
|
||||||
|
# process the raw profiles produced by instrumented targets (built with
|
||||||
|
# the gn arg 'use_clang_coverage').
|
||||||
|
'checkout_clang_coverage_tools': False,
|
||||||
|
|
||||||
# Fetch clang-tidy into the same bin/ directory as our clang binary.
|
# Fetch clang-tidy into the same bin/ directory as our clang binary.
|
||||||
'checkout_clang_tidy': False,
|
'checkout_clang_tidy': False,
|
||||||
|
|
||||||
'chromium_url': 'https://chromium.googlesource.com',
|
'chromium_url': 'https://chromium.googlesource.com',
|
||||||
'android_url': 'https://android.googlesource.com',
|
'android_url': 'https://android.googlesource.com',
|
||||||
'download_gcmole': False,
|
'download_gcmole': False,
|
||||||
'download_jsfunfuzz': False,
|
'download_jsfunfuzz': False,
|
||||||
'download_prebuilt_bazel': False,
|
'download_prebuilt_bazel': False,
|
||||||
'check_v8_header_includes': False,
|
'check_v8_header_includes': False,
|
||||||
'checkout_reclient': False,
|
|
||||||
|
|
||||||
# By default, download the fuchsia sdk from the public sdk directory.
|
# By default, download the fuchsia sdk from the public sdk directory.
|
||||||
'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/gn/',
|
'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/gn/',
|
||||||
|
|
||||||
# reclient CIPD package version
|
# reclient CIPD package version
|
||||||
'reclient_version': 're_client_version:0.69.0.458df98-gomaip',
|
'reclient_version': 're_client_version:0.87.0.b6908b3-gomaip',
|
||||||
|
|
||||||
# GN CIPD package version.
|
# GN CIPD package version.
|
||||||
'gn_version': 'git_revision:00b741b1568d56cf4e117dcb9f70cd42653b4c78',
|
'gn_version': 'git_revision:70d6c60823c0233a0f35eccc25b2b640d2980bdc',
|
||||||
|
|
||||||
|
# ninja CIPD package version
|
||||||
|
# https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja
|
||||||
|
'ninja_version': 'version:2@1.8.2.chromium.3',
|
||||||
|
|
||||||
# luci-go CIPD package version.
|
# luci-go CIPD package version.
|
||||||
'luci_go': 'git_revision:3226112a79a7c2de84c3186191e24dd61680a77d',
|
'luci_go': 'git_revision:bac571b5399502fa16ac48a1d3820e1117505085',
|
||||||
|
|
||||||
# Three lines of non-changing comments so that
|
# Three lines of non-changing comments so that
|
||||||
# the commit queue can handle CLs rolling Fuchsia sdk
|
# the commit queue can handle CLs rolling Fuchsia sdk
|
||||||
# and whatever else without interference from each other.
|
# and whatever else without interference from each other.
|
||||||
'fuchsia_version': 'version:9.20220902.1.1',
|
'fuchsia_version': 'version:11.20221209.0.1',
|
||||||
|
|
||||||
# Three lines of non-changing comments so that
|
# Three lines of non-changing comments so that
|
||||||
# the commit queue can handle CLs rolling android_sdk_build-tools_version
|
# the commit queue can handle CLs rolling android_sdk_build-tools_version
|
||||||
@ -87,16 +98,16 @@ vars = {
|
|||||||
# Three lines of non-changing comments so that
|
# Three lines of non-changing comments so that
|
||||||
# the commit queue can handle CLs rolling android_sdk_tools-lint_version
|
# the commit queue can handle CLs rolling android_sdk_tools-lint_version
|
||||||
# and whatever else without interference from each other.
|
# and whatever else without interference from each other.
|
||||||
'android_sdk_cmdline-tools_version': 'IPzAG-uU5zVMxohpg9-7-N0tQC1TCSW1VbrBFw7Ld04C',
|
'android_sdk_cmdline-tools_version': 'oWlET2yQhaPKQ66tYNuSPaueU78Z9VlxpyxOoUjwRuIC',
|
||||||
}
|
}
|
||||||
|
|
||||||
deps = {
|
deps = {
|
||||||
'base/trace_event/common':
|
'base/trace_event/common':
|
||||||
Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '640fc6dc86d5e75e6c7e8006cb45fb46c91014e0',
|
Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '521ac34ebd795939c7e16b37d9d3ddb40e8ed556',
|
||||||
'build':
|
'build':
|
||||||
Var('chromium_url') + '/chromium/src/build.git' + '@' + '24bb6108a40a44bda3a7fa88d906c0774034df08',
|
Var('chromium_url') + '/chromium/src/build.git' + '@' + '3d4b0c1e773d659da18710fc4984b8195f6d5aea',
|
||||||
'buildtools':
|
'buildtools':
|
||||||
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '46ab4c32d461f34456161fac6cd58d203c5083e9',
|
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '202b660eb577da482fdec18173379df77147a394',
|
||||||
'buildtools/clang_format/script':
|
'buildtools/clang_format/script':
|
||||||
Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + '8b525d2747f2584fc35d8c7e612e66f377858df7',
|
Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + '8b525d2747f2584fc35d8c7e612e66f377858df7',
|
||||||
'buildtools/linux64': {
|
'buildtools/linux64': {
|
||||||
@ -120,11 +131,11 @@ deps = {
|
|||||||
'condition': 'host_os == "mac"',
|
'condition': 'host_os == "mac"',
|
||||||
},
|
},
|
||||||
'buildtools/third_party/libc++/trunk':
|
'buildtools/third_party/libc++/trunk':
|
||||||
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '85a3363f04e1e0e7b85d62d5d9a419e039755262',
|
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '52399655fdafdd14ade17ab12ddc9e955423aa5a',
|
||||||
'buildtools/third_party/libc++abi/trunk':
|
'buildtools/third_party/libc++abi/trunk':
|
||||||
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + '6285577a9df73170c1496b78542a2c18fa2352fd',
|
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + '25a3d07096374aeeffa3dab8b582143dde5a9ca9',
|
||||||
'buildtools/third_party/libunwind/trunk':
|
'buildtools/third_party/libunwind/trunk':
|
||||||
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + '42aa6de5544ec1ccc27da640a044bd3f474ee75a',
|
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + '09a1f53060bc601b5ee821d7ab52071eed096fda',
|
||||||
'buildtools/win': {
|
'buildtools/win': {
|
||||||
'packages': [
|
'packages': [
|
||||||
{
|
{
|
||||||
@ -143,20 +154,20 @@ deps = {
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
'dep_type': 'cipd',
|
'dep_type': 'cipd',
|
||||||
'condition': '(host_os == "linux" or host_os == "mac" or host_os == "win") and checkout_reclient',
|
'condition': '(host_os == "linux" or host_os == "mac" or host_os == "win") and host_cpu != "s390" and host_cpu != "ppc"',
|
||||||
},
|
},
|
||||||
'test/benchmarks/data':
|
'test/benchmarks/data':
|
||||||
Var('chromium_url') + '/v8/deps/third_party/benchmarks.git' + '@' + '05d7188267b4560491ff9155c5ee13e207ecd65f',
|
Var('chromium_url') + '/v8/deps/third_party/benchmarks.git' + '@' + '05d7188267b4560491ff9155c5ee13e207ecd65f',
|
||||||
'test/mozilla/data':
|
'test/mozilla/data':
|
||||||
Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be',
|
Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be',
|
||||||
'test/test262/data':
|
'test/test262/data':
|
||||||
Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '8dcc0e1955b1753271ed0812d1a2a15a23de069b',
|
Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + 'e6c6460a5b94e32e01ce9a9d236f3148d4648ce5',
|
||||||
'third_party/android_ndk': {
|
'third_party/android_ndk': {
|
||||||
'url': Var('chromium_url') + '/android_ndk.git' + '@' + '8388a2be5421311dc75c5f937aae13d821a27f3d',
|
'url': Var('chromium_url') + '/android_ndk.git' + '@' + '8388a2be5421311dc75c5f937aae13d821a27f3d',
|
||||||
'condition': 'checkout_android',
|
'condition': 'checkout_android',
|
||||||
},
|
},
|
||||||
'third_party/android_platform': {
|
'third_party/android_platform': {
|
||||||
'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + '04b33506bfd9d0e866bd8bd62f4cbf323d84dc79',
|
'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + '1bf9b932433ebb78828bf3c8cd0ccc86b9ef4787',
|
||||||
'condition': 'checkout_android',
|
'condition': 'checkout_android',
|
||||||
},
|
},
|
||||||
'third_party/android_sdk/public': {
|
'third_party/android_sdk/public': {
|
||||||
@ -198,7 +209,7 @@ deps = {
|
|||||||
'dep_type': 'cipd',
|
'dep_type': 'cipd',
|
||||||
},
|
},
|
||||||
'third_party/catapult': {
|
'third_party/catapult': {
|
||||||
'url': Var('chromium_url') + '/catapult.git' + '@' + '7ee071132a536a6616589cc2411674d1b459b4da',
|
'url': Var('chromium_url') + '/catapult.git' + '@' + '2f1cf6121c17b31d4607afbaec37f33b0526cfc4',
|
||||||
'condition': 'checkout_android',
|
'condition': 'checkout_android',
|
||||||
},
|
},
|
||||||
'third_party/colorama/src': {
|
'third_party/colorama/src': {
|
||||||
@ -206,7 +217,7 @@ deps = {
|
|||||||
'condition': 'checkout_android',
|
'condition': 'checkout_android',
|
||||||
},
|
},
|
||||||
'third_party/depot_tools':
|
'third_party/depot_tools':
|
||||||
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'a089281a82f9481e246dcc1292145d0e8635f51c',
|
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'a964ca1296b9238d0797aa5f25597efa7b897515',
|
||||||
'third_party/fuchsia-sdk/sdk': {
|
'third_party/fuchsia-sdk/sdk': {
|
||||||
'packages': [
|
'packages': [
|
||||||
{
|
{
|
||||||
@ -223,9 +234,9 @@ deps = {
|
|||||||
'third_party/googletest/src':
|
'third_party/googletest/src':
|
||||||
Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'af29db7ec28d6df1c7f0f745186884091e602e07',
|
Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'af29db7ec28d6df1c7f0f745186884091e602e07',
|
||||||
'third_party/icu':
|
'third_party/icu':
|
||||||
Var('chromium_url') + '/chromium/deps/icu.git' + '@' + 'bbdc7d8936bd9b896ff9c9822b697554b73c1c9d',
|
Var('chromium_url') + '/chromium/deps/icu.git' + '@' + '1b7d391f0528fb3a4976b7541b387ee04f915f83',
|
||||||
'third_party/instrumented_libraries':
|
'third_party/instrumented_libraries':
|
||||||
Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + 'e09c4b66b6e87116eb190651421f1a6e2f3b9c52',
|
Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + '09ba70cfb2c0d01c60684660e357ae200caf2968',
|
||||||
'third_party/ittapi': {
|
'third_party/ittapi': {
|
||||||
# Force checkout ittapi libraries to pass v8 header includes check on
|
# Force checkout ittapi libraries to pass v8 header includes check on
|
||||||
# bots that has check_v8_header_includes enabled.
|
# bots that has check_v8_header_includes enabled.
|
||||||
@ -233,13 +244,23 @@ deps = {
|
|||||||
'condition': "checkout_ittapi or check_v8_header_includes",
|
'condition': "checkout_ittapi or check_v8_header_includes",
|
||||||
},
|
},
|
||||||
'third_party/jinja2':
|
'third_party/jinja2':
|
||||||
Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + 'ee69aa00ee8536f61db6a451f3858745cf587de6',
|
Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + '4633bf431193690c3491244f5a0acbe9ac776233',
|
||||||
'third_party/jsoncpp/source':
|
'third_party/jsoncpp/source':
|
||||||
Var('chromium_url') + '/external/github.com/open-source-parsers/jsoncpp.git'+ '@' + '42e892d96e47b1f6e29844cc705e148ec4856448',
|
Var('chromium_url') + '/external/github.com/open-source-parsers/jsoncpp.git'+ '@' + '42e892d96e47b1f6e29844cc705e148ec4856448',
|
||||||
'third_party/logdog/logdog':
|
'third_party/logdog/logdog':
|
||||||
Var('chromium_url') + '/infra/luci/luci-py/client/libs/logdog' + '@' + '0b2078a90f7a638d576b3a7c407d136f2fb62399',
|
Var('chromium_url') + '/infra/luci/luci-py/client/libs/logdog' + '@' + '0b2078a90f7a638d576b3a7c407d136f2fb62399',
|
||||||
'third_party/markupsafe':
|
'third_party/markupsafe':
|
||||||
Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '1b882ef6372b58bfd55a3285f37ed801be9137cd',
|
Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '13f4e8c9e206567eeb13bf585406ddc574005748',
|
||||||
|
'third_party/ninja': {
|
||||||
|
'packages': [
|
||||||
|
{
|
||||||
|
'package': 'infra/3pp/tools/ninja/${{platform}}',
|
||||||
|
'version': Var('ninja_version'),
|
||||||
|
}
|
||||||
|
],
|
||||||
|
'dep_type': 'cipd',
|
||||||
|
'condition': 'host_cpu != "s390" and host_cpu != "ppc"'
|
||||||
|
},
|
||||||
'third_party/perfetto':
|
'third_party/perfetto':
|
||||||
Var('android_url') + '/platform/external/perfetto.git' + '@' + '0eba417b2c72264fa825dc21067b9adc9b8adf70',
|
Var('android_url') + '/platform/external/perfetto.git' + '@' + '0eba417b2c72264fa825dc21067b9adc9b8adf70',
|
||||||
'third_party/protobuf':
|
'third_party/protobuf':
|
||||||
@ -249,9 +270,9 @@ deps = {
|
|||||||
'condition': 'checkout_android',
|
'condition': 'checkout_android',
|
||||||
},
|
},
|
||||||
'third_party/zlib':
|
'third_party/zlib':
|
||||||
Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + '9f4113d3bae3285a4511fd7c827baf64b4f9eb4b',
|
Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + '18d27fa10b237fdfcbd8f0c65c19fe009981a3bc',
|
||||||
'tools/clang':
|
'tools/clang':
|
||||||
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '0a2285903bf27182c56d8a1cc8b0e0d8a1ce8c31',
|
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '3344dd8997f422862a1c5477b490b3611be31351',
|
||||||
'tools/luci-go': {
|
'tools/luci-go': {
|
||||||
'packages': [
|
'packages': [
|
||||||
{
|
{
|
||||||
@ -485,7 +506,7 @@ hooks = [
|
|||||||
'--arch=x64'],
|
'--arch=x64'],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'name': 'msan_chained_origins',
|
'name': 'msan_chained_origins_xenial',
|
||||||
'pattern': '.',
|
'pattern': '.',
|
||||||
'condition': 'checkout_instrumented_libraries',
|
'condition': 'checkout_instrumented_libraries',
|
||||||
'action': [ 'python3',
|
'action': [ 'python3',
|
||||||
@ -493,11 +514,11 @@ hooks = [
|
|||||||
'--no_resume',
|
'--no_resume',
|
||||||
'--no_auth',
|
'--no_auth',
|
||||||
'--bucket', 'chromium-instrumented-libraries',
|
'--bucket', 'chromium-instrumented-libraries',
|
||||||
'-s', 'third_party/instrumented_libraries/binaries/msan-chained-origins.tgz.sha1',
|
'-s', 'third_party/instrumented_libraries/binaries/msan-chained-origins-xenial.tgz.sha1',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'name': 'msan_no_origins',
|
'name': 'msan_no_origins_xenial',
|
||||||
'pattern': '.',
|
'pattern': '.',
|
||||||
'condition': 'checkout_instrumented_libraries',
|
'condition': 'checkout_instrumented_libraries',
|
||||||
'action': [ 'python3',
|
'action': [ 'python3',
|
||||||
@ -505,7 +526,7 @@ hooks = [
|
|||||||
'--no_resume',
|
'--no_resume',
|
||||||
'--no_auth',
|
'--no_auth',
|
||||||
'--bucket', 'chromium-instrumented-libraries',
|
'--bucket', 'chromium-instrumented-libraries',
|
||||||
'-s', 'third_party/instrumented_libraries/binaries/msan-no-origins.tgz.sha1',
|
'-s', 'third_party/instrumented_libraries/binaries/msan-no-origins-xenial.tgz.sha1',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -543,6 +564,14 @@ hooks = [
|
|||||||
'condition': 'host_os != "aix"',
|
'condition': 'host_os != "aix"',
|
||||||
'action': ['python3', 'tools/clang/scripts/update.py'],
|
'action': ['python3', 'tools/clang/scripts/update.py'],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
# This is supposed to support the same set of platforms as 'clang' above.
|
||||||
|
'name': 'clang_coverage',
|
||||||
|
'pattern': '.',
|
||||||
|
'condition': 'checkout_clang_coverage_tools',
|
||||||
|
'action': ['python3', 'tools/clang/scripts/update.py',
|
||||||
|
'--package=coverage_tools'],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'name': 'clang_tidy',
|
'name': 'clang_tidy',
|
||||||
'pattern': '.',
|
'pattern': '.',
|
||||||
@ -560,11 +589,11 @@ hooks = [
|
|||||||
{
|
{
|
||||||
'name': 'Download Fuchsia system images',
|
'name': 'Download Fuchsia system images',
|
||||||
'pattern': '.',
|
'pattern': '.',
|
||||||
'condition': 'checkout_fuchsia',
|
'condition': 'checkout_fuchsia and checkout_fuchsia_product_bundles',
|
||||||
'action': [
|
'action': [
|
||||||
'python3',
|
'python3',
|
||||||
'build/fuchsia/update_images.py',
|
'build/fuchsia/update_product_bundles.py',
|
||||||
'--boot-images={checkout_fuchsia_boot_images}',
|
'{checkout_fuchsia_boot_images}',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -575,16 +604,6 @@ hooks = [
|
|||||||
'action': ['python3', 'tools/clang/scripts/update.py',
|
'action': ['python3', 'tools/clang/scripts/update.py',
|
||||||
'--package=objdump'],
|
'--package=objdump'],
|
||||||
},
|
},
|
||||||
# Download and initialize "vpython" VirtualEnv environment packages.
|
|
||||||
{
|
|
||||||
'name': 'vpython_common',
|
|
||||||
'pattern': '.',
|
|
||||||
'condition': 'checkout_android',
|
|
||||||
'action': [ 'vpython',
|
|
||||||
'-vpython-spec', '.vpython',
|
|
||||||
'-vpython-tool', 'install',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
'name': 'vpython3_common',
|
'name': 'vpython3_common',
|
||||||
'pattern': '.',
|
'pattern': '.',
|
||||||
|
@ -5,5 +5,6 @@
|
|||||||
adamk@chromium.org
|
adamk@chromium.org
|
||||||
danno@chromium.org
|
danno@chromium.org
|
||||||
hpayer@chromium.org
|
hpayer@chromium.org
|
||||||
|
mlippautz@chromium.org
|
||||||
verwaest@chromium.org
|
verwaest@chromium.org
|
||||||
vahl@chromium.org
|
vahl@chromium.org
|
||||||
|
3
LICENSE
3
LICENSE
@ -15,8 +15,7 @@ are:
|
|||||||
- Strongtalk assembler, the basis of the files assembler-arm-inl.h,
|
- Strongtalk assembler, the basis of the files assembler-arm-inl.h,
|
||||||
assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
|
assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
|
||||||
assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
|
assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
|
||||||
assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
|
assembler-x64.cc, assembler-x64.h, assembler.cc and assembler.h.
|
||||||
assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
|
|
||||||
This code is copyrighted by Sun Microsystems Inc. and released
|
This code is copyrighted by Sun Microsystems Inc. and released
|
||||||
under a 3-clause BSD license.
|
under a 3-clause BSD license.
|
||||||
|
|
||||||
|
1
OWNERS
1
OWNERS
@ -5,6 +5,7 @@ file:ENG_REVIEW_OWNERS
|
|||||||
per-file .*=file:INFRA_OWNERS
|
per-file .*=file:INFRA_OWNERS
|
||||||
per-file .bazelrc=file:COMMON_OWNERS
|
per-file .bazelrc=file:COMMON_OWNERS
|
||||||
per-file .mailmap=file:COMMON_OWNERS
|
per-file .mailmap=file:COMMON_OWNERS
|
||||||
|
per-file .ycm_extra_conf.py=file:COMMON_OWNERS
|
||||||
per-file codereview.settings=file:INFRA_OWNERS
|
per-file codereview.settings=file:INFRA_OWNERS
|
||||||
per-file AUTHORS=file:COMMON_OWNERS
|
per-file AUTHORS=file:COMMON_OWNERS
|
||||||
per-file BUILD.bazel=file:COMMON_OWNERS
|
per-file BUILD.bazel=file:COMMON_OWNERS
|
||||||
|
@ -85,6 +85,7 @@ def _V8PresubmitChecks(input_api, output_api):
|
|||||||
sys.path.append(input_api.os_path.join(
|
sys.path.append(input_api.os_path.join(
|
||||||
input_api.PresubmitLocalPath(), 'tools'))
|
input_api.PresubmitLocalPath(), 'tools'))
|
||||||
from v8_presubmit import CppLintProcessor
|
from v8_presubmit import CppLintProcessor
|
||||||
|
from v8_presubmit import GCMoleProcessor
|
||||||
from v8_presubmit import JSLintProcessor
|
from v8_presubmit import JSLintProcessor
|
||||||
from v8_presubmit import TorqueLintProcessor
|
from v8_presubmit import TorqueLintProcessor
|
||||||
from v8_presubmit import SourceProcessor
|
from v8_presubmit import SourceProcessor
|
||||||
@ -126,6 +127,9 @@ def _V8PresubmitChecks(input_api, output_api):
|
|||||||
if not StatusFilesProcessor().RunOnFiles(
|
if not StatusFilesProcessor().RunOnFiles(
|
||||||
input_api.AffectedFiles(include_deletes=True)):
|
input_api.AffectedFiles(include_deletes=True)):
|
||||||
results.append(output_api.PresubmitError("Status file check failed"))
|
results.append(output_api.PresubmitError("Status file check failed"))
|
||||||
|
if not GCMoleProcessor().RunOnFiles(
|
||||||
|
input_api.AffectedFiles(include_deletes=False)):
|
||||||
|
results.append(output_api.PresubmitError("GCMole pattern check failed"))
|
||||||
results.extend(input_api.canned_checks.CheckAuthorizedAuthor(
|
results.extend(input_api.canned_checks.CheckAuthorizedAuthor(
|
||||||
input_api, output_api, bot_allowlist=[
|
input_api, output_api, bot_allowlist=[
|
||||||
'v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com'
|
'v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com'
|
||||||
@ -257,8 +261,9 @@ def _CheckHeadersHaveIncludeGuards(input_api, output_api):
|
|||||||
files_to_check=(file_inclusion_pattern, ),
|
files_to_check=(file_inclusion_pattern, ),
|
||||||
files_to_skip=files_to_skip)
|
files_to_skip=files_to_skip)
|
||||||
|
|
||||||
leading_src_pattern = input_api.re.compile(r'^src/')
|
leading_src_pattern = input_api.re.compile(r'^src[\\\/]')
|
||||||
dash_dot_slash_pattern = input_api.re.compile(r'[-./]')
|
dash_dot_slash_pattern = input_api.re.compile(r'[-.\\\/]')
|
||||||
|
|
||||||
def PathToGuardMacro(path):
|
def PathToGuardMacro(path):
|
||||||
"""Guards should be of the form V8_PATH_TO_FILE_WITHOUT_SRC_H_."""
|
"""Guards should be of the form V8_PATH_TO_FILE_WITHOUT_SRC_H_."""
|
||||||
x = input_api.re.sub(leading_src_pattern, 'v8_', path)
|
x = input_api.re.sub(leading_src_pattern, 'v8_', path)
|
||||||
|
@ -22,6 +22,13 @@ config_setting(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "is_opt",
|
||||||
|
values = {
|
||||||
|
"compilation_mode": "opt",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
config_setting(
|
config_setting(
|
||||||
name = "is_debug",
|
name = "is_debug",
|
||||||
values = {
|
values = {
|
||||||
|
@ -152,6 +152,14 @@ def _default_args():
|
|||||||
],
|
],
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
|
cxxopts = select({
|
||||||
|
"//third_party/v8/HEAD/google3/config:is_opt": [
|
||||||
|
"-fvisibility=hidden",
|
||||||
|
"-fvisibility-inlines-hidden",
|
||||||
|
],
|
||||||
|
"//conditions:default": [
|
||||||
|
],
|
||||||
|
}),
|
||||||
includes = ["include"],
|
includes = ["include"],
|
||||||
linkopts = select({
|
linkopts = select({
|
||||||
"@v8//bazel/config:is_windows": [
|
"@v8//bazel/config:is_windows": [
|
||||||
@ -407,15 +415,19 @@ v8_target_cpu_transition = transition(
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _mksnapshot(ctx):
|
def _mksnapshot(ctx):
|
||||||
|
prefix = ctx.attr.prefix
|
||||||
|
suffix = ctx.attr.suffix
|
||||||
outs = [
|
outs = [
|
||||||
ctx.actions.declare_file(ctx.attr.prefix + "/snapshot.cc"),
|
ctx.actions.declare_file(prefix + "/snapshot" + suffix + ".cc"),
|
||||||
ctx.actions.declare_file(ctx.attr.prefix + "/embedded.S"),
|
ctx.actions.declare_file(prefix + "/embedded" + suffix + ".S"),
|
||||||
]
|
]
|
||||||
ctx.actions.run(
|
ctx.actions.run(
|
||||||
outputs = outs,
|
outputs = outs,
|
||||||
inputs = [],
|
inputs = [],
|
||||||
arguments = [
|
arguments = [
|
||||||
"--embedded_variant=Default",
|
"--embedded_variant=Default",
|
||||||
|
"--target_os",
|
||||||
|
ctx.attr.target_os,
|
||||||
"--startup_src",
|
"--startup_src",
|
||||||
outs[0].path,
|
outs[0].path,
|
||||||
"--embedded_src",
|
"--embedded_src",
|
||||||
@ -436,26 +448,38 @@ _v8_mksnapshot = rule(
|
|||||||
executable = True,
|
executable = True,
|
||||||
cfg = "exec",
|
cfg = "exec",
|
||||||
),
|
),
|
||||||
|
"target_os": attr.string(mandatory = True),
|
||||||
"_allowlist_function_transition": attr.label(
|
"_allowlist_function_transition": attr.label(
|
||||||
default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
|
default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
|
||||||
),
|
),
|
||||||
"prefix": attr.string(mandatory = True),
|
"prefix": attr.string(mandatory = True),
|
||||||
|
"suffix": attr.string(mandatory = True),
|
||||||
},
|
},
|
||||||
cfg = v8_target_cpu_transition,
|
cfg = v8_target_cpu_transition,
|
||||||
)
|
)
|
||||||
|
|
||||||
def v8_mksnapshot(name, args):
|
def v8_mksnapshot(name, args, suffix = ""):
|
||||||
_v8_mksnapshot(
|
_v8_mksnapshot(
|
||||||
name = "noicu/" + name,
|
name = "noicu/" + name,
|
||||||
args = args,
|
args = args,
|
||||||
prefix = "noicu",
|
prefix = "noicu",
|
||||||
tool = ":noicu/mksnapshot",
|
tool = ":noicu/mksnapshot" + suffix,
|
||||||
|
suffix = suffix,
|
||||||
|
target_os = select({
|
||||||
|
"@v8//bazel/config:is_macos": "mac",
|
||||||
|
"//conditions:default": "",
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
_v8_mksnapshot(
|
_v8_mksnapshot(
|
||||||
name = "icu/" + name,
|
name = "icu/" + name,
|
||||||
args = args,
|
args = args,
|
||||||
prefix = "icu",
|
prefix = "icu",
|
||||||
tool = ":icu/mksnapshot",
|
tool = ":icu/mksnapshot" + suffix,
|
||||||
|
suffix = suffix,
|
||||||
|
target_os = select({
|
||||||
|
"@v8//bazel/config:is_macos": "mac",
|
||||||
|
"//conditions:default": "",
|
||||||
|
}),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _quote(val):
|
def _quote(val):
|
||||||
@ -494,6 +518,7 @@ def build_config_content(cpu, icu):
|
|||||||
("v8_current_cpu", cpu),
|
("v8_current_cpu", cpu),
|
||||||
("v8_dict_property_const_tracking", "false"),
|
("v8_dict_property_const_tracking", "false"),
|
||||||
("v8_enable_atomic_object_field_writes", "false"),
|
("v8_enable_atomic_object_field_writes", "false"),
|
||||||
|
("v8_enable_conservative_stack_scanning", "false"),
|
||||||
("v8_enable_concurrent_marking", "false"),
|
("v8_enable_concurrent_marking", "false"),
|
||||||
("v8_enable_i18n_support", icu),
|
("v8_enable_i18n_support", icu),
|
||||||
("v8_enable_verify_predictable", "false"),
|
("v8_enable_verify_predictable", "false"),
|
||||||
|
@ -44,10 +44,6 @@ declare_args() {
|
|||||||
#
|
#
|
||||||
# There are test cases for this code posted as an attachment to
|
# There are test cases for this code posted as an attachment to
|
||||||
# https://crbug.com/625353.
|
# https://crbug.com/625353.
|
||||||
#
|
|
||||||
# TODO(GYP): Currently only regular (non-cross) compiles, and cross-compiles
|
|
||||||
# from x64 hosts to Intel, ARM, or MIPS targets, are implemented. Add support
|
|
||||||
# for the other supported configurations.
|
|
||||||
|
|
||||||
if (v8_snapshot_toolchain == "") {
|
if (v8_snapshot_toolchain == "") {
|
||||||
if (current_os == host_os && current_cpu == host_cpu) {
|
if (current_os == host_os && current_cpu == host_cpu) {
|
||||||
@ -64,28 +60,26 @@ if (v8_snapshot_toolchain == "") {
|
|||||||
current_cpu == "arm") {
|
current_cpu == "arm") {
|
||||||
# Trying to compile 32-bit arm on arm64. Good luck!
|
# Trying to compile 32-bit arm on arm64. Good luck!
|
||||||
v8_snapshot_toolchain = current_toolchain
|
v8_snapshot_toolchain = current_toolchain
|
||||||
} else if (host_cpu == "x64" &&
|
} else if (host_cpu == "x64" && v8_current_cpu == "mips64") {
|
||||||
(v8_current_cpu == "mips" || v8_current_cpu == "mips64")) {
|
|
||||||
# We don't support snapshot generation for big-endian targets,
|
# We don't support snapshot generation for big-endian targets,
|
||||||
# therefore snapshots will need to be built using native mksnapshot
|
# therefore snapshots will need to be built using native mksnapshot
|
||||||
# in combination with qemu
|
# in combination with qemu
|
||||||
v8_snapshot_toolchain = current_toolchain
|
v8_snapshot_toolchain = current_toolchain
|
||||||
|
} else if (host_cpu == current_cpu) {
|
||||||
|
# Cross-build from same ISA on one OS to another. For example:
|
||||||
|
# * targeting win/x64 on a linux/x64 host
|
||||||
|
# * targeting win/arm64 on a mac/arm64 host
|
||||||
|
v8_snapshot_toolchain = host_toolchain
|
||||||
} else if (host_cpu == "arm64" && current_cpu == "x64") {
|
} else if (host_cpu == "arm64" && current_cpu == "x64") {
|
||||||
# Cross-build from arm64 to intel (likely on an Apple Silicon mac).
|
# Cross-build from arm64 to intel (likely on an Apple Silicon mac).
|
||||||
v8_snapshot_toolchain =
|
v8_snapshot_toolchain =
|
||||||
"//build/toolchain/${host_os}:clang_arm64_v8_$v8_current_cpu"
|
"//build/toolchain/${host_os}:clang_arm64_v8_$v8_current_cpu"
|
||||||
} else if (host_cpu == "x64") {
|
} else if (host_cpu == "x64") {
|
||||||
# This is a cross-compile from an x64 host to either a non-Intel target
|
# This is a cross-compile from an x64 host to either a non-Intel target
|
||||||
# cpu or a different target OS. Clang will always be used by default on the
|
# cpu or to 32-bit x86 on a different target OS.
|
||||||
# host, unless this is a ChromeOS build, in which case the same toolchain
|
|
||||||
# (Clang or GCC) will be used for target and host by default.
|
|
||||||
if (is_chromeos && !is_clang) {
|
|
||||||
_clang = ""
|
|
||||||
} else {
|
|
||||||
_clang = "clang_"
|
|
||||||
}
|
|
||||||
|
|
||||||
if (v8_current_cpu == "x64" || v8_current_cpu == "x86") {
|
assert(v8_current_cpu != "x64", "handled by host_cpu == current_cpu branch")
|
||||||
|
if (v8_current_cpu == "x86") {
|
||||||
_cpus = v8_current_cpu
|
_cpus = v8_current_cpu
|
||||||
} else if (v8_current_cpu == "arm64" || v8_current_cpu == "mips64el" ||
|
} else if (v8_current_cpu == "arm64" || v8_current_cpu == "mips64el" ||
|
||||||
v8_current_cpu == "riscv64" || v8_current_cpu == "loong64") {
|
v8_current_cpu == "riscv64" || v8_current_cpu == "loong64") {
|
||||||
@ -96,8 +90,7 @@ if (v8_snapshot_toolchain == "") {
|
|||||||
} else {
|
} else {
|
||||||
_cpus = "x64_v8_${v8_current_cpu}"
|
_cpus = "x64_v8_${v8_current_cpu}"
|
||||||
}
|
}
|
||||||
} else if (v8_current_cpu == "arm" || v8_current_cpu == "mipsel" ||
|
} else if (v8_current_cpu == "arm" || v8_current_cpu == "riscv32") {
|
||||||
v8_current_cpu == "riscv32") {
|
|
||||||
_cpus = "x86_v8_${v8_current_cpu}"
|
_cpus = "x86_v8_${v8_current_cpu}"
|
||||||
} else {
|
} else {
|
||||||
# This branch should not be reached; leave _cpus blank so the assert
|
# This branch should not be reached; leave _cpus blank so the assert
|
||||||
@ -106,7 +99,7 @@ if (v8_snapshot_toolchain == "") {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (_cpus != "") {
|
if (_cpus != "") {
|
||||||
v8_snapshot_toolchain = "//build/toolchain/${host_os}:${_clang}${_cpus}"
|
v8_snapshot_toolchain = "//build/toolchain/${host_os}:clang_${_cpus}"
|
||||||
} else if (is_win && v8_current_cpu == "arm64") {
|
} else if (is_win && v8_current_cpu == "arm64") {
|
||||||
# cross compile Windows arm64 with host toolchain.
|
# cross compile Windows arm64 with host toolchain.
|
||||||
v8_snapshot_toolchain = host_toolchain
|
v8_snapshot_toolchain = host_toolchain
|
||||||
@ -122,7 +115,6 @@ assert(v8_snapshot_toolchain != "",
|
|||||||
# avoid building v8_libbase on the host more than once. On mips with big endian,
|
# avoid building v8_libbase on the host more than once. On mips with big endian,
|
||||||
# the snapshot toolchain is the target toolchain and, hence, can't be used.
|
# the snapshot toolchain is the target toolchain and, hence, can't be used.
|
||||||
v8_generator_toolchain = v8_snapshot_toolchain
|
v8_generator_toolchain = v8_snapshot_toolchain
|
||||||
if (host_cpu == "x64" &&
|
if (host_cpu == "x64" && v8_current_cpu == "mips64") {
|
||||||
(v8_current_cpu == "mips" || v8_current_cpu == "mips64")) {
|
|
||||||
v8_generator_toolchain = "//build/toolchain/linux:clang_x64"
|
v8_generator_toolchain = "//build/toolchain/linux:clang_x64"
|
||||||
}
|
}
|
||||||
|
@ -107,6 +107,9 @@ declare_args() {
|
|||||||
# Enable advanced BigInt algorithms, costing about 10-30 KB binary size
|
# Enable advanced BigInt algorithms, costing about 10-30 KB binary size
|
||||||
# depending on platform. Disabled on Android to save binary size.
|
# depending on platform. Disabled on Android to save binary size.
|
||||||
v8_advanced_bigint_algorithms = !is_android
|
v8_advanced_bigint_algorithms = !is_android
|
||||||
|
|
||||||
|
# TODO: macros for determining endian type are clang specific.
|
||||||
|
v8_use_libm_trig_functions = is_clang
|
||||||
}
|
}
|
||||||
|
|
||||||
if (v8_use_external_startup_data == "") {
|
if (v8_use_external_startup_data == "") {
|
||||||
@ -199,8 +202,7 @@ if ((is_posix || is_fuchsia) &&
|
|||||||
}
|
}
|
||||||
|
|
||||||
# On MIPS gcc_target_rpath and ldso_path might be needed for all builds.
|
# On MIPS gcc_target_rpath and ldso_path might be needed for all builds.
|
||||||
if (target_cpu == "mipsel" || target_cpu == "mips64el" ||
|
if (target_cpu == "mips64el" || target_cpu == "mips64") {
|
||||||
target_cpu == "mips" || target_cpu == "mips64") {
|
|
||||||
v8_add_configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
|
v8_add_configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,6 +26,8 @@ This allows Oilpan to run garbage collection in parallel with mutators running i
|
|||||||
References to objects belonging to another thread's heap are modeled using cross-thread roots.
|
References to objects belonging to another thread's heap are modeled using cross-thread roots.
|
||||||
This is even true for on-heap to on-heap references.
|
This is even true for on-heap to on-heap references.
|
||||||
|
|
||||||
|
Oilpan heaps may generally not be accessed from different threads unless otherwise noted.
|
||||||
|
|
||||||
## Heap partitioning
|
## Heap partitioning
|
||||||
|
|
||||||
Oilpan's heaps are partitioned into spaces.
|
Oilpan's heaps are partitioned into spaces.
|
||||||
|
@ -12,12 +12,18 @@ namespace cppgc {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
class HeapBase;
|
class HeapBase;
|
||||||
class WriteBarrierTypeForCagedHeapPolicy;
|
class WriteBarrierTypeForCagedHeapPolicy;
|
||||||
|
class WriteBarrierTypeForNonCagedHeapPolicy;
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Opaque handle used for additional heap APIs.
|
* Opaque handle used for additional heap APIs.
|
||||||
*/
|
*/
|
||||||
class HeapHandle {
|
class HeapHandle {
|
||||||
|
public:
|
||||||
|
// Deleted copy ctor to avoid treating the type by value.
|
||||||
|
HeapHandle(const HeapHandle&) = delete;
|
||||||
|
HeapHandle& operator=(const HeapHandle&) = delete;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
HeapHandle() = default;
|
HeapHandle() = default;
|
||||||
|
|
||||||
@ -34,6 +40,7 @@ class HeapHandle {
|
|||||||
|
|
||||||
friend class internal::HeapBase;
|
friend class internal::HeapBase;
|
||||||
friend class internal::WriteBarrierTypeForCagedHeapPolicy;
|
friend class internal::WriteBarrierTypeForCagedHeapPolicy;
|
||||||
|
friend class internal::WriteBarrierTypeForNonCagedHeapPolicy;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace cppgc
|
} // namespace cppgc
|
||||||
|
@ -47,10 +47,7 @@ constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(2) * kGB;
|
|||||||
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
|
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
|
||||||
#endif // !defined(CPPGC_2GB_CAGE)
|
#endif // !defined(CPPGC_2GB_CAGE)
|
||||||
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
|
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
|
||||||
|
#endif // defined(CPPGC_CAGED_HEAP)
|
||||||
constexpr size_t kCagedHeapNormalPageReservationSize =
|
|
||||||
kCagedHeapReservationSize / 2;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static constexpr size_t kDefaultAlignment = sizeof(void*);
|
static constexpr size_t kDefaultAlignment = sizeof(void*);
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ namespace cppgc {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
class HeapBase;
|
class HeapBase;
|
||||||
|
class HeapBaseHandle;
|
||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
|
|
||||||
@ -92,8 +93,6 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
|
|||||||
|
|
||||||
#endif // CPPGC_YOUNG_GENERATION
|
#endif // CPPGC_YOUNG_GENERATION
|
||||||
|
|
||||||
// TODO(v8:12231): Remove this class entirely so that it doesn't occupy space is
|
|
||||||
// when CPPGC_YOUNG_GENERATION is off.
|
|
||||||
struct CagedHeapLocalData final {
|
struct CagedHeapLocalData final {
|
||||||
V8_INLINE static CagedHeapLocalData& Get() {
|
V8_INLINE static CagedHeapLocalData& Get() {
|
||||||
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
|
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
|
||||||
|
@ -45,33 +45,11 @@ class V8_EXPORT CagedHeapBase {
|
|||||||
kHalfWordShift);
|
kHalfWordShift);
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE static bool IsWithinNormalPageReservation(void* address) {
|
|
||||||
return (reinterpret_cast<uintptr_t>(address) - g_heap_base_) <
|
|
||||||
api_constants::kCagedHeapNormalPageReservationSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
V8_INLINE static bool IsWithinLargePageReservation(const void* ptr) {
|
|
||||||
CPPGC_DCHECK(g_heap_base_);
|
|
||||||
auto uptr = reinterpret_cast<uintptr_t>(ptr);
|
|
||||||
return (uptr >= g_heap_base_ +
|
|
||||||
api_constants::kCagedHeapNormalPageReservationSize) &&
|
|
||||||
(uptr < g_heap_base_ + api_constants::kCagedHeapReservationSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
|
V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
|
||||||
|
|
||||||
V8_INLINE static BasePageHandle& LookupPageFromInnerPointer(void* ptr) {
|
|
||||||
if (V8_LIKELY(IsWithinNormalPageReservation(ptr)))
|
|
||||||
return *BasePageHandle::FromPayload(ptr);
|
|
||||||
else
|
|
||||||
return LookupLargePageFromInnerPointer(ptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
friend class CagedHeap;
|
friend class CagedHeap;
|
||||||
|
|
||||||
static BasePageHandle& LookupLargePageFromInnerPointer(void* address);
|
|
||||||
|
|
||||||
static uintptr_t g_heap_base_;
|
static uintptr_t g_heap_base_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ class CageBaseGlobal final {
|
|||||||
#undef CPPGC_REQUIRE_CONSTANT_INIT
|
#undef CPPGC_REQUIRE_CONSTANT_INIT
|
||||||
#undef CPPGC_CONST
|
#undef CPPGC_CONST
|
||||||
|
|
||||||
class CompressedPointer final {
|
class V8_TRIVIAL_ABI CompressedPointer final {
|
||||||
public:
|
public:
|
||||||
using IntegralType = uint32_t;
|
using IntegralType = uint32_t;
|
||||||
|
|
||||||
@ -170,7 +170,7 @@ class CompressedPointer final {
|
|||||||
|
|
||||||
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
#endif // defined(CPPGC_POINTER_COMPRESSION)
|
||||||
|
|
||||||
class RawPointer final {
|
class V8_TRIVIAL_ABI RawPointer final {
|
||||||
public:
|
public:
|
||||||
using IntegralType = uintptr_t;
|
using IntegralType = uintptr_t;
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
#include "cppgc/heap-state.h"
|
#include "cppgc/heap-state.h"
|
||||||
#include "cppgc/internal/api-constants.h"
|
#include "cppgc/internal/api-constants.h"
|
||||||
#include "cppgc/internal/atomic-entry-flag.h"
|
#include "cppgc/internal/atomic-entry-flag.h"
|
||||||
|
#include "cppgc/internal/base-page-handle.h"
|
||||||
#include "cppgc/internal/member-storage.h"
|
#include "cppgc/internal/member-storage.h"
|
||||||
#include "cppgc/platform.h"
|
#include "cppgc/platform.h"
|
||||||
#include "cppgc/sentinel-pointer.h"
|
#include "cppgc/sentinel-pointer.h"
|
||||||
@ -268,8 +269,8 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|||||||
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
|
|
||||||
#if defined(CPPGC_YOUNG_GENERATION)
|
|
||||||
HeapHandle& handle = callback();
|
HeapHandle& handle = callback();
|
||||||
|
#if defined(CPPGC_YOUNG_GENERATION)
|
||||||
if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
|
if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
|
||||||
if (!handle.is_young_generation_enabled()) {
|
if (!handle.is_young_generation_enabled()) {
|
||||||
return WriteBarrier::Type::kNone;
|
return WriteBarrier::Type::kNone;
|
||||||
@ -283,8 +284,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
|
|||||||
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
|
||||||
}
|
}
|
||||||
#else // !defined(CPPGC_YOUNG_GENERATION)
|
#else // !defined(CPPGC_YOUNG_GENERATION)
|
||||||
HeapHandle& handle = callback();
|
if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
|
||||||
if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
|
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
}
|
}
|
||||||
#endif // !defined(CPPGC_YOUNG_GENERATION)
|
#endif // !defined(CPPGC_YOUNG_GENERATION)
|
||||||
@ -327,11 +327,6 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
|
|||||||
template <WriteBarrier::ValueMode value_mode>
|
template <WriteBarrier::ValueMode value_mode>
|
||||||
struct ValueModeDispatch;
|
struct ValueModeDispatch;
|
||||||
|
|
||||||
// TODO(chromium:1056170): Create fast path on API.
|
|
||||||
static bool IsMarking(const void*, HeapHandle**);
|
|
||||||
// TODO(chromium:1056170): Create fast path on API.
|
|
||||||
static bool IsMarking(HeapHandle&);
|
|
||||||
|
|
||||||
WriteBarrierTypeForNonCagedHeapPolicy() = delete;
|
WriteBarrierTypeForNonCagedHeapPolicy() = delete;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -349,7 +344,13 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
|||||||
if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
|
if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
}
|
}
|
||||||
if (IsMarking(object, ¶ms.heap)) {
|
// We know that |object| is within the normal page or in the beginning of a
|
||||||
|
// large page, so extract the page header by bitmasking.
|
||||||
|
BasePageHandle* page =
|
||||||
|
BasePageHandle::FromPayload(const_cast<void*>(object));
|
||||||
|
|
||||||
|
HeapHandle& heap_handle = page->heap_handle();
|
||||||
|
if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
|
||||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||||
}
|
}
|
||||||
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
|
||||||
@ -365,7 +366,7 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
|
|||||||
HeapHandleCallback callback) {
|
HeapHandleCallback callback) {
|
||||||
if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
|
if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
|
||||||
HeapHandle& handle = callback();
|
HeapHandle& handle = callback();
|
||||||
if (IsMarking(handle)) {
|
if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
|
||||||
params.heap = &handle;
|
params.heap = &handle;
|
||||||
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ namespace internal {
|
|||||||
|
|
||||||
// MemberBase always refers to the object as const object and defers to
|
// MemberBase always refers to the object as const object and defers to
|
||||||
// BasicMember on casting to the right type as needed.
|
// BasicMember on casting to the right type as needed.
|
||||||
class MemberBase {
|
class V8_TRIVIAL_ABI MemberBase {
|
||||||
public:
|
public:
|
||||||
#if defined(CPPGC_POINTER_COMPRESSION)
|
#if defined(CPPGC_POINTER_COMPRESSION)
|
||||||
using RawStorage = CompressedPointer;
|
using RawStorage = CompressedPointer;
|
||||||
@ -68,13 +68,16 @@ class MemberBase {
|
|||||||
V8_INLINE void ClearFromGC() const { raw_.Clear(); }
|
V8_INLINE void ClearFromGC() const { raw_.Clear(); }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
friend class MemberDebugHelper;
|
||||||
|
|
||||||
mutable RawStorage raw_;
|
mutable RawStorage raw_;
|
||||||
};
|
};
|
||||||
|
|
||||||
// The basic class from which all Member classes are 'generated'.
|
// The basic class from which all Member classes are 'generated'.
|
||||||
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
|
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
|
||||||
typename CheckingPolicy>
|
typename CheckingPolicy>
|
||||||
class BasicMember final : private MemberBase, private CheckingPolicy {
|
class V8_TRIVIAL_ABI BasicMember final : private MemberBase,
|
||||||
|
private CheckingPolicy {
|
||||||
public:
|
public:
|
||||||
using PointeeType = T;
|
using PointeeType = T;
|
||||||
|
|
||||||
|
@ -33,8 +33,9 @@ class V8_EXPORT Platform {
|
|||||||
virtual ~Platform() = default;
|
virtual ~Platform() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the allocator used by cppgc to allocate its heap and various
|
* \returns the allocator used by cppgc to allocate its heap and various
|
||||||
* support structures.
|
* support structures. Returning nullptr results in using the `PageAllocator`
|
||||||
|
* provided by `cppgc::InitializeProcess()` instead.
|
||||||
*/
|
*/
|
||||||
virtual PageAllocator* GetPageAllocator() = 0;
|
virtual PageAllocator* GetPageAllocator() = 0;
|
||||||
|
|
||||||
@ -133,9 +134,10 @@ class V8_EXPORT Platform {
|
|||||||
* Can be called multiple times when paired with `ShutdownProcess()`.
|
* Can be called multiple times when paired with `ShutdownProcess()`.
|
||||||
*
|
*
|
||||||
* \param page_allocator The allocator used for maintaining meta data. Must stay
|
* \param page_allocator The allocator used for maintaining meta data. Must stay
|
||||||
* always alive and not change between multiple calls to InitializeProcess.
|
* always alive and not change between multiple calls to InitializeProcess. If
|
||||||
|
* no allocator is provided, a default internal version will be used.
|
||||||
*/
|
*/
|
||||||
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator);
|
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator = nullptr);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Must be called after destroying the last used heap. Some process-global
|
* Must be called after destroying the last used heap. Some process-global
|
||||||
|
@ -946,34 +946,6 @@
|
|||||||
{ "name": "url", "type": "string", "description": "JavaScript script name or url." },
|
{ "name": "url", "type": "string", "description": "JavaScript script name or url." },
|
||||||
{ "name": "functions", "type": "array", "items": { "$ref": "FunctionCoverage" }, "description": "Functions contained in the script that has coverage data." }
|
{ "name": "functions", "type": "array", "items": { "$ref": "FunctionCoverage" }, "description": "Functions contained in the script that has coverage data." }
|
||||||
]
|
]
|
||||||
},
|
|
||||||
{ "id": "TypeObject",
|
|
||||||
"type": "object",
|
|
||||||
"description": "Describes a type collected during runtime.",
|
|
||||||
"properties": [
|
|
||||||
{ "name": "name", "type": "string", "description": "Name of a type collected with type profiling." }
|
|
||||||
],
|
|
||||||
"experimental": true
|
|
||||||
},
|
|
||||||
{ "id": "TypeProfileEntry",
|
|
||||||
"type": "object",
|
|
||||||
"description": "Source offset and types for a parameter or return value.",
|
|
||||||
"properties": [
|
|
||||||
{ "name": "offset", "type": "integer", "description": "Source offset of the parameter or end of function for return values." },
|
|
||||||
{ "name": "types", "type": "array", "items": {"$ref": "TypeObject"}, "description": "The types for this parameter or return value."}
|
|
||||||
],
|
|
||||||
"experimental": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "ScriptTypeProfile",
|
|
||||||
"type": "object",
|
|
||||||
"description": "Type profile data collected during runtime for a JavaScript script.",
|
|
||||||
"properties": [
|
|
||||||
{ "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "JavaScript script id." },
|
|
||||||
{ "name": "url", "type": "string", "description": "JavaScript script name or url." },
|
|
||||||
{ "name": "entries", "type": "array", "items": { "$ref": "TypeProfileEntry" }, "description": "Type profile entries for parameters and return values of the functions in the script." }
|
|
||||||
],
|
|
||||||
"experimental": true
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"commands": [
|
"commands": [
|
||||||
@ -1024,24 +996,6 @@
|
|||||||
{ "name": "result", "type": "array", "items": { "$ref": "ScriptCoverage" }, "description": "Coverage data for the current isolate." }
|
{ "name": "result", "type": "array", "items": { "$ref": "ScriptCoverage" }, "description": "Coverage data for the current isolate." }
|
||||||
],
|
],
|
||||||
"description": "Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection."
|
"description": "Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection."
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "startTypeProfile",
|
|
||||||
"description": "Enable type profile.",
|
|
||||||
"experimental": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "stopTypeProfile",
|
|
||||||
"description": "Disable type profile. Disabling releases type profile data collected so far.",
|
|
||||||
"experimental": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "takeTypeProfile",
|
|
||||||
"returns": [
|
|
||||||
{ "name": "result", "type": "array", "items": { "$ref": "ScriptTypeProfile" }, "description": "Type profile for all scripts since startTypeProfile() was turned on." }
|
|
||||||
],
|
|
||||||
"description": "Collect type profile.",
|
|
||||||
"experimental": true
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"events": [
|
"events": [
|
||||||
|
@ -458,13 +458,14 @@ domain Debugger
|
|||||||
# New value for breakpoints active state.
|
# New value for breakpoints active state.
|
||||||
boolean active
|
boolean active
|
||||||
|
|
||||||
# Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or
|
# Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions,
|
||||||
# no exceptions. Initial pause on exceptions state is `none`.
|
# or caught exceptions, no exceptions. Initial pause on exceptions state is `none`.
|
||||||
command setPauseOnExceptions
|
command setPauseOnExceptions
|
||||||
parameters
|
parameters
|
||||||
# Pause on exceptions mode.
|
# Pause on exceptions mode.
|
||||||
enum state
|
enum state
|
||||||
none
|
none
|
||||||
|
caught
|
||||||
uncaught
|
uncaught
|
||||||
all
|
all
|
||||||
|
|
||||||
@ -766,6 +767,22 @@ experimental domain HeapProfiler
|
|||||||
# Average sample interval in bytes. Poisson distribution is used for the intervals. The
|
# Average sample interval in bytes. Poisson distribution is used for the intervals. The
|
||||||
# default value is 32768 bytes.
|
# default value is 32768 bytes.
|
||||||
optional number samplingInterval
|
optional number samplingInterval
|
||||||
|
# By default, the sampling heap profiler reports only objects which are
|
||||||
|
# still alive when the profile is returned via getSamplingProfile or
|
||||||
|
# stopSampling, which is useful for determining what functions contribute
|
||||||
|
# the most to steady-state memory usage. This flag instructs the sampling
|
||||||
|
# heap profiler to also include information about objects discarded by
|
||||||
|
# major GC, which will show which functions cause large temporary memory
|
||||||
|
# usage or long GC pauses.
|
||||||
|
optional boolean includeObjectsCollectedByMajorGC
|
||||||
|
# By default, the sampling heap profiler reports only objects which are
|
||||||
|
# still alive when the profile is returned via getSamplingProfile or
|
||||||
|
# stopSampling, which is useful for determining what functions contribute
|
||||||
|
# the most to steady-state memory usage. This flag instructs the sampling
|
||||||
|
# heap profiler to also include information about objects discarded by
|
||||||
|
# minor GC, which is useful when tuning a latency-sensitive application
|
||||||
|
# for minimal GC activity.
|
||||||
|
optional boolean includeObjectsCollectedByMinorGC
|
||||||
|
|
||||||
command startTrackingHeapObjects
|
command startTrackingHeapObjects
|
||||||
parameters
|
parameters
|
||||||
@ -902,30 +919,6 @@ domain Profiler
|
|||||||
# Functions contained in the script that has coverage data.
|
# Functions contained in the script that has coverage data.
|
||||||
array of FunctionCoverage functions
|
array of FunctionCoverage functions
|
||||||
|
|
||||||
# Describes a type collected during runtime.
|
|
||||||
experimental type TypeObject extends object
|
|
||||||
properties
|
|
||||||
# Name of a type collected with type profiling.
|
|
||||||
string name
|
|
||||||
|
|
||||||
# Source offset and types for a parameter or return value.
|
|
||||||
experimental type TypeProfileEntry extends object
|
|
||||||
properties
|
|
||||||
# Source offset of the parameter or end of function for return values.
|
|
||||||
integer offset
|
|
||||||
# The types for this parameter or return value.
|
|
||||||
array of TypeObject types
|
|
||||||
|
|
||||||
# Type profile data collected during runtime for a JavaScript script.
|
|
||||||
experimental type ScriptTypeProfile extends object
|
|
||||||
properties
|
|
||||||
# JavaScript script id.
|
|
||||||
Runtime.ScriptId scriptId
|
|
||||||
# JavaScript script name or url.
|
|
||||||
string url
|
|
||||||
# Type profile entries for parameters and return values of the functions in the script.
|
|
||||||
array of TypeProfileEntry entries
|
|
||||||
|
|
||||||
command disable
|
command disable
|
||||||
|
|
||||||
command enable
|
command enable
|
||||||
@ -960,9 +953,6 @@ domain Profiler
|
|||||||
# Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
|
# Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
|
||||||
number timestamp
|
number timestamp
|
||||||
|
|
||||||
# Enable type profile.
|
|
||||||
experimental command startTypeProfile
|
|
||||||
|
|
||||||
command stop
|
command stop
|
||||||
returns
|
returns
|
||||||
# Recorded profile.
|
# Recorded profile.
|
||||||
@ -972,9 +962,6 @@ domain Profiler
|
|||||||
# executing optimized code.
|
# executing optimized code.
|
||||||
command stopPreciseCoverage
|
command stopPreciseCoverage
|
||||||
|
|
||||||
# Disable type profile. Disabling releases type profile data collected so far.
|
|
||||||
experimental command stopTypeProfile
|
|
||||||
|
|
||||||
# Collect coverage data for the current isolate, and resets execution counters. Precise code
|
# Collect coverage data for the current isolate, and resets execution counters. Precise code
|
||||||
# coverage needs to have started.
|
# coverage needs to have started.
|
||||||
command takePreciseCoverage
|
command takePreciseCoverage
|
||||||
@ -984,12 +971,6 @@ domain Profiler
|
|||||||
# Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
|
# Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
|
||||||
number timestamp
|
number timestamp
|
||||||
|
|
||||||
# Collect type profile.
|
|
||||||
experimental command takeTypeProfile
|
|
||||||
returns
|
|
||||||
# Type profile for all scripts since startTypeProfile() was turned on.
|
|
||||||
array of ScriptTypeProfile result
|
|
||||||
|
|
||||||
event consoleProfileFinished
|
event consoleProfileFinished
|
||||||
parameters
|
parameters
|
||||||
string id
|
string id
|
||||||
|
@ -282,12 +282,12 @@ class V8_PLATFORM_EXPORT TracingController
|
|||||||
const char* name, uint64_t handle) override;
|
const char* name, uint64_t handle) override;
|
||||||
|
|
||||||
static const char* GetCategoryGroupName(const uint8_t* category_enabled_flag);
|
static const char* GetCategoryGroupName(const uint8_t* category_enabled_flag);
|
||||||
#endif // !defined(V8_USE_PERFETTO)
|
|
||||||
|
|
||||||
void AddTraceStateObserver(
|
void AddTraceStateObserver(
|
||||||
v8::TracingController::TraceStateObserver* observer) override;
|
v8::TracingController::TraceStateObserver* observer) override;
|
||||||
void RemoveTraceStateObserver(
|
void RemoveTraceStateObserver(
|
||||||
v8::TracingController::TraceStateObserver* observer) override;
|
v8::TracingController::TraceStateObserver* observer) override;
|
||||||
|
#endif // !defined(V8_USE_PERFETTO)
|
||||||
|
|
||||||
void StartTracing(TraceConfig* trace_config);
|
void StartTracing(TraceConfig* trace_config);
|
||||||
void StopTracing();
|
void StopTracing();
|
||||||
@ -307,7 +307,6 @@ class V8_PLATFORM_EXPORT TracingController
|
|||||||
std::unique_ptr<base::Mutex> mutex_;
|
std::unique_ptr<base::Mutex> mutex_;
|
||||||
std::unique_ptr<TraceConfig> trace_config_;
|
std::unique_ptr<TraceConfig> trace_config_;
|
||||||
std::atomic_bool recording_{false};
|
std::atomic_bool recording_{false};
|
||||||
std::unordered_set<v8::TracingController::TraceStateObserver*> observers_;
|
|
||||||
|
|
||||||
#if defined(V8_USE_PERFETTO)
|
#if defined(V8_USE_PERFETTO)
|
||||||
std::ostream* output_stream_ = nullptr;
|
std::ostream* output_stream_ = nullptr;
|
||||||
@ -316,6 +315,7 @@ class V8_PLATFORM_EXPORT TracingController
|
|||||||
TraceEventListener* listener_for_testing_ = nullptr;
|
TraceEventListener* listener_for_testing_ = nullptr;
|
||||||
std::unique_ptr<perfetto::TracingSession> tracing_session_;
|
std::unique_ptr<perfetto::TracingSession> tracing_session_;
|
||||||
#else // !defined(V8_USE_PERFETTO)
|
#else // !defined(V8_USE_PERFETTO)
|
||||||
|
std::unordered_set<v8::TracingController::TraceStateObserver*> observers_;
|
||||||
std::unique_ptr<TraceBuffer> trace_buffer_;
|
std::unique_ptr<TraceBuffer> trace_buffer_;
|
||||||
#endif // !defined(V8_USE_PERFETTO)
|
#endif // !defined(V8_USE_PERFETTO)
|
||||||
|
|
||||||
|
@ -53,12 +53,28 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
|
|||||||
*/
|
*/
|
||||||
size_t ByteLength() const;
|
size_t ByteLength() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The maximum length (in bytes) that this backing store may grow to.
|
||||||
|
*
|
||||||
|
* If this backing store was created for a resizable ArrayBuffer or a growable
|
||||||
|
* SharedArrayBuffer, it is >= ByteLength(). Otherwise it is ==
|
||||||
|
* ByteLength().
|
||||||
|
*/
|
||||||
|
size_t MaxByteLength() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicates whether the backing store was created for an ArrayBuffer or
|
* Indicates whether the backing store was created for an ArrayBuffer or
|
||||||
* a SharedArrayBuffer.
|
* a SharedArrayBuffer.
|
||||||
*/
|
*/
|
||||||
bool IsShared() const;
|
bool IsShared() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates whether the backing store was created for a resizable ArrayBuffer
|
||||||
|
* or a growable SharedArrayBuffer, and thus may be resized by user JavaScript
|
||||||
|
* code.
|
||||||
|
*/
|
||||||
|
bool IsResizableByUserJavaScript() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prevent implicit instantiation of operator delete with size_t argument.
|
* Prevent implicit instantiation of operator delete with size_t argument.
|
||||||
* The size_t argument would be incorrect because ptr points to the
|
* The size_t argument would be incorrect because ptr points to the
|
||||||
@ -189,6 +205,11 @@ class V8_EXPORT ArrayBuffer : public Object {
|
|||||||
*/
|
*/
|
||||||
size_t ByteLength() const;
|
size_t ByteLength() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum length in bytes.
|
||||||
|
*/
|
||||||
|
size_t MaxByteLength() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new ArrayBuffer. Allocate |byte_length| bytes.
|
* Create a new ArrayBuffer. Allocate |byte_length| bytes.
|
||||||
* Allocated memory will be owned by a created ArrayBuffer and
|
* Allocated memory will be owned by a created ArrayBuffer and
|
||||||
@ -235,24 +256,65 @@ class V8_EXPORT ArrayBuffer : public Object {
|
|||||||
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
|
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
|
||||||
void* deleter_data);
|
void* deleter_data);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a new resizable standalone BackingStore that is allocated using the
|
||||||
|
* array buffer allocator of the isolate. The result can be later passed to
|
||||||
|
* ArrayBuffer::New.
|
||||||
|
*
|
||||||
|
* |byte_length| must be <= |max_byte_length|.
|
||||||
|
*
|
||||||
|
* This function is usable without an isolate. Unlike |NewBackingStore| calls
|
||||||
|
* with an isolate, GCs cannot be triggered, and there are no
|
||||||
|
* retries. Allocation failure will cause the function to crash with an
|
||||||
|
* out-of-memory error.
|
||||||
|
*/
|
||||||
|
static std::unique_ptr<BackingStore> NewResizableBackingStore(
|
||||||
|
size_t byte_length, size_t max_byte_length);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if this ArrayBuffer may be detached.
|
* Returns true if this ArrayBuffer may be detached.
|
||||||
*/
|
*/
|
||||||
bool IsDetachable() const;
|
bool IsDetachable() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if this ArrayBuffer has been detached.
|
||||||
|
*/
|
||||||
|
bool WasDetached() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Detaches this ArrayBuffer and all its views (typed arrays).
|
* Detaches this ArrayBuffer and all its views (typed arrays).
|
||||||
* Detaching sets the byte length of the buffer and all typed arrays to zero,
|
* Detaching sets the byte length of the buffer and all typed arrays to zero,
|
||||||
* preventing JavaScript from ever accessing underlying backing store.
|
* preventing JavaScript from ever accessing underlying backing store.
|
||||||
* ArrayBuffer should have been externalized and must be detachable.
|
* ArrayBuffer should have been externalized and must be detachable.
|
||||||
*/
|
*/
|
||||||
|
V8_DEPRECATE_SOON(
|
||||||
|
"Use the version which takes a key parameter (passing a null handle is "
|
||||||
|
"ok).")
|
||||||
void Detach();
|
void Detach();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detaches this ArrayBuffer and all its views (typed arrays).
|
||||||
|
* Detaching sets the byte length of the buffer and all typed arrays to zero,
|
||||||
|
* preventing JavaScript from ever accessing underlying backing store.
|
||||||
|
* ArrayBuffer should have been externalized and must be detachable. Returns
|
||||||
|
* Nothing if the key didn't pass the [[ArrayBufferDetachKey]] check,
|
||||||
|
* Just(true) otherwise.
|
||||||
|
*/
|
||||||
|
V8_WARN_UNUSED_RESULT Maybe<bool> Detach(v8::Local<v8::Value> key);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the ArrayBufferDetachKey.
|
||||||
|
*/
|
||||||
|
void SetDetachKey(v8::Local<v8::Value> key);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a shared pointer to the backing store of this array buffer. This
|
* Get a shared pointer to the backing store of this array buffer. This
|
||||||
* pointer coordinates the lifetime management of the internal storage
|
* pointer coordinates the lifetime management of the internal storage
|
||||||
* with any live ArrayBuffers on the heap, even across isolates. The embedder
|
* with any live ArrayBuffers on the heap, even across isolates. The embedder
|
||||||
* should not attempt to manage lifetime of the storage through other means.
|
* should not attempt to manage lifetime of the storage through other means.
|
||||||
|
*
|
||||||
|
* The returned shared pointer will not be empty, even if the ArrayBuffer has
|
||||||
|
* been detached. Use |WasDetached| to tell if it has been detached instead.
|
||||||
*/
|
*/
|
||||||
std::shared_ptr<BackingStore> GetBackingStore();
|
std::shared_ptr<BackingStore> GetBackingStore();
|
||||||
|
|
||||||
@ -366,6 +428,11 @@ class V8_EXPORT SharedArrayBuffer : public Object {
|
|||||||
*/
|
*/
|
||||||
size_t ByteLength() const;
|
size_t ByteLength() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum length in bytes.
|
||||||
|
*/
|
||||||
|
size_t MaxByteLength() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new SharedArrayBuffer. Allocate |byte_length| bytes.
|
* Create a new SharedArrayBuffer. Allocate |byte_length| bytes.
|
||||||
* Allocated memory will be owned by a created SharedArrayBuffer and
|
* Allocated memory will be owned by a created SharedArrayBuffer and
|
||||||
|
@ -237,7 +237,8 @@ using LogEventCallback = void (*)(const char* name,
|
|||||||
enum class CrashKeyId {
|
enum class CrashKeyId {
|
||||||
kIsolateAddress,
|
kIsolateAddress,
|
||||||
kReadonlySpaceFirstPageAddress,
|
kReadonlySpaceFirstPageAddress,
|
||||||
kMapSpaceFirstPageAddress,
|
kMapSpaceFirstPageAddress V8_ENUM_DEPRECATE_SOON("Map space got removed"),
|
||||||
|
kOldSpaceFirstPageAddress,
|
||||||
kCodeRangeBaseAddress,
|
kCodeRangeBaseAddress,
|
||||||
kCodeSpaceFirstPageAddress,
|
kCodeSpaceFirstPageAddress,
|
||||||
kDumpType,
|
kDumpType,
|
||||||
|
@ -169,6 +169,9 @@ class V8_EXPORT Context : public Data {
|
|||||||
/** Returns the microtask queue associated with a current context. */
|
/** Returns the microtask queue associated with a current context. */
|
||||||
MicrotaskQueue* GetMicrotaskQueue();
|
MicrotaskQueue* GetMicrotaskQueue();
|
||||||
|
|
||||||
|
/** Sets the microtask queue associated with the current context. */
|
||||||
|
void SetMicrotaskQueue(MicrotaskQueue* queue);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The field at kDebugIdIndex used to be reserved for the inspector.
|
* The field at kDebugIdIndex used to be reserved for the inspector.
|
||||||
* It now serves no purpose.
|
* It now serves no purpose.
|
||||||
@ -290,6 +293,7 @@ class V8_EXPORT Context : public Data {
|
|||||||
Local<Function> after_hook,
|
Local<Function> after_hook,
|
||||||
Local<Function> resolve_hook);
|
Local<Function> resolve_hook);
|
||||||
|
|
||||||
|
bool HasTemplateLiteralObject(Local<Value> object);
|
||||||
/**
|
/**
|
||||||
* Stack-allocated class which sets the execution context for all
|
* Stack-allocated class which sets the execution context for all
|
||||||
* operations executed within a local scope.
|
* operations executed within a local scope.
|
||||||
|
@ -103,6 +103,10 @@ struct CppHeapCreateParams {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* A heap for allocating managed C++ objects.
|
* A heap for allocating managed C++ objects.
|
||||||
|
*
|
||||||
|
* Similar to v8::Isolate, the heap may only be accessed from one thread at a
|
||||||
|
* time. The heap may be used from different threads using the
|
||||||
|
* v8::Locker/v8::Unlocker APIs which is different from generic Oilpan.
|
||||||
*/
|
*/
|
||||||
class V8_EXPORT CppHeap {
|
class V8_EXPORT CppHeap {
|
||||||
public:
|
public:
|
||||||
|
@ -53,7 +53,7 @@ class V8_EXPORT Data {
|
|||||||
bool IsContext() const;
|
bool IsContext() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Data();
|
Data() = delete;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -72,7 +72,7 @@ class V8_EXPORT EmbedderRootsHandler {
|
|||||||
class V8_EXPORT
|
class V8_EXPORT
|
||||||
// GCC doesn't like combining __attribute__(()) with [[deprecated]].
|
// GCC doesn't like combining __attribute__(()) with [[deprecated]].
|
||||||
#ifdef __clang__
|
#ifdef __clang__
|
||||||
V8_DEPRECATE_SOON("Use CppHeap when working with v8::TracedReference.")
|
V8_DEPRECATED("Use CppHeap when working with v8::TracedReference.")
|
||||||
#endif // __clang__
|
#endif // __clang__
|
||||||
EmbedderHeapTracer {
|
EmbedderHeapTracer {
|
||||||
public:
|
public:
|
||||||
|
@ -247,7 +247,9 @@ class CTypeInfo {
|
|||||||
kUint64,
|
kUint64,
|
||||||
kFloat32,
|
kFloat32,
|
||||||
kFloat64,
|
kFloat64,
|
||||||
|
kPointer,
|
||||||
kV8Value,
|
kV8Value,
|
||||||
|
kSeqOneByteString,
|
||||||
kApiObject, // This will be deprecated once all users have
|
kApiObject, // This will be deprecated once all users have
|
||||||
// migrated from v8::ApiObject to v8::Local<v8::Value>.
|
// migrated from v8::ApiObject to v8::Local<v8::Value>.
|
||||||
kAny, // This is added to enable untyped representation of fast
|
kAny, // This is added to enable untyped representation of fast
|
||||||
@ -379,6 +381,11 @@ struct FastApiArrayBuffer {
|
|||||||
size_t byte_length;
|
size_t byte_length;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct FastOneByteString {
|
||||||
|
const char* data;
|
||||||
|
uint32_t length;
|
||||||
|
};
|
||||||
|
|
||||||
class V8_EXPORT CFunctionInfo {
|
class V8_EXPORT CFunctionInfo {
|
||||||
public:
|
public:
|
||||||
// Construct a struct to hold a CFunction's type information.
|
// Construct a struct to hold a CFunction's type information.
|
||||||
@ -429,6 +436,7 @@ struct AnyCType {
|
|||||||
uint64_t uint64_value;
|
uint64_t uint64_value;
|
||||||
float float_value;
|
float float_value;
|
||||||
double double_value;
|
double double_value;
|
||||||
|
void* pointer_value;
|
||||||
Local<Object> object_value;
|
Local<Object> object_value;
|
||||||
Local<Array> sequence_value;
|
Local<Array> sequence_value;
|
||||||
const FastApiTypedArray<uint8_t>* uint8_ta_value;
|
const FastApiTypedArray<uint8_t>* uint8_ta_value;
|
||||||
@ -438,6 +446,7 @@ struct AnyCType {
|
|||||||
const FastApiTypedArray<uint64_t>* uint64_ta_value;
|
const FastApiTypedArray<uint64_t>* uint64_ta_value;
|
||||||
const FastApiTypedArray<float>* float_ta_value;
|
const FastApiTypedArray<float>* float_ta_value;
|
||||||
const FastApiTypedArray<double>* double_ta_value;
|
const FastApiTypedArray<double>* double_ta_value;
|
||||||
|
const FastOneByteString* string_value;
|
||||||
FastApiCallbackOptions* options_value;
|
FastApiCallbackOptions* options_value;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -613,8 +622,9 @@ class CFunctionInfoImpl : public CFunctionInfo {
|
|||||||
kReturnType == CTypeInfo::Type::kUint32 ||
|
kReturnType == CTypeInfo::Type::kUint32 ||
|
||||||
kReturnType == CTypeInfo::Type::kFloat32 ||
|
kReturnType == CTypeInfo::Type::kFloat32 ||
|
||||||
kReturnType == CTypeInfo::Type::kFloat64 ||
|
kReturnType == CTypeInfo::Type::kFloat64 ||
|
||||||
|
kReturnType == CTypeInfo::Type::kPointer ||
|
||||||
kReturnType == CTypeInfo::Type::kAny,
|
kReturnType == CTypeInfo::Type::kAny,
|
||||||
"64-bit int and api object values are not currently "
|
"64-bit int, string and api object values are not currently "
|
||||||
"supported return types.");
|
"supported return types.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -651,13 +661,14 @@ struct CTypeInfoTraits {};
|
|||||||
|
|
||||||
#define PRIMITIVE_C_TYPES(V) \
|
#define PRIMITIVE_C_TYPES(V) \
|
||||||
V(bool, kBool) \
|
V(bool, kBool) \
|
||||||
|
V(uint8_t, kUint8) \
|
||||||
V(int32_t, kInt32) \
|
V(int32_t, kInt32) \
|
||||||
V(uint32_t, kUint32) \
|
V(uint32_t, kUint32) \
|
||||||
V(int64_t, kInt64) \
|
V(int64_t, kInt64) \
|
||||||
V(uint64_t, kUint64) \
|
V(uint64_t, kUint64) \
|
||||||
V(float, kFloat32) \
|
V(float, kFloat32) \
|
||||||
V(double, kFloat64) \
|
V(double, kFloat64) \
|
||||||
V(uint8_t, kUint8)
|
V(void*, kPointer)
|
||||||
|
|
||||||
// Same as above, but includes deprecated types for compatibility.
|
// Same as above, but includes deprecated types for compatibility.
|
||||||
#define ALL_C_TYPES(V) \
|
#define ALL_C_TYPES(V) \
|
||||||
@ -691,13 +702,13 @@ PRIMITIVE_C_TYPES(DEFINE_TYPE_INFO_TRAITS)
|
|||||||
};
|
};
|
||||||
|
|
||||||
#define TYPED_ARRAY_C_TYPES(V) \
|
#define TYPED_ARRAY_C_TYPES(V) \
|
||||||
|
V(uint8_t, kUint8) \
|
||||||
V(int32_t, kInt32) \
|
V(int32_t, kInt32) \
|
||||||
V(uint32_t, kUint32) \
|
V(uint32_t, kUint32) \
|
||||||
V(int64_t, kInt64) \
|
V(int64_t, kInt64) \
|
||||||
V(uint64_t, kUint64) \
|
V(uint64_t, kUint64) \
|
||||||
V(float, kFloat32) \
|
V(float, kFloat32) \
|
||||||
V(double, kFloat64) \
|
V(double, kFloat64)
|
||||||
V(uint8_t, kUint8)
|
|
||||||
|
|
||||||
TYPED_ARRAY_C_TYPES(SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA)
|
TYPED_ARRAY_C_TYPES(SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA)
|
||||||
|
|
||||||
@ -735,6 +746,18 @@ struct TypeInfoHelper<FastApiCallbackOptions&> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
template <>
|
||||||
|
struct TypeInfoHelper<const FastOneByteString&> {
|
||||||
|
static constexpr CTypeInfo::Flags Flags() { return CTypeInfo::Flags::kNone; }
|
||||||
|
|
||||||
|
static constexpr CTypeInfo::Type Type() {
|
||||||
|
return CTypeInfo::Type::kSeqOneByteString;
|
||||||
|
}
|
||||||
|
static constexpr CTypeInfo::SequenceType SequenceType() {
|
||||||
|
return CTypeInfo::SequenceType::kScalar;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
#define STATIC_ASSERT_IMPLIES(COND, ASSERTION, MSG) \
|
#define STATIC_ASSERT_IMPLIES(COND, ASSERTION, MSG) \
|
||||||
static_assert(((COND) == 0) || (ASSERTION), MSG)
|
static_assert(((COND) == 0) || (ASSERTION), MSG)
|
||||||
|
|
||||||
|
@ -32,19 +32,19 @@ namespace Debugger {
|
|||||||
namespace API {
|
namespace API {
|
||||||
class SearchMatch;
|
class SearchMatch;
|
||||||
}
|
}
|
||||||
}
|
} // namespace Debugger
|
||||||
namespace Runtime {
|
namespace Runtime {
|
||||||
namespace API {
|
namespace API {
|
||||||
class RemoteObject;
|
class RemoteObject;
|
||||||
class StackTrace;
|
class StackTrace;
|
||||||
class StackTraceId;
|
class StackTraceId;
|
||||||
}
|
} // namespace API
|
||||||
}
|
} // namespace Runtime
|
||||||
namespace Schema {
|
namespace Schema {
|
||||||
namespace API {
|
namespace API {
|
||||||
class Domain;
|
class Domain;
|
||||||
}
|
}
|
||||||
}
|
} // namespace Schema
|
||||||
} // namespace protocol
|
} // namespace protocol
|
||||||
|
|
||||||
class V8_EXPORT StringView {
|
class V8_EXPORT StringView {
|
||||||
@ -134,6 +134,13 @@ class V8_EXPORT V8DebuggerId {
|
|||||||
int64_t m_second = 0;
|
int64_t m_second = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct V8_EXPORT V8StackFrame {
|
||||||
|
StringView sourceURL;
|
||||||
|
StringView functionName;
|
||||||
|
int lineNumber;
|
||||||
|
int columnNumber;
|
||||||
|
};
|
||||||
|
|
||||||
class V8_EXPORT V8StackTrace {
|
class V8_EXPORT V8StackTrace {
|
||||||
public:
|
public:
|
||||||
virtual StringView firstNonEmptySourceURL() const = 0;
|
virtual StringView firstNonEmptySourceURL() const = 0;
|
||||||
@ -151,6 +158,8 @@ class V8_EXPORT V8StackTrace {
|
|||||||
|
|
||||||
// Safe to pass between threads, drops async chain.
|
// Safe to pass between threads, drops async chain.
|
||||||
virtual std::unique_ptr<V8StackTrace> clone() = 0;
|
virtual std::unique_ptr<V8StackTrace> clone() = 0;
|
||||||
|
|
||||||
|
virtual std::vector<V8StackFrame> frames() const = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
class V8_EXPORT V8InspectorSession {
|
class V8_EXPORT V8InspectorSession {
|
||||||
@ -203,6 +212,9 @@ class V8_EXPORT V8InspectorSession {
|
|||||||
std::unique_ptr<StringBuffer>* objectGroup) = 0;
|
std::unique_ptr<StringBuffer>* objectGroup) = 0;
|
||||||
virtual void releaseObjectGroup(StringView) = 0;
|
virtual void releaseObjectGroup(StringView) = 0;
|
||||||
virtual void triggerPreciseCoverageDeltaUpdate(StringView occasion) = 0;
|
virtual void triggerPreciseCoverageDeltaUpdate(StringView occasion) = 0;
|
||||||
|
|
||||||
|
// Prepare for shutdown (disables debugger pausing, etc.).
|
||||||
|
virtual void stop() = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
class V8_EXPORT WebDriverValue {
|
class V8_EXPORT WebDriverValue {
|
||||||
@ -365,9 +377,12 @@ class V8_EXPORT V8Inspector {
|
|||||||
virtual void flushProtocolNotifications() = 0;
|
virtual void flushProtocolNotifications() = 0;
|
||||||
};
|
};
|
||||||
enum ClientTrustLevel { kUntrusted, kFullyTrusted };
|
enum ClientTrustLevel { kUntrusted, kFullyTrusted };
|
||||||
|
enum SessionPauseState { kWaitingForDebugger, kNotWaitingForDebugger };
|
||||||
|
// TODO(chromium:1352175): remove default value once downstream change lands.
|
||||||
virtual std::unique_ptr<V8InspectorSession> connect(
|
virtual std::unique_ptr<V8InspectorSession> connect(
|
||||||
int contextGroupId, Channel*, StringView state,
|
int contextGroupId, Channel*, StringView state,
|
||||||
ClientTrustLevel client_trust_level) {
|
ClientTrustLevel client_trust_level,
|
||||||
|
SessionPauseState = kNotWaitingForDebugger) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,6 +52,7 @@ const int kHeapObjectTag = 1;
|
|||||||
const int kWeakHeapObjectTag = 3;
|
const int kWeakHeapObjectTag = 3;
|
||||||
const int kHeapObjectTagSize = 2;
|
const int kHeapObjectTagSize = 2;
|
||||||
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
|
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
|
||||||
|
const intptr_t kHeapObjectReferenceTagMask = 1 << (kHeapObjectTagSize - 1);
|
||||||
|
|
||||||
// Tag information for fowarding pointers stored in object headers.
|
// Tag information for fowarding pointers stored in object headers.
|
||||||
// 0b00 at the lowest 2 bits in the header indicates that the map word is a
|
// 0b00 at the lowest 2 bits in the header indicates that the map word is a
|
||||||
@ -181,7 +182,7 @@ constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
|
|||||||
#else
|
#else
|
||||||
// Everywhere else use a 1TB sandbox.
|
// Everywhere else use a 1TB sandbox.
|
||||||
constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
|
constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
|
||||||
#endif // V8_OS_ANDROID
|
#endif // V8_TARGET_OS_ANDROID
|
||||||
constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
|
constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
|
||||||
|
|
||||||
// Required alignment of the sandbox. For simplicity, we require the
|
// Required alignment of the sandbox. For simplicity, we require the
|
||||||
@ -222,6 +223,21 @@ static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
|
|||||||
"The minimum reservation size for a sandbox must be larger than "
|
"The minimum reservation size for a sandbox must be larger than "
|
||||||
"the pointer compression cage contained within it.");
|
"the pointer compression cage contained within it.");
|
||||||
|
|
||||||
|
// The maximum buffer size allowed inside the sandbox. This is mostly dependent
|
||||||
|
// on the size of the guard regions around the sandbox: an attacker must not be
|
||||||
|
// able to construct a buffer that appears larger than the guard regions and
|
||||||
|
// thereby "reach out of" the sandbox.
|
||||||
|
constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
|
||||||
|
static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize,
|
||||||
|
"The maximum allowed buffer size must not be larger than the "
|
||||||
|
"sandbox's guard regions");
|
||||||
|
|
||||||
|
constexpr size_t kBoundedSizeShift = 29;
|
||||||
|
static_assert(1ULL << (64 - kBoundedSizeShift) ==
|
||||||
|
kMaxSafeBufferSizeForSandbox + 1,
|
||||||
|
"The maximum size of a BoundedSize must be synchronized with the "
|
||||||
|
"kMaxSafeBufferSizeForSandbox");
|
||||||
|
|
||||||
#endif // V8_ENABLE_SANDBOX
|
#endif // V8_ENABLE_SANDBOX
|
||||||
|
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
@ -231,7 +247,7 @@ static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
|
|||||||
// size allows omitting bounds checks on table accesses if the indices are
|
// size allows omitting bounds checks on table accesses if the indices are
|
||||||
// guaranteed (e.g. through shifting) to be below the maximum index. This
|
// guaranteed (e.g. through shifting) to be below the maximum index. This
|
||||||
// value must be a power of two.
|
// value must be a power of two.
|
||||||
static const size_t kExternalPointerTableReservationSize = 128 * MB;
|
static const size_t kExternalPointerTableReservationSize = 512 * MB;
|
||||||
|
|
||||||
// The maximum number of entries in an external pointer table.
|
// The maximum number of entries in an external pointer table.
|
||||||
static const size_t kMaxExternalPointers =
|
static const size_t kMaxExternalPointers =
|
||||||
@ -240,7 +256,7 @@ static const size_t kMaxExternalPointers =
|
|||||||
// The external pointer table indices stored in HeapObjects as external
|
// The external pointer table indices stored in HeapObjects as external
|
||||||
// pointers are shifted to the left by this amount to guarantee that they are
|
// pointers are shifted to the left by this amount to guarantee that they are
|
||||||
// smaller than the maximum table size.
|
// smaller than the maximum table size.
|
||||||
static const uint32_t kExternalPointerIndexShift = 8;
|
static const uint32_t kExternalPointerIndexShift = 6;
|
||||||
static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
|
static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
|
||||||
"kExternalPointerTableReservationSize and "
|
"kExternalPointerTableReservationSize and "
|
||||||
"kExternalPointerIndexShift don't match");
|
"kExternalPointerIndexShift don't match");
|
||||||
@ -329,6 +345,14 @@ using ExternalPointer_t = Address;
|
|||||||
// that the Embedder is not using this byte (really only this one bit) for any
|
// that the Embedder is not using this byte (really only this one bit) for any
|
||||||
// other purpose. This bit also does not collide with the memory tagging
|
// other purpose. This bit also does not collide with the memory tagging
|
||||||
// extension (MTE) which would use bits [56, 60).
|
// extension (MTE) which would use bits [56, 60).
|
||||||
|
//
|
||||||
|
// External pointer tables are also available even when the sandbox is off but
|
||||||
|
// pointer compression is on. In that case, the mechanism can be used to easy
|
||||||
|
// alignment requirements as it turns unaligned 64-bit raw pointers into
|
||||||
|
// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
|
||||||
|
// for this purpose, instead of using the ExternalPointer accessors one needs to
|
||||||
|
// use ExternalPointerHandles directly and use them to access the pointers in an
|
||||||
|
// ExternalPointerTable.
|
||||||
constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
|
constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
|
||||||
constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
|
constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
|
||||||
constexpr uint64_t kExternalPointerTagShift = 48;
|
constexpr uint64_t kExternalPointerTagShift = 48;
|
||||||
@ -351,70 +375,58 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = {
|
|||||||
0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
|
0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
|
||||||
0b11100010, 0b11100100, 0b11101000, 0b11110000};
|
0b11100010, 0b11100100, 0b11101000, 0b11110000};
|
||||||
|
|
||||||
|
#define TAG(i) \
|
||||||
|
((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \
|
||||||
|
kExternalPointerMarkBit)
|
||||||
|
|
||||||
// clang-format off
|
// clang-format off
|
||||||
// New entries should be added with state "sandboxed".
|
|
||||||
// When adding new tags, please ensure that the code using these tags is
|
// When adding new tags, please ensure that the code using these tags is
|
||||||
// "substitution-safe", i.e. still operate safely if external pointers of the
|
// "substitution-safe", i.e. still operate safely if external pointers of the
|
||||||
// same type are swapped by an attacker. See comment above for more details.
|
// same type are swapped by an attacker. See comment above for more details.
|
||||||
#define TAG(i) (kAllExternalPointerTypeTags[i])
|
|
||||||
|
|
||||||
// Shared external pointers are owned by the shared Isolate and stored in the
|
// Shared external pointers are owned by the shared Isolate and stored in the
|
||||||
// shared external pointer table associated with that Isolate, where they can
|
// shared external pointer table associated with that Isolate, where they can
|
||||||
// be accessed from multiple threads at the same time. The objects referenced
|
// be accessed from multiple threads at the same time. The objects referenced
|
||||||
// in this way must therefore always be thread-safe.
|
// in this way must therefore always be thread-safe.
|
||||||
#define SHARED_EXTERNAL_POINTER_TAGS(V) \
|
#define SHARED_EXTERNAL_POINTER_TAGS(V) \
|
||||||
V(kFirstSharedTag, sandboxed, TAG(0)) \
|
V(kFirstSharedTag, TAG(0)) \
|
||||||
V(kWaiterQueueNodeTag, sandboxed, TAG(0)) \
|
V(kWaiterQueueNodeTag, TAG(0)) \
|
||||||
V(kExternalStringResourceTag, sandboxed, TAG(1)) \
|
V(kExternalStringResourceTag, TAG(1)) \
|
||||||
V(kExternalStringResourceDataTag, sandboxed, TAG(2)) \
|
V(kExternalStringResourceDataTag, TAG(2)) \
|
||||||
V(kLastSharedTag, sandboxed, TAG(2))
|
V(kLastSharedTag, TAG(2))
|
||||||
|
|
||||||
// External pointers using these tags are kept in a per-Isolate external
|
// External pointers using these tags are kept in a per-Isolate external
|
||||||
// pointer table and can only be accessed when this Isolate is active.
|
// pointer table and can only be accessed when this Isolate is active.
|
||||||
#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
|
#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
|
||||||
V(kForeignForeignAddressTag, sandboxed, TAG(10)) \
|
V(kForeignForeignAddressTag, TAG(10)) \
|
||||||
V(kNativeContextMicrotaskQueueTag, sandboxed, TAG(11)) \
|
V(kNativeContextMicrotaskQueueTag, TAG(11)) \
|
||||||
V(kEmbedderDataSlotPayloadTag, sandboxed, TAG(12)) \
|
V(kEmbedderDataSlotPayloadTag, TAG(12)) \
|
||||||
V(kExternalObjectValueTag, sandboxed, TAG(13)) \
|
/* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
|
||||||
V(kCallHandlerInfoCallbackTag, sandboxed, TAG(14)) \
|
/* it is the Embedder's responsibility to ensure type safety (against */ \
|
||||||
V(kAccessorInfoGetterTag, sandboxed, TAG(15)) \
|
/* substitution) and lifetime validity of these objects. */ \
|
||||||
V(kAccessorInfoSetterTag, sandboxed, TAG(16)) \
|
V(kExternalObjectValueTag, TAG(13)) \
|
||||||
V(kWasmInternalFunctionCallTargetTag, sandboxed, TAG(17)) \
|
V(kCallHandlerInfoCallbackTag, TAG(14)) \
|
||||||
V(kWasmTypeInfoNativeTypeTag, sandboxed, TAG(18)) \
|
V(kAccessorInfoGetterTag, TAG(15)) \
|
||||||
V(kWasmExportedFunctionDataSignatureTag, sandboxed, TAG(19)) \
|
V(kAccessorInfoSetterTag, TAG(16)) \
|
||||||
V(kWasmContinuationJmpbufTag, sandboxed, TAG(20))
|
V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
|
||||||
|
V(kWasmTypeInfoNativeTypeTag, TAG(18)) \
|
||||||
|
V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \
|
||||||
|
V(kWasmContinuationJmpbufTag, TAG(20)) \
|
||||||
|
V(kArrayBufferExtensionTag, TAG(21))
|
||||||
|
|
||||||
// All external pointer tags.
|
// All external pointer tags.
|
||||||
#define ALL_EXTERNAL_POINTER_TAGS(V) \
|
#define ALL_EXTERNAL_POINTER_TAGS(V) \
|
||||||
SHARED_EXTERNAL_POINTER_TAGS(V) \
|
SHARED_EXTERNAL_POINTER_TAGS(V) \
|
||||||
PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
|
PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
|
||||||
|
|
||||||
// When the sandbox is enabled, external pointers marked as "sandboxed" above
|
#define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
|
||||||
// use the external pointer table (i.e. are sandboxed). This allows a gradual
|
|
||||||
// rollout of external pointer sandboxing. If the sandbox is off, no external
|
|
||||||
// pointers are sandboxed.
|
|
||||||
//
|
|
||||||
// Sandboxed external pointer tags are available when compressing pointers even
|
|
||||||
// when the sandbox is off. Some tags (e.g. kWaiterQueueNodeTag) are used
|
|
||||||
// manually with the external pointer table even when the sandbox is off to ease
|
|
||||||
// alignment requirements.
|
|
||||||
#define sandboxed(X) (X << kExternalPointerTagShift) | kExternalPointerMarkBit
|
|
||||||
#define unsandboxed(X) kUnsandboxedExternalPointerTag
|
|
||||||
#if defined(V8_COMPRESS_POINTERS)
|
|
||||||
#define EXTERNAL_POINTER_TAG_ENUM(Name, State, Bits) Name = State(Bits),
|
|
||||||
#else
|
|
||||||
#define EXTERNAL_POINTER_TAG_ENUM(Name, State, Bits) Name = unsandboxed(Bits),
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define MAKE_TAG(HasMarkBit, TypeTag) \
|
#define MAKE_TAG(HasMarkBit, TypeTag) \
|
||||||
((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
|
((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
|
||||||
(HasMarkBit ? kExternalPointerMarkBit : 0))
|
(HasMarkBit ? kExternalPointerMarkBit : 0))
|
||||||
enum ExternalPointerTag : uint64_t {
|
enum ExternalPointerTag : uint64_t {
|
||||||
// Empty tag value. Mostly used as placeholder.
|
// Empty tag value. Mostly used as placeholder.
|
||||||
kExternalPointerNullTag = MAKE_TAG(0, 0b00000000),
|
kExternalPointerNullTag = MAKE_TAG(0, 0b00000000),
|
||||||
// Tag to use for unsandboxed external pointers, which are still stored as
|
|
||||||
// raw pointers on the heap.
|
|
||||||
kUnsandboxedExternalPointerTag = MAKE_TAG(0, 0b00000000),
|
|
||||||
// External pointer tag that will match any external pointer. Use with care!
|
// External pointer tag that will match any external pointer. Use with care!
|
||||||
kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
|
kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
|
||||||
// The free entry tag has all type bits set so every type check with a
|
// The free entry tag has all type bits set so every type check with a
|
||||||
@ -428,20 +440,11 @@ enum ExternalPointerTag : uint64_t {
|
|||||||
};
|
};
|
||||||
|
|
||||||
#undef MAKE_TAG
|
#undef MAKE_TAG
|
||||||
#undef unsandboxed
|
|
||||||
#undef sandboxed
|
|
||||||
#undef TAG
|
#undef TAG
|
||||||
#undef EXTERNAL_POINTER_TAG_ENUM
|
#undef EXTERNAL_POINTER_TAG_ENUM
|
||||||
|
|
||||||
// clang-format on
|
// clang-format on
|
||||||
|
|
||||||
// True if the external pointer is sandboxed and so must be referenced through
|
|
||||||
// an external pointer table.
|
|
||||||
V8_INLINE static constexpr bool IsSandboxedExternalPointerType(
|
|
||||||
ExternalPointerTag tag) {
|
|
||||||
return tag != kUnsandboxedExternalPointerTag;
|
|
||||||
}
|
|
||||||
|
|
||||||
// True if the external pointer must be accessed from the shared isolate's
|
// True if the external pointer must be accessed from the shared isolate's
|
||||||
// external pointer table.
|
// external pointer table.
|
||||||
V8_INLINE static constexpr bool IsSharedExternalPointerType(
|
V8_INLINE static constexpr bool IsSharedExternalPointerType(
|
||||||
@ -451,11 +454,9 @@ V8_INLINE static constexpr bool IsSharedExternalPointerType(
|
|||||||
|
|
||||||
// Sanity checks.
|
// Sanity checks.
|
||||||
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
|
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
|
||||||
static_assert(!IsSandboxedExternalPointerType(Tag) || \
|
static_assert(IsSharedExternalPointerType(Tag));
|
||||||
IsSharedExternalPointerType(Tag));
|
|
||||||
#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
|
#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
|
||||||
static_assert(!IsSandboxedExternalPointerType(Tag) || \
|
static_assert(!IsSharedExternalPointerType(Tag));
|
||||||
!IsSharedExternalPointerType(Tag));
|
|
||||||
|
|
||||||
SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
|
SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
|
||||||
PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
|
PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
|
||||||
@ -519,10 +520,6 @@ class Internals {
|
|||||||
|
|
||||||
// ExternalPointerTable layout guarantees.
|
// ExternalPointerTable layout guarantees.
|
||||||
static const int kExternalPointerTableBufferOffset = 0;
|
static const int kExternalPointerTableBufferOffset = 0;
|
||||||
static const int kExternalPointerTableCapacityOffset =
|
|
||||||
kExternalPointerTableBufferOffset + kApiSystemPointerSize;
|
|
||||||
static const int kExternalPointerTableFreelistHeadOffset =
|
|
||||||
kExternalPointerTableCapacityOffset + kApiInt32Size;
|
|
||||||
static const int kExternalPointerTableSize = 4 * kApiSystemPointerSize;
|
static const int kExternalPointerTableSize = 4 * kApiSystemPointerSize;
|
||||||
|
|
||||||
// IsolateData layout guarantees.
|
// IsolateData layout guarantees.
|
||||||
@ -532,7 +529,7 @@ class Internals {
|
|||||||
static const int kVariousBooleanFlagsOffset =
|
static const int kVariousBooleanFlagsOffset =
|
||||||
kIsolateStackGuardOffset + kStackGuardSize;
|
kIsolateStackGuardOffset + kStackGuardSize;
|
||||||
static const int kBuiltinTier0EntryTableOffset =
|
static const int kBuiltinTier0EntryTableOffset =
|
||||||
kVariousBooleanFlagsOffset + kApiSystemPointerSize;
|
kVariousBooleanFlagsOffset + 8;
|
||||||
static const int kBuiltinTier0TableOffset =
|
static const int kBuiltinTier0TableOffset =
|
||||||
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
|
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
|
||||||
static const int kIsolateEmbedderDataOffset =
|
static const int kIsolateEmbedderDataOffset =
|
||||||
@ -569,6 +566,8 @@ class Internals {
|
|||||||
static const int kNodeStateMask = 0x3;
|
static const int kNodeStateMask = 0x3;
|
||||||
static const int kNodeStateIsWeakValue = 2;
|
static const int kNodeStateIsWeakValue = 2;
|
||||||
|
|
||||||
|
static const int kTracedNodeClassIdOffset = kApiSystemPointerSize;
|
||||||
|
|
||||||
static const int kFirstNonstringType = 0x80;
|
static const int kFirstNonstringType = 0x80;
|
||||||
static const int kOddballType = 0x83;
|
static const int kOddballType = 0x83;
|
||||||
static const int kForeignType = 0xcc;
|
static const int kForeignType = 0xcc;
|
||||||
@ -778,11 +777,10 @@ class Internals {
|
|||||||
V8_INLINE static internal::Address ReadExternalPointerField(
|
V8_INLINE static internal::Address ReadExternalPointerField(
|
||||||
v8::Isolate* isolate, internal::Address heap_object_ptr, int offset) {
|
v8::Isolate* isolate, internal::Address heap_object_ptr, int offset) {
|
||||||
#ifdef V8_ENABLE_SANDBOX
|
#ifdef V8_ENABLE_SANDBOX
|
||||||
if (IsSandboxedExternalPointerType(tag)) {
|
static_assert(tag != kExternalPointerNullTag);
|
||||||
// See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
|
// See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
|
||||||
// it can be inlined and doesn't require an additional call.
|
// it can be inlined and doesn't require an additional call.
|
||||||
internal::Address* table =
|
internal::Address* table = IsSharedExternalPointerType(tag)
|
||||||
IsSharedExternalPointerType(tag)
|
|
||||||
? GetSharedExternalPointerTableBase(isolate)
|
? GetSharedExternalPointerTableBase(isolate)
|
||||||
: GetExternalPointerTableBase(isolate);
|
: GetExternalPointerTableBase(isolate);
|
||||||
internal::ExternalPointerHandle handle =
|
internal::ExternalPointerHandle handle =
|
||||||
@ -793,9 +791,9 @@ class Internals {
|
|||||||
internal::Address entry =
|
internal::Address entry =
|
||||||
std::atomic_load_explicit(ptr, std::memory_order_relaxed);
|
std::atomic_load_explicit(ptr, std::memory_order_relaxed);
|
||||||
return entry & ~tag;
|
return entry & ~tag;
|
||||||
}
|
#else
|
||||||
#endif
|
|
||||||
return ReadRawField<Address>(heap_object_ptr, offset);
|
return ReadRawField<Address>(heap_object_ptr, offset);
|
||||||
|
#endif // V8_ENABLE_SANDBOX
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
|
@ -233,7 +233,7 @@ class V8_EXPORT Isolate {
|
|||||||
* Explicitly specify a startup snapshot blob. The embedder owns the blob.
|
* Explicitly specify a startup snapshot blob. The embedder owns the blob.
|
||||||
* The embedder *must* ensure that the snapshot is from a trusted source.
|
* The embedder *must* ensure that the snapshot is from a trusted source.
|
||||||
*/
|
*/
|
||||||
StartupData* snapshot_blob = nullptr;
|
const StartupData* snapshot_blob = nullptr;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enables the host application to provide a mechanism for recording
|
* Enables the host application to provide a mechanism for recording
|
||||||
@ -535,6 +535,8 @@ class V8_EXPORT Isolate {
|
|||||||
kFunctionPrototypeArguments = 113,
|
kFunctionPrototypeArguments = 113,
|
||||||
kFunctionPrototypeCaller = 114,
|
kFunctionPrototypeCaller = 114,
|
||||||
kTurboFanOsrCompileStarted = 115,
|
kTurboFanOsrCompileStarted = 115,
|
||||||
|
kAsyncStackTaggingCreateTaskCall = 116,
|
||||||
|
kDurationFormat = 117,
|
||||||
|
|
||||||
// If you add new values here, you'll also need to update Chromium's:
|
// If you add new values here, you'll also need to update Chromium's:
|
||||||
// web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to
|
// web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to
|
||||||
@ -952,22 +954,20 @@ class V8_EXPORT Isolate {
|
|||||||
* Attaches a managed C++ heap as an extension to the JavaScript heap. The
|
* Attaches a managed C++ heap as an extension to the JavaScript heap. The
|
||||||
* embedder maintains ownership of the CppHeap. At most one C++ heap can be
|
* embedder maintains ownership of the CppHeap. At most one C++ heap can be
|
||||||
* attached to V8.
|
* attached to V8.
|
||||||
|
*
|
||||||
* AttachCppHeap cannot be used simultaneously with SetEmbedderHeapTracer.
|
* AttachCppHeap cannot be used simultaneously with SetEmbedderHeapTracer.
|
||||||
*
|
*
|
||||||
* This is an experimental feature and may still change significantly.
|
* Multi-threaded use requires the use of v8::Locker/v8::Unlocker, see
|
||||||
|
* CppHeap.
|
||||||
*/
|
*/
|
||||||
void AttachCppHeap(CppHeap*);
|
void AttachCppHeap(CppHeap*);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Detaches a managed C++ heap if one was attached using `AttachCppHeap()`.
|
* Detaches a managed C++ heap if one was attached using `AttachCppHeap()`.
|
||||||
*
|
|
||||||
* This is an experimental feature and may still change significantly.
|
|
||||||
*/
|
*/
|
||||||
void DetachCppHeap();
|
void DetachCppHeap();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is an experimental feature and may still change significantly.
|
|
||||||
|
|
||||||
* \returns the C++ heap managed by V8. Only available if such a heap has been
|
* \returns the C++ heap managed by V8. Only available if such a heap has been
|
||||||
* attached using `AttachCppHeap()`.
|
* attached using `AttachCppHeap()`.
|
||||||
*/
|
*/
|
||||||
@ -1524,8 +1524,10 @@ class V8_EXPORT Isolate {
|
|||||||
|
|
||||||
void SetWasmLoadSourceMapCallback(WasmLoadSourceMapCallback callback);
|
void SetWasmLoadSourceMapCallback(WasmLoadSourceMapCallback callback);
|
||||||
|
|
||||||
|
V8_DEPRECATED("Wasm SIMD is always enabled")
|
||||||
void SetWasmSimdEnabledCallback(WasmSimdEnabledCallback callback);
|
void SetWasmSimdEnabledCallback(WasmSimdEnabledCallback callback);
|
||||||
|
|
||||||
|
V8_DEPRECATED("Wasm exceptions are always enabled")
|
||||||
void SetWasmExceptionsEnabledCallback(WasmExceptionsEnabledCallback callback);
|
void SetWasmExceptionsEnabledCallback(WasmExceptionsEnabledCallback callback);
|
||||||
|
|
||||||
void SetSharedArrayBufferConstructorEnabledCallback(
|
void SetSharedArrayBufferConstructorEnabledCallback(
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
|
|
||||||
#include "v8-internal.h" // NOLINT(build/include_directory)
|
#include "v8-internal.h" // NOLINT(build/include_directory)
|
||||||
#include "v8-local-handle.h" // NOLINT(build/include_directory)
|
#include "v8-local-handle.h" // NOLINT(build/include_directory)
|
||||||
|
#include "v8config.h" // NOLINT(build/include_directory)
|
||||||
|
|
||||||
namespace v8 {
|
namespace v8 {
|
||||||
|
|
||||||
@ -96,16 +97,88 @@ struct GarbageCollectionYoungCycle {
|
|||||||
};
|
};
|
||||||
|
|
||||||
struct WasmModuleDecoded {
|
struct WasmModuleDecoded {
|
||||||
|
WasmModuleDecoded() = default;
|
||||||
|
WasmModuleDecoded(bool async, bool streamed, bool success,
|
||||||
|
size_t module_size_in_bytes, size_t function_count,
|
||||||
|
int64_t wall_clock_duration_in_us)
|
||||||
|
: async(async),
|
||||||
|
streamed(streamed),
|
||||||
|
success(success),
|
||||||
|
module_size_in_bytes(module_size_in_bytes),
|
||||||
|
function_count(function_count),
|
||||||
|
wall_clock_duration_in_us(wall_clock_duration_in_us) {}
|
||||||
|
|
||||||
|
V8_DEPRECATE_SOON("Use the version without cpu_duration_in_us")
|
||||||
|
WasmModuleDecoded(bool async, bool streamed, bool success,
|
||||||
|
size_t module_size_in_bytes, size_t function_count,
|
||||||
|
int64_t wall_clock_duration_in_us,
|
||||||
|
int64_t cpu_duration_in_us)
|
||||||
|
: async(async),
|
||||||
|
streamed(streamed),
|
||||||
|
success(success),
|
||||||
|
module_size_in_bytes(module_size_in_bytes),
|
||||||
|
function_count(function_count),
|
||||||
|
wall_clock_duration_in_us(wall_clock_duration_in_us),
|
||||||
|
cpu_duration_in_us(cpu_duration_in_us) {}
|
||||||
|
|
||||||
|
START_ALLOW_USE_DEPRECATED()
|
||||||
|
// Copy constructor and copy assignment operator are allowed to copy the
|
||||||
|
// {cpu_duration_in_us} field.
|
||||||
|
WasmModuleDecoded(const WasmModuleDecoded&) = default;
|
||||||
|
WasmModuleDecoded& operator=(const WasmModuleDecoded&) = default;
|
||||||
|
END_ALLOW_USE_DEPRECATED()
|
||||||
|
|
||||||
bool async = false;
|
bool async = false;
|
||||||
bool streamed = false;
|
bool streamed = false;
|
||||||
bool success = false;
|
bool success = false;
|
||||||
size_t module_size_in_bytes = 0;
|
size_t module_size_in_bytes = 0;
|
||||||
size_t function_count = 0;
|
size_t function_count = 0;
|
||||||
int64_t wall_clock_duration_in_us = -1;
|
int64_t wall_clock_duration_in_us = -1;
|
||||||
|
V8_DEPRECATE_SOON("We do not collect cpu times any more")
|
||||||
int64_t cpu_duration_in_us = -1;
|
int64_t cpu_duration_in_us = -1;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct WasmModuleCompiled {
|
struct WasmModuleCompiled {
|
||||||
|
WasmModuleCompiled() = default;
|
||||||
|
|
||||||
|
WasmModuleCompiled(bool async, bool streamed, bool cached, bool deserialized,
|
||||||
|
bool lazy, bool success, size_t code_size_in_bytes,
|
||||||
|
size_t liftoff_bailout_count,
|
||||||
|
int64_t wall_clock_duration_in_us)
|
||||||
|
: async(async),
|
||||||
|
streamed(streamed),
|
||||||
|
cached(cached),
|
||||||
|
deserialized(deserialized),
|
||||||
|
lazy(lazy),
|
||||||
|
success(success),
|
||||||
|
code_size_in_bytes(code_size_in_bytes),
|
||||||
|
liftoff_bailout_count(liftoff_bailout_count),
|
||||||
|
wall_clock_duration_in_us(wall_clock_duration_in_us) {}
|
||||||
|
|
||||||
|
V8_DEPRECATE_SOON("Use the version without cpu_duration_in_us")
|
||||||
|
WasmModuleCompiled(bool async, bool streamed, bool cached, bool deserialized,
|
||||||
|
bool lazy, bool success, size_t code_size_in_bytes,
|
||||||
|
size_t liftoff_bailout_count,
|
||||||
|
int64_t wall_clock_duration_in_us,
|
||||||
|
int64_t cpu_duration_in_us)
|
||||||
|
: async(async),
|
||||||
|
streamed(streamed),
|
||||||
|
cached(cached),
|
||||||
|
deserialized(deserialized),
|
||||||
|
lazy(lazy),
|
||||||
|
success(success),
|
||||||
|
code_size_in_bytes(code_size_in_bytes),
|
||||||
|
liftoff_bailout_count(liftoff_bailout_count),
|
||||||
|
wall_clock_duration_in_us(wall_clock_duration_in_us),
|
||||||
|
cpu_duration_in_us(cpu_duration_in_us) {}
|
||||||
|
|
||||||
|
START_ALLOW_USE_DEPRECATED()
|
||||||
|
// Copy constructor and copy assignment operator are allowed to copy the
|
||||||
|
// {cpu_duration_in_us} field.
|
||||||
|
WasmModuleCompiled(const WasmModuleCompiled&) = default;
|
||||||
|
WasmModuleCompiled& operator=(const WasmModuleCompiled&) = default;
|
||||||
|
END_ALLOW_USE_DEPRECATED()
|
||||||
|
|
||||||
bool async = false;
|
bool async = false;
|
||||||
bool streamed = false;
|
bool streamed = false;
|
||||||
bool cached = false;
|
bool cached = false;
|
||||||
@ -115,6 +188,7 @@ struct WasmModuleCompiled {
|
|||||||
size_t code_size_in_bytes = 0;
|
size_t code_size_in_bytes = 0;
|
||||||
size_t liftoff_bailout_count = 0;
|
size_t liftoff_bailout_count = 0;
|
||||||
int64_t wall_clock_duration_in_us = -1;
|
int64_t wall_clock_duration_in_us = -1;
|
||||||
|
V8_DEPRECATE_SOON("We do not collect cpu times any more")
|
||||||
int64_t cpu_duration_in_us = -1;
|
int64_t cpu_duration_in_us = -1;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -118,7 +118,12 @@ class V8_EXPORT V8_NODISCARD MicrotasksScope {
|
|||||||
public:
|
public:
|
||||||
enum Type { kRunMicrotasks, kDoNotRunMicrotasks };
|
enum Type { kRunMicrotasks, kDoNotRunMicrotasks };
|
||||||
|
|
||||||
|
V8_DEPRECATE_SOON(
|
||||||
|
"May be incorrect if context was created with non-default microtask "
|
||||||
|
"queue")
|
||||||
MicrotasksScope(Isolate* isolate, Type type);
|
MicrotasksScope(Isolate* isolate, Type type);
|
||||||
|
|
||||||
|
MicrotasksScope(Local<Context> context, Type type);
|
||||||
MicrotasksScope(Isolate* isolate, MicrotaskQueue* microtask_queue, Type type);
|
MicrotasksScope(Isolate* isolate, MicrotaskQueue* microtask_queue, Type type);
|
||||||
~MicrotasksScope();
|
~MicrotasksScope();
|
||||||
|
|
||||||
|
@ -285,6 +285,8 @@ class ConvertableToTraceFormat {
|
|||||||
* V8 Tracing controller.
|
* V8 Tracing controller.
|
||||||
*
|
*
|
||||||
* Can be implemented by an embedder to record trace events from V8.
|
* Can be implemented by an embedder to record trace events from V8.
|
||||||
|
*
|
||||||
|
* Will become obsolete in Perfetto SDK build (v8_use_perfetto = true).
|
||||||
*/
|
*/
|
||||||
class TracingController {
|
class TracingController {
|
||||||
public:
|
public:
|
||||||
@ -348,10 +350,16 @@ class TracingController {
|
|||||||
virtual void OnTraceDisabled() = 0;
|
virtual void OnTraceDisabled() = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Adds tracing state change observer. */
|
/**
|
||||||
|
* Adds tracing state change observer.
|
||||||
|
* Does nothing in Perfetto SDK build (v8_use_perfetto = true).
|
||||||
|
*/
|
||||||
virtual void AddTraceStateObserver(TraceStateObserver*) {}
|
virtual void AddTraceStateObserver(TraceStateObserver*) {}
|
||||||
|
|
||||||
/** Removes tracing state change observer. */
|
/**
|
||||||
|
* Removes tracing state change observer.
|
||||||
|
* Does nothing in Perfetto SDK build (v8_use_perfetto = true).
|
||||||
|
*/
|
||||||
virtual void RemoveTraceStateObserver(TraceStateObserver*) {}
|
virtual void RemoveTraceStateObserver(TraceStateObserver*) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -923,6 +931,7 @@ class Platform {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Allows the embedder to manage memory page allocations.
|
* Allows the embedder to manage memory page allocations.
|
||||||
|
* Returning nullptr will cause V8 to use the default page allocator.
|
||||||
*/
|
*/
|
||||||
virtual PageAllocator* GetPageAllocator() = 0;
|
virtual PageAllocator* GetPageAllocator() = 0;
|
||||||
|
|
||||||
@ -943,18 +952,6 @@ class Platform {
|
|||||||
*/
|
*/
|
||||||
virtual void OnCriticalMemoryPressure() {}
|
virtual void OnCriticalMemoryPressure() {}
|
||||||
|
|
||||||
/**
|
|
||||||
* Enables the embedder to respond in cases where V8 can't allocate large
|
|
||||||
* memory regions. The |length| parameter is the amount of memory needed.
|
|
||||||
* Returns true if memory is now available. Returns false if no memory could
|
|
||||||
* be made available. V8 will retry allocations until this method returns
|
|
||||||
* false.
|
|
||||||
*
|
|
||||||
* Embedder overrides of this function must NOT call back into V8.
|
|
||||||
*/
|
|
||||||
V8_DEPRECATED("Use the method without informative parameter")
|
|
||||||
virtual bool OnCriticalMemoryPressure(size_t length) { return false; }
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the number of worker threads used by
|
* Gets the number of worker threads used by
|
||||||
* Call(BlockingTask)OnWorkerThread(). This can be used to estimate the number
|
* Call(BlockingTask)OnWorkerThread(). This can be used to estimate the number
|
||||||
|
@ -175,6 +175,32 @@ class V8_EXPORT CpuProfileNode {
|
|||||||
static const int kNoColumnNumberInfo = Message::kNoColumnInfo;
|
static const int kNoColumnNumberInfo = Message::kNoColumnInfo;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An interface for exporting data from V8, using "push" model.
|
||||||
|
*/
|
||||||
|
class V8_EXPORT OutputStream {
|
||||||
|
public:
|
||||||
|
enum WriteResult { kContinue = 0, kAbort = 1 };
|
||||||
|
virtual ~OutputStream() = default;
|
||||||
|
/** Notify about the end of stream. */
|
||||||
|
virtual void EndOfStream() = 0;
|
||||||
|
/** Get preferred output chunk size. Called only once. */
|
||||||
|
virtual int GetChunkSize() { return 1024; }
|
||||||
|
/**
|
||||||
|
* Writes the next chunk of snapshot data into the stream. Writing
|
||||||
|
* can be stopped by returning kAbort as function result. EndOfStream
|
||||||
|
* will not be called in case writing was aborted.
|
||||||
|
*/
|
||||||
|
virtual WriteResult WriteAsciiChunk(char* data, int size) = 0;
|
||||||
|
/**
|
||||||
|
* Writes the next chunk of heap stats data into the stream. Writing
|
||||||
|
* can be stopped by returning kAbort as function result. EndOfStream
|
||||||
|
* will not be called in case writing was aborted.
|
||||||
|
*/
|
||||||
|
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate* data, int count) {
|
||||||
|
return kAbort;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CpuProfile contains a CPU profile in a form of top-down call tree
|
* CpuProfile contains a CPU profile in a form of top-down call tree
|
||||||
@ -182,6 +208,9 @@ class V8_EXPORT CpuProfileNode {
|
|||||||
*/
|
*/
|
||||||
class V8_EXPORT CpuProfile {
|
class V8_EXPORT CpuProfile {
|
||||||
public:
|
public:
|
||||||
|
enum SerializationFormat {
|
||||||
|
kJSON = 0 // See format description near 'Serialize' method.
|
||||||
|
};
|
||||||
/** Returns CPU profile title. */
|
/** Returns CPU profile title. */
|
||||||
Local<String> GetTitle() const;
|
Local<String> GetTitle() const;
|
||||||
|
|
||||||
@ -235,6 +264,25 @@ class V8_EXPORT CpuProfile {
|
|||||||
* All pointers to nodes previously returned become invalid.
|
* All pointers to nodes previously returned become invalid.
|
||||||
*/
|
*/
|
||||||
void Delete();
|
void Delete();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare a serialized representation of the profile. The result
|
||||||
|
* is written into the stream provided in chunks of specified size.
|
||||||
|
*
|
||||||
|
* For the JSON format, heap contents are represented as an object
|
||||||
|
* with the following structure:
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* nodes: [nodes array],
|
||||||
|
* startTime: number,
|
||||||
|
* endTime: number
|
||||||
|
* samples: [strings array]
|
||||||
|
* timeDeltas: [numbers array]
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
void Serialize(OutputStream* stream,
|
||||||
|
SerializationFormat format = kJSON) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum CpuProfilingMode {
|
enum CpuProfilingMode {
|
||||||
@ -576,37 +624,6 @@ class V8_EXPORT HeapGraphNode {
|
|||||||
const HeapGraphEdge* GetChild(int index) const;
|
const HeapGraphEdge* GetChild(int index) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An interface for exporting data from V8, using "push" model.
|
|
||||||
*/
|
|
||||||
class V8_EXPORT OutputStream {
|
|
||||||
public:
|
|
||||||
enum WriteResult {
|
|
||||||
kContinue = 0,
|
|
||||||
kAbort = 1
|
|
||||||
};
|
|
||||||
virtual ~OutputStream() = default;
|
|
||||||
/** Notify about the end of stream. */
|
|
||||||
virtual void EndOfStream() = 0;
|
|
||||||
/** Get preferred output chunk size. Called only once. */
|
|
||||||
virtual int GetChunkSize() { return 1024; }
|
|
||||||
/**
|
|
||||||
* Writes the next chunk of snapshot data into the stream. Writing
|
|
||||||
* can be stopped by returning kAbort as function result. EndOfStream
|
|
||||||
* will not be called in case writing was aborted.
|
|
||||||
*/
|
|
||||||
virtual WriteResult WriteAsciiChunk(char* data, int size) = 0;
|
|
||||||
/**
|
|
||||||
* Writes the next chunk of heap stats data into the stream. Writing
|
|
||||||
* can be stopped by returning kAbort as function result. EndOfStream
|
|
||||||
* will not be called in case writing was aborted.
|
|
||||||
*/
|
|
||||||
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate* data, int count) {
|
|
||||||
return kAbort;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HeapSnapshots record the state of the JS heap at some moment.
|
* HeapSnapshots record the state of the JS heap at some moment.
|
||||||
*/
|
*/
|
||||||
@ -903,6 +920,8 @@ class V8_EXPORT HeapProfiler {
|
|||||||
enum SamplingFlags {
|
enum SamplingFlags {
|
||||||
kSamplingNoFlags = 0,
|
kSamplingNoFlags = 0,
|
||||||
kSamplingForceGC = 1 << 0,
|
kSamplingForceGC = 1 << 0,
|
||||||
|
kSamplingIncludeObjectsCollectedByMajorGC = 1 << 1,
|
||||||
|
kSamplingIncludeObjectsCollectedByMinorGC = 1 << 2,
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1097,10 +1116,8 @@ class V8_EXPORT HeapProfiler {
|
|||||||
* |stack_depth| parameter controls the maximum number of stack frames to be
|
* |stack_depth| parameter controls the maximum number of stack frames to be
|
||||||
* captured on each allocation.
|
* captured on each allocation.
|
||||||
*
|
*
|
||||||
* NOTE: This is a proof-of-concept at this point. Right now we only sample
|
* NOTE: Support for native allocations doesn't exist yet, but is anticipated
|
||||||
* newspace allocations. Support for paged space allocation (e.g. pre-tenured
|
* in the future.
|
||||||
* objects, large objects, code objects, etc.) and native allocations
|
|
||||||
* doesn't exist yet, but is anticipated in the future.
|
|
||||||
*
|
*
|
||||||
* Objects allocated before the sampling is started will not be included in
|
* Objects allocated before the sampling is started will not be included in
|
||||||
* the profile.
|
* the profile.
|
||||||
|
@ -92,7 +92,15 @@ class V8_EXPORT UnboundScript {
|
|||||||
* A compiled JavaScript module, not yet tied to a Context.
|
* A compiled JavaScript module, not yet tied to a Context.
|
||||||
*/
|
*/
|
||||||
class V8_EXPORT UnboundModuleScript : public Data {
|
class V8_EXPORT UnboundModuleScript : public Data {
|
||||||
// Only used as a container for code caching.
|
public:
|
||||||
|
/**
|
||||||
|
* Data read from magic sourceURL comments.
|
||||||
|
*/
|
||||||
|
Local<Value> GetSourceURL();
|
||||||
|
/**
|
||||||
|
* Data read from magic sourceMappingURL comments.
|
||||||
|
*/
|
||||||
|
Local<Value> GetSourceMappingURL();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,7 +91,7 @@ class V8_EXPORT SnapshotCreator {
|
|||||||
*/
|
*/
|
||||||
SnapshotCreator(Isolate* isolate,
|
SnapshotCreator(Isolate* isolate,
|
||||||
const intptr_t* external_references = nullptr,
|
const intptr_t* external_references = nullptr,
|
||||||
StartupData* existing_blob = nullptr);
|
const StartupData* existing_blob = nullptr);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create and enter an isolate, and set it up for serialization.
|
* Create and enter an isolate, and set it up for serialization.
|
||||||
@ -102,7 +102,7 @@ class V8_EXPORT SnapshotCreator {
|
|||||||
* that must be equivalent to CreateParams::external_references.
|
* that must be equivalent to CreateParams::external_references.
|
||||||
*/
|
*/
|
||||||
SnapshotCreator(const intptr_t* external_references = nullptr,
|
SnapshotCreator(const intptr_t* external_references = nullptr,
|
||||||
StartupData* existing_blob = nullptr);
|
const StartupData* existing_blob = nullptr);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Destroy the snapshot creator, and exit and dispose of the Isolate
|
* Destroy the snapshot creator, and exit and dispose of the Isolate
|
||||||
|
@ -30,7 +30,9 @@ class Signature;
|
|||||||
F(AsyncIteratorPrototype, initial_async_iterator_prototype) \
|
F(AsyncIteratorPrototype, initial_async_iterator_prototype) \
|
||||||
F(ErrorPrototype, initial_error_prototype) \
|
F(ErrorPrototype, initial_error_prototype) \
|
||||||
F(IteratorPrototype, initial_iterator_prototype) \
|
F(IteratorPrototype, initial_iterator_prototype) \
|
||||||
F(ObjProto_valueOf, object_value_of_function)
|
F(MapIteratorPrototype, initial_map_iterator_prototype) \
|
||||||
|
F(ObjProto_valueOf, object_value_of_function) \
|
||||||
|
F(SetIteratorPrototype, initial_set_iterator_prototype)
|
||||||
|
|
||||||
enum Intrinsic {
|
enum Intrinsic {
|
||||||
#define V8_DECL_INTRINSIC(name, iname) k##name,
|
#define V8_DECL_INTRINSIC(name, iname) k##name,
|
||||||
|
@ -403,7 +403,7 @@ void TracedReferenceBase::SetWrapperClassId(uint16_t class_id) {
|
|||||||
using I = internal::Internals;
|
using I = internal::Internals;
|
||||||
if (IsEmpty()) return;
|
if (IsEmpty()) return;
|
||||||
internal::Address* obj = reinterpret_cast<internal::Address*>(val_);
|
internal::Address* obj = reinterpret_cast<internal::Address*>(val_);
|
||||||
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kNodeClassIdOffset;
|
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kTracedNodeClassIdOffset;
|
||||||
*reinterpret_cast<uint16_t*>(addr) = class_id;
|
*reinterpret_cast<uint16_t*>(addr) = class_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -411,7 +411,7 @@ uint16_t TracedReferenceBase::WrapperClassId() const {
|
|||||||
using I = internal::Internals;
|
using I = internal::Internals;
|
||||||
if (IsEmpty()) return 0;
|
if (IsEmpty()) return 0;
|
||||||
internal::Address* obj = reinterpret_cast<internal::Address*>(val_);
|
internal::Address* obj = reinterpret_cast<internal::Address*>(val_);
|
||||||
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kNodeClassIdOffset;
|
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + I::kTracedNodeClassIdOffset;
|
||||||
return *reinterpret_cast<uint16_t*>(addr);
|
return *reinterpret_cast<uint16_t*>(addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,9 +18,9 @@ struct CalleeSavedRegisters {
|
|||||||
void* arm_r10;
|
void* arm_r10;
|
||||||
};
|
};
|
||||||
#elif V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM64 || \
|
#elif V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM64 || \
|
||||||
V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC || \
|
V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64 || \
|
||||||
V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_RISCV64 || V8_TARGET_ARCH_S390 || \
|
V8_TARGET_ARCH_RISCV64 || V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_LOONG64 || \
|
||||||
V8_TARGET_ARCH_LOONG64 || V8_TARGET_ARCH_RISCV32
|
V8_TARGET_ARCH_RISCV32
|
||||||
struct CalleeSavedRegisters {};
|
struct CalleeSavedRegisters {};
|
||||||
#else
|
#else
|
||||||
#error Target architecture was not detected as supported by v8
|
#error Target architecture was not detected as supported by v8
|
||||||
|
@ -244,6 +244,11 @@ class V8_EXPORT Value : public Data {
|
|||||||
*/
|
*/
|
||||||
bool IsWeakSet() const;
|
bool IsWeakSet() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if this value is a WeakRef.
|
||||||
|
*/
|
||||||
|
bool IsWeakRef() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if this value is an ArrayBuffer.
|
* Returns true if this value is an ArrayBuffer.
|
||||||
*/
|
*/
|
||||||
|
@ -8,8 +8,8 @@
|
|||||||
// These macros define the version number for the current version.
|
// These macros define the version number for the current version.
|
||||||
// NOTE these macros are used by some of the tool scripts and the build
|
// NOTE these macros are used by some of the tool scripts and the build
|
||||||
// system so their names cannot be changed without changing the scripts.
|
// system so their names cannot be changed without changing the scripts.
|
||||||
#define V8_MAJOR_VERSION 10
|
#define V8_MAJOR_VERSION 11
|
||||||
#define V8_MINOR_VERSION 7
|
#define V8_MINOR_VERSION 0
|
||||||
#define V8_BUILD_NUMBER 0
|
#define V8_BUILD_NUMBER 0
|
||||||
#define V8_PATCH_LEVEL 0
|
#define V8_PATCH_LEVEL 0
|
||||||
|
|
||||||
|
@ -288,6 +288,9 @@ path. Add it with -I<path> to the command line
|
|||||||
//
|
//
|
||||||
// V8_HAS_ATTRIBUTE_ALWAYS_INLINE - __attribute__((always_inline))
|
// V8_HAS_ATTRIBUTE_ALWAYS_INLINE - __attribute__((always_inline))
|
||||||
// supported
|
// supported
|
||||||
|
// V8_HAS_ATTRIBUTE_CONSTINIT - __attribute__((require_constant_
|
||||||
|
// initialization))
|
||||||
|
// supported
|
||||||
// V8_HAS_ATTRIBUTE_NONNULL - __attribute__((nonnull)) supported
|
// V8_HAS_ATTRIBUTE_NONNULL - __attribute__((nonnull)) supported
|
||||||
// V8_HAS_ATTRIBUTE_NOINLINE - __attribute__((noinline)) supported
|
// V8_HAS_ATTRIBUTE_NOINLINE - __attribute__((noinline)) supported
|
||||||
// V8_HAS_ATTRIBUTE_UNUSED - __attribute__((unused)) supported
|
// V8_HAS_ATTRIBUTE_UNUSED - __attribute__((unused)) supported
|
||||||
@ -305,6 +308,9 @@ path. Add it with -I<path> to the command line
|
|||||||
// V8_HAS_BUILTIN_EXPECT - __builtin_expect() supported
|
// V8_HAS_BUILTIN_EXPECT - __builtin_expect() supported
|
||||||
// V8_HAS_BUILTIN_FRAME_ADDRESS - __builtin_frame_address() supported
|
// V8_HAS_BUILTIN_FRAME_ADDRESS - __builtin_frame_address() supported
|
||||||
// V8_HAS_BUILTIN_POPCOUNT - __builtin_popcount() supported
|
// V8_HAS_BUILTIN_POPCOUNT - __builtin_popcount() supported
|
||||||
|
// V8_HAS_BUILTIN_ADD_OVERFLOW - __builtin_add_overflow() supported
|
||||||
|
// V8_HAS_BUILTIN_SUB_OVERFLOW - __builtin_sub_overflow() supported
|
||||||
|
// V8_HAS_BUILTIN_MUL_OVERFLOW - __builtin_mul_overflow() supported
|
||||||
// V8_HAS_BUILTIN_SADD_OVERFLOW - __builtin_sadd_overflow() supported
|
// V8_HAS_BUILTIN_SADD_OVERFLOW - __builtin_sadd_overflow() supported
|
||||||
// V8_HAS_BUILTIN_SSUB_OVERFLOW - __builtin_ssub_overflow() supported
|
// V8_HAS_BUILTIN_SSUB_OVERFLOW - __builtin_ssub_overflow() supported
|
||||||
// V8_HAS_BUILTIN_UADD_OVERFLOW - __builtin_uadd_overflow() supported
|
// V8_HAS_BUILTIN_UADD_OVERFLOW - __builtin_uadd_overflow() supported
|
||||||
@ -334,6 +340,9 @@ path. Add it with -I<path> to the command line
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
# define V8_HAS_ATTRIBUTE_ALWAYS_INLINE (__has_attribute(always_inline))
|
# define V8_HAS_ATTRIBUTE_ALWAYS_INLINE (__has_attribute(always_inline))
|
||||||
|
# define V8_HAS_ATTRIBUTE_CONSTINIT \
|
||||||
|
(__has_attribute(require_constant_initialization))
|
||||||
|
# define V8_HAS_ATTRIBUTE_CONST (__has_attribute(const))
|
||||||
# define V8_HAS_ATTRIBUTE_NONNULL (__has_attribute(nonnull))
|
# define V8_HAS_ATTRIBUTE_NONNULL (__has_attribute(nonnull))
|
||||||
# define V8_HAS_ATTRIBUTE_NOINLINE (__has_attribute(noinline))
|
# define V8_HAS_ATTRIBUTE_NOINLINE (__has_attribute(noinline))
|
||||||
# define V8_HAS_ATTRIBUTE_UNUSED (__has_attribute(unused))
|
# define V8_HAS_ATTRIBUTE_UNUSED (__has_attribute(unused))
|
||||||
@ -355,6 +364,9 @@ path. Add it with -I<path> to the command line
|
|||||||
# define V8_HAS_BUILTIN_EXPECT (__has_builtin(__builtin_expect))
|
# define V8_HAS_BUILTIN_EXPECT (__has_builtin(__builtin_expect))
|
||||||
# define V8_HAS_BUILTIN_FRAME_ADDRESS (__has_builtin(__builtin_frame_address))
|
# define V8_HAS_BUILTIN_FRAME_ADDRESS (__has_builtin(__builtin_frame_address))
|
||||||
# define V8_HAS_BUILTIN_POPCOUNT (__has_builtin(__builtin_popcount))
|
# define V8_HAS_BUILTIN_POPCOUNT (__has_builtin(__builtin_popcount))
|
||||||
|
# define V8_HAS_BUILTIN_ADD_OVERFLOW (__has_builtin(__builtin_add_overflow))
|
||||||
|
# define V8_HAS_BUILTIN_SUB_OVERFLOW (__has_builtin(__builtin_sub_overflow))
|
||||||
|
# define V8_HAS_BUILTIN_MUL_OVERFLOW (__has_builtin(__builtin_mul_overflow))
|
||||||
# define V8_HAS_BUILTIN_SADD_OVERFLOW (__has_builtin(__builtin_sadd_overflow))
|
# define V8_HAS_BUILTIN_SADD_OVERFLOW (__has_builtin(__builtin_sadd_overflow))
|
||||||
# define V8_HAS_BUILTIN_SSUB_OVERFLOW (__has_builtin(__builtin_ssub_overflow))
|
# define V8_HAS_BUILTIN_SSUB_OVERFLOW (__has_builtin(__builtin_ssub_overflow))
|
||||||
# define V8_HAS_BUILTIN_UADD_OVERFLOW (__has_builtin(__builtin_uadd_overflow))
|
# define V8_HAS_BUILTIN_UADD_OVERFLOW (__has_builtin(__builtin_uadd_overflow))
|
||||||
@ -450,6 +462,26 @@ path. Add it with -I<path> to the command line
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
// A macro to mark functions whose values don't change (e.g. across calls)
|
||||||
|
// and thereby compiler is free to hoist and fold multiple calls together.
|
||||||
|
// Use like:
|
||||||
|
// V8_CONST int foo() { ... }
|
||||||
|
#if V8_HAS_ATTRIBUTE_CONST
|
||||||
|
# define V8_CONST __attribute__((const))
|
||||||
|
#else
|
||||||
|
# define V8_CONST
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// A macro to mark a declaration as requiring constant initialization.
|
||||||
|
// Use like:
|
||||||
|
// int* foo V8_CONSTINIT;
|
||||||
|
#if V8_HAS_ATTRIBUTE_CONSTINIT
|
||||||
|
# define V8_CONSTINIT __attribute__((require_constant_initialization))
|
||||||
|
#else
|
||||||
|
# define V8_CONSTINIT
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
// A macro to mark specific arguments as non-null.
|
// A macro to mark specific arguments as non-null.
|
||||||
// Use like:
|
// Use like:
|
||||||
// int add(int* x, int y, int* z) V8_NONNULL(1, 3) { return *x + y + *z; }
|
// int add(int* x, int y, int* z) V8_NONNULL(1, 3) { return *x + y + *z; }
|
||||||
@ -579,6 +611,37 @@ path. Add it with -I<path> to the command line
|
|||||||
#define V8_NO_UNIQUE_ADDRESS /* NOT SUPPORTED */
|
#define V8_NO_UNIQUE_ADDRESS /* NOT SUPPORTED */
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
// Marks a type as being eligible for the "trivial" ABI despite having a
|
||||||
|
// non-trivial destructor or copy/move constructor. Such types can be relocated
|
||||||
|
// after construction by simply copying their memory, which makes them eligible
|
||||||
|
// to be passed in registers. The canonical example is std::unique_ptr.
|
||||||
|
//
|
||||||
|
// Use with caution; this has some subtle effects on constructor/destructor
|
||||||
|
// ordering and will be very incorrect if the type relies on its address
|
||||||
|
// remaining constant. When used as a function argument (by value), the value
|
||||||
|
// may be constructed in the caller's stack frame, passed in a register, and
|
||||||
|
// then used and destructed in the callee's stack frame. A similar thing can
|
||||||
|
// occur when values are returned.
|
||||||
|
//
|
||||||
|
// TRIVIAL_ABI is not needed for types which have a trivial destructor and
|
||||||
|
// copy/move constructors, since those are automatically trivial by the ABI
|
||||||
|
// spec.
|
||||||
|
//
|
||||||
|
// It is also not likely to be effective on types too large to be passed in one
|
||||||
|
// or two registers on typical target ABIs.
|
||||||
|
//
|
||||||
|
// See also:
|
||||||
|
// https://clang.llvm.org/docs/AttributeReference.html#trivial-abi
|
||||||
|
// https://libcxx.llvm.org/docs/DesignDocs/UniquePtrTrivialAbi.html
|
||||||
|
#if defined(__clang__) && defined(__has_attribute)
|
||||||
|
#if __has_attribute(trivial_abi)
|
||||||
|
#define V8_TRIVIAL_ABI [[clang::trivial_abi]]
|
||||||
|
#endif // __has_attribute(trivial_abi)
|
||||||
|
#endif // defined(__clang__) && defined(__has_attribute)
|
||||||
|
#if !defined(V8_TRIVIAL_ABI)
|
||||||
|
#define V8_TRIVIAL_ABI
|
||||||
|
#endif //!defined(V8_TRIVIAL_ABI)
|
||||||
|
|
||||||
// Helper macro to define no_sanitize attributes only with clang.
|
// Helper macro to define no_sanitize attributes only with clang.
|
||||||
#if defined(__clang__) && defined(__has_attribute)
|
#if defined(__clang__) && defined(__has_attribute)
|
||||||
#if __has_attribute(no_sanitize)
|
#if __has_attribute(no_sanitize)
|
||||||
@ -653,9 +716,6 @@ V8 shared library set USING_V8_SHARED.
|
|||||||
#elif defined(__mips64)
|
#elif defined(__mips64)
|
||||||
#define V8_HOST_ARCH_MIPS64 1
|
#define V8_HOST_ARCH_MIPS64 1
|
||||||
#define V8_HOST_ARCH_64_BIT 1
|
#define V8_HOST_ARCH_64_BIT 1
|
||||||
#elif defined(__MIPSEB__) || defined(__MIPSEL__)
|
|
||||||
#define V8_HOST_ARCH_MIPS 1
|
|
||||||
#define V8_HOST_ARCH_32_BIT 1
|
|
||||||
#elif defined(__loongarch64)
|
#elif defined(__loongarch64)
|
||||||
#define V8_HOST_ARCH_LOONG64 1
|
#define V8_HOST_ARCH_LOONG64 1
|
||||||
#define V8_HOST_ARCH_64_BIT 1
|
#define V8_HOST_ARCH_64_BIT 1
|
||||||
@ -692,8 +752,8 @@ V8 shared library set USING_V8_SHARED.
|
|||||||
// architecture, that is, target the native environment as presented by the
|
// architecture, that is, target the native environment as presented by the
|
||||||
// compiler.
|
// compiler.
|
||||||
#if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && \
|
#if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && \
|
||||||
!V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS && !V8_TARGET_ARCH_MIPS64 && \
|
!V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS64 && !V8_TARGET_ARCH_PPC && \
|
||||||
!V8_TARGET_ARCH_PPC && !V8_TARGET_ARCH_PPC64 && !V8_TARGET_ARCH_S390 && \
|
!V8_TARGET_ARCH_PPC64 && !V8_TARGET_ARCH_S390 && \
|
||||||
!V8_TARGET_ARCH_RISCV64 && !V8_TARGET_ARCH_LOONG64 && \
|
!V8_TARGET_ARCH_RISCV64 && !V8_TARGET_ARCH_LOONG64 && \
|
||||||
!V8_TARGET_ARCH_RISCV32
|
!V8_TARGET_ARCH_RISCV32
|
||||||
#if defined(_M_X64) || defined(__x86_64__)
|
#if defined(_M_X64) || defined(__x86_64__)
|
||||||
@ -706,8 +766,8 @@ V8 shared library set USING_V8_SHARED.
|
|||||||
#define V8_TARGET_ARCH_ARM 1
|
#define V8_TARGET_ARCH_ARM 1
|
||||||
#elif defined(__mips64)
|
#elif defined(__mips64)
|
||||||
#define V8_TARGET_ARCH_MIPS64 1
|
#define V8_TARGET_ARCH_MIPS64 1
|
||||||
#elif defined(__MIPSEB__) || defined(__MIPSEL__)
|
#elif defined(__loongarch64)
|
||||||
#define V8_TARGET_ARCH_MIPS 1
|
#define V8_TARGET_ARCH_LOONG64 1
|
||||||
#elif defined(_ARCH_PPC64)
|
#elif defined(_ARCH_PPC64)
|
||||||
#define V8_TARGET_ARCH_PPC64 1
|
#define V8_TARGET_ARCH_PPC64 1
|
||||||
#elif defined(_ARCH_PPC)
|
#elif defined(_ARCH_PPC)
|
||||||
@ -785,9 +845,6 @@ V8 shared library set USING_V8_SHARED.
|
|||||||
#if (V8_TARGET_ARCH_ARM64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_ARM64))
|
#if (V8_TARGET_ARCH_ARM64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_ARM64))
|
||||||
#error Target architecture arm64 is only supported on arm64 and x64 host
|
#error Target architecture arm64 is only supported on arm64 and x64 host
|
||||||
#endif
|
#endif
|
||||||
#if (V8_TARGET_ARCH_MIPS && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_MIPS))
|
|
||||||
#error Target architecture mips is only supported on mips and ia32 host
|
|
||||||
#endif
|
|
||||||
#if (V8_TARGET_ARCH_MIPS64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_MIPS64))
|
#if (V8_TARGET_ARCH_MIPS64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_MIPS64))
|
||||||
#error Target architecture mips64 is only supported on mips64 and x64 host
|
#error Target architecture mips64 is only supported on mips64 and x64 host
|
||||||
#endif
|
#endif
|
||||||
@ -812,12 +869,6 @@ V8 shared library set USING_V8_SHARED.
|
|||||||
#define V8_TARGET_LITTLE_ENDIAN 1
|
#define V8_TARGET_LITTLE_ENDIAN 1
|
||||||
#elif V8_TARGET_ARCH_LOONG64
|
#elif V8_TARGET_ARCH_LOONG64
|
||||||
#define V8_TARGET_LITTLE_ENDIAN 1
|
#define V8_TARGET_LITTLE_ENDIAN 1
|
||||||
#elif V8_TARGET_ARCH_MIPS
|
|
||||||
#if defined(__MIPSEB__)
|
|
||||||
#define V8_TARGET_BIG_ENDIAN 1
|
|
||||||
#else
|
|
||||||
#define V8_TARGET_LITTLE_ENDIAN 1
|
|
||||||
#endif
|
|
||||||
#elif V8_TARGET_ARCH_MIPS64
|
#elif V8_TARGET_ARCH_MIPS64
|
||||||
#if defined(__MIPSEB__) || defined(V8_TARGET_ARCH_MIPS64_BE)
|
#if defined(__MIPSEB__) || defined(V8_TARGET_ARCH_MIPS64_BE)
|
||||||
#define V8_TARGET_BIG_ENDIAN 1
|
#define V8_TARGET_BIG_ENDIAN 1
|
||||||
|
@ -66,7 +66,6 @@
|
|||||||
'V8 Linux64 - debug builder': 'debug_x64',
|
'V8 Linux64 - debug builder': 'debug_x64',
|
||||||
'V8 Linux64 - external code space - debug - builder': 'debug_x64_external_code_space',
|
'V8 Linux64 - external code space - debug - builder': 'debug_x64_external_code_space',
|
||||||
'V8 Linux64 - custom snapshot - debug builder': 'debug_x64_custom',
|
'V8 Linux64 - custom snapshot - debug builder': 'debug_x64_custom',
|
||||||
'V8 Linux64 - heap sandbox - debug - builder': 'debug_x64_heap_sandbox',
|
|
||||||
'V8 Linux64 - internal snapshot - builder': 'release_x64_internal',
|
'V8 Linux64 - internal snapshot - builder': 'release_x64_internal',
|
||||||
'V8 Linux64 - debug - header includes - builder': 'debug_x64_header_includes',
|
'V8 Linux64 - debug - header includes - builder': 'debug_x64_header_includes',
|
||||||
'V8 Linux64 - no sandbox - debug builder': 'debug_x64_no_sandbox',
|
'V8 Linux64 - no sandbox - debug builder': 'debug_x64_no_sandbox',
|
||||||
@ -108,14 +107,15 @@
|
|||||||
'V8 Linux - arm64 - sim - MSAN - builder': 'release_simulate_arm64_msan',
|
'V8 Linux - arm64 - sim - MSAN - builder': 'release_simulate_arm64_msan',
|
||||||
# FYI.
|
# FYI.
|
||||||
'V8 iOS - sim - builder': 'release_x64_ios_simulator',
|
'V8 iOS - sim - builder': 'release_x64_ios_simulator',
|
||||||
'V8 Linux64 - arm64 - sim - heap sandbox - debug - builder': 'debug_x64_heap_sandbox_arm64_sim',
|
|
||||||
'V8 Linux64 - arm64 - sim - no pointer compression - builder':
|
'V8 Linux64 - arm64 - sim - no pointer compression - builder':
|
||||||
'release_simulate_arm64_no_pointer_compression',
|
'release_simulate_arm64_no_pointer_compression',
|
||||||
|
'V8 Linux64 - coverage': 'release_x64_coverage',
|
||||||
'V8 Linux64 - cppgc-non-default - debug - builder': 'debug_x64_non_default_cppgc',
|
'V8 Linux64 - cppgc-non-default - debug - builder': 'debug_x64_non_default_cppgc',
|
||||||
'V8 Linux64 - debug - perfetto - builder': 'debug_x64_perfetto',
|
'V8 Linux64 - debug - perfetto - builder': 'debug_x64_perfetto',
|
||||||
'V8 Linux64 - disable runtime call stats - builder': 'release_x64_disable_runtime_call_stats',
|
'V8 Linux64 - disable runtime call stats - builder': 'release_x64_disable_runtime_call_stats',
|
||||||
'V8 Linux64 - debug - single generation - builder': 'debug_x64_single_generation',
|
'V8 Linux64 - debug - single generation - builder': 'debug_x64_single_generation',
|
||||||
'V8 Linux64 - no pointer compression - builder': 'release_x64_no_pointer_compression',
|
'V8 Linux64 - no pointer compression - builder': 'release_x64_no_pointer_compression',
|
||||||
|
'V8 Linux64 css - debug builder': 'debug_x64_conservative_stack_scanning',
|
||||||
'V8 Linux64 gcc - builder': 'release_x64_gcc',
|
'V8 Linux64 gcc - builder': 'release_x64_gcc',
|
||||||
'V8 Linux64 gcc - debug builder': 'debug_x64_gcc',
|
'V8 Linux64 gcc - debug builder': 'debug_x64_gcc',
|
||||||
'V8 Linux64 gcc light - debug builder': 'debug_x64_gcc',
|
'V8 Linux64 gcc light - debug builder': 'debug_x64_gcc',
|
||||||
@ -126,7 +126,6 @@
|
|||||||
'V8 Linux - vtunejit': 'debug_x86_vtunejit',
|
'V8 Linux - vtunejit': 'debug_x86_vtunejit',
|
||||||
'V8 Linux64 - gcov coverage': 'release_x64_gcc_coverage',
|
'V8 Linux64 - gcov coverage': 'release_x64_gcc_coverage',
|
||||||
'V8 Linux64 - Fuzzilli - builder': 'release_x64_fuzzilli',
|
'V8 Linux64 - Fuzzilli - builder': 'release_x64_fuzzilli',
|
||||||
'V8 Linux - predictable - builder': 'release_x86_predictable',
|
|
||||||
'V8 Linux64 - predictable - builder': 'release_x64_predictable',
|
'V8 Linux64 - predictable - builder': 'release_x64_predictable',
|
||||||
'V8 Linux - full debug builder': 'full_debug_x86',
|
'V8 Linux - full debug builder': 'full_debug_x86',
|
||||||
'V8 Mac64 - full debug builder': 'full_debug_x64',
|
'V8 Mac64 - full debug builder': 'full_debug_x64',
|
||||||
@ -168,7 +167,7 @@
|
|||||||
'V8 Clusterfuzz Linux64 UBSan - release builder':
|
'V8 Clusterfuzz Linux64 UBSan - release builder':
|
||||||
'release_x64_ubsan_recover',
|
'release_x64_ubsan_recover',
|
||||||
'V8 Clusterfuzz Linux64 ASAN sandbox testing - release builder':
|
'V8 Clusterfuzz Linux64 ASAN sandbox testing - release builder':
|
||||||
'release_x64_asan_sandbox_testing',
|
'release_x64_asan_symbolized_expose_memory_corruption',
|
||||||
},
|
},
|
||||||
'client.v8.perf' : {
|
'client.v8.perf' : {
|
||||||
'V8 Arm - builder - perf': 'official_arm',
|
'V8 Arm - builder - perf': 'official_arm',
|
||||||
@ -208,105 +207,97 @@
|
|||||||
'tryserver.v8': {
|
'tryserver.v8': {
|
||||||
'v8_android_arm_compile_rel': 'release_android_arm',
|
'v8_android_arm_compile_rel': 'release_android_arm',
|
||||||
'v8_android_arm64_compile_dbg': 'debug_android_arm64',
|
'v8_android_arm64_compile_dbg': 'debug_android_arm64',
|
||||||
'v8_android_arm64_n5x_rel_ng': 'release_android_arm64',
|
'v8_android_arm64_n5x_compile_rel': 'release_android_arm64',
|
||||||
'v8_fuchsia_compile_rel': 'release_x64_fuchsia_trybot',
|
'v8_fuchsia_compile_rel': 'release_x64_fuchsia_trybot',
|
||||||
'v8_fuchsia_rel_ng': 'release_x64_fuchsia_trybot',
|
|
||||||
'v8_ios_simulator': 'release_x64_ios_simulator',
|
'v8_ios_simulator': 'release_x64_ios_simulator',
|
||||||
'v8_linux_rel_ng': 'release_x86_gcmole_trybot',
|
'v8_linux_compile_rel': 'release_x86_gcmole_trybot',
|
||||||
'v8_linux_optional_rel_ng': 'release_x86_trybot',
|
'v8_linux_optional_compile_rel': 'release_x86_trybot',
|
||||||
'v8_linux_verify_csa_rel_ng': 'release_x86_verify_csa',
|
'v8_linux_verify_csa_compile_rel': 'release_x86_verify_csa',
|
||||||
'v8_linux_nodcheck_rel_ng': 'release_x86_minimal_symbols',
|
'v8_linux_nodcheck_compile_rel': 'release_x86_minimal_symbols',
|
||||||
'v8_linux_dbg_ng': 'debug_x86_trybot',
|
'v8_linux_compile_dbg': 'debug_x86_trybot',
|
||||||
'v8_linux_noi18n_compile_dbg': 'debug_x86_no_i18n',
|
'v8_linux_noi18n_compile_dbg': 'debug_x86_no_i18n',
|
||||||
'v8_linux_noi18n_rel_ng': 'release_x86_no_i18n_trybot',
|
'v8_linux_noi18n_compile_rel': 'release_x86_no_i18n_trybot',
|
||||||
'v8_linux_gc_stress_dbg_ng': 'debug_x86_trybot',
|
'v8_linux_gc_stress_compile_dbg': 'debug_x86_trybot',
|
||||||
'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap',
|
'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap',
|
||||||
'v8_linux_vtunejit': 'debug_x86_vtunejit',
|
'v8_linux_vtunejit': 'debug_x86_vtunejit',
|
||||||
'v8_linux64_arm64_no_pointer_compression_rel_ng':
|
'v8_linux64_arm64_no_pointer_compression_compile_rel':
|
||||||
'release_simulate_arm64_no_pointer_compression',
|
'release_simulate_arm64_no_pointer_compression',
|
||||||
'v8_linux64_cppgc_non_default_dbg_ng': 'debug_x64_non_default_cppgc',
|
'v8_linux64_cppgc_non_default_compile_dbg': 'debug_x64_non_default_cppgc',
|
||||||
'v8_linux64_dbg_ng': 'debug_x64_trybot',
|
'v8_linux64_compile_dbg': 'debug_x64_trybot',
|
||||||
'v8_linux64_no_sandbox_dbg_ng': 'debug_x64_no_sandbox',
|
'v8_linux64_coverage': 'release_x64_coverage',
|
||||||
'v8_linux64_dict_tracking_dbg_ng': 'debug_x64_dict_tracking_trybot',
|
'v8_linux64_no_sandbox_compile_dbg': 'debug_x64_no_sandbox',
|
||||||
'v8_linux64_disable_runtime_call_stats_rel_ng': 'release_x64_disable_runtime_call_stats',
|
'v8_linux64_dict_tracking_compile_dbg': 'debug_x64_dict_tracking_trybot',
|
||||||
'v8_linux64_external_code_space_dbg_ng': 'debug_x64_external_code_space',
|
'v8_linux64_disable_runtime_call_stats_compile_rel': 'release_x64_disable_runtime_call_stats',
|
||||||
'v8_linux64_gc_stress_custom_snapshot_dbg_ng': 'debug_x64_trybot_custom',
|
'v8_linux64_external_code_space_compile_dbg': 'debug_x64_external_code_space',
|
||||||
'v8_linux64_gc_stress_dbg_ng': 'debug_x64_trybot',
|
'v8_linux64_css_compile_dbg': 'debug_x64_conservative_stack_scanning',
|
||||||
|
'v8_linux64_gc_stress_custom_snapshot_compile_dbg': 'debug_x64_trybot_custom',
|
||||||
|
'v8_linux64_gc_stress_compile_dbg': 'debug_x64_trybot',
|
||||||
'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc',
|
'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc',
|
||||||
'v8_linux64_gcc_compile_rel': 'release_x64_gcc',
|
|
||||||
'v8_linux64_gcc_light_compile_dbg': 'debug_x64_gcc',
|
'v8_linux64_gcc_light_compile_dbg': 'debug_x64_gcc',
|
||||||
'v8_linux64_gcc_rel_ng': 'release_x64_gcc',
|
'v8_linux64_gcc_compile_rel': 'release_x64_gcc',
|
||||||
'v8_linux64_gcov_coverage': 'release_x64_gcc_coverage',
|
'v8_linux64_gcov_coverage': 'release_x64_gcc_coverage',
|
||||||
'v8_linux64_header_includes_dbg': 'debug_x64_header_includes',
|
'v8_linux64_header_includes_dbg': 'debug_x64_header_includes',
|
||||||
'v8_linux64_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox',
|
'v8_linux64_minor_mc_compile_dbg': 'debug_x64_trybot',
|
||||||
'v8_linux64_minor_mc_dbg_ng': 'debug_x64_trybot',
|
'v8_linux64_fyi_compile_rel': 'release_x64_test_features_trybot',
|
||||||
'v8_linux_arm64_sim_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox_arm64_sim',
|
'v8_linux64_nodcheck_compile_rel': 'release_x64',
|
||||||
'v8_linux64_fyi_rel_ng': 'release_x64_test_features_trybot',
|
'v8_linux64_perfetto_compile_dbg': 'debug_x64_perfetto',
|
||||||
'v8_linux64_nodcheck_rel_ng': 'release_x64',
|
'v8_linux64_no_pointer_compression_compile_rel': 'release_x64_no_pointer_compression',
|
||||||
'v8_linux64_perfetto_dbg_ng': 'debug_x64_perfetto',
|
'v8_linux64_compile_rel': 'release_x64_test_features_trybot',
|
||||||
'v8_linux64_no_pointer_compression_rel_ng': 'release_x64_no_pointer_compression',
|
'v8_linux64_no_sandbox_compile_rel': 'release_x64_no_sandbox',
|
||||||
'v8_linux64_rel_ng': 'release_x64_test_features_trybot',
|
'v8_linux64_predictable_compile_rel': 'release_x64_predictable',
|
||||||
'v8_linux64_no_sandbox_rel_ng': 'release_x64_no_sandbox',
|
|
||||||
'v8_linux64_predictable_rel_ng': 'release_x64_predictable',
|
|
||||||
'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap',
|
'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap',
|
||||||
'v8_linux64_single_generation_dbg_ng': 'debug_x64_single_generation',
|
'v8_linux64_single_generation_compile_dbg': 'debug_x64_single_generation',
|
||||||
'v8_linux64_no_wasm_compile_rel': 'release_x64_webassembly_disabled',
|
'v8_linux64_no_wasm_compile_rel': 'release_x64_webassembly_disabled',
|
||||||
'v8_linux64_verify_csa_rel_ng': 'release_x64_verify_csa',
|
'v8_linux64_verify_csa_compile_rel': 'release_x64_verify_csa',
|
||||||
'v8_linux64_asan_rel_ng': 'release_x64_asan_minimal_symbols',
|
'v8_linux64_asan_compile_rel': 'release_x64_asan_minimal_symbols',
|
||||||
'v8_linux64_cfi_rel_ng': 'release_x64_cfi',
|
'v8_linux64_cfi_compile_rel': 'release_x64_cfi',
|
||||||
'v8_linux64_fuzzilli_ng': 'release_x64_fuzzilli',
|
'v8_linux64_fuzzilli_compile_rel': 'release_x64_fuzzilli',
|
||||||
'v8_linux64_loong64_rel_ng': 'release_simulate_loong64',
|
'v8_linux64_loong64_compile_rel': 'release_simulate_loong64',
|
||||||
'v8_linux64_msan_rel_ng': 'release_simulate_arm64_msan_minimal_symbols',
|
'v8_linux64_msan_compile_rel': 'release_simulate_arm64_msan_minimal_symbols',
|
||||||
'v8_linux_riscv32_rel_ng': 'release_simulate_riscv32',
|
'v8_linux_riscv32_compile_rel': 'release_simulate_riscv32',
|
||||||
'v8_linux64_riscv64_rel_ng': 'release_simulate_riscv64',
|
'v8_linux64_riscv64_compile_rel': 'release_simulate_riscv64',
|
||||||
'v8_linux64_tsan_rel_ng': 'release_x64_tsan_minimal_symbols',
|
'v8_linux64_tsan_compile_rel': 'release_x64_tsan_minimal_symbols',
|
||||||
'v8_linux64_tsan_no_cm_rel_ng': 'release_x64_tsan_no_cm',
|
'v8_linux64_tsan_no_cm_compile_rel': 'release_x64_tsan_no_cm',
|
||||||
'v8_linux64_tsan_isolates_rel_ng':
|
'v8_linux64_tsan_isolates_compile_rel':
|
||||||
'release_x64_tsan_minimal_symbols',
|
'release_x64_tsan_minimal_symbols',
|
||||||
'v8_linux64_ubsan_rel_ng': 'release_x64_ubsan_minimal_symbols',
|
'v8_linux64_ubsan_compile_rel': 'release_x64_ubsan_minimal_symbols',
|
||||||
'v8_odroid_arm_rel_ng': 'release_arm',
|
'v8_odroid_arm_compile_rel': 'release_arm',
|
||||||
'v8_linux_torque_compare': 'torque_compare',
|
'v8_linux_torque_compare': 'torque_compare',
|
||||||
# TODO(machenbach): Remove after switching to x64 on infra side.
|
# TODO(machenbach): Remove after switching to x64 on infra side.
|
||||||
'v8_win_dbg_ng': 'debug_x86_trybot',
|
|
||||||
'v8_win_compile_dbg': 'debug_x86_trybot',
|
'v8_win_compile_dbg': 'debug_x86_trybot',
|
||||||
'v8_win_rel_ng': 'release_x86_trybot',
|
'v8_win_compile_rel': 'release_x86_trybot',
|
||||||
'v8_win64_asan_rel_ng': 'release_x64_asan_no_lsan',
|
'v8_win64_asan_compile_rel': 'release_x64_asan_no_lsan',
|
||||||
|
'v8_win64_msvc_light_compile_rel': 'release_x64_msvc',
|
||||||
|
'v8_win64_compile_dbg': 'debug_x64_minimal_symbols',
|
||||||
'v8_win64_msvc_compile_rel': 'release_x64_msvc',
|
'v8_win64_msvc_compile_rel': 'release_x64_msvc',
|
||||||
'v8_win64_dbg_ng': 'debug_x64_minimal_symbols',
|
'v8_win64_compile_rel': 'release_x64_trybot',
|
||||||
'v8_win64_msvc_rel_ng': 'release_x64_msvc',
|
|
||||||
'v8_win64_rel_ng': 'release_x64_trybot',
|
|
||||||
'v8_mac_arm64_rel_ng': 'release_arm64',
|
|
||||||
'v8_mac_arm64_dbg_ng': 'debug_arm64',
|
|
||||||
'v8_mac_arm64_full_dbg_ng': 'full_debug_arm64',
|
|
||||||
'v8_mac_arm64_no_pointer_compression_dbg_ng': 'debug_arm64_no_pointer_compression',
|
|
||||||
'v8_mac_arm64_compile_dbg': 'debug_arm64',
|
|
||||||
'v8_mac_arm64_compile_rel': 'release_arm64',
|
'v8_mac_arm64_compile_rel': 'release_arm64',
|
||||||
|
'v8_mac_arm64_compile_dbg': 'debug_arm64',
|
||||||
|
'v8_mac_arm64_full_compile_dbg': 'full_debug_arm64',
|
||||||
|
'v8_mac_arm64_no_pointer_compression_compile_dbg': 'debug_arm64_no_pointer_compression',
|
||||||
|
'v8_mac_arm64_sim_compile_rel': 'release_simulate_arm64_trybot',
|
||||||
'v8_mac_arm64_sim_compile_dbg': 'debug_simulate_arm64',
|
'v8_mac_arm64_sim_compile_dbg': 'debug_simulate_arm64',
|
||||||
'v8_mac_arm64_sim_compile_rel': 'release_simulate_arm64',
|
'v8_mac_arm64_sim_nodcheck_compile_rel': 'release_simulate_arm64',
|
||||||
'v8_mac_arm64_sim_rel_ng': 'release_simulate_arm64_trybot',
|
'v8_mac64_gc_stress_compile_dbg': 'debug_x64_trybot',
|
||||||
'v8_mac_arm64_sim_dbg_ng': 'debug_simulate_arm64',
|
'v8_mac64_compile_rel': 'release_x64_trybot',
|
||||||
'v8_mac_arm64_sim_nodcheck_rel_ng': 'release_simulate_arm64',
|
|
||||||
'v8_mac64_gc_stress_dbg_ng': 'debug_x64_trybot',
|
|
||||||
'v8_mac64_rel_ng': 'release_x64_trybot',
|
|
||||||
'v8_mac64_dbg': 'debug_x64',
|
'v8_mac64_dbg': 'debug_x64',
|
||||||
'v8_mac64_dbg_ng': 'debug_x64',
|
'v8_mac64_compile_dbg': 'debug_x64',
|
||||||
'v8_mac64_compile_full_dbg_ng': 'full_debug_x64',
|
'v8_mac64_noopt_compile_dbg': 'full_debug_x64',
|
||||||
'v8_mac64_asan_compile_rel': 'release_x64_asan_no_lsan',
|
'v8_mac64_asan_compile_rel': 'release_x64_asan_no_lsan',
|
||||||
'v8_mac64_asan_rel_ng': 'release_x64_asan_no_lsan',
|
'v8_linux_arm_compile_rel': 'release_simulate_arm_trybot',
|
||||||
'v8_linux_arm_rel_ng': 'release_simulate_arm_trybot',
|
|
||||||
'v8_linux_arm_lite_compile_dbg': 'debug_simulate_arm_lite',
|
'v8_linux_arm_lite_compile_dbg': 'debug_simulate_arm_lite',
|
||||||
'v8_linux_arm_lite_rel_ng': 'release_simulate_arm_lite_trybot',
|
'v8_linux_arm_lite_compile_rel': 'release_simulate_arm_lite_trybot',
|
||||||
'v8_linux_arm_dbg_ng': 'debug_simulate_arm',
|
'v8_linux_arm_compile_dbg': 'debug_simulate_arm',
|
||||||
'v8_linux_arm_armv8a_rel': 'release_simulate_arm_trybot',
|
'v8_linux_arm_armv8a_rel': 'release_simulate_arm_trybot',
|
||||||
'v8_linux_arm_armv8a_dbg': 'debug_simulate_arm',
|
'v8_linux_arm_armv8a_dbg': 'debug_simulate_arm',
|
||||||
'v8_linux_arm64_rel_ng': 'release_simulate_arm64_trybot',
|
'v8_linux_arm64_compile_rel': 'release_simulate_arm64_trybot',
|
||||||
'v8_linux_arm64_cfi_rel_ng' : 'release_simulate_arm64_cfi',
|
'v8_linux_arm64_cfi_compile_rel' : 'release_simulate_arm64_cfi',
|
||||||
'v8_linux_arm64_dbg_ng': 'debug_simulate_arm64',
|
'v8_linux_arm64_compile_dbg': 'debug_simulate_arm64',
|
||||||
'v8_linux_arm64_gc_stress_dbg_ng': 'debug_simulate_arm64',
|
'v8_linux_arm64_gc_stress_compile_dbg': 'debug_simulate_arm64',
|
||||||
'v8_linux_mips64el_compile_rel': 'release_simulate_mips64el',
|
'v8_linux_mips64el_compile_rel': 'release_simulate_mips64el',
|
||||||
'v8_numfuzz_ng': 'release_x64',
|
'v8_numfuzz_compile_rel': 'release_x64',
|
||||||
'v8_numfuzz_dbg_ng': 'debug_x64',
|
'v8_numfuzz_compile_dbg': 'debug_x64',
|
||||||
'v8_numfuzz_tsan_ng': 'release_x64_tsan',
|
'v8_numfuzz_tsan_compile_rel': 'release_x64_tsan',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -419,7 +410,7 @@
|
|||||||
'release_simulate_arm64_cfi': [
|
'release_simulate_arm64_cfi': [
|
||||||
'release_bot', 'simulate_arm64', 'v8_control_flow_integrity'],
|
'release_bot', 'simulate_arm64', 'v8_control_flow_integrity'],
|
||||||
'release_simulate_arm64_no_pointer_compression': [
|
'release_simulate_arm64_no_pointer_compression': [
|
||||||
'release_bot', 'simulate_arm64_no_sandbox', 'dcheck_always_on',
|
'release_bot', 'simulate_arm64', 'no_sandbox', 'dcheck_always_on',
|
||||||
'v8_enable_slow_dchecks', 'v8_disable_pointer_compression'],
|
'v8_enable_slow_dchecks', 'v8_disable_pointer_compression'],
|
||||||
'release_simulate_arm64_msan': [
|
'release_simulate_arm64_msan': [
|
||||||
'release_bot', 'simulate_arm64', 'msan'],
|
'release_bot', 'simulate_arm64', 'msan'],
|
||||||
@ -452,7 +443,7 @@
|
|||||||
'debug_arm64': [
|
'debug_arm64': [
|
||||||
'debug_bot', 'arm64'],
|
'debug_bot', 'arm64'],
|
||||||
'debug_arm64_no_pointer_compression': [
|
'debug_arm64_no_pointer_compression': [
|
||||||
'debug_bot', 'arm64_no_sandbox', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_enable_javascript_promise_hooks',
|
'debug_bot', 'arm64', 'no_sandbox', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_enable_javascript_promise_hooks',
|
||||||
'v8_disable_pointer_compression'],
|
'v8_disable_pointer_compression'],
|
||||||
'full_debug_arm64': [
|
'full_debug_arm64': [
|
||||||
'debug_bot', 'arm64', 'v8_full_debug'],
|
'debug_bot', 'arm64', 'v8_full_debug'],
|
||||||
@ -499,6 +490,9 @@
|
|||||||
'release_x64_asan_no_lsan_verify_heap_dchecks': [
|
'release_x64_asan_no_lsan_verify_heap_dchecks': [
|
||||||
'release_bot', 'x64', 'asan', 'dcheck_always_on',
|
'release_bot', 'x64', 'asan', 'dcheck_always_on',
|
||||||
'v8_enable_slow_dchecks', 'v8_verify_heap'],
|
'v8_enable_slow_dchecks', 'v8_verify_heap'],
|
||||||
|
'release_x64_asan_symbolized_expose_memory_corruption': [
|
||||||
|
'release_bot', 'x64', 'asan', 'symbolized',
|
||||||
|
'v8_expose_memory_corruption_api'],
|
||||||
'release_x64_asan_symbolized_verify_heap': [
|
'release_x64_asan_symbolized_verify_heap': [
|
||||||
'release_bot', 'x64', 'asan', 'lsan', 'symbolized',
|
'release_bot', 'x64', 'asan', 'lsan', 'symbolized',
|
||||||
'v8_verify_heap'],
|
'v8_verify_heap'],
|
||||||
@ -506,6 +500,8 @@
|
|||||||
'release_bot', 'x64', 'cfi'],
|
'release_bot', 'x64', 'cfi'],
|
||||||
'release_x64_cfi_clusterfuzz': [
|
'release_x64_cfi_clusterfuzz': [
|
||||||
'release_bot', 'x64', 'cfi_clusterfuzz'],
|
'release_bot', 'x64', 'cfi_clusterfuzz'],
|
||||||
|
'release_x64_coverage': [
|
||||||
|
'release_bot', 'x64', 'clang_coverage'],
|
||||||
'release_x64_fuzzilli': [
|
'release_x64_fuzzilli': [
|
||||||
'release_bot', 'x64', 'dcheck_always_on', 'v8_enable_slow_dchecks',
|
'release_bot', 'x64', 'dcheck_always_on', 'v8_enable_slow_dchecks',
|
||||||
'v8_verify_heap', 'v8_verify_csa', 'fuzzilli'],
|
'v8_verify_heap', 'v8_verify_csa', 'fuzzilli'],
|
||||||
@ -533,12 +529,12 @@
|
|||||||
'release_x64_minimal_symbols_reclient': [
|
'release_x64_minimal_symbols_reclient': [
|
||||||
'release_bot_reclient', 'x64', 'minimal_symbols'],
|
'release_bot_reclient', 'x64', 'minimal_symbols'],
|
||||||
'release_x64_no_pointer_compression': [
|
'release_x64_no_pointer_compression': [
|
||||||
'release_bot', 'x64_no_sandbox', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_enable_javascript_promise_hooks',
|
'release_bot', 'x64', 'no_sandbox', 'dcheck_always_on', 'v8_enable_slow_dchecks',
|
||||||
'v8_disable_pointer_compression'],
|
'v8_enable_javascript_promise_hooks', 'v8_disable_pointer_compression'],
|
||||||
'release_x64_reclient': [
|
'release_x64_reclient': [
|
||||||
'release_bot_reclient', 'x64'],
|
'release_bot_reclient', 'x64'],
|
||||||
'release_x64_no_sandbox': [
|
'release_x64_no_sandbox': [
|
||||||
'release_bot', 'x64_no_sandbox'],
|
'release_bot', 'x64', 'no_sandbox'],
|
||||||
'release_x64_trybot': [
|
'release_x64_trybot': [
|
||||||
'release_trybot', 'x64'],
|
'release_trybot', 'x64'],
|
||||||
'release_x64_test_features_trybot': [
|
'release_x64_test_features_trybot': [
|
||||||
@ -562,9 +558,6 @@
|
|||||||
'v8_enable_slow_dchecks', 'v8_verify_csa'],
|
'v8_enable_slow_dchecks', 'v8_verify_csa'],
|
||||||
'release_x64_webassembly_disabled': [
|
'release_x64_webassembly_disabled': [
|
||||||
'release_bot', 'x64', 'webassembly_disabled'],
|
'release_bot', 'x64', 'webassembly_disabled'],
|
||||||
'release_x64_asan_sandbox_testing': [
|
|
||||||
'release_bot', 'x64', 'asan', 'symbolized', 'v8_enable_sandbox_future',
|
|
||||||
'v8_expose_memory_corruption_api'],
|
|
||||||
|
|
||||||
# Official configs for x64.
|
# Official configs for x64.
|
||||||
'official_x64': [
|
'official_x64': [
|
||||||
@ -578,6 +571,8 @@
|
|||||||
'debug_x64_asan_no_lsan_static': [
|
'debug_x64_asan_no_lsan_static': [
|
||||||
'debug', 'static', 'goma', 'v8_enable_slow_dchecks', 'v8_optimized_debug',
|
'debug', 'static', 'goma', 'v8_enable_slow_dchecks', 'v8_optimized_debug',
|
||||||
'x64', 'asan'],
|
'x64', 'asan'],
|
||||||
|
'debug_x64_conservative_stack_scanning': [
|
||||||
|
'debug_bot', 'x64', 'conservative_stack_scanning'],
|
||||||
'debug_x64_custom': [
|
'debug_x64_custom': [
|
||||||
'debug_bot', 'x64', 'v8_snapshot_custom'],
|
'debug_bot', 'x64', 'v8_snapshot_custom'],
|
||||||
'debug_x64_external_code_space': [
|
'debug_x64_external_code_space': [
|
||||||
@ -588,10 +583,6 @@
|
|||||||
'debug_bot_no_goma', 'x64', 'gcc', 'lld', 'no_custom_libcxx'],
|
'debug_bot_no_goma', 'x64', 'gcc', 'lld', 'no_custom_libcxx'],
|
||||||
'debug_x64_header_includes': [
|
'debug_x64_header_includes': [
|
||||||
'debug_bot', 'x64', 'v8_check_header_includes'],
|
'debug_bot', 'x64', 'v8_check_header_includes'],
|
||||||
'debug_x64_heap_sandbox': [
|
|
||||||
'debug_bot', 'x64', 'v8_enable_sandbox_future', 'v8_expose_memory_corruption_api'],
|
|
||||||
'debug_x64_heap_sandbox_arm64_sim': [
|
|
||||||
'debug_bot', 'simulate_arm64', 'v8_enable_sandbox_future', 'v8_expose_memory_corruption_api'],
|
|
||||||
'debug_x64_minimal_symbols': [
|
'debug_x64_minimal_symbols': [
|
||||||
'debug_bot', 'x64', 'minimal_symbols'],
|
'debug_bot', 'x64', 'minimal_symbols'],
|
||||||
'debug_x64_non_default_cppgc': [
|
'debug_x64_non_default_cppgc': [
|
||||||
@ -599,7 +590,7 @@
|
|||||||
'debug_x64_perfetto': [
|
'debug_x64_perfetto': [
|
||||||
'debug_bot', 'x64', 'perfetto'],
|
'debug_bot', 'x64', 'perfetto'],
|
||||||
'debug_x64_no_sandbox': [
|
'debug_x64_no_sandbox': [
|
||||||
'debug_bot', 'x64_no_sandbox'],
|
'debug_bot', 'x64', 'no_sandbox'],
|
||||||
'debug_x64_single_generation': [
|
'debug_x64_single_generation': [
|
||||||
'debug_bot', 'x64', 'v8_enable_single_generation'],
|
'debug_bot', 'x64', 'v8_enable_single_generation'],
|
||||||
'debug_x64_trybot': [
|
'debug_x64_trybot': [
|
||||||
@ -645,8 +636,6 @@
|
|||||||
'release_trybot', 'x86', 'v8_no_i18n'],
|
'release_trybot', 'x86', 'v8_no_i18n'],
|
||||||
'release_x64_predictable': [
|
'release_x64_predictable': [
|
||||||
'release_bot', 'x64', 'v8_enable_verify_predictable'],
|
'release_bot', 'x64', 'v8_enable_verify_predictable'],
|
||||||
'release_x86_predictable': [
|
|
||||||
'release_bot', 'x86', 'v8_enable_verify_predictable'],
|
|
||||||
'release_x86_shared_verify_heap': [
|
'release_x86_shared_verify_heap': [
|
||||||
'release', 'x86', 'goma', 'shared', 'v8_verify_heap'],
|
'release', 'x86', 'goma', 'shared', 'v8_verify_heap'],
|
||||||
'release_x86_trybot': [
|
'release_x86_trybot': [
|
||||||
@ -678,11 +667,7 @@
|
|||||||
},
|
},
|
||||||
|
|
||||||
'arm64': {
|
'arm64': {
|
||||||
'gn_args': 'target_cpu="arm64" v8_enable_sandbox=true',
|
'gn_args': 'target_cpu="arm64"',
|
||||||
},
|
|
||||||
|
|
||||||
'arm64_no_sandbox': {
|
|
||||||
'gn_args': 'target_cpu="arm64" v8_enable_sandbox=false',
|
|
||||||
},
|
},
|
||||||
|
|
||||||
'asan': {
|
'asan': {
|
||||||
@ -706,6 +691,15 @@
|
|||||||
'gn_args': 'is_clang=true',
|
'gn_args': 'is_clang=true',
|
||||||
},
|
},
|
||||||
|
|
||||||
|
'clang_coverage': {
|
||||||
|
'gn_args': 'use_clang_coverage=true',
|
||||||
|
},
|
||||||
|
|
||||||
|
'conservative_stack_scanning': {
|
||||||
|
'gn_args': 'v8_enable_conservative_stack_scanning=true '
|
||||||
|
'v8_enable_inner_pointer_resolution_mb=true',
|
||||||
|
},
|
||||||
|
|
||||||
'coverage': {
|
'coverage': {
|
||||||
'gn_args': 'v8_code_coverage=true',
|
'gn_args': 'v8_code_coverage=true',
|
||||||
},
|
},
|
||||||
@ -795,12 +789,12 @@
|
|||||||
|
|
||||||
'msan': {
|
'msan': {
|
||||||
'mixins': ['v8_enable_test_features'],
|
'mixins': ['v8_enable_test_features'],
|
||||||
'gn_args': 'is_msan=true msan_track_origins=2',
|
'gn_args': 'is_msan=true msan_track_origins=2 instrumented_libraries_release="xenial"',
|
||||||
},
|
},
|
||||||
|
|
||||||
'msan_no_origins': {
|
'msan_no_origins': {
|
||||||
'mixins': ['v8_enable_test_features'],
|
'mixins': ['v8_enable_test_features'],
|
||||||
'gn_args': 'is_msan=true msan_track_origins=0',
|
'gn_args': 'is_msan=true msan_track_origins=0 instrumented_libraries_release="xenial"',
|
||||||
},
|
},
|
||||||
|
|
||||||
'msvc': {
|
'msvc': {
|
||||||
@ -815,6 +809,10 @@
|
|||||||
'gn_args': 'use_goma=false',
|
'gn_args': 'use_goma=false',
|
||||||
},
|
},
|
||||||
|
|
||||||
|
'no_sandbox': {
|
||||||
|
'gn_args': 'v8_enable_sandbox=false',
|
||||||
|
},
|
||||||
|
|
||||||
'no_sysroot': {
|
'no_sysroot': {
|
||||||
'gn_args': 'use_sysroot=false',
|
'gn_args': 'use_sysroot=false',
|
||||||
},
|
},
|
||||||
@ -864,11 +862,7 @@
|
|||||||
},
|
},
|
||||||
|
|
||||||
'simulate_arm64': {
|
'simulate_arm64': {
|
||||||
'gn_args': 'target_cpu="x64" v8_target_cpu="arm64" v8_enable_sandbox=true',
|
'gn_args': 'target_cpu="x64" v8_target_cpu="arm64"',
|
||||||
},
|
|
||||||
|
|
||||||
'simulate_arm64_no_sandbox': {
|
|
||||||
'gn_args': 'target_cpu="x64" v8_target_cpu="arm64" v8_enable_sandbox=false',
|
|
||||||
},
|
},
|
||||||
|
|
||||||
'simulate_loong64': {
|
'simulate_loong64': {
|
||||||
@ -940,10 +934,6 @@
|
|||||||
'gn_args': 'v8_enable_runtime_call_stats=false',
|
'gn_args': 'v8_enable_runtime_call_stats=false',
|
||||||
},
|
},
|
||||||
|
|
||||||
'v8_enable_sandbox_future': {
|
|
||||||
'gn_args': 'v8_enable_sandbox_future=true',
|
|
||||||
},
|
|
||||||
|
|
||||||
'v8_expose_memory_corruption_api': {
|
'v8_expose_memory_corruption_api': {
|
||||||
'gn_args': 'v8_expose_memory_corruption_api=true',
|
'gn_args': 'v8_expose_memory_corruption_api=true',
|
||||||
},
|
},
|
||||||
@ -1033,11 +1023,7 @@
|
|||||||
},
|
},
|
||||||
|
|
||||||
'x64': {
|
'x64': {
|
||||||
'gn_args': 'target_cpu="x64" v8_enable_sandbox=true',
|
'gn_args': 'target_cpu="x64"',
|
||||||
},
|
|
||||||
|
|
||||||
'x64_no_sandbox': {
|
|
||||||
'gn_args': 'target_cpu="x64" v8_enable_sandbox=false',
|
|
||||||
},
|
},
|
||||||
|
|
||||||
'x86': {
|
'x86': {
|
||||||
|
File diff suppressed because it is too large
Load Diff
1
src/DEPS
1
src/DEPS
@ -32,6 +32,7 @@ include_rules = [
|
|||||||
"+src/heap/local-factory.h",
|
"+src/heap/local-factory.h",
|
||||||
"+src/heap/local-heap.h",
|
"+src/heap/local-heap.h",
|
||||||
"+src/heap/local-heap-inl.h",
|
"+src/heap/local-heap-inl.h",
|
||||||
|
"+src/heap/pretenuring-handler-inl.h",
|
||||||
# TODO(v8:10496): Don't expose memory chunk outside of heap/.
|
# TODO(v8:10496): Don't expose memory chunk outside of heap/.
|
||||||
"+src/heap/memory-chunk.h",
|
"+src/heap/memory-chunk.h",
|
||||||
"+src/heap/memory-chunk-inl.h",
|
"+src/heap/memory-chunk-inl.h",
|
||||||
|
@ -83,6 +83,8 @@ MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
|
|||||||
InstantiateFunction(isolate,
|
InstantiateFunction(isolate,
|
||||||
Handle<FunctionTemplateInfo>::cast(getter)),
|
Handle<FunctionTemplateInfo>::cast(getter)),
|
||||||
Object);
|
Object);
|
||||||
|
Handle<CodeT> trampoline = BUILTIN_CODE(isolate, DebugBreakTrampoline);
|
||||||
|
Handle<JSFunction>::cast(getter)->set_code(*trampoline);
|
||||||
}
|
}
|
||||||
if (setter->IsFunctionTemplateInfo() &&
|
if (setter->IsFunctionTemplateInfo() &&
|
||||||
FunctionTemplateInfo::cast(*setter).BreakAtEntry()) {
|
FunctionTemplateInfo::cast(*setter).BreakAtEntry()) {
|
||||||
@ -91,6 +93,8 @@ MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
|
|||||||
InstantiateFunction(isolate,
|
InstantiateFunction(isolate,
|
||||||
Handle<FunctionTemplateInfo>::cast(setter)),
|
Handle<FunctionTemplateInfo>::cast(setter)),
|
||||||
Object);
|
Object);
|
||||||
|
Handle<CodeT> trampoline = BUILTIN_CODE(isolate, DebugBreakTrampoline);
|
||||||
|
Handle<JSFunction>::cast(setter)->set_code(*trampoline);
|
||||||
}
|
}
|
||||||
RETURN_ON_EXCEPTION(
|
RETURN_ON_EXCEPTION(
|
||||||
isolate,
|
isolate,
|
||||||
@ -529,7 +533,7 @@ MaybeHandle<JSFunction> InstantiateFunction(
|
|||||||
if (!data->needs_access_check() &&
|
if (!data->needs_access_check() &&
|
||||||
data->GetNamedPropertyHandler().IsUndefined(isolate) &&
|
data->GetNamedPropertyHandler().IsUndefined(isolate) &&
|
||||||
data->GetIndexedPropertyHandler().IsUndefined(isolate)) {
|
data->GetIndexedPropertyHandler().IsUndefined(isolate)) {
|
||||||
function_type = FLAG_embedder_instance_types && data->HasInstanceType()
|
function_type = v8_flags.embedder_instance_types && data->HasInstanceType()
|
||||||
? static_cast<InstanceType>(data->InstanceType())
|
? static_cast<InstanceType>(data->InstanceType())
|
||||||
: JS_API_OBJECT_TYPE;
|
: JS_API_OBJECT_TYPE;
|
||||||
}
|
}
|
||||||
|
499
src/api/api.cc
499
src/api/api.cc
@ -63,6 +63,7 @@
|
|||||||
#include "src/handles/global-handles.h"
|
#include "src/handles/global-handles.h"
|
||||||
#include "src/handles/persistent-handles.h"
|
#include "src/handles/persistent-handles.h"
|
||||||
#include "src/handles/shared-object-conveyor-handles.h"
|
#include "src/handles/shared-object-conveyor-handles.h"
|
||||||
|
#include "src/handles/traced-handles.h"
|
||||||
#include "src/heap/embedder-tracing.h"
|
#include "src/heap/embedder-tracing.h"
|
||||||
#include "src/heap/heap-inl.h"
|
#include "src/heap/heap-inl.h"
|
||||||
#include "src/heap/heap-write-barrier.h"
|
#include "src/heap/heap-write-barrier.h"
|
||||||
@ -286,7 +287,7 @@ void i::V8::FatalProcessOutOfMemory(i::Isolate* i_isolate, const char* location,
|
|||||||
// BUG(1718): Don't use the take_snapshot since we don't support
|
// BUG(1718): Don't use the take_snapshot since we don't support
|
||||||
// HeapObjectIterator here without doing a special GC.
|
// HeapObjectIterator here without doing a special GC.
|
||||||
i_isolate->heap()->RecordStats(&heap_stats, false);
|
i_isolate->heap()->RecordStats(&heap_stats, false);
|
||||||
if (!FLAG_correctness_fuzzer_suppressions) {
|
if (!v8_flags.correctness_fuzzer_suppressions) {
|
||||||
char* first_newline = strchr(last_few_messages, '\n');
|
char* first_newline = strchr(last_few_messages, '\n');
|
||||||
if (first_newline == nullptr || first_newline[1] == '\0')
|
if (first_newline == nullptr || first_newline[1] == '\0')
|
||||||
first_newline = last_few_messages;
|
first_newline = last_few_messages;
|
||||||
@ -359,19 +360,12 @@ void V8::SetSnapshotDataBlob(StartupData* snapshot_blob) {
|
|||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
#ifdef V8_ENABLE_SANDBOX
|
#ifdef V8_ENABLE_SANDBOX
|
||||||
// ArrayBufferAllocator to use when sandboxed pointers are used in which case
|
// ArrayBufferAllocator to use when the sandbox is enabled in which case all
|
||||||
// all ArrayBuffer backing stores need to be allocated inside the sandbox.
|
// ArrayBuffer backing stores need to be allocated inside the sandbox.
|
||||||
// Note, the current implementation is extremely inefficient as it uses the
|
|
||||||
// BoundedPageAllocator. In the future, we'll need a proper allocator
|
|
||||||
// implementation.
|
|
||||||
class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
|
class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
|
||||||
public:
|
public:
|
||||||
ArrayBufferAllocator() { CHECK(page_allocator_); }
|
|
||||||
|
|
||||||
void* Allocate(size_t length) override {
|
void* Allocate(size_t length) override {
|
||||||
return page_allocator_->AllocatePages(nullptr, RoundUp(length, page_size_),
|
return allocator_->Allocate(length);
|
||||||
page_size_,
|
|
||||||
PageAllocator::kReadWrite);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void* AllocateUninitialized(size_t length) override {
|
void* AllocateUninitialized(size_t length) override {
|
||||||
@ -379,12 +373,136 @@ class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Free(void* data, size_t length) override {
|
void Free(void* data, size_t length) override {
|
||||||
page_allocator_->FreePages(data, RoundUp(length, page_size_));
|
return allocator_->Free(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
PageAllocator* page_allocator_ = internal::GetArrayBufferPageAllocator();
|
// Backend allocator shared by all ArrayBufferAllocator instances. This way,
|
||||||
const size_t page_size_ = page_allocator_->AllocatePageSize();
|
// there is a single region of virtual addres space reserved inside the
|
||||||
|
// sandbox from which all ArrayBufferAllocators allocate their memory,
|
||||||
|
// instead of each allocator creating their own region, which may cause
|
||||||
|
// address space exhaustion inside the sandbox.
|
||||||
|
// TODO(chromium:1340224): replace this with a more efficient allocator.
|
||||||
|
class BackendAllocator {
|
||||||
|
public:
|
||||||
|
BackendAllocator() {
|
||||||
|
CHECK(i::GetProcessWideSandbox()->is_initialized());
|
||||||
|
VirtualAddressSpace* vas = i::GetProcessWideSandbox()->address_space();
|
||||||
|
constexpr size_t max_backing_memory_size = 8ULL * i::GB;
|
||||||
|
constexpr size_t min_backing_memory_size = 1ULL * i::GB;
|
||||||
|
size_t backing_memory_size = max_backing_memory_size;
|
||||||
|
i::Address backing_memory_base = 0;
|
||||||
|
while (!backing_memory_base &&
|
||||||
|
backing_memory_size >= min_backing_memory_size) {
|
||||||
|
backing_memory_base = vas->AllocatePages(
|
||||||
|
VirtualAddressSpace::kNoHint, backing_memory_size, kChunkSize,
|
||||||
|
PagePermissions::kNoAccess);
|
||||||
|
if (!backing_memory_base) {
|
||||||
|
backing_memory_size /= 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!backing_memory_base) {
|
||||||
|
i::V8::FatalProcessOutOfMemory(
|
||||||
|
nullptr,
|
||||||
|
"Could not reserve backing memory for ArrayBufferAllocators");
|
||||||
|
}
|
||||||
|
DCHECK(IsAligned(backing_memory_base, kChunkSize));
|
||||||
|
|
||||||
|
region_alloc_ = std::make_unique<base::RegionAllocator>(
|
||||||
|
backing_memory_base, backing_memory_size, kAllocationGranularity);
|
||||||
|
end_of_accessible_region_ = region_alloc_->begin();
|
||||||
|
|
||||||
|
// Install a on-merge callback to discard or decommit unused pages.
|
||||||
|
region_alloc_->set_on_merge_callback([this](i::Address start,
|
||||||
|
size_t size) {
|
||||||
|
mutex_.AssertHeld();
|
||||||
|
VirtualAddressSpace* vas = i::GetProcessWideSandbox()->address_space();
|
||||||
|
i::Address end = start + size;
|
||||||
|
if (end == region_alloc_->end() &&
|
||||||
|
start <= end_of_accessible_region_ - kChunkSize) {
|
||||||
|
// Can shrink the accessible region.
|
||||||
|
i::Address new_end_of_accessible_region = RoundUp(start, kChunkSize);
|
||||||
|
size_t size =
|
||||||
|
end_of_accessible_region_ - new_end_of_accessible_region;
|
||||||
|
CHECK(vas->DecommitPages(new_end_of_accessible_region, size));
|
||||||
|
end_of_accessible_region_ = new_end_of_accessible_region;
|
||||||
|
} else if (size >= 2 * kChunkSize) {
|
||||||
|
// Can discard pages. The pages stay accessible, so the size of the
|
||||||
|
// accessible region doesn't change.
|
||||||
|
i::Address chunk_start = RoundUp(start, kChunkSize);
|
||||||
|
i::Address chunk_end = RoundDown(start + size, kChunkSize);
|
||||||
|
CHECK(vas->DiscardSystemPages(chunk_start, chunk_end - chunk_start));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
~BackendAllocator() {
|
||||||
|
// The sandbox may already have been torn down, in which case there's no
|
||||||
|
// need to free any memory.
|
||||||
|
if (i::GetProcessWideSandbox()->is_initialized()) {
|
||||||
|
VirtualAddressSpace* vas = i::GetProcessWideSandbox()->address_space();
|
||||||
|
vas->FreePages(region_alloc_->begin(), region_alloc_->size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
BackendAllocator(const BackendAllocator&) = delete;
|
||||||
|
BackendAllocator& operator=(const BackendAllocator&) = delete;
|
||||||
|
|
||||||
|
void* Allocate(size_t length) {
|
||||||
|
base::MutexGuard guard(&mutex_);
|
||||||
|
|
||||||
|
length = RoundUp(length, kAllocationGranularity);
|
||||||
|
i::Address region = region_alloc_->AllocateRegion(length);
|
||||||
|
if (region == base::RegionAllocator::kAllocationFailure) return nullptr;
|
||||||
|
|
||||||
|
// Check if the memory is inside the accessible region. If not, grow it.
|
||||||
|
i::Address end = region + length;
|
||||||
|
size_t length_to_memset = length;
|
||||||
|
if (end > end_of_accessible_region_) {
|
||||||
|
VirtualAddressSpace* vas = i::GetProcessWideSandbox()->address_space();
|
||||||
|
i::Address new_end_of_accessible_region = RoundUp(end, kChunkSize);
|
||||||
|
size_t size = new_end_of_accessible_region - end_of_accessible_region_;
|
||||||
|
if (!vas->SetPagePermissions(end_of_accessible_region_, size,
|
||||||
|
PagePermissions::kReadWrite)) {
|
||||||
|
CHECK(region_alloc_->FreeRegion(region));
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The pages that were inaccessible are guaranteed to be zeroed, so only
|
||||||
|
// memset until the previous end of the accessible region.
|
||||||
|
length_to_memset = end_of_accessible_region_ - region;
|
||||||
|
end_of_accessible_region_ = new_end_of_accessible_region;
|
||||||
|
}
|
||||||
|
|
||||||
|
void* mem = reinterpret_cast<void*>(region);
|
||||||
|
memset(mem, 0, length_to_memset);
|
||||||
|
return mem;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Free(void* data) {
|
||||||
|
base::MutexGuard guard(&mutex_);
|
||||||
|
region_alloc_->FreeRegion(reinterpret_cast<i::Address>(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
static BackendAllocator* SharedInstance() {
|
||||||
|
static base::LeakyObject<BackendAllocator> instance;
|
||||||
|
return instance.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Use a region allocator with a "page size" of 128 bytes as a reasonable
|
||||||
|
// compromise between the number of regions it has to manage and the amount
|
||||||
|
// of memory wasted due to rounding allocation sizes up to the page size.
|
||||||
|
static constexpr size_t kAllocationGranularity = 128;
|
||||||
|
// The backing memory's accessible region is grown in chunks of this size.
|
||||||
|
static constexpr size_t kChunkSize = 1 * i::MB;
|
||||||
|
|
||||||
|
std::unique_ptr<base::RegionAllocator> region_alloc_;
|
||||||
|
size_t end_of_accessible_region_;
|
||||||
|
base::Mutex mutex_;
|
||||||
|
};
|
||||||
|
|
||||||
|
BackendAllocator* allocator_ = BackendAllocator::SharedInstance();
|
||||||
};
|
};
|
||||||
|
|
||||||
#else
|
#else
|
||||||
@ -430,7 +548,7 @@ struct SnapshotCreatorData {
|
|||||||
|
|
||||||
SnapshotCreator::SnapshotCreator(Isolate* v8_isolate,
|
SnapshotCreator::SnapshotCreator(Isolate* v8_isolate,
|
||||||
const intptr_t* external_references,
|
const intptr_t* external_references,
|
||||||
StartupData* existing_snapshot) {
|
const StartupData* existing_snapshot) {
|
||||||
SnapshotCreatorData* data = new SnapshotCreatorData(v8_isolate);
|
SnapshotCreatorData* data = new SnapshotCreatorData(v8_isolate);
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
||||||
i_isolate->set_array_buffer_allocator(&data->allocator_);
|
i_isolate->set_array_buffer_allocator(&data->allocator_);
|
||||||
@ -452,7 +570,7 @@ SnapshotCreator::SnapshotCreator(Isolate* v8_isolate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
SnapshotCreator::SnapshotCreator(const intptr_t* external_references,
|
SnapshotCreator::SnapshotCreator(const intptr_t* external_references,
|
||||||
StartupData* existing_snapshot)
|
const StartupData* existing_snapshot)
|
||||||
: SnapshotCreator(Isolate::Allocate(), external_references,
|
: SnapshotCreator(Isolate::Allocate(), external_references,
|
||||||
existing_snapshot) {}
|
existing_snapshot) {}
|
||||||
|
|
||||||
@ -618,7 +736,10 @@ StartupData SnapshotCreator::CreateBlob(
|
|||||||
i::Snapshot::ClearReconstructableDataForSerialization(
|
i::Snapshot::ClearReconstructableDataForSerialization(
|
||||||
i_isolate, function_code_handling == FunctionCodeHandling::kClear);
|
i_isolate, function_code_handling == FunctionCodeHandling::kClear);
|
||||||
|
|
||||||
i::GlobalSafepointScope global_safepoint(i_isolate);
|
i::SafepointKind safepoint_kind = i_isolate->has_shared_heap()
|
||||||
|
? i::SafepointKind::kGlobal
|
||||||
|
: i::SafepointKind::kIsolate;
|
||||||
|
i::SafepointScope safepoint_scope(i_isolate, safepoint_kind);
|
||||||
i::DisallowGarbageCollection no_gc_from_here_on;
|
i::DisallowGarbageCollection no_gc_from_here_on;
|
||||||
|
|
||||||
// Create a vector with all contexts and clear associated Persistent fields.
|
// Create a vector with all contexts and clear associated Persistent fields.
|
||||||
@ -656,7 +777,7 @@ StartupData SnapshotCreator::CreateBlob(
|
|||||||
|
|
||||||
data->created_ = true;
|
data->created_ = true;
|
||||||
return i::Snapshot::Create(i_isolate, &contexts, embedder_fields_serializers,
|
return i::Snapshot::Create(i_isolate, &contexts, embedder_fields_serializers,
|
||||||
global_safepoint, no_gc_from_here_on);
|
safepoint_scope, no_gc_from_here_on);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool StartupData::CanBeRehashed() const {
|
bool StartupData::CanBeRehashed() const {
|
||||||
@ -794,10 +915,9 @@ i::Address* GlobalizeTracedReference(i::Isolate* i_isolate, i::Address* obj,
|
|||||||
Utils::ApiCheck((slot != nullptr), "v8::GlobalizeTracedReference",
|
Utils::ApiCheck((slot != nullptr), "v8::GlobalizeTracedReference",
|
||||||
"the address slot must be not null");
|
"the address slot must be not null");
|
||||||
#endif
|
#endif
|
||||||
i::Handle<i::Object> result =
|
auto result = i_isolate->traced_handles()->Create(*obj, slot, store_mode);
|
||||||
i_isolate->global_handles()->CreateTraced(*obj, slot, store_mode);
|
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
if (i::FLAG_verify_heap) {
|
if (i::v8_flags.verify_heap) {
|
||||||
i::Object(*obj).ObjectVerify(i_isolate);
|
i::Object(*obj).ObjectVerify(i_isolate);
|
||||||
}
|
}
|
||||||
#endif // VERIFY_HEAP
|
#endif // VERIFY_HEAP
|
||||||
@ -805,16 +925,16 @@ i::Address* GlobalizeTracedReference(i::Isolate* i_isolate, i::Address* obj,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void MoveTracedReference(internal::Address** from, internal::Address** to) {
|
void MoveTracedReference(internal::Address** from, internal::Address** to) {
|
||||||
GlobalHandles::MoveTracedReference(from, to);
|
TracedHandles::Move(from, to);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CopyTracedReference(const internal::Address* const* from,
|
void CopyTracedReference(const internal::Address* const* from,
|
||||||
internal::Address** to) {
|
internal::Address** to) {
|
||||||
GlobalHandles::CopyTracedReference(from, to);
|
TracedHandles::Copy(from, to);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DisposeTracedReference(internal::Address* location) {
|
void DisposeTracedReference(internal::Address* location) {
|
||||||
GlobalHandles::DestroyTracedReference(location);
|
TracedHandles::Destroy(location);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
@ -825,7 +945,7 @@ i::Address* GlobalizeReference(i::Isolate* i_isolate, i::Address* obj) {
|
|||||||
API_RCS_SCOPE(i_isolate, Persistent, New);
|
API_RCS_SCOPE(i_isolate, Persistent, New);
|
||||||
i::Handle<i::Object> result = i_isolate->global_handles()->Create(*obj);
|
i::Handle<i::Object> result = i_isolate->global_handles()->Create(*obj);
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
if (i::FLAG_verify_heap) {
|
if (i::v8_flags.verify_heap) {
|
||||||
i::Object(*obj).ObjectVerify(i_isolate);
|
i::Object(*obj).ObjectVerify(i_isolate);
|
||||||
}
|
}
|
||||||
#endif // VERIFY_HEAP
|
#endif // VERIFY_HEAP
|
||||||
@ -1678,7 +1798,7 @@ void ObjectTemplate::SetAccessor(v8::Local<String> name,
|
|||||||
SideEffectType getter_side_effect_type,
|
SideEffectType getter_side_effect_type,
|
||||||
SideEffectType setter_side_effect_type) {
|
SideEffectType setter_side_effect_type) {
|
||||||
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
|
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
|
||||||
i::FLAG_disable_old_api_accessors, false,
|
i::v8_flags.disable_old_api_accessors, false,
|
||||||
getter_side_effect_type, setter_side_effect_type);
|
getter_side_effect_type, setter_side_effect_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1690,7 +1810,7 @@ void ObjectTemplate::SetAccessor(v8::Local<Name> name,
|
|||||||
SideEffectType getter_side_effect_type,
|
SideEffectType getter_side_effect_type,
|
||||||
SideEffectType setter_side_effect_type) {
|
SideEffectType setter_side_effect_type) {
|
||||||
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
|
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
|
||||||
i::FLAG_disable_old_api_accessors, false,
|
i::v8_flags.disable_old_api_accessors, false,
|
||||||
getter_side_effect_type, setter_side_effect_type);
|
getter_side_effect_type, setter_side_effect_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1940,8 +2060,32 @@ void ObjectTemplate::SetCodeLike() {
|
|||||||
|
|
||||||
// --- S c r i p t s ---
|
// --- S c r i p t s ---
|
||||||
|
|
||||||
// Internally, UnboundScript is a SharedFunctionInfo, and Script is a
|
// Internally, UnboundScript and UnboundModuleScript are SharedFunctionInfos,
|
||||||
// JSFunction.
|
// and Script is a JSFunction.
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
inline Local<Value> GetSharedFunctionInfoSourceMappingURL(
|
||||||
|
i::Isolate* isolate, i::Handle<i::SharedFunctionInfo> obj) {
|
||||||
|
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate);
|
||||||
|
if (obj->script().IsScript()) {
|
||||||
|
i::Object url = i::Script::cast(obj->script()).source_mapping_url();
|
||||||
|
return Utils::ToLocal(i::Handle<i::Object>(url, isolate));
|
||||||
|
} else {
|
||||||
|
return Local<String>();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Local<Value> GetSharedFunctionInfoSourceURL(
|
||||||
|
i::Isolate* isolate, i::Handle<i::SharedFunctionInfo> obj) {
|
||||||
|
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate);
|
||||||
|
if (obj->script().IsScript()) {
|
||||||
|
i::Object url = i::Script::cast(obj->script()).source_url();
|
||||||
|
return Utils::ToLocal(i::Handle<i::Object>(url, isolate));
|
||||||
|
} else {
|
||||||
|
return Local<String>();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} // namespace
|
||||||
|
|
||||||
ScriptCompiler::CachedData::CachedData(const uint8_t* data_, int length_,
|
ScriptCompiler::CachedData::CachedData(const uint8_t* data_, int length_,
|
||||||
BufferPolicy buffer_policy_)
|
BufferPolicy buffer_policy_)
|
||||||
@ -2026,14 +2170,8 @@ Local<Value> UnboundScript::GetSourceURL() {
|
|||||||
i::Handle<i::SharedFunctionInfo> obj =
|
i::Handle<i::SharedFunctionInfo> obj =
|
||||||
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
|
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
|
||||||
i::Isolate* i_isolate = obj->GetIsolate();
|
i::Isolate* i_isolate = obj->GetIsolate();
|
||||||
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
|
||||||
API_RCS_SCOPE(i_isolate, UnboundScript, GetSourceURL);
|
API_RCS_SCOPE(i_isolate, UnboundScript, GetSourceURL);
|
||||||
if (obj->script().IsScript()) {
|
return GetSharedFunctionInfoSourceURL(i_isolate, obj);
|
||||||
i::Object url = i::Script::cast(obj->script()).source_url();
|
|
||||||
return Utils::ToLocal(i::Handle<i::Object>(url, i_isolate));
|
|
||||||
} else {
|
|
||||||
return Local<String>();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Local<Value> UnboundScript::GetSourceMappingURL() {
|
Local<Value> UnboundScript::GetSourceMappingURL() {
|
||||||
@ -2041,13 +2179,23 @@ Local<Value> UnboundScript::GetSourceMappingURL() {
|
|||||||
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
|
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
|
||||||
i::Isolate* i_isolate = obj->GetIsolate();
|
i::Isolate* i_isolate = obj->GetIsolate();
|
||||||
API_RCS_SCOPE(i_isolate, UnboundScript, GetSourceMappingURL);
|
API_RCS_SCOPE(i_isolate, UnboundScript, GetSourceMappingURL);
|
||||||
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
return GetSharedFunctionInfoSourceMappingURL(i_isolate, obj);
|
||||||
if (obj->script().IsScript()) {
|
}
|
||||||
i::Object url = i::Script::cast(obj->script()).source_mapping_url();
|
|
||||||
return Utils::ToLocal(i::Handle<i::Object>(url, i_isolate));
|
Local<Value> UnboundModuleScript::GetSourceURL() {
|
||||||
} else {
|
i::Handle<i::SharedFunctionInfo> obj =
|
||||||
return Local<String>();
|
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
|
||||||
}
|
i::Isolate* i_isolate = obj->GetIsolate();
|
||||||
|
API_RCS_SCOPE(i_isolate, UnboundModuleScript, GetSourceURL);
|
||||||
|
return GetSharedFunctionInfoSourceURL(i_isolate, obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
Local<Value> UnboundModuleScript::GetSourceMappingURL() {
|
||||||
|
i::Handle<i::SharedFunctionInfo> obj =
|
||||||
|
i::Handle<i::SharedFunctionInfo>::cast(Utils::OpenHandle(this));
|
||||||
|
i::Isolate* i_isolate = obj->GetIsolate();
|
||||||
|
API_RCS_SCOPE(i_isolate, UnboundModuleScript, GetSourceMappingURL);
|
||||||
|
return GetSharedFunctionInfoSourceMappingURL(i_isolate, obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeLocal<Value> Script::Run(Local<Context> context) {
|
MaybeLocal<Value> Script::Run(Local<Context> context) {
|
||||||
@ -2076,7 +2224,7 @@ MaybeLocal<Value> Script::Run(Local<Context> context,
|
|||||||
//
|
//
|
||||||
// To avoid this, on running scripts check first if JIT code log is
|
// To avoid this, on running scripts check first if JIT code log is
|
||||||
// pending and generate immediately.
|
// pending and generate immediately.
|
||||||
if (i::FLAG_enable_etw_stack_walking) {
|
if (i::v8_flags.enable_etw_stack_walking) {
|
||||||
i::ETWJITInterface::MaybeSetHandlerNow(i_isolate);
|
i::ETWJITInterface::MaybeSetHandlerNow(i_isolate);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -2085,14 +2233,15 @@ MaybeLocal<Value> Script::Run(Local<Context> context,
|
|||||||
// TODO(crbug.com/1193459): remove once ablation study is completed
|
// TODO(crbug.com/1193459): remove once ablation study is completed
|
||||||
base::ElapsedTimer timer;
|
base::ElapsedTimer timer;
|
||||||
base::TimeDelta delta;
|
base::TimeDelta delta;
|
||||||
if (i::FLAG_script_delay > 0) {
|
if (i::v8_flags.script_delay > 0) {
|
||||||
delta = v8::base::TimeDelta::FromMillisecondsD(i::FLAG_script_delay);
|
delta = v8::base::TimeDelta::FromMillisecondsD(i::v8_flags.script_delay);
|
||||||
}
|
}
|
||||||
if (i::FLAG_script_delay_once > 0 && !i_isolate->did_run_script_delay()) {
|
if (i::v8_flags.script_delay_once > 0 && !i_isolate->did_run_script_delay()) {
|
||||||
delta = v8::base::TimeDelta::FromMillisecondsD(i::FLAG_script_delay_once);
|
delta =
|
||||||
|
v8::base::TimeDelta::FromMillisecondsD(i::v8_flags.script_delay_once);
|
||||||
i_isolate->set_did_run_script_delay(true);
|
i_isolate->set_did_run_script_delay(true);
|
||||||
}
|
}
|
||||||
if (i::FLAG_script_delay_fraction > 0.0) {
|
if (i::v8_flags.script_delay_fraction > 0.0) {
|
||||||
timer.Start();
|
timer.Start();
|
||||||
} else if (delta.InMicroseconds() > 0) {
|
} else if (delta.InMicroseconds() > 0) {
|
||||||
timer.Start();
|
timer.Start();
|
||||||
@ -2101,7 +2250,7 @@ MaybeLocal<Value> Script::Run(Local<Context> context,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (V8_UNLIKELY(i::FLAG_experimental_web_snapshots)) {
|
if (V8_UNLIKELY(i::v8_flags.experimental_web_snapshots)) {
|
||||||
i::Handle<i::HeapObject> maybe_script =
|
i::Handle<i::HeapObject> maybe_script =
|
||||||
handle(fun->shared().script(), i_isolate);
|
handle(fun->shared().script(), i_isolate);
|
||||||
if (maybe_script->IsScript() &&
|
if (maybe_script->IsScript() &&
|
||||||
@ -2125,9 +2274,9 @@ MaybeLocal<Value> Script::Run(Local<Context> context,
|
|||||||
has_pending_exception = !ToLocal<Value>(
|
has_pending_exception = !ToLocal<Value>(
|
||||||
i::Execution::CallScript(i_isolate, fun, receiver, options), &result);
|
i::Execution::CallScript(i_isolate, fun, receiver, options), &result);
|
||||||
|
|
||||||
if (i::FLAG_script_delay_fraction > 0.0) {
|
if (i::v8_flags.script_delay_fraction > 0.0) {
|
||||||
delta = v8::base::TimeDelta::FromMillisecondsD(
|
delta = v8::base::TimeDelta::FromMillisecondsD(
|
||||||
timer.Elapsed().InMillisecondsF() * i::FLAG_script_delay_fraction);
|
timer.Elapsed().InMillisecondsF() * i::v8_flags.script_delay_fraction);
|
||||||
timer.Restart();
|
timer.Restart();
|
||||||
while (timer.Elapsed() < delta) {
|
while (timer.Elapsed() < delta) {
|
||||||
// Busy wait.
|
// Busy wait.
|
||||||
@ -2734,7 +2883,7 @@ ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreaming(
|
|||||||
Utils::ApiCheck(options == kNoCompileOptions || options == kEagerCompile,
|
Utils::ApiCheck(options == kNoCompileOptions || options == kEagerCompile,
|
||||||
"v8::ScriptCompiler::StartStreaming",
|
"v8::ScriptCompiler::StartStreaming",
|
||||||
"Invalid CompileOptions");
|
"Invalid CompileOptions");
|
||||||
if (!i::FLAG_script_streaming) return nullptr;
|
if (!i::v8_flags.script_streaming) return nullptr;
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
||||||
i::ScriptStreamingData* data = source->impl();
|
i::ScriptStreamingData* data = source->impl();
|
||||||
std::unique_ptr<i::BackgroundCompileTask> task =
|
std::unique_ptr<i::BackgroundCompileTask> task =
|
||||||
@ -2767,20 +2916,22 @@ void ScriptCompiler::ConsumeCodeCacheTask::SourceTextAvailable(
|
|||||||
|
|
||||||
bool ScriptCompiler::ConsumeCodeCacheTask::ShouldMergeWithExistingScript()
|
bool ScriptCompiler::ConsumeCodeCacheTask::ShouldMergeWithExistingScript()
|
||||||
const {
|
const {
|
||||||
if (!i::FLAG_merge_background_deserialized_script_with_compilation_cache) {
|
if (!i::v8_flags
|
||||||
|
.merge_background_deserialized_script_with_compilation_cache) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return impl_->ShouldMergeWithExistingScript();
|
return impl_->ShouldMergeWithExistingScript();
|
||||||
}
|
}
|
||||||
|
|
||||||
void ScriptCompiler::ConsumeCodeCacheTask::MergeWithExistingScript() {
|
void ScriptCompiler::ConsumeCodeCacheTask::MergeWithExistingScript() {
|
||||||
DCHECK(i::FLAG_merge_background_deserialized_script_with_compilation_cache);
|
DCHECK(
|
||||||
|
i::v8_flags.merge_background_deserialized_script_with_compilation_cache);
|
||||||
impl_->MergeWithExistingScript();
|
impl_->MergeWithExistingScript();
|
||||||
}
|
}
|
||||||
|
|
||||||
ScriptCompiler::ConsumeCodeCacheTask* ScriptCompiler::StartConsumingCodeCache(
|
ScriptCompiler::ConsumeCodeCacheTask* ScriptCompiler::StartConsumingCodeCache(
|
||||||
Isolate* v8_isolate, std::unique_ptr<CachedData> cached_data) {
|
Isolate* v8_isolate, std::unique_ptr<CachedData> cached_data) {
|
||||||
if (!i::FLAG_concurrent_cache_deserialization) return nullptr;
|
if (!i::v8_flags.concurrent_cache_deserialization) return nullptr;
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
||||||
DCHECK_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
DCHECK_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
||||||
return new ScriptCompiler::ConsumeCodeCacheTask(
|
return new ScriptCompiler::ConsumeCodeCacheTask(
|
||||||
@ -3721,6 +3872,7 @@ bool Value::IsWasmModuleObject() const { return false; }
|
|||||||
#endif // V8_ENABLE_WEBASSEMBLY
|
#endif // V8_ENABLE_WEBASSEMBLY
|
||||||
VALUE_IS_SPECIFIC_TYPE(WeakMap, JSWeakMap)
|
VALUE_IS_SPECIFIC_TYPE(WeakMap, JSWeakMap)
|
||||||
VALUE_IS_SPECIFIC_TYPE(WeakSet, JSWeakSet)
|
VALUE_IS_SPECIFIC_TYPE(WeakSet, JSWeakSet)
|
||||||
|
VALUE_IS_SPECIFIC_TYPE(WeakRef, JSWeakRef)
|
||||||
|
|
||||||
#undef VALUE_IS_SPECIFIC_TYPE
|
#undef VALUE_IS_SPECIFIC_TYPE
|
||||||
|
|
||||||
@ -4059,10 +4211,18 @@ size_t v8::BackingStore::ByteLength() const {
|
|||||||
return reinterpret_cast<const i::BackingStore*>(this)->byte_length();
|
return reinterpret_cast<const i::BackingStore*>(this)->byte_length();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
size_t v8::BackingStore::MaxByteLength() const {
|
||||||
|
return reinterpret_cast<const i::BackingStore*>(this)->max_byte_length();
|
||||||
|
}
|
||||||
|
|
||||||
bool v8::BackingStore::IsShared() const {
|
bool v8::BackingStore::IsShared() const {
|
||||||
return reinterpret_cast<const i::BackingStore*>(this)->is_shared();
|
return reinterpret_cast<const i::BackingStore*>(this)->is_shared();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool v8::BackingStore::IsResizableByUserJavaScript() const {
|
||||||
|
return reinterpret_cast<const i::BackingStore*>(this)->is_resizable_by_js();
|
||||||
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
std::unique_ptr<v8::BackingStore> v8::BackingStore::Reallocate(
|
std::unique_ptr<v8::BackingStore> v8::BackingStore::Reallocate(
|
||||||
v8::Isolate* v8_isolate, std::unique_ptr<v8::BackingStore> backing_store,
|
v8::Isolate* v8_isolate, std::unique_ptr<v8::BackingStore> backing_store,
|
||||||
@ -4756,11 +4916,16 @@ MaybeLocal<String> v8::Object::ObjectProtoToString(Local<Context> context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Local<String> v8::Object::GetConstructorName() {
|
Local<String> v8::Object::GetConstructorName() {
|
||||||
|
// TODO(v8:12547): Consider adding GetConstructorName(Local<Context>).
|
||||||
auto self = Utils::OpenHandle(this);
|
auto self = Utils::OpenHandle(this);
|
||||||
// TODO(v8:12547): Support shared objects.
|
i::Isolate* i_isolate;
|
||||||
DCHECK(!self->InSharedHeap());
|
if (self->InSharedWritableHeap()) {
|
||||||
|
i_isolate = i::Isolate::Current();
|
||||||
|
} else {
|
||||||
|
i_isolate = self->GetIsolate();
|
||||||
|
}
|
||||||
i::Handle<i::String> name =
|
i::Handle<i::String> name =
|
||||||
i::JSReceiver::GetConstructorName(self->GetIsolate(), self);
|
i::JSReceiver::GetConstructorName(i_isolate, self);
|
||||||
return Utils::ToLocal(name);
|
return Utils::ToLocal(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4910,7 +5075,7 @@ Maybe<bool> Object::SetAccessor(Local<Context> context, Local<Name> name,
|
|||||||
SideEffectType setter_side_effect_type) {
|
SideEffectType setter_side_effect_type) {
|
||||||
return ObjectSetAccessor(context, this, name, getter, setter,
|
return ObjectSetAccessor(context, this, name, getter, setter,
|
||||||
data.FromMaybe(Local<Value>()), settings, attribute,
|
data.FromMaybe(Local<Value>()), settings, attribute,
|
||||||
i::FLAG_disable_old_api_accessors, false,
|
i::v8_flags.disable_old_api_accessors, false,
|
||||||
getter_side_effect_type, setter_side_effect_type);
|
getter_side_effect_type, setter_side_effect_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6182,7 +6347,8 @@ void v8::Object::SetAlignedPointerInInternalField(int index, void* value) {
|
|||||||
.store_aligned_pointer(obj->GetIsolate(), value),
|
.store_aligned_pointer(obj->GetIsolate(), value),
|
||||||
location, "Unaligned pointer");
|
location, "Unaligned pointer");
|
||||||
DCHECK_EQ(value, GetAlignedPointerFromInternalField(index));
|
DCHECK_EQ(value, GetAlignedPointerFromInternalField(index));
|
||||||
internal::WriteBarrier::MarkingFromInternalFields(i::JSObject::cast(*obj));
|
internal::WriteBarrier::CombinedBarrierFromInternalFields(
|
||||||
|
i::JSObject::cast(*obj), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
void v8::Object::SetAlignedPointerInInternalFields(int argc, int indices[],
|
void v8::Object::SetAlignedPointerInInternalFields(int argc, int indices[],
|
||||||
@ -6205,7 +6371,8 @@ void v8::Object::SetAlignedPointerInInternalFields(int argc, int indices[],
|
|||||||
location, "Unaligned pointer");
|
location, "Unaligned pointer");
|
||||||
DCHECK_EQ(value, GetAlignedPointerFromInternalField(index));
|
DCHECK_EQ(value, GetAlignedPointerFromInternalField(index));
|
||||||
}
|
}
|
||||||
internal::WriteBarrier::MarkingFromInternalFields(js_obj);
|
internal::WriteBarrier::CombinedBarrierFromInternalFields(js_obj, argc,
|
||||||
|
values);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- E n v i r o n m e n t ---
|
// --- E n v i r o n m e n t ---
|
||||||
@ -6653,10 +6820,31 @@ v8::Isolate* Context::GetIsolate() {
|
|||||||
v8::MicrotaskQueue* Context::GetMicrotaskQueue() {
|
v8::MicrotaskQueue* Context::GetMicrotaskQueue() {
|
||||||
i::Handle<i::Context> env = Utils::OpenHandle(this);
|
i::Handle<i::Context> env = Utils::OpenHandle(this);
|
||||||
Utils::ApiCheck(env->IsNativeContext(), "v8::Context::GetMicrotaskQueue",
|
Utils::ApiCheck(env->IsNativeContext(), "v8::Context::GetMicrotaskQueue",
|
||||||
"Must be calld on a native context");
|
"Must be called on a native context");
|
||||||
return i::Handle<i::NativeContext>::cast(env)->microtask_queue();
|
return i::Handle<i::NativeContext>::cast(env)->microtask_queue();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Context::SetMicrotaskQueue(v8::MicrotaskQueue* queue) {
|
||||||
|
i::Handle<i::Context> context = Utils::OpenHandle(this);
|
||||||
|
i::Isolate* i_isolate = context->GetIsolate();
|
||||||
|
Utils::ApiCheck(context->IsNativeContext(), "v8::Context::SetMicrotaskQueue",
|
||||||
|
"Must be called on a native context");
|
||||||
|
i::Handle<i::NativeContext> native_context =
|
||||||
|
i::Handle<i::NativeContext>::cast(context);
|
||||||
|
i::HandleScopeImplementer* impl = i_isolate->handle_scope_implementer();
|
||||||
|
Utils::ApiCheck(!native_context->microtask_queue()->IsRunningMicrotasks(),
|
||||||
|
"v8::Context::SetMicrotaskQueue",
|
||||||
|
"Must not be running microtasks");
|
||||||
|
Utils::ApiCheck(
|
||||||
|
native_context->microtask_queue()->GetMicrotasksScopeDepth() == 0,
|
||||||
|
"v8::Context::SetMicrotaskQueue", "Must not have microtask scope pushed");
|
||||||
|
Utils::ApiCheck(impl->EnteredContextCount() == 0,
|
||||||
|
"v8::Context::SetMicrotaskQueue()",
|
||||||
|
"Cannot set Microtask Queue with an entered context");
|
||||||
|
native_context->set_microtask_queue(
|
||||||
|
i_isolate, static_cast<const i::MicrotaskQueue*>(queue));
|
||||||
|
}
|
||||||
|
|
||||||
v8::Local<v8::Object> Context::Global() {
|
v8::Local<v8::Object> Context::Global() {
|
||||||
i::Handle<i::Context> context = Utils::OpenHandle(this);
|
i::Handle<i::Context> context = Utils::OpenHandle(this);
|
||||||
i::Isolate* i_isolate = context->GetIsolate();
|
i::Isolate* i_isolate = context->GetIsolate();
|
||||||
@ -6785,6 +6973,14 @@ void v8::Context::SetPromiseHooks(Local<Function> init_hook,
|
|||||||
#endif // V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
|
#endif // V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool Context::HasTemplateLiteralObject(Local<Value> object) {
|
||||||
|
i::DisallowGarbageCollection no_gc;
|
||||||
|
i::Object i_object = *Utils::OpenHandle(*object);
|
||||||
|
if (!i_object.IsJSArray()) return false;
|
||||||
|
return Utils::OpenHandle(this)->native_context().HasTemplateLiteralObject(
|
||||||
|
i::JSArray::cast(i_object));
|
||||||
|
}
|
||||||
|
|
||||||
MaybeLocal<Context> metrics::Recorder::GetContext(
|
MaybeLocal<Context> metrics::Recorder::GetContext(
|
||||||
Isolate* v8_isolate, metrics::Recorder::ContextId id) {
|
Isolate* v8_isolate, metrics::Recorder::ContextId id) {
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
||||||
@ -7128,9 +7324,16 @@ bool v8::String::MakeExternal(v8::String::ExternalStringResource* resource) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(v8:12007): Consider adding
|
||||||
|
// MakeExternal(Isolate*, ExternalStringResource*).
|
||||||
|
i::Isolate* i_isolate;
|
||||||
|
if (obj.InSharedWritableHeap()) {
|
||||||
|
i_isolate = i::Isolate::Current();
|
||||||
|
} else {
|
||||||
// It is safe to call GetIsolateFromWritableHeapObject because
|
// It is safe to call GetIsolateFromWritableHeapObject because
|
||||||
// SupportsExternalization already checked that the object is writable.
|
// SupportsExternalization already checked that the object is writable.
|
||||||
i::Isolate* i_isolate = i::GetIsolateFromWritableObject(obj);
|
i_isolate = i::GetIsolateFromWritableObject(obj);
|
||||||
|
}
|
||||||
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
||||||
|
|
||||||
CHECK(resource && resource->data());
|
CHECK(resource && resource->data());
|
||||||
@ -7154,9 +7357,16 @@ bool v8::String::MakeExternal(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(v8:12007): Consider adding
|
||||||
|
// MakeExternal(Isolate*, ExternalOneByteStringResource*).
|
||||||
|
i::Isolate* i_isolate;
|
||||||
|
if (obj.InSharedWritableHeap()) {
|
||||||
|
i_isolate = i::Isolate::Current();
|
||||||
|
} else {
|
||||||
// It is safe to call GetIsolateFromWritableHeapObject because
|
// It is safe to call GetIsolateFromWritableHeapObject because
|
||||||
// SupportsExternalization already checked that the object is writable.
|
// SupportsExternalization already checked that the object is writable.
|
||||||
i::Isolate* i_isolate = i::GetIsolateFromWritableObject(obj);
|
i_isolate = i::GetIsolateFromWritableObject(obj);
|
||||||
|
}
|
||||||
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
||||||
|
|
||||||
CHECK(resource && resource->data());
|
CHECK(resource && resource->data());
|
||||||
@ -8095,6 +8305,10 @@ bool v8::ArrayBuffer::IsDetachable() const {
|
|||||||
return Utils::OpenHandle(this)->is_detachable();
|
return Utils::OpenHandle(this)->is_detachable();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool v8::ArrayBuffer::WasDetached() const {
|
||||||
|
return Utils::OpenHandle(this)->was_detached();
|
||||||
|
}
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
std::shared_ptr<i::BackingStore> ToInternal(
|
std::shared_ptr<i::BackingStore> ToInternal(
|
||||||
std::shared_ptr<i::BackingStoreBase> backing_store) {
|
std::shared_ptr<i::BackingStoreBase> backing_store) {
|
||||||
@ -8102,19 +8316,42 @@ std::shared_ptr<i::BackingStore> ToInternal(
|
|||||||
}
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
void v8::ArrayBuffer::Detach() {
|
Maybe<bool> v8::ArrayBuffer::Detach(v8::Local<v8::Value> key) {
|
||||||
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
||||||
i::Isolate* i_isolate = obj->GetIsolate();
|
i::Isolate* i_isolate = obj->GetIsolate();
|
||||||
Utils::ApiCheck(obj->is_detachable(), "v8::ArrayBuffer::Detach",
|
Utils::ApiCheck(obj->is_detachable(), "v8::ArrayBuffer::Detach",
|
||||||
"Only detachable ArrayBuffers can be detached");
|
"Only detachable ArrayBuffers can be detached");
|
||||||
API_RCS_SCOPE(i_isolate, ArrayBuffer, Detach);
|
ENTER_V8_NO_SCRIPT(
|
||||||
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
i_isolate, reinterpret_cast<v8::Isolate*>(i_isolate)->GetCurrentContext(),
|
||||||
obj->Detach();
|
ArrayBuffer, Detach, Nothing<bool>(), i::HandleScope);
|
||||||
|
if (!key.IsEmpty()) {
|
||||||
|
i::Handle<i::Object> i_key = Utils::OpenHandle(*key);
|
||||||
|
constexpr bool kForceForWasmMemory = false;
|
||||||
|
has_pending_exception =
|
||||||
|
i::JSArrayBuffer::Detach(obj, kForceForWasmMemory, i_key).IsNothing();
|
||||||
|
} else {
|
||||||
|
has_pending_exception = i::JSArrayBuffer::Detach(obj).IsNothing();
|
||||||
|
}
|
||||||
|
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
|
||||||
|
return Just(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
void v8::ArrayBuffer::Detach() { Detach(Local<Value>()).Check(); }
|
||||||
|
|
||||||
|
void v8::ArrayBuffer::SetDetachKey(v8::Local<v8::Value> key) {
|
||||||
|
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
||||||
|
i::Handle<i::Object> i_key = Utils::OpenHandle(*key);
|
||||||
|
obj->set_detach_key(*i_key);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t v8::ArrayBuffer::ByteLength() const {
|
size_t v8::ArrayBuffer::ByteLength() const {
|
||||||
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
||||||
return obj->byte_length();
|
return obj->GetByteLength();
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t v8::ArrayBuffer::MaxByteLength() const {
|
||||||
|
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
||||||
|
return obj->max_byte_length();
|
||||||
}
|
}
|
||||||
|
|
||||||
Local<ArrayBuffer> v8::ArrayBuffer::New(Isolate* v8_isolate,
|
Local<ArrayBuffer> v8::ArrayBuffer::New(Isolate* v8_isolate,
|
||||||
@ -8176,12 +8413,12 @@ std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
|
|||||||
void* deleter_data) {
|
void* deleter_data) {
|
||||||
CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength);
|
CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength);
|
||||||
#ifdef V8_ENABLE_SANDBOX
|
#ifdef V8_ENABLE_SANDBOX
|
||||||
Utils::ApiCheck(
|
Utils::ApiCheck(!data || i::GetProcessWideSandbox()->Contains(data),
|
||||||
!data || i::GetProcessWideSandbox()->Contains(data),
|
|
||||||
"v8_ArrayBuffer_NewBackingStore",
|
"v8_ArrayBuffer_NewBackingStore",
|
||||||
"When the V8 Sandbox is enabled, ArrayBuffer backing stores must be "
|
"When the V8 Sandbox is enabled, ArrayBuffer backing stores "
|
||||||
"allocated inside the sandbox address space. Please use an appropriate "
|
"must be allocated inside the sandbox address space. Please "
|
||||||
"ArrayBuffer::Allocator to allocate these buffers.");
|
"use an appropriate ArrayBuffer::Allocator to allocate these "
|
||||||
|
"buffers, or disable the sandbox.");
|
||||||
#endif // V8_ENABLE_SANDBOX
|
#endif // V8_ENABLE_SANDBOX
|
||||||
|
|
||||||
std::unique_ptr<i::BackingStoreBase> backing_store =
|
std::unique_ptr<i::BackingStoreBase> backing_store =
|
||||||
@ -8191,6 +8428,41 @@ std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
|
|||||||
static_cast<v8::BackingStore*>(backing_store.release()));
|
static_cast<v8::BackingStore*>(backing_store.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
std::unique_ptr<BackingStore> v8::ArrayBuffer::NewResizableBackingStore(
|
||||||
|
size_t byte_length, size_t max_byte_length) {
|
||||||
|
Utils::ApiCheck(i::v8_flags.harmony_rab_gsab,
|
||||||
|
"v8::ArrayBuffer::NewResizableBackingStore",
|
||||||
|
"Constructing resizable ArrayBuffers is not supported");
|
||||||
|
Utils::ApiCheck(byte_length <= max_byte_length,
|
||||||
|
"v8::ArrayBuffer::NewResizableBackingStore",
|
||||||
|
"Cannot construct resizable ArrayBuffer, byte_length must be "
|
||||||
|
"<= max_byte_length");
|
||||||
|
Utils::ApiCheck(
|
||||||
|
byte_length <= i::JSArrayBuffer::kMaxByteLength,
|
||||||
|
"v8::ArrayBuffer::NewResizableBackingStore",
|
||||||
|
"Cannot construct resizable ArrayBuffer, requested length is too big");
|
||||||
|
|
||||||
|
size_t page_size, initial_pages, max_pages;
|
||||||
|
if (i::JSArrayBuffer::GetResizableBackingStorePageConfiguration(
|
||||||
|
nullptr, byte_length, max_byte_length, i::kDontThrow, &page_size,
|
||||||
|
&initial_pages, &max_pages)
|
||||||
|
.IsNothing()) {
|
||||||
|
i::V8::FatalProcessOutOfMemory(nullptr,
|
||||||
|
"v8::ArrayBuffer::NewResizableBackingStore");
|
||||||
|
}
|
||||||
|
std::unique_ptr<i::BackingStoreBase> backing_store =
|
||||||
|
i::BackingStore::TryAllocateAndPartiallyCommitMemory(
|
||||||
|
nullptr, byte_length, max_byte_length, page_size, initial_pages,
|
||||||
|
max_pages, i::WasmMemoryFlag::kNotWasm, i::SharedFlag::kNotShared);
|
||||||
|
if (!backing_store) {
|
||||||
|
i::V8::FatalProcessOutOfMemory(nullptr,
|
||||||
|
"v8::ArrayBuffer::NewResizableBackingStore");
|
||||||
|
}
|
||||||
|
return std::unique_ptr<v8::BackingStore>(
|
||||||
|
static_cast<v8::BackingStore*>(backing_store.release()));
|
||||||
|
}
|
||||||
|
|
||||||
Local<ArrayBuffer> v8::ArrayBufferView::Buffer() {
|
Local<ArrayBuffer> v8::ArrayBufferView::Buffer() {
|
||||||
i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this);
|
i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this);
|
||||||
i::Handle<i::JSArrayBuffer> buffer;
|
i::Handle<i::JSArrayBuffer> buffer;
|
||||||
@ -8240,13 +8512,21 @@ size_t v8::ArrayBufferView::ByteOffset() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
size_t v8::ArrayBufferView::ByteLength() {
|
size_t v8::ArrayBufferView::ByteLength() {
|
||||||
i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this);
|
i::DisallowGarbageCollection no_gc;
|
||||||
return obj->WasDetached() ? 0 : obj->byte_length();
|
i::JSArrayBufferView obj = *Utils::OpenHandle(this);
|
||||||
|
if (obj.WasDetached()) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
if (obj.IsJSTypedArray()) {
|
||||||
|
return i::JSTypedArray::cast(obj).GetByteLength();
|
||||||
|
}
|
||||||
|
return i::JSDataView::cast(obj).GetByteLength();
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t v8::TypedArray::Length() {
|
size_t v8::TypedArray::Length() {
|
||||||
i::Handle<i::JSTypedArray> obj = Utils::OpenHandle(this);
|
i::DisallowGarbageCollection no_gc;
|
||||||
return obj->WasDetached() ? 0 : obj->length();
|
i::JSTypedArray obj = *Utils::OpenHandle(this);
|
||||||
|
return obj.WasDetached() ? 0 : obj.GetLength();
|
||||||
}
|
}
|
||||||
|
|
||||||
static_assert(
|
static_assert(
|
||||||
@ -8273,7 +8553,7 @@ static_assert(
|
|||||||
Local<Type##Array> Type##Array::New( \
|
Local<Type##Array> Type##Array::New( \
|
||||||
Local<SharedArrayBuffer> shared_array_buffer, size_t byte_offset, \
|
Local<SharedArrayBuffer> shared_array_buffer, size_t byte_offset, \
|
||||||
size_t length) { \
|
size_t length) { \
|
||||||
CHECK(i::FLAG_harmony_sharedarraybuffer); \
|
CHECK(i::v8_flags.harmony_sharedarraybuffer); \
|
||||||
i::Isolate* i_isolate = \
|
i::Isolate* i_isolate = \
|
||||||
Utils::OpenHandle(*shared_array_buffer)->GetIsolate(); \
|
Utils::OpenHandle(*shared_array_buffer)->GetIsolate(); \
|
||||||
API_RCS_SCOPE(i_isolate, Type##Array, New); \
|
API_RCS_SCOPE(i_isolate, Type##Array, New); \
|
||||||
@ -8295,6 +8575,7 @@ static_assert(
|
|||||||
TYPED_ARRAYS(TYPED_ARRAY_NEW)
|
TYPED_ARRAYS(TYPED_ARRAY_NEW)
|
||||||
#undef TYPED_ARRAY_NEW
|
#undef TYPED_ARRAY_NEW
|
||||||
|
|
||||||
|
// TODO(v8:11111): Support creating length tracking DataViews via the API.
|
||||||
Local<DataView> DataView::New(Local<ArrayBuffer> array_buffer,
|
Local<DataView> DataView::New(Local<ArrayBuffer> array_buffer,
|
||||||
size_t byte_offset, size_t byte_length) {
|
size_t byte_offset, size_t byte_length) {
|
||||||
i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*array_buffer);
|
i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*array_buffer);
|
||||||
@ -8308,7 +8589,7 @@ Local<DataView> DataView::New(Local<ArrayBuffer> array_buffer,
|
|||||||
|
|
||||||
Local<DataView> DataView::New(Local<SharedArrayBuffer> shared_array_buffer,
|
Local<DataView> DataView::New(Local<SharedArrayBuffer> shared_array_buffer,
|
||||||
size_t byte_offset, size_t byte_length) {
|
size_t byte_offset, size_t byte_length) {
|
||||||
CHECK(i::FLAG_harmony_sharedarraybuffer);
|
CHECK(i::v8_flags.harmony_sharedarraybuffer);
|
||||||
i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*shared_array_buffer);
|
i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*shared_array_buffer);
|
||||||
i::Isolate* i_isolate = buffer->GetIsolate();
|
i::Isolate* i_isolate = buffer->GetIsolate();
|
||||||
API_RCS_SCOPE(i_isolate, DataView, New);
|
API_RCS_SCOPE(i_isolate, DataView, New);
|
||||||
@ -8320,12 +8601,17 @@ Local<DataView> DataView::New(Local<SharedArrayBuffer> shared_array_buffer,
|
|||||||
|
|
||||||
size_t v8::SharedArrayBuffer::ByteLength() const {
|
size_t v8::SharedArrayBuffer::ByteLength() const {
|
||||||
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
||||||
return obj->byte_length();
|
return obj->GetByteLength();
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t v8::SharedArrayBuffer::MaxByteLength() const {
|
||||||
|
i::Handle<i::JSArrayBuffer> obj = Utils::OpenHandle(this);
|
||||||
|
return obj->max_byte_length();
|
||||||
}
|
}
|
||||||
|
|
||||||
Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* v8_isolate,
|
Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* v8_isolate,
|
||||||
size_t byte_length) {
|
size_t byte_length) {
|
||||||
CHECK(i::FLAG_harmony_sharedarraybuffer);
|
CHECK(i::v8_flags.harmony_sharedarraybuffer);
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
||||||
API_RCS_SCOPE(i_isolate, SharedArrayBuffer, New);
|
API_RCS_SCOPE(i_isolate, SharedArrayBuffer, New);
|
||||||
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
|
||||||
@ -8347,7 +8633,7 @@ Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* v8_isolate,
|
|||||||
|
|
||||||
Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(
|
Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(
|
||||||
Isolate* v8_isolate, std::shared_ptr<BackingStore> backing_store) {
|
Isolate* v8_isolate, std::shared_ptr<BackingStore> backing_store) {
|
||||||
CHECK(i::FLAG_harmony_sharedarraybuffer);
|
CHECK(i::v8_flags.harmony_sharedarraybuffer);
|
||||||
CHECK_IMPLIES(backing_store->ByteLength() != 0,
|
CHECK_IMPLIES(backing_store->ByteLength() != 0,
|
||||||
backing_store->Data() != nullptr);
|
backing_store->Data() != nullptr);
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
|
||||||
@ -8738,7 +9024,7 @@ bool Isolate::HasPendingBackgroundTasks() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) {
|
void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) {
|
||||||
Utils::ApiCheck(i::FLAG_expose_gc,
|
Utils::ApiCheck(i::v8_flags.expose_gc,
|
||||||
"v8::Isolate::RequestGarbageCollectionForTesting",
|
"v8::Isolate::RequestGarbageCollectionForTesting",
|
||||||
"Must use --expose-gc");
|
"Must use --expose-gc");
|
||||||
if (type == kMinorGarbageCollection) {
|
if (type == kMinorGarbageCollection) {
|
||||||
@ -9256,7 +9542,7 @@ int64_t Isolate::AdjustAmountOfExternalAllocatedMemory(
|
|||||||
|
|
||||||
void Isolate::SetEventLogger(LogEventCallback that) {
|
void Isolate::SetEventLogger(LogEventCallback that) {
|
||||||
// Do not overwrite the event logger if we want to log explicitly.
|
// Do not overwrite the event logger if we want to log explicitly.
|
||||||
if (i::FLAG_log_internal_timer_events) return;
|
if (i::v8_flags.log_internal_timer_events) return;
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this);
|
||||||
i_isolate->set_event_logger(that);
|
i_isolate->set_event_logger(that);
|
||||||
}
|
}
|
||||||
@ -9389,7 +9675,7 @@ bool Isolate::IdleNotificationDeadline(double deadline_in_seconds) {
|
|||||||
// Returning true tells the caller that it need not
|
// Returning true tells the caller that it need not
|
||||||
// continue to call IdleNotification.
|
// continue to call IdleNotification.
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this);
|
||||||
if (!i::FLAG_use_idle_notification) return true;
|
if (!i::v8_flags.use_idle_notification) return true;
|
||||||
return i_isolate->heap()->IdleNotification(deadline_in_seconds);
|
return i_isolate->heap()->IdleNotification(deadline_in_seconds);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -9575,16 +9861,19 @@ CALLBACK_SETTER(WasmAsyncResolvePromiseCallback,
|
|||||||
CALLBACK_SETTER(WasmLoadSourceMapCallback, WasmLoadSourceMapCallback,
|
CALLBACK_SETTER(WasmLoadSourceMapCallback, WasmLoadSourceMapCallback,
|
||||||
wasm_load_source_map_callback)
|
wasm_load_source_map_callback)
|
||||||
|
|
||||||
CALLBACK_SETTER(WasmSimdEnabledCallback, WasmSimdEnabledCallback,
|
|
||||||
wasm_simd_enabled_callback)
|
|
||||||
|
|
||||||
CALLBACK_SETTER(WasmExceptionsEnabledCallback, WasmExceptionsEnabledCallback,
|
|
||||||
wasm_exceptions_enabled_callback)
|
|
||||||
|
|
||||||
CALLBACK_SETTER(SharedArrayBufferConstructorEnabledCallback,
|
CALLBACK_SETTER(SharedArrayBufferConstructorEnabledCallback,
|
||||||
SharedArrayBufferConstructorEnabledCallback,
|
SharedArrayBufferConstructorEnabledCallback,
|
||||||
sharedarraybuffer_constructor_enabled_callback)
|
sharedarraybuffer_constructor_enabled_callback)
|
||||||
|
|
||||||
|
void Isolate::SetWasmExceptionsEnabledCallback(
|
||||||
|
WasmExceptionsEnabledCallback callback) {
|
||||||
|
// Exceptions are always enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
void Isolate::SetWasmSimdEnabledCallback(WasmSimdEnabledCallback callback) {
|
||||||
|
// SIMD is always enabled
|
||||||
|
}
|
||||||
|
|
||||||
void Isolate::InstallConditionalFeatures(Local<Context> context) {
|
void Isolate::InstallConditionalFeatures(Local<Context> context) {
|
||||||
v8::HandleScope handle_scope(this);
|
v8::HandleScope handle_scope(this);
|
||||||
v8::Context::Scope context_scope(context);
|
v8::Context::Scope context_scope(context);
|
||||||
@ -9592,7 +9881,7 @@ void Isolate::InstallConditionalFeatures(Local<Context> context) {
|
|||||||
if (i_isolate->is_execution_terminating()) return;
|
if (i_isolate->is_execution_terminating()) return;
|
||||||
i_isolate->InstallConditionalFeatures(Utils::OpenHandle(*context));
|
i_isolate->InstallConditionalFeatures(Utils::OpenHandle(*context));
|
||||||
#if V8_ENABLE_WEBASSEMBLY
|
#if V8_ENABLE_WEBASSEMBLY
|
||||||
if (i::FLAG_expose_wasm && !i_isolate->has_pending_exception()) {
|
if (i::v8_flags.expose_wasm && !i_isolate->has_pending_exception()) {
|
||||||
i::WasmJs::InstallConditionalFeatures(i_isolate,
|
i::WasmJs::InstallConditionalFeatures(i_isolate,
|
||||||
Utils::OpenHandle(*context));
|
Utils::OpenHandle(*context));
|
||||||
}
|
}
|
||||||
@ -9743,6 +10032,11 @@ MicrotasksScope::MicrotasksScope(Isolate* v8_isolate,
|
|||||||
MicrotasksScope::Type type)
|
MicrotasksScope::Type type)
|
||||||
: MicrotasksScope(v8_isolate, nullptr, type) {}
|
: MicrotasksScope(v8_isolate, nullptr, type) {}
|
||||||
|
|
||||||
|
MicrotasksScope::MicrotasksScope(Local<Context> v8_context,
|
||||||
|
MicrotasksScope::Type type)
|
||||||
|
: MicrotasksScope(v8_context->GetIsolate(), v8_context->GetMicrotaskQueue(),
|
||||||
|
type) {}
|
||||||
|
|
||||||
MicrotasksScope::MicrotasksScope(Isolate* v8_isolate,
|
MicrotasksScope::MicrotasksScope(Isolate* v8_isolate,
|
||||||
MicrotaskQueue* microtask_queue,
|
MicrotaskQueue* microtask_queue,
|
||||||
MicrotasksScope::Type type)
|
MicrotasksScope::Type type)
|
||||||
@ -10064,6 +10358,21 @@ int64_t CpuProfile::GetEndTime() const {
|
|||||||
return profile->end_time().since_origin().InMicroseconds();
|
return profile->end_time().since_origin().InMicroseconds();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static i::CpuProfile* ToInternal(const CpuProfile* profile) {
|
||||||
|
return const_cast<i::CpuProfile*>(
|
||||||
|
reinterpret_cast<const i::CpuProfile*>(profile));
|
||||||
|
}
|
||||||
|
|
||||||
|
void CpuProfile::Serialize(OutputStream* stream,
|
||||||
|
CpuProfile::SerializationFormat format) const {
|
||||||
|
Utils::ApiCheck(format == kJSON, "v8::CpuProfile::Serialize",
|
||||||
|
"Unknown serialization format");
|
||||||
|
Utils::ApiCheck(stream->GetChunkSize() > 0, "v8::CpuProfile::Serialize",
|
||||||
|
"Invalid stream chunk size");
|
||||||
|
i::CpuProfileJSONSerializer serializer(ToInternal(this));
|
||||||
|
serializer.Serialize(stream);
|
||||||
|
}
|
||||||
|
|
||||||
int CpuProfile::GetSamplesCount() const {
|
int CpuProfile::GetSamplesCount() const {
|
||||||
return reinterpret_cast<const i::CpuProfile*>(this)->samples_count();
|
return reinterpret_cast<const i::CpuProfile*>(this)->samples_count();
|
||||||
}
|
}
|
||||||
@ -10526,7 +10835,7 @@ void EmbedderHeapTracer::IterateTracedGlobalHandles(
|
|||||||
TracedGlobalHandleVisitor* visitor) {
|
TracedGlobalHandleVisitor* visitor) {
|
||||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate_);
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate_);
|
||||||
i::DisallowGarbageCollection no_gc;
|
i::DisallowGarbageCollection no_gc;
|
||||||
i_isolate->global_handles()->IterateTracedNodes(visitor);
|
i_isolate->traced_handles()->Iterate(visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool EmbedderHeapTracer::IsRootForNonTracingGC(
|
bool EmbedderHeapTracer::IsRootForNonTracingGC(
|
||||||
|
@ -133,7 +133,7 @@ void Report(Handle<Script> script, int position, base::Vector<const char> text,
|
|||||||
// Hook to report successful execution of {AsmJs::CompileAsmViaWasm} phase.
|
// Hook to report successful execution of {AsmJs::CompileAsmViaWasm} phase.
|
||||||
void ReportCompilationSuccess(Handle<Script> script, int position,
|
void ReportCompilationSuccess(Handle<Script> script, int position,
|
||||||
double compile_time, size_t module_size) {
|
double compile_time, size_t module_size) {
|
||||||
if (FLAG_suppress_asm_messages || !FLAG_trace_asm_time) return;
|
if (v8_flags.suppress_asm_messages || !v8_flags.trace_asm_time) return;
|
||||||
base::EmbeddedVector<char, 100> text;
|
base::EmbeddedVector<char, 100> text;
|
||||||
int length = SNPrintF(text, "success, compile time %0.3f ms, %zu bytes",
|
int length = SNPrintF(text, "success, compile time %0.3f ms, %zu bytes",
|
||||||
compile_time, module_size);
|
compile_time, module_size);
|
||||||
@ -146,7 +146,7 @@ void ReportCompilationSuccess(Handle<Script> script, int position,
|
|||||||
// Hook to report failed execution of {AsmJs::CompileAsmViaWasm} phase.
|
// Hook to report failed execution of {AsmJs::CompileAsmViaWasm} phase.
|
||||||
void ReportCompilationFailure(ParseInfo* parse_info, int position,
|
void ReportCompilationFailure(ParseInfo* parse_info, int position,
|
||||||
const char* reason) {
|
const char* reason) {
|
||||||
if (FLAG_suppress_asm_messages) return;
|
if (v8_flags.suppress_asm_messages) return;
|
||||||
parse_info->pending_error_handler()->ReportWarningAt(
|
parse_info->pending_error_handler()->ReportWarningAt(
|
||||||
position, position, MessageTemplate::kAsmJsInvalid, reason);
|
position, position, MessageTemplate::kAsmJsInvalid, reason);
|
||||||
}
|
}
|
||||||
@ -154,7 +154,7 @@ void ReportCompilationFailure(ParseInfo* parse_info, int position,
|
|||||||
// Hook to report successful execution of {AsmJs::InstantiateAsmWasm} phase.
|
// Hook to report successful execution of {AsmJs::InstantiateAsmWasm} phase.
|
||||||
void ReportInstantiationSuccess(Handle<Script> script, int position,
|
void ReportInstantiationSuccess(Handle<Script> script, int position,
|
||||||
double instantiate_time) {
|
double instantiate_time) {
|
||||||
if (FLAG_suppress_asm_messages || !FLAG_trace_asm_time) return;
|
if (v8_flags.suppress_asm_messages || !v8_flags.trace_asm_time) return;
|
||||||
base::EmbeddedVector<char, 50> text;
|
base::EmbeddedVector<char, 50> text;
|
||||||
int length = SNPrintF(text, "success, %0.3f ms", instantiate_time);
|
int length = SNPrintF(text, "success, %0.3f ms", instantiate_time);
|
||||||
CHECK_NE(-1, length);
|
CHECK_NE(-1, length);
|
||||||
@ -166,7 +166,7 @@ void ReportInstantiationSuccess(Handle<Script> script, int position,
|
|||||||
// Hook to report failed execution of {AsmJs::InstantiateAsmWasm} phase.
|
// Hook to report failed execution of {AsmJs::InstantiateAsmWasm} phase.
|
||||||
void ReportInstantiationFailure(Handle<Script> script, int position,
|
void ReportInstantiationFailure(Handle<Script> script, int position,
|
||||||
const char* reason) {
|
const char* reason) {
|
||||||
if (FLAG_suppress_asm_messages) return;
|
if (v8_flags.suppress_asm_messages) return;
|
||||||
base::Vector<const char> text = base::CStrVector(reason);
|
base::Vector<const char> text = base::CStrVector(reason);
|
||||||
Report(script, position, text, MessageTemplate::kAsmJsLinkingFailed,
|
Report(script, position, text, MessageTemplate::kAsmJsLinkingFailed,
|
||||||
v8::Isolate::kMessageWarning);
|
v8::Isolate::kMessageWarning);
|
||||||
@ -237,7 +237,7 @@ UnoptimizedCompilationJob::Status AsmJsCompilationJob::ExecuteJobImpl() {
|
|||||||
stream->Seek(compilation_info()->literal()->start_position());
|
stream->Seek(compilation_info()->literal()->start_position());
|
||||||
wasm::AsmJsParser parser(&translate_zone, stack_limit(), stream);
|
wasm::AsmJsParser parser(&translate_zone, stack_limit(), stream);
|
||||||
if (!parser.Run()) {
|
if (!parser.Run()) {
|
||||||
if (!FLAG_suppress_asm_messages) {
|
if (!v8_flags.suppress_asm_messages) {
|
||||||
ReportCompilationFailure(parse_info(), parser.failure_location(),
|
ReportCompilationFailure(parse_info(), parser.failure_location(),
|
||||||
parser.failure_message());
|
parser.failure_message());
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ namespace wasm {
|
|||||||
failed_ = true; \
|
failed_ = true; \
|
||||||
failure_message_ = msg; \
|
failure_message_ = msg; \
|
||||||
failure_location_ = static_cast<int>(scanner_.Position()); \
|
failure_location_ = static_cast<int>(scanner_.Position()); \
|
||||||
if (FLAG_trace_asm_parser) { \
|
if (v8_flags.trace_asm_parser) { \
|
||||||
PrintF("[asm.js failure: %s, token: '%s', see: %s:%d]\n", msg, \
|
PrintF("[asm.js failure: %s, token: '%s', see: %s:%d]\n", msg, \
|
||||||
scanner_.Name(scanner_.Token()).c_str(), __FILE__, __LINE__); \
|
scanner_.Name(scanner_.Token()).c_str(), __FILE__, __LINE__); \
|
||||||
} \
|
} \
|
||||||
|
@ -67,7 +67,7 @@ void AsmJsScanner::Next() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
if (FLAG_trace_asm_scanner) {
|
if (v8_flags.trace_asm_scanner) {
|
||||||
if (Token() == kDouble) {
|
if (Token() == kDouble) {
|
||||||
PrintF("%lf ", AsDouble());
|
PrintF("%lf ", AsDouble());
|
||||||
} else if (Token() == kUnsigned) {
|
} else if (Token() == kUnsigned) {
|
||||||
|
@ -715,7 +715,7 @@ bool DeclarationScope::Analyze(ParseInfo* info) {
|
|||||||
scope->GetScriptScope()->RewriteReplGlobalVariables();
|
scope->GetScriptScope()->RewriteReplGlobalVariables();
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (FLAG_print_scopes) {
|
if (v8_flags.print_scopes) {
|
||||||
PrintF("Global scope:\n");
|
PrintF("Global scope:\n");
|
||||||
scope->Print();
|
scope->Print();
|
||||||
}
|
}
|
||||||
@ -885,9 +885,8 @@ void DeclarationScope::AddLocal(Variable* var) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
|
void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
|
||||||
DCHECK(!IsCleared());
|
DCHECK_EQ(new_parent, outer_scope_->inner_scope_);
|
||||||
DCHECK_EQ(new_parent, outer_scope_and_calls_eval_.GetPointer()->inner_scope_);
|
DCHECK_EQ(new_parent->outer_scope_, outer_scope_);
|
||||||
DCHECK_EQ(new_parent->outer_scope_, outer_scope_and_calls_eval_.GetPointer());
|
|
||||||
DCHECK_EQ(new_parent, new_parent->GetClosureScope());
|
DCHECK_EQ(new_parent, new_parent->GetClosureScope());
|
||||||
DCHECK_NULL(new_parent->inner_scope_);
|
DCHECK_NULL(new_parent->inner_scope_);
|
||||||
DCHECK(new_parent->unresolved_list_.is_empty());
|
DCHECK(new_parent->unresolved_list_.is_empty());
|
||||||
@ -912,12 +911,11 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
|
|||||||
new_parent->sibling_ = top_inner_scope_;
|
new_parent->sibling_ = top_inner_scope_;
|
||||||
}
|
}
|
||||||
|
|
||||||
Scope* outer_scope = outer_scope_and_calls_eval_.GetPointer();
|
new_parent->unresolved_list_.MoveTail(&outer_scope_->unresolved_list_,
|
||||||
new_parent->unresolved_list_.MoveTail(&outer_scope->unresolved_list_,
|
|
||||||
top_unresolved_);
|
top_unresolved_);
|
||||||
|
|
||||||
// Move temporaries allocated for complex parameter initializers.
|
// Move temporaries allocated for complex parameter initializers.
|
||||||
DeclarationScope* outer_closure = outer_scope->GetClosureScope();
|
DeclarationScope* outer_closure = outer_scope_->GetClosureScope();
|
||||||
for (auto it = top_local_; it != outer_closure->locals()->end(); ++it) {
|
for (auto it = top_local_; it != outer_closure->locals()->end(); ++it) {
|
||||||
Variable* local = *it;
|
Variable* local = *it;
|
||||||
DCHECK_EQ(VariableMode::kTemporary, local->mode());
|
DCHECK_EQ(VariableMode::kTemporary, local->mode());
|
||||||
@ -929,16 +927,11 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
|
|||||||
outer_closure->locals_.Rewind(top_local_);
|
outer_closure->locals_.Rewind(top_local_);
|
||||||
|
|
||||||
// Move eval calls since Snapshot's creation into new_parent.
|
// Move eval calls since Snapshot's creation into new_parent.
|
||||||
if (outer_scope_and_calls_eval_->calls_eval_) {
|
if (outer_scope_->calls_eval_) {
|
||||||
new_parent->RecordDeclarationScopeEvalCall();
|
new_parent->RecordEvalCall();
|
||||||
new_parent->inner_scope_calls_eval_ = true;
|
outer_scope_->calls_eval_ = false;
|
||||||
|
declaration_scope_->sloppy_eval_can_extend_vars_ = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We are in the arrow function case. The calls eval we may have recorded
|
|
||||||
// is intended for the inner scope and we should simply restore the
|
|
||||||
// original "calls eval" flag of the outer scope.
|
|
||||||
RestoreEvalFlag();
|
|
||||||
Clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Scope::ReplaceOuterScope(Scope* outer) {
|
void Scope::ReplaceOuterScope(Scope* outer) {
|
||||||
@ -1270,8 +1263,9 @@ Declaration* DeclarationScope::CheckConflictingVarDeclarations(
|
|||||||
if (decl->IsVariableDeclaration() &&
|
if (decl->IsVariableDeclaration() &&
|
||||||
decl->AsVariableDeclaration()->AsNested() != nullptr) {
|
decl->AsVariableDeclaration()->AsNested() != nullptr) {
|
||||||
Scope* current = decl->AsVariableDeclaration()->AsNested()->scope();
|
Scope* current = decl->AsVariableDeclaration()->AsNested()->scope();
|
||||||
DCHECK(decl->var()->mode() == VariableMode::kVar ||
|
if (decl->var()->mode() != VariableMode::kVar &&
|
||||||
decl->var()->mode() == VariableMode::kDynamic);
|
decl->var()->mode() != VariableMode::kDynamic)
|
||||||
|
continue;
|
||||||
// Iterate through all scopes until the declaration scope.
|
// Iterate through all scopes until the declaration scope.
|
||||||
do {
|
do {
|
||||||
// There is a conflict if there exists a non-VAR binding.
|
// There is a conflict if there exists a non-VAR binding.
|
||||||
@ -1762,7 +1756,7 @@ void DeclarationScope::AnalyzePartially(Parser* parser,
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (FLAG_print_scopes) {
|
if (v8_flags.print_scopes) {
|
||||||
PrintF("Inner function scope:\n");
|
PrintF("Inner function scope:\n");
|
||||||
Print();
|
Print();
|
||||||
}
|
}
|
||||||
@ -2576,6 +2570,9 @@ void Scope::AllocateVariablesRecursively() {
|
|||||||
this->ForEach([](Scope* scope) -> Iteration {
|
this->ForEach([](Scope* scope) -> Iteration {
|
||||||
DCHECK(!scope->already_resolved_);
|
DCHECK(!scope->already_resolved_);
|
||||||
if (WasLazilyParsed(scope)) return Iteration::kContinue;
|
if (WasLazilyParsed(scope)) return Iteration::kContinue;
|
||||||
|
if (scope->sloppy_eval_can_extend_vars_) {
|
||||||
|
scope->num_heap_slots_ = Context::MIN_CONTEXT_EXTENDED_SLOTS;
|
||||||
|
}
|
||||||
DCHECK_EQ(scope->ContextHeaderLength(), scope->num_heap_slots_);
|
DCHECK_EQ(scope->ContextHeaderLength(), scope->num_heap_slots_);
|
||||||
|
|
||||||
// Allocate variables for this scope.
|
// Allocate variables for this scope.
|
||||||
|
@ -110,12 +110,6 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
|
|||||||
|
|
||||||
class Snapshot final {
|
class Snapshot final {
|
||||||
public:
|
public:
|
||||||
Snapshot()
|
|
||||||
: outer_scope_and_calls_eval_(nullptr, false),
|
|
||||||
top_unresolved_(),
|
|
||||||
top_local_() {
|
|
||||||
DCHECK(IsCleared());
|
|
||||||
}
|
|
||||||
inline explicit Snapshot(Scope* scope);
|
inline explicit Snapshot(Scope* scope);
|
||||||
|
|
||||||
// Disallow copy and move.
|
// Disallow copy and move.
|
||||||
@ -123,45 +117,31 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
|
|||||||
Snapshot(Snapshot&&) = delete;
|
Snapshot(Snapshot&&) = delete;
|
||||||
|
|
||||||
~Snapshot() {
|
~Snapshot() {
|
||||||
// If we're still active, there was no arrow function. In that case outer
|
// Restore eval flags from before the scope was active.
|
||||||
// calls eval if it already called eval before this snapshot started, or
|
if (sloppy_eval_can_extend_vars_) {
|
||||||
// if the code during the snapshot called eval.
|
declaration_scope_->sloppy_eval_can_extend_vars_ = true;
|
||||||
if (!IsCleared() && outer_scope_and_calls_eval_.GetPayload()) {
|
|
||||||
RestoreEvalFlag();
|
|
||||||
}
|
}
|
||||||
}
|
if (calls_eval_) {
|
||||||
|
outer_scope_->calls_eval_ = true;
|
||||||
void RestoreEvalFlag() {
|
|
||||||
if (outer_scope_and_calls_eval_.GetPayload()) {
|
|
||||||
// This recreates both calls_eval and sloppy_eval_can_extend_vars.
|
|
||||||
outer_scope_and_calls_eval_.GetPointer()->RecordEvalCall();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Reparent(DeclarationScope* new_parent);
|
void Reparent(DeclarationScope* new_parent);
|
||||||
bool IsCleared() const {
|
|
||||||
return outer_scope_and_calls_eval_.GetPointer() == nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Clear() {
|
|
||||||
outer_scope_and_calls_eval_.SetPointer(nullptr);
|
|
||||||
#ifdef DEBUG
|
|
||||||
outer_scope_and_calls_eval_.SetPayload(false);
|
|
||||||
top_inner_scope_ = nullptr;
|
|
||||||
top_local_ = base::ThreadedList<Variable>::Iterator();
|
|
||||||
top_unresolved_ = UnresolvedList::Iterator();
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// During tracking calls_eval caches whether the outer scope called eval.
|
Scope* outer_scope_;
|
||||||
// Upon move assignment we store whether the new inner scope calls eval into
|
Scope* declaration_scope_;
|
||||||
// the move target calls_eval bit, and restore calls eval on the outer
|
|
||||||
// scope.
|
|
||||||
base::PointerWithPayload<Scope, bool, 1> outer_scope_and_calls_eval_;
|
|
||||||
Scope* top_inner_scope_;
|
Scope* top_inner_scope_;
|
||||||
UnresolvedList::Iterator top_unresolved_;
|
UnresolvedList::Iterator top_unresolved_;
|
||||||
base::ThreadedList<Variable>::Iterator top_local_;
|
base::ThreadedList<Variable>::Iterator top_local_;
|
||||||
|
// While the scope is active, the scope caches the flag values for
|
||||||
|
// outer_scope_ / declaration_scope_ they can be used to know what happened
|
||||||
|
// while parsing the arrow head. If this turns out to be an arrow head, new
|
||||||
|
// values on the respective scopes will be cleared and moved to the inner
|
||||||
|
// scope. Otherwise the cached flags will be merged with the flags from the
|
||||||
|
// arrow head.
|
||||||
|
bool calls_eval_;
|
||||||
|
bool sloppy_eval_can_extend_vars_;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class DeserializationMode { kIncludingVariables, kScopesOnly };
|
enum class DeserializationMode { kIncludingVariables, kScopesOnly };
|
||||||
@ -907,8 +887,8 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
|
|||||||
void RecordDeclarationScopeEvalCall() {
|
void RecordDeclarationScopeEvalCall() {
|
||||||
calls_eval_ = true;
|
calls_eval_ = true;
|
||||||
|
|
||||||
// If this isn't a sloppy eval, we don't care about it.
|
// The caller already checked whether we're in sloppy mode.
|
||||||
if (language_mode() != LanguageMode::kSloppy) return;
|
CHECK(is_sloppy(language_mode()));
|
||||||
|
|
||||||
// Sloppy eval in script scopes can only introduce global variables anyway,
|
// Sloppy eval in script scopes can only introduce global variables anyway,
|
||||||
// so we don't care that it calls sloppy eval.
|
// so we don't care that it calls sloppy eval.
|
||||||
@ -942,7 +922,6 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sloppy_eval_can_extend_vars_ = true;
|
sloppy_eval_can_extend_vars_ = true;
|
||||||
num_heap_slots_ = Context::MIN_CONTEXT_EXTENDED_SLOTS;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool sloppy_eval_can_extend_vars() const {
|
bool sloppy_eval_can_extend_vars() const {
|
||||||
@ -1367,7 +1346,9 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
|
|||||||
|
|
||||||
void Scope::RecordEvalCall() {
|
void Scope::RecordEvalCall() {
|
||||||
calls_eval_ = true;
|
calls_eval_ = true;
|
||||||
|
if (is_sloppy(language_mode())) {
|
||||||
GetDeclarationScope()->RecordDeclarationScopeEvalCall();
|
GetDeclarationScope()->RecordDeclarationScopeEvalCall();
|
||||||
|
}
|
||||||
RecordInnerScopeEvalCall();
|
RecordInnerScopeEvalCall();
|
||||||
// The eval contents might access "super" (if it's inside a function that
|
// The eval contents might access "super" (if it's inside a function that
|
||||||
// binds super).
|
// binds super).
|
||||||
@ -1380,14 +1361,18 @@ void Scope::RecordEvalCall() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Scope::Snapshot::Snapshot(Scope* scope)
|
Scope::Snapshot::Snapshot(Scope* scope)
|
||||||
: outer_scope_and_calls_eval_(scope, scope->calls_eval_),
|
: outer_scope_(scope),
|
||||||
|
declaration_scope_(scope->GetDeclarationScope()),
|
||||||
top_inner_scope_(scope->inner_scope_),
|
top_inner_scope_(scope->inner_scope_),
|
||||||
top_unresolved_(scope->unresolved_list_.end()),
|
top_unresolved_(scope->unresolved_list_.end()),
|
||||||
top_local_(scope->GetClosureScope()->locals_.end()) {
|
top_local_(scope->GetClosureScope()->locals_.end()),
|
||||||
// Reset in order to record eval calls during this Snapshot's lifetime.
|
calls_eval_(outer_scope_->calls_eval_),
|
||||||
outer_scope_and_calls_eval_.GetPointer()->calls_eval_ = false;
|
sloppy_eval_can_extend_vars_(
|
||||||
outer_scope_and_calls_eval_.GetPointer()->sloppy_eval_can_extend_vars_ =
|
declaration_scope_->sloppy_eval_can_extend_vars_) {
|
||||||
false;
|
// Reset in order to record (sloppy) eval calls during this Snapshot's
|
||||||
|
// lifetime.
|
||||||
|
outer_scope_->calls_eval_ = false;
|
||||||
|
declaration_scope_->sloppy_eval_can_extend_vars_ = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
class ModuleScope final : public DeclarationScope {
|
class ModuleScope final : public DeclarationScope {
|
||||||
|
@ -5,3 +5,9 @@ include_rules = [
|
|||||||
"-src",
|
"-src",
|
||||||
"+src/base",
|
"+src/base",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
specific_include_rules = {
|
||||||
|
"ieee754.h": [
|
||||||
|
"+third_party/glibc/src/sysdeps/ieee754/dbl-64/trig.h"
|
||||||
|
],
|
||||||
|
}
|
||||||
|
@ -62,7 +62,7 @@ class BitField final {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Returns a type U with the bit field value updated.
|
// Returns a type U with the bit field value updated.
|
||||||
static constexpr U update(U previous, T value) {
|
V8_NODISCARD static constexpr U update(U previous, T value) {
|
||||||
return (previous & ~kMask) | encode(value);
|
return (previous & ~kMask) | encode(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,6 +52,46 @@ int32_t SignedMulHigh32(int32_t lhs, int32_t rhs) {
|
|||||||
32u);
|
32u);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The algorithm used is described in section 8.2 of
|
||||||
|
// Hacker's Delight, by Henry S. Warren, Jr.
|
||||||
|
// It assumes that a right shift on a signed integer is an arithmetic shift.
|
||||||
|
int64_t SignedMulHigh64(int64_t u, int64_t v) {
|
||||||
|
uint64_t u0 = u & 0xFFFFFFFF;
|
||||||
|
int64_t u1 = u >> 32;
|
||||||
|
uint64_t v0 = v & 0xFFFFFFFF;
|
||||||
|
int64_t v1 = v >> 32;
|
||||||
|
|
||||||
|
uint64_t w0 = u0 * v0;
|
||||||
|
int64_t t = u1 * v0 + (w0 >> 32);
|
||||||
|
int64_t w1 = t & 0xFFFFFFFF;
|
||||||
|
int64_t w2 = t >> 32;
|
||||||
|
w1 = u0 * v1 + w1;
|
||||||
|
|
||||||
|
return u1 * v1 + w2 + (w1 >> 32);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The algorithm used is described in section 8.2 of
|
||||||
|
// Hacker's Delight, by Henry S. Warren, Jr.
|
||||||
|
uint64_t UnsignedMulHigh64(uint64_t u, uint64_t v) {
|
||||||
|
uint64_t u0 = u & 0xFFFFFFFF;
|
||||||
|
uint64_t u1 = u >> 32;
|
||||||
|
uint64_t v0 = v & 0xFFFFFFFF;
|
||||||
|
uint64_t v1 = v >> 32;
|
||||||
|
|
||||||
|
uint64_t w0 = u0 * v0;
|
||||||
|
uint64_t t = u1 * v0 + (w0 >> 32);
|
||||||
|
uint64_t w1 = t & 0xFFFFFFFFLL;
|
||||||
|
uint64_t w2 = t >> 32;
|
||||||
|
w1 = u0 * v1 + w1;
|
||||||
|
|
||||||
|
return u1 * v1 + w2 + (w1 >> 32);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t UnsignedMulHigh32(uint32_t lhs, uint32_t rhs) {
|
||||||
|
uint64_t const value =
|
||||||
|
static_cast<uint64_t>(lhs) * static_cast<uint64_t>(rhs);
|
||||||
|
return static_cast<uint32_t>(value >> 32u);
|
||||||
|
}
|
||||||
|
|
||||||
int32_t SignedMulHighAndAdd32(int32_t lhs, int32_t rhs, int32_t acc) {
|
int32_t SignedMulHighAndAdd32(int32_t lhs, int32_t rhs, int32_t acc) {
|
||||||
return base::bit_cast<int32_t>(
|
return base::bit_cast<int32_t>(
|
||||||
@ -66,12 +106,22 @@ int32_t SignedDiv32(int32_t lhs, int32_t rhs) {
|
|||||||
return lhs / rhs;
|
return lhs / rhs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64_t SignedDiv64(int64_t lhs, int64_t rhs) {
|
||||||
|
if (rhs == 0) return 0;
|
||||||
|
if (rhs == -1) return lhs == std::numeric_limits<int64_t>::min() ? lhs : -lhs;
|
||||||
|
return lhs / rhs;
|
||||||
|
}
|
||||||
|
|
||||||
int32_t SignedMod32(int32_t lhs, int32_t rhs) {
|
int32_t SignedMod32(int32_t lhs, int32_t rhs) {
|
||||||
if (rhs == 0 || rhs == -1) return 0;
|
if (rhs == 0 || rhs == -1) return 0;
|
||||||
return lhs % rhs;
|
return lhs % rhs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64_t SignedMod64(int64_t lhs, int64_t rhs) {
|
||||||
|
if (rhs == 0 || rhs == -1) return 0;
|
||||||
|
return lhs % rhs;
|
||||||
|
}
|
||||||
|
|
||||||
int64_t SignedSaturatedAdd64(int64_t lhs, int64_t rhs) {
|
int64_t SignedSaturatedAdd64(int64_t lhs, int64_t rhs) {
|
||||||
using limits = std::numeric_limits<int64_t>;
|
using limits = std::numeric_limits<int64_t>;
|
||||||
// Underflow if {lhs + rhs < min}. In that case, return {min}.
|
// Underflow if {lhs + rhs < min}. In that case, return {min}.
|
||||||
|
@ -70,6 +70,30 @@ T ReverseBits(T value) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ReverseBytes(value) returns |value| in reverse byte order.
|
||||||
|
template <typename T>
|
||||||
|
T ReverseBytes(T value) {
|
||||||
|
static_assert((sizeof(value) == 1) || (sizeof(value) == 2) ||
|
||||||
|
(sizeof(value) == 4) || (sizeof(value) == 8));
|
||||||
|
T result = 0;
|
||||||
|
for (unsigned i = 0; i < sizeof(value); i++) {
|
||||||
|
result = (result << 8) | (value & 0xff);
|
||||||
|
value >>= 8;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
inline constexpr std::make_unsigned_t<T> Unsigned(T value) {
|
||||||
|
static_assert(std::is_signed_v<T>);
|
||||||
|
return static_cast<std::make_unsigned_t<T>>(value);
|
||||||
|
}
|
||||||
|
template <class T>
|
||||||
|
inline constexpr std::make_signed_t<T> Signed(T value) {
|
||||||
|
static_assert(std::is_unsigned_v<T>);
|
||||||
|
return static_cast<std::make_signed_t<T>>(value);
|
||||||
|
}
|
||||||
|
|
||||||
// CountLeadingZeros(value) returns the number of zero bits following the most
|
// CountLeadingZeros(value) returns the number of zero bits following the most
|
||||||
// significant 1 bit in |value| if |value| is non-zero, otherwise it returns
|
// significant 1 bit in |value| if |value| is non-zero, otherwise it returns
|
||||||
// {sizeof(T) * 8}.
|
// {sizeof(T) * 8}.
|
||||||
@ -104,6 +128,15 @@ inline constexpr unsigned CountLeadingZeros64(uint64_t value) {
|
|||||||
return CountLeadingZeros(value);
|
return CountLeadingZeros(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The number of leading zeros for a positive number,
|
||||||
|
// the number of leading ones for a negative number.
|
||||||
|
template <class T>
|
||||||
|
constexpr unsigned CountLeadingSignBits(T value) {
|
||||||
|
static_assert(std::is_signed_v<T>);
|
||||||
|
return value < 0 ? CountLeadingZeros(~Unsigned(value))
|
||||||
|
: CountLeadingZeros(Unsigned(value));
|
||||||
|
}
|
||||||
|
|
||||||
// CountTrailingZeros(value) returns the number of zero bits preceding the
|
// CountTrailingZeros(value) returns the number of zero bits preceding the
|
||||||
// least significant 1 bit in |value| if |value| is non-zero, otherwise it
|
// least significant 1 bit in |value| if |value| is non-zero, otherwise it
|
||||||
// returns {sizeof(T) * 8}.
|
// returns {sizeof(T) * 8}.
|
||||||
@ -277,9 +310,13 @@ inline bool SignedMulOverflow32(int32_t lhs, int32_t rhs, int32_t* val) {
|
|||||||
// |rhs| and stores the result into the variable pointed to by |val| and
|
// |rhs| and stores the result into the variable pointed to by |val| and
|
||||||
// returns true if the signed summation resulted in an overflow.
|
// returns true if the signed summation resulted in an overflow.
|
||||||
inline bool SignedAddOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
inline bool SignedAddOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
||||||
|
#if V8_HAS_BUILTIN_ADD_OVERFLOW
|
||||||
|
return __builtin_add_overflow(lhs, rhs, val);
|
||||||
|
#else
|
||||||
uint64_t res = static_cast<uint64_t>(lhs) + static_cast<uint64_t>(rhs);
|
uint64_t res = static_cast<uint64_t>(lhs) + static_cast<uint64_t>(rhs);
|
||||||
*val = base::bit_cast<int64_t>(res);
|
*val = base::bit_cast<int64_t>(res);
|
||||||
return ((res ^ lhs) & (res ^ rhs) & (1ULL << 63)) != 0;
|
return ((res ^ lhs) & (res ^ rhs) & (1ULL << 63)) != 0;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -287,9 +324,34 @@ inline bool SignedAddOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
|||||||
// |rhs| and stores the result into the variable pointed to by |val| and
|
// |rhs| and stores the result into the variable pointed to by |val| and
|
||||||
// returns true if the signed subtraction resulted in an overflow.
|
// returns true if the signed subtraction resulted in an overflow.
|
||||||
inline bool SignedSubOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
inline bool SignedSubOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
||||||
|
#if V8_HAS_BUILTIN_SUB_OVERFLOW
|
||||||
|
return __builtin_sub_overflow(lhs, rhs, val);
|
||||||
|
#else
|
||||||
uint64_t res = static_cast<uint64_t>(lhs) - static_cast<uint64_t>(rhs);
|
uint64_t res = static_cast<uint64_t>(lhs) - static_cast<uint64_t>(rhs);
|
||||||
*val = base::bit_cast<int64_t>(res);
|
*val = base::bit_cast<int64_t>(res);
|
||||||
return ((res ^ lhs) & (res ^ ~rhs) & (1ULL << 63)) != 0;
|
return ((res ^ lhs) & (res ^ ~rhs) & (1ULL << 63)) != 0;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignedMulOverflow64(lhs,rhs,val) performs a signed multiplication of |lhs|
|
||||||
|
// and |rhs| and stores the result into the variable pointed to by |val| and
|
||||||
|
// returns true if the signed multiplication resulted in an overflow.
|
||||||
|
inline bool SignedMulOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
||||||
|
#if V8_HAS_BUILTIN_MUL_OVERFLOW
|
||||||
|
return __builtin_mul_overflow(lhs, rhs, val);
|
||||||
|
#else
|
||||||
|
int64_t res = base::bit_cast<int64_t>(static_cast<uint64_t>(lhs) *
|
||||||
|
static_cast<uint64_t>(rhs));
|
||||||
|
*val = res;
|
||||||
|
|
||||||
|
// Check for INT64_MIN / -1 as it's undefined behaviour and could cause
|
||||||
|
// hardware exceptions.
|
||||||
|
if ((res == INT64_MIN && lhs == -1)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return lhs != 0 && (res / lhs) != rhs;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignedMulHigh32(lhs, rhs) multiplies two signed 32-bit values |lhs| and
|
// SignedMulHigh32(lhs, rhs) multiplies two signed 32-bit values |lhs| and
|
||||||
@ -297,6 +359,21 @@ inline bool SignedSubOverflow64(int64_t lhs, int64_t rhs, int64_t* val) {
|
|||||||
// those.
|
// those.
|
||||||
V8_BASE_EXPORT int32_t SignedMulHigh32(int32_t lhs, int32_t rhs);
|
V8_BASE_EXPORT int32_t SignedMulHigh32(int32_t lhs, int32_t rhs);
|
||||||
|
|
||||||
|
// UnsignedMulHigh32(lhs, rhs) multiplies two unsigned 32-bit values |lhs| and
|
||||||
|
// |rhs|, extracts the most significant 32 bits of the result, and returns
|
||||||
|
// those.
|
||||||
|
V8_BASE_EXPORT uint32_t UnsignedMulHigh32(uint32_t lhs, uint32_t rhs);
|
||||||
|
|
||||||
|
// SignedMulHigh64(lhs, rhs) multiplies two signed 64-bit values |lhs| and
|
||||||
|
// |rhs|, extracts the most significant 64 bits of the result, and returns
|
||||||
|
// those.
|
||||||
|
V8_BASE_EXPORT int64_t SignedMulHigh64(int64_t lhs, int64_t rhs);
|
||||||
|
|
||||||
|
// UnsignedMulHigh64(lhs, rhs) multiplies two unsigned 64-bit values |lhs| and
|
||||||
|
// |rhs|, extracts the most significant 64 bits of the result, and returns
|
||||||
|
// those.
|
||||||
|
V8_BASE_EXPORT uint64_t UnsignedMulHigh64(uint64_t lhs, uint64_t rhs);
|
||||||
|
|
||||||
// SignedMulHighAndAdd32(lhs, rhs, acc) multiplies two signed 32-bit values
|
// SignedMulHighAndAdd32(lhs, rhs, acc) multiplies two signed 32-bit values
|
||||||
// |lhs| and |rhs|, extracts the most significant 32 bits of the result, and
|
// |lhs| and |rhs|, extracts the most significant 32 bits of the result, and
|
||||||
// adds the accumulate value |acc|.
|
// adds the accumulate value |acc|.
|
||||||
@ -308,11 +385,21 @@ V8_BASE_EXPORT int32_t SignedMulHighAndAdd32(int32_t lhs, int32_t rhs,
|
|||||||
// is minint and |rhs| is -1, it returns minint.
|
// is minint and |rhs| is -1, it returns minint.
|
||||||
V8_BASE_EXPORT int32_t SignedDiv32(int32_t lhs, int32_t rhs);
|
V8_BASE_EXPORT int32_t SignedDiv32(int32_t lhs, int32_t rhs);
|
||||||
|
|
||||||
|
// SignedDiv64(lhs, rhs) divides |lhs| by |rhs| and returns the quotient
|
||||||
|
// truncated to int64. If |rhs| is zero, then zero is returned. If |lhs|
|
||||||
|
// is minint and |rhs| is -1, it returns minint.
|
||||||
|
V8_BASE_EXPORT int64_t SignedDiv64(int64_t lhs, int64_t rhs);
|
||||||
|
|
||||||
// SignedMod32(lhs, rhs) divides |lhs| by |rhs| and returns the remainder
|
// SignedMod32(lhs, rhs) divides |lhs| by |rhs| and returns the remainder
|
||||||
// truncated to int32. If either |rhs| is zero or |lhs| is minint and |rhs|
|
// truncated to int32. If either |rhs| is zero or |lhs| is minint and |rhs|
|
||||||
// is -1, it returns zero.
|
// is -1, it returns zero.
|
||||||
V8_BASE_EXPORT int32_t SignedMod32(int32_t lhs, int32_t rhs);
|
V8_BASE_EXPORT int32_t SignedMod32(int32_t lhs, int32_t rhs);
|
||||||
|
|
||||||
|
// SignedMod64(lhs, rhs) divides |lhs| by |rhs| and returns the remainder
|
||||||
|
// truncated to int64. If either |rhs| is zero or |lhs| is minint and |rhs|
|
||||||
|
// is -1, it returns zero.
|
||||||
|
V8_BASE_EXPORT int64_t SignedMod64(int64_t lhs, int64_t rhs);
|
||||||
|
|
||||||
// UnsignedAddOverflow32(lhs,rhs,val) performs an unsigned summation of |lhs|
|
// UnsignedAddOverflow32(lhs,rhs,val) performs an unsigned summation of |lhs|
|
||||||
// and |rhs| and stores the result into the variable pointed to by |val| and
|
// and |rhs| and stores the result into the variable pointed to by |val| and
|
||||||
// returns true if the unsigned summation resulted in an overflow.
|
// returns true if the unsigned summation resulted in an overflow.
|
||||||
@ -332,6 +419,11 @@ inline uint32_t UnsignedDiv32(uint32_t lhs, uint32_t rhs) {
|
|||||||
return rhs ? lhs / rhs : 0u;
|
return rhs ? lhs / rhs : 0u;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UnsignedDiv64(lhs, rhs) divides |lhs| by |rhs| and returns the quotient
|
||||||
|
// truncated to uint64. If |rhs| is zero, then zero is returned.
|
||||||
|
inline uint64_t UnsignedDiv64(uint64_t lhs, uint64_t rhs) {
|
||||||
|
return rhs ? lhs / rhs : 0u;
|
||||||
|
}
|
||||||
|
|
||||||
// UnsignedMod32(lhs, rhs) divides |lhs| by |rhs| and returns the remainder
|
// UnsignedMod32(lhs, rhs) divides |lhs| by |rhs| and returns the remainder
|
||||||
// truncated to uint32. If |rhs| is zero, then zero is returned.
|
// truncated to uint32. If |rhs| is zero, then zero is returned.
|
||||||
@ -339,6 +431,12 @@ inline uint32_t UnsignedMod32(uint32_t lhs, uint32_t rhs) {
|
|||||||
return rhs ? lhs % rhs : 0u;
|
return rhs ? lhs % rhs : 0u;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UnsignedMod64(lhs, rhs) divides |lhs| by |rhs| and returns the remainder
|
||||||
|
// truncated to uint64. If |rhs| is zero, then zero is returned.
|
||||||
|
inline uint64_t UnsignedMod64(uint64_t lhs, uint64_t rhs) {
|
||||||
|
return rhs ? lhs % rhs : 0u;
|
||||||
|
}
|
||||||
|
|
||||||
// Wraparound integer arithmetic without undefined behavior.
|
// Wraparound integer arithmetic without undefined behavior.
|
||||||
|
|
||||||
inline int32_t WraparoundAdd32(int32_t lhs, int32_t rhs) {
|
inline int32_t WraparoundAdd32(int32_t lhs, int32_t rhs) {
|
||||||
|
@ -99,9 +99,9 @@
|
|||||||
// Disabled on MSVC because constructors of standard containers are not noexcept
|
// Disabled on MSVC because constructors of standard containers are not noexcept
|
||||||
// there.
|
// there.
|
||||||
#if ((!defined(V8_CC_GNU) && !defined(V8_CC_MSVC) && \
|
#if ((!defined(V8_CC_GNU) && !defined(V8_CC_MSVC) && \
|
||||||
!defined(V8_TARGET_ARCH_MIPS) && !defined(V8_TARGET_ARCH_MIPS64) && \
|
!defined(V8_TARGET_ARCH_MIPS64) && !defined(V8_TARGET_ARCH_PPC) && \
|
||||||
!defined(V8_TARGET_ARCH_PPC) && !defined(V8_TARGET_ARCH_PPC64) && \
|
!defined(V8_TARGET_ARCH_PPC64) && !defined(V8_TARGET_ARCH_RISCV64) && \
|
||||||
!defined(V8_TARGET_ARCH_RISCV64) && !defined(V8_TARGET_ARCH_RISCV32)) || \
|
!defined(V8_TARGET_ARCH_RISCV32)) || \
|
||||||
(defined(__clang__) && __cplusplus > 201300L))
|
(defined(__clang__) && __cplusplus > 201300L))
|
||||||
#define V8_NOEXCEPT noexcept
|
#define V8_NOEXCEPT noexcept
|
||||||
#else
|
#else
|
||||||
@ -135,4 +135,15 @@
|
|||||||
#define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment)))
|
#define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment)))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
// Forces the linker to not GC the section corresponding to the symbol.
|
||||||
|
#if defined(__has_attribute)
|
||||||
|
#if __has_attribute(used) && __has_attribute(retain)
|
||||||
|
#define V8_DONT_STRIP_SYMBOL __attribute__((used, retain))
|
||||||
|
#endif // __has_attribute(used) && __has_attribute(retain)
|
||||||
|
#endif // defined(__has_attribute)
|
||||||
|
|
||||||
|
#if !defined(V8_DONT_STRIP_SYMBOL)
|
||||||
|
#define V8_DONT_STRIP_SYMBOL
|
||||||
|
#endif // !defined(V8_DONT_STRIP_SYMBOL)
|
||||||
|
|
||||||
#endif // V8_BASE_COMPILER_SPECIFIC_H_
|
#endif // V8_BASE_COMPILER_SPECIFIC_H_
|
||||||
|
@ -6,23 +6,25 @@
|
|||||||
#define V8_BASE_CONTAINER_UTILS_H_
|
#define V8_BASE_CONTAINER_UTILS_H_
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <iterator>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
namespace v8::base {
|
namespace v8::base {
|
||||||
|
|
||||||
// Returns true iff the {element} is found in the {container}.
|
// Returns true iff the {element} is found in the {container}.
|
||||||
template <typename C, typename T>
|
template <typename C, typename T>
|
||||||
bool contains(const C& container, const T& element) {
|
bool contains(const C& container, const T& element) {
|
||||||
const auto e = end(container);
|
const auto e = std::end(container);
|
||||||
return std::find(begin(container), e, element) != e;
|
return std::find(std::begin(container), e, element) != e;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the first index of {element} in {container}. Returns std::nullopt if
|
// Returns the first index of {element} in {container}. Returns std::nullopt if
|
||||||
// {container} does not contain {element}.
|
// {container} does not contain {element}.
|
||||||
template <typename C, typename T>
|
template <typename C, typename T>
|
||||||
std::optional<size_t> index_of(const C& container, const T& element) {
|
std::optional<size_t> index_of(const C& container, const T& element) {
|
||||||
const auto b = begin(container);
|
const auto b = std::begin(container);
|
||||||
const auto e = end(container);
|
const auto e = std::end(container);
|
||||||
if (auto it = std::find(b, e, element); it != e) {
|
if (auto it = std::find(b, e, element); it != e) {
|
||||||
return {std::distance(b, it)};
|
return {std::distance(b, it)};
|
||||||
}
|
}
|
||||||
@ -33,8 +35,8 @@ std::optional<size_t> index_of(const C& container, const T& element) {
|
|||||||
// {predicate}. Returns std::nullopt if no element satisfies {predicate}.
|
// {predicate}. Returns std::nullopt if no element satisfies {predicate}.
|
||||||
template <typename C, typename P>
|
template <typename C, typename P>
|
||||||
std::optional<size_t> index_of_if(const C& container, const P& predicate) {
|
std::optional<size_t> index_of_if(const C& container, const P& predicate) {
|
||||||
const auto b = begin(container);
|
const auto b = std::begin(container);
|
||||||
const auto e = end(container);
|
const auto e = std::end(container);
|
||||||
if (auto it = std::find_if(b, e, predicate); it != e) {
|
if (auto it = std::find_if(b, e, predicate); it != e) {
|
||||||
return {std::distance(b, it)};
|
return {std::distance(b, it)};
|
||||||
}
|
}
|
||||||
@ -47,9 +49,9 @@ std::optional<size_t> index_of_if(const C& container, const P& predicate) {
|
|||||||
template <typename C>
|
template <typename C>
|
||||||
inline size_t erase_at(C& container, size_t index, size_t count = 1) {
|
inline size_t erase_at(C& container, size_t index, size_t count = 1) {
|
||||||
// TODO(C++20): Replace with std::erase.
|
// TODO(C++20): Replace with std::erase.
|
||||||
if (size(container) <= index) return 0;
|
if (std::size(container) <= index) return 0;
|
||||||
auto start = begin(container) + index;
|
auto start = std::begin(container) + index;
|
||||||
count = std::min<size_t>(count, std::distance(start, end(container)));
|
count = std::min<size_t>(count, std::distance(start, std::end(container)));
|
||||||
container.erase(start, start + count);
|
container.erase(start, start + count);
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
@ -59,34 +61,66 @@ inline size_t erase_at(C& container, size_t index, size_t count = 1) {
|
|||||||
// TODO(C++20): Replace with std::erase_if.
|
// TODO(C++20): Replace with std::erase_if.
|
||||||
template <typename C, typename P>
|
template <typename C, typename P>
|
||||||
inline size_t erase_if(C& container, const P& predicate) {
|
inline size_t erase_if(C& container, const P& predicate) {
|
||||||
size_t count = 0;
|
auto it =
|
||||||
auto e = end(container);
|
std::remove_if(std::begin(container), std::end(container), predicate);
|
||||||
for (auto it = begin(container); it != e;) {
|
auto count = std::distance(it, std::end(container));
|
||||||
it = std::find_if(it, e, predicate);
|
container.erase(it, std::end(container));
|
||||||
if (it == e) break;
|
|
||||||
it = container.erase(it);
|
|
||||||
e = end(container);
|
|
||||||
++count;
|
|
||||||
}
|
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper for std::count_if.
|
// Helper for std::count_if.
|
||||||
template <typename C, typename P>
|
template <typename C, typename P>
|
||||||
inline size_t count_if(const C& container, const P& predicate) {
|
inline size_t count_if(const C& container, const P& predicate) {
|
||||||
return std::count_if(begin(container), end(container), predicate);
|
return std::count_if(std::begin(container), std::end(container), predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper for std::all_of.
|
||||||
|
template <typename C, typename P>
|
||||||
|
inline bool all_of(const C& container, const P& predicate) {
|
||||||
|
return std::all_of(std::begin(container), std::end(container), predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper for std::none_of.
|
||||||
|
template <typename C, typename P>
|
||||||
|
inline bool none_of(const C& container, const P& predicate) {
|
||||||
|
return std::none_of(std::begin(container), std::end(container), predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper for std::sort.
|
||||||
|
template <typename C>
|
||||||
|
inline void sort(C& container) {
|
||||||
|
std::sort(std::begin(container), std::end(container));
|
||||||
|
}
|
||||||
|
template <typename C, typename Comp>
|
||||||
|
inline void sort(C& container, Comp comp) {
|
||||||
|
std::sort(std::begin(container), std::end(container), comp);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns true iff all elements of {container} compare equal using operator==.
|
// Returns true iff all elements of {container} compare equal using operator==.
|
||||||
template <typename C>
|
template <typename C>
|
||||||
inline bool all_equal(const C& container) {
|
inline bool all_equal(const C& container) {
|
||||||
if (size(container) <= 1) return true;
|
if (std::size(container) <= 1) return true;
|
||||||
auto b = begin(container);
|
auto b = std::begin(container);
|
||||||
const auto& value = *b;
|
const auto& value = *b;
|
||||||
return std::all_of(++b, end(container),
|
return std::all_of(++b, std::end(container),
|
||||||
[&](const auto& v) { return v == value; });
|
[&](const auto& v) { return v == value; });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns true iff all elements of {container} compare equal to {value} using
|
||||||
|
// operator==.
|
||||||
|
template <typename C, typename T>
|
||||||
|
inline bool all_equal(const C& container, const T& value) {
|
||||||
|
return std::all_of(std::begin(container), std::end(container),
|
||||||
|
[&](const auto& v) { return v == value; });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appends to vector {v} all the elements in the range {std::begin(container)}
|
||||||
|
// and {std::end(container)}.
|
||||||
|
template <typename V, typename C>
|
||||||
|
inline void vector_append(V& v, const C& container) {
|
||||||
|
v.insert(std::end(v), std::begin(container), std::end(container));
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace v8::base
|
} // namespace v8::base
|
||||||
|
|
||||||
#endif // V8_BASE_CONTAINER_UTILS_H_
|
#endif // V8_BASE_CONTAINER_UTILS_H_
|
||||||
|
@ -89,8 +89,8 @@ static V8_INLINE void __cpuid(int cpu_info[4], int info_type) {
|
|||||||
|
|
||||||
#endif // !V8_LIBC_MSVCRT
|
#endif // !V8_LIBC_MSVCRT
|
||||||
|
|
||||||
#elif V8_HOST_ARCH_ARM || V8_HOST_ARCH_ARM64 || V8_HOST_ARCH_MIPS || \
|
#elif V8_HOST_ARCH_ARM || V8_HOST_ARCH_ARM64 || V8_HOST_ARCH_MIPS64 || \
|
||||||
V8_HOST_ARCH_MIPS64 || V8_HOST_ARCH_RISCV64
|
V8_HOST_ARCH_RISCV64
|
||||||
|
|
||||||
#if V8_OS_LINUX
|
#if V8_OS_LINUX
|
||||||
|
|
||||||
@ -198,48 +198,6 @@ static uint32_t ReadELFHWCaps() {
|
|||||||
|
|
||||||
#endif // V8_HOST_ARCH_ARM || V8_HOST_ARCH_ARM64
|
#endif // V8_HOST_ARCH_ARM || V8_HOST_ARCH_ARM64
|
||||||
|
|
||||||
#if V8_HOST_ARCH_MIPS
|
|
||||||
int __detect_fp64_mode(void) {
|
|
||||||
double result = 0;
|
|
||||||
// Bit representation of (double)1 is 0x3FF0000000000000.
|
|
||||||
__asm__ volatile(
|
|
||||||
".set push\n\t"
|
|
||||||
".set noreorder\n\t"
|
|
||||||
".set oddspreg\n\t"
|
|
||||||
"lui $t0, 0x3FF0\n\t"
|
|
||||||
"ldc1 $f0, %0\n\t"
|
|
||||||
"mtc1 $t0, $f1\n\t"
|
|
||||||
"sdc1 $f0, %0\n\t"
|
|
||||||
".set pop\n\t"
|
|
||||||
: "+m"(result)
|
|
||||||
:
|
|
||||||
: "t0", "$f0", "$f1", "memory");
|
|
||||||
|
|
||||||
return !(result == 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
int __detect_mips_arch_revision(void) {
|
|
||||||
// TODO(dusmil): Do the specific syscall as soon as it is implemented in mips
|
|
||||||
// kernel.
|
|
||||||
uint32_t result = 0;
|
|
||||||
__asm__ volatile(
|
|
||||||
"move $v0, $zero\n\t"
|
|
||||||
// Encoding for "addi $v0, $v0, 1" on non-r6,
|
|
||||||
// which is encoding for "bovc $v0, %v0, 1" on r6.
|
|
||||||
// Use machine code directly to avoid compilation errors with different
|
|
||||||
// toolchains and maintain compatibility.
|
|
||||||
".word 0x20420001\n\t"
|
|
||||||
"sw $v0, %0\n\t"
|
|
||||||
: "=m"(result)
|
|
||||||
:
|
|
||||||
: "v0", "memory");
|
|
||||||
// Result is 0 on r6 architectures, 1 on other architecture revisions.
|
|
||||||
// Fall-back to the least common denominator which is mips32 revision 1.
|
|
||||||
return result ? 1 : 6;
|
|
||||||
}
|
|
||||||
#endif // V8_HOST_ARCH_MIPS
|
|
||||||
|
|
||||||
// Extract the information exposed by the kernel via /proc/cpuinfo.
|
// Extract the information exposed by the kernel via /proc/cpuinfo.
|
||||||
class CPUInfo final {
|
class CPUInfo final {
|
||||||
public:
|
public:
|
||||||
@ -359,7 +317,7 @@ static bool HasListItem(const char* list, const char* item) {
|
|||||||
#endif // V8_OS_LINUX
|
#endif // V8_OS_LINUX
|
||||||
|
|
||||||
#endif // V8_HOST_ARCH_ARM || V8_HOST_ARCH_ARM64 ||
|
#endif // V8_HOST_ARCH_ARM || V8_HOST_ARCH_ARM64 ||
|
||||||
// V8_HOST_ARCH_MIPS || V8_HOST_ARCH_MIPS64 || V8_HOST_ARCH_RISCV64
|
// V8_HOST_ARCH_MIPS64 || V8_HOST_ARCH_RISCV64
|
||||||
|
|
||||||
#if defined(V8_OS_STARBOARD)
|
#if defined(V8_OS_STARBOARD)
|
||||||
|
|
||||||
@ -742,7 +700,7 @@ CPU::CPU()
|
|||||||
|
|
||||||
#endif // V8_OS_LINUX
|
#endif // V8_OS_LINUX
|
||||||
|
|
||||||
#elif V8_HOST_ARCH_MIPS || V8_HOST_ARCH_MIPS64
|
#elif V8_HOST_ARCH_MIPS64
|
||||||
|
|
||||||
// Simple detection of FPU at runtime for Linux.
|
// Simple detection of FPU at runtime for Linux.
|
||||||
// It is based on /proc/cpuinfo, which reveals hardware configuration
|
// It is based on /proc/cpuinfo, which reveals hardware configuration
|
||||||
@ -756,10 +714,6 @@ CPU::CPU()
|
|||||||
has_msa_ = HasListItem(ASEs, "msa");
|
has_msa_ = HasListItem(ASEs, "msa");
|
||||||
delete[] cpu_model;
|
delete[] cpu_model;
|
||||||
delete[] ASEs;
|
delete[] ASEs;
|
||||||
#ifdef V8_HOST_ARCH_MIPS
|
|
||||||
is_fp64_mode_ = __detect_fp64_mode();
|
|
||||||
architecture_ = __detect_mips_arch_revision();
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#elif V8_HOST_ARCH_ARM64
|
#elif V8_HOST_ARCH_ARM64
|
||||||
#ifdef V8_OS_WIN
|
#ifdef V8_OS_WIN
|
||||||
|
@ -6,15 +6,16 @@
|
|||||||
|
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
|
#include <type_traits>
|
||||||
|
|
||||||
#include "src/base/logging.h"
|
#include "src/base/logging.h"
|
||||||
#include "src/base/macros.h"
|
#include "src/base/macros.h"
|
||||||
|
|
||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace base {
|
namespace base {
|
||||||
|
|
||||||
template <class T>
|
template <class T, std::enable_if_t<std::is_unsigned_v<T>, bool>>
|
||||||
MagicNumbersForDivision<T> SignedDivisionByConstant(T d) {
|
MagicNumbersForDivision<T> SignedDivisionByConstant(T d) {
|
||||||
static_assert(static_cast<T>(0) < static_cast<T>(-1));
|
|
||||||
DCHECK(d != static_cast<T>(-1) && d != 0 && d != 1);
|
DCHECK(d != static_cast<T>(-1) && d != 0 && d != 1);
|
||||||
const unsigned bits = static_cast<unsigned>(sizeof(T)) * 8;
|
const unsigned bits = static_cast<unsigned>(sizeof(T)) * 8;
|
||||||
const T min = (static_cast<T>(1) << (bits - 1));
|
const T min = (static_cast<T>(1) << (bits - 1));
|
||||||
@ -48,11 +49,10 @@ MagicNumbersForDivision<T> SignedDivisionByConstant(T d) {
|
|||||||
return MagicNumbersForDivision<T>(neg ? (0 - mul) : mul, p - bits, false);
|
return MagicNumbersForDivision<T>(neg ? (0 - mul) : mul, p - bits, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <class T>
|
template <class T>
|
||||||
MagicNumbersForDivision<T> UnsignedDivisionByConstant(T d,
|
MagicNumbersForDivision<T> UnsignedDivisionByConstant(T d,
|
||||||
unsigned leading_zeros) {
|
unsigned leading_zeros) {
|
||||||
static_assert(static_cast<T>(0) < static_cast<T>(-1));
|
static_assert(std::is_unsigned_v<T>);
|
||||||
DCHECK_NE(d, 0);
|
DCHECK_NE(d, 0);
|
||||||
const unsigned bits = static_cast<unsigned>(sizeof(T)) * 8;
|
const unsigned bits = static_cast<unsigned>(sizeof(T)) * 8;
|
||||||
const T ones = ~static_cast<T>(0) >> leading_zeros;
|
const T ones = ~static_cast<T>(0) >> leading_zeros;
|
||||||
|
@ -7,6 +7,9 @@
|
|||||||
|
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
|
#include <tuple>
|
||||||
|
#include <type_traits>
|
||||||
|
|
||||||
#include "src/base/base-export.h"
|
#include "src/base/base-export.h"
|
||||||
#include "src/base/export-template.h"
|
#include "src/base/export-template.h"
|
||||||
|
|
||||||
@ -16,10 +19,10 @@ namespace base {
|
|||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
|
|
||||||
// The magic numbers for division via multiplication, see Warren's "Hacker's
|
// The magic numbers for division via multiplication, see Warren's "Hacker's
|
||||||
// Delight", chapter 10. The template parameter must be one of the unsigned
|
// Delight", chapter 10.
|
||||||
// integral types.
|
|
||||||
template <class T>
|
template <class T>
|
||||||
struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision {
|
struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision {
|
||||||
|
static_assert(std::is_integral_v<T>);
|
||||||
MagicNumbersForDivision(T m, unsigned s, bool a)
|
MagicNumbersForDivision(T m, unsigned s, bool a)
|
||||||
: multiplier(m), shift(s), add(a) {}
|
: multiplier(m), shift(s), add(a) {}
|
||||||
bool operator==(const MagicNumbersForDivision& rhs) const {
|
bool operator==(const MagicNumbersForDivision& rhs) const {
|
||||||
@ -31,13 +34,20 @@ struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision {
|
|||||||
bool add;
|
bool add;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// Calculate the multiplier and shift for signed division via multiplication.
|
// Calculate the multiplier and shift for signed division via multiplication.
|
||||||
// The divisor must not be -1, 0 or 1 when interpreted as a signed value.
|
// The divisor must not be -1, 0 or 1 when interpreted as a signed value.
|
||||||
template <class T>
|
template <class T, std::enable_if_t<std::is_unsigned_v<T>, bool> = true>
|
||||||
EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT)
|
EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT)
|
||||||
MagicNumbersForDivision<T> SignedDivisionByConstant(T d);
|
MagicNumbersForDivision<T> SignedDivisionByConstant(T d);
|
||||||
|
|
||||||
|
template <class T, std::enable_if_t<std::is_signed_v<T>, bool> = true>
|
||||||
|
MagicNumbersForDivision<T> SignedDivisionByConstant(T d) {
|
||||||
|
using Unsigned = std::make_unsigned_t<T>;
|
||||||
|
MagicNumbersForDivision<Unsigned> magic =
|
||||||
|
SignedDivisionByConstant(static_cast<Unsigned>(d));
|
||||||
|
return {static_cast<T>(magic.multiplier), magic.shift, magic.add};
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate the multiplier and shift for unsigned division via multiplication,
|
// Calculate the multiplier and shift for unsigned division via multiplication,
|
||||||
// see Warren's "Hacker's Delight", chapter 10. The divisor must not be 0 and
|
// see Warren's "Hacker's Delight", chapter 10. The divisor must not be 0 and
|
||||||
// leading_zeros can be used to speed up the calculation if the given number of
|
// leading_zeros can be used to speed up the calculation if the given number of
|
||||||
|
@ -105,10 +105,12 @@ namespace {
|
|||||||
} while (false)
|
} while (false)
|
||||||
|
|
||||||
int32_t __ieee754_rem_pio2(double x, double* y) V8_WARN_UNUSED_RESULT;
|
int32_t __ieee754_rem_pio2(double x, double* y) V8_WARN_UNUSED_RESULT;
|
||||||
double __kernel_cos(double x, double y) V8_WARN_UNUSED_RESULT;
|
|
||||||
int __kernel_rem_pio2(double* x, double* y, int e0, int nx, int prec,
|
int __kernel_rem_pio2(double* x, double* y, int e0, int nx, int prec,
|
||||||
const int32_t* ipio2) V8_WARN_UNUSED_RESULT;
|
const int32_t* ipio2) V8_WARN_UNUSED_RESULT;
|
||||||
|
#if !defined(V8_USE_LIBM_TRIG_FUNCTIONS)
|
||||||
|
double __kernel_cos(double x, double y) V8_WARN_UNUSED_RESULT;
|
||||||
double __kernel_sin(double x, double y, int iy) V8_WARN_UNUSED_RESULT;
|
double __kernel_sin(double x, double y, int iy) V8_WARN_UNUSED_RESULT;
|
||||||
|
#endif
|
||||||
|
|
||||||
/* __ieee754_rem_pio2(x,y)
|
/* __ieee754_rem_pio2(x,y)
|
||||||
*
|
*
|
||||||
@ -269,6 +271,7 @@ int32_t __ieee754_rem_pio2(double x, double *y) {
|
|||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if !defined(V8_USE_LIBM_TRIG_FUNCTIONS)
|
||||||
/* __kernel_cos( x, y )
|
/* __kernel_cos( x, y )
|
||||||
* kernel cos function on [-pi/4, pi/4], pi/4 ~ 0.785398164
|
* kernel cos function on [-pi/4, pi/4], pi/4 ~ 0.785398164
|
||||||
* Input x is assumed to be bounded by ~pi/4 in magnitude.
|
* Input x is assumed to be bounded by ~pi/4 in magnitude.
|
||||||
@ -334,6 +337,7 @@ V8_INLINE double __kernel_cos(double x, double y) {
|
|||||||
return a - (iz - (z * r - x * y));
|
return a - (iz - (z * r - x * y));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* __kernel_rem_pio2(x,y,e0,nx,prec,ipio2)
|
/* __kernel_rem_pio2(x,y,e0,nx,prec,ipio2)
|
||||||
* double x[],y[]; int e0,nx,prec; int ipio2[];
|
* double x[],y[]; int e0,nx,prec; int ipio2[];
|
||||||
@ -643,6 +647,7 @@ recompute:
|
|||||||
return n & 7;
|
return n & 7;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if !defined(V8_USE_LIBM_TRIG_FUNCTIONS)
|
||||||
/* __kernel_sin( x, y, iy)
|
/* __kernel_sin( x, y, iy)
|
||||||
* kernel sin function on [-pi/4, pi/4], pi/4 ~ 0.7854
|
* kernel sin function on [-pi/4, pi/4], pi/4 ~ 0.7854
|
||||||
* Input x is assumed to be bounded by ~pi/4 in magnitude.
|
* Input x is assumed to be bounded by ~pi/4 in magnitude.
|
||||||
@ -696,6 +701,7 @@ V8_INLINE double __kernel_sin(double x, double y, int iy) {
|
|||||||
return x - ((z * (half * y - v * r) - y) - v * S1);
|
return x - ((z * (half * y - v * r) - y) - v * S1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* __kernel_tan( x, y, k )
|
/* __kernel_tan( x, y, k )
|
||||||
* kernel tan function on [-pi/4, pi/4], pi/4 ~ 0.7854
|
* kernel tan function on [-pi/4, pi/4], pi/4 ~ 0.7854
|
||||||
@ -1318,6 +1324,7 @@ double atan2(double y, double x) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if !defined(V8_USE_LIBM_TRIG_FUNCTIONS)
|
||||||
/* cos(x)
|
/* cos(x)
|
||||||
* Return cosine function of x.
|
* Return cosine function of x.
|
||||||
*
|
*
|
||||||
@ -1377,6 +1384,7 @@ double cos(double x) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* exp(x)
|
/* exp(x)
|
||||||
* Returns the exponential of x.
|
* Returns the exponential of x.
|
||||||
@ -2410,6 +2418,7 @@ double cbrt(double x) {
|
|||||||
return (t);
|
return (t);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if !defined(V8_USE_LIBM_TRIG_FUNCTIONS)
|
||||||
/* sin(x)
|
/* sin(x)
|
||||||
* Return sine function of x.
|
* Return sine function of x.
|
||||||
*
|
*
|
||||||
@ -2469,6 +2478,7 @@ double sin(double x) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* tan(x)
|
/* tan(x)
|
||||||
* Return tangent function of x.
|
* Return tangent function of x.
|
||||||
@ -3015,6 +3025,15 @@ double tanh(double x) {
|
|||||||
#undef SET_HIGH_WORD
|
#undef SET_HIGH_WORD
|
||||||
#undef SET_LOW_WORD
|
#undef SET_LOW_WORD
|
||||||
|
|
||||||
|
#if defined(V8_USE_LIBM_TRIG_FUNCTIONS) && defined(BUILDING_V8_BASE_SHARED)
|
||||||
|
double sin(double x) {
|
||||||
|
return glibc_sin(x);
|
||||||
|
}
|
||||||
|
double cos(double x) {
|
||||||
|
return glibc_cos(x);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
} // namespace ieee754
|
} // namespace ieee754
|
||||||
} // namespace base
|
} // namespace base
|
||||||
} // namespace v8
|
} // namespace v8
|
||||||
|
@ -7,6 +7,10 @@
|
|||||||
|
|
||||||
#include "src/base/base-export.h"
|
#include "src/base/base-export.h"
|
||||||
|
|
||||||
|
#if defined(V8_USE_LIBM_TRIG_FUNCTIONS)
|
||||||
|
#include "third_party/glibc/src/sysdeps/ieee754/dbl-64/trig.h" // nogncheck
|
||||||
|
#endif
|
||||||
|
|
||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace base {
|
namespace base {
|
||||||
namespace ieee754 {
|
namespace ieee754 {
|
||||||
@ -34,7 +38,15 @@ V8_BASE_EXPORT double atan(double x);
|
|||||||
V8_BASE_EXPORT double atan2(double y, double x);
|
V8_BASE_EXPORT double atan2(double y, double x);
|
||||||
|
|
||||||
// Returns the cosine of |x|, where |x| is given in radians.
|
// Returns the cosine of |x|, where |x| is given in radians.
|
||||||
|
#if defined(V8_USE_LIBM_TRIG_FUNCTIONS) && \
|
||||||
|
!defined(BUILDING_V8_BASE_SHARED) && \
|
||||||
|
!defined(USING_V8_BASE_SHARED)
|
||||||
|
inline double cos(double x) {
|
||||||
|
return glibc_cos(x);
|
||||||
|
}
|
||||||
|
#else
|
||||||
V8_BASE_EXPORT double cos(double x);
|
V8_BASE_EXPORT double cos(double x);
|
||||||
|
#endif
|
||||||
|
|
||||||
// Returns the base-e exponential of |x|.
|
// Returns the base-e exponential of |x|.
|
||||||
V8_BASE_EXPORT double exp(double x);
|
V8_BASE_EXPORT double exp(double x);
|
||||||
@ -68,8 +80,16 @@ V8_BASE_EXPORT double expm1(double x);
|
|||||||
// behaviour is preserved for compatibility reasons.
|
// behaviour is preserved for compatibility reasons.
|
||||||
V8_BASE_EXPORT double pow(double x, double y);
|
V8_BASE_EXPORT double pow(double x, double y);
|
||||||
|
|
||||||
|
#if defined(V8_USE_LIBM_TRIG_FUNCTIONS) && \
|
||||||
|
!defined(BUILDING_V8_BASE_SHARED) && \
|
||||||
|
!defined(USING_V8_BASE_SHARED)
|
||||||
|
inline double sin(double x) {
|
||||||
|
return glibc_sin(x);
|
||||||
|
}
|
||||||
|
#else
|
||||||
// Returns the sine of |x|, where |x| is given in radians.
|
// Returns the sine of |x|, where |x| is given in radians.
|
||||||
V8_BASE_EXPORT double sin(double x);
|
V8_BASE_EXPORT double sin(double x);
|
||||||
|
#endif
|
||||||
|
|
||||||
// Returns the tangent of |x|, where |x| is given in radians.
|
// Returns the tangent of |x|, where |x| is given in radians.
|
||||||
V8_BASE_EXPORT double tan(double x);
|
V8_BASE_EXPORT double tan(double x);
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
#ifndef V8_BASE_LOGGING_H_
|
#ifndef V8_BASE_LOGGING_H_
|
||||||
#define V8_BASE_LOGGING_H_
|
#define V8_BASE_LOGGING_H_
|
||||||
|
|
||||||
|
#include <cstdint>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <string>
|
#include <string>
|
||||||
@ -45,8 +46,21 @@ V8_BASE_EXPORT V8_NOINLINE void V8_Dcheck(const char* file, int line,
|
|||||||
#endif // !defined(OFFICIAL_BUILD)
|
#endif // !defined(OFFICIAL_BUILD)
|
||||||
#endif // DEBUG
|
#endif // DEBUG
|
||||||
|
|
||||||
#define UNIMPLEMENTED() FATAL("unimplemented code")
|
namespace v8::base {
|
||||||
#define UNREACHABLE() FATAL("unreachable code")
|
// These string constants are pattern-matched by fuzzers.
|
||||||
|
constexpr const char* kUnimplementedCodeMessage = "unimplemented code";
|
||||||
|
constexpr const char* kUnreachableCodeMessage = "unreachable code";
|
||||||
|
} // namespace v8::base
|
||||||
|
|
||||||
|
#define UNIMPLEMENTED() FATAL(::v8::base::kUnimplementedCodeMessage)
|
||||||
|
#define UNREACHABLE() FATAL(::v8::base::kUnreachableCodeMessage)
|
||||||
|
// g++ versions <= 8 cannot use UNREACHABLE() in a constexpr function.
|
||||||
|
// TODO(miladfarca): Remove once all compilers handle this properly.
|
||||||
|
#if defined(__GNUC__) && !defined(__clang__) && (__GNUC__ <= 8)
|
||||||
|
#define CONSTEXPR_UNREACHABLE() abort()
|
||||||
|
#else
|
||||||
|
#define CONSTEXPR_UNREACHABLE() UNREACHABLE()
|
||||||
|
#endif
|
||||||
|
|
||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace base {
|
namespace base {
|
||||||
|
@ -153,7 +153,7 @@ static bool DoubleStrtod(Vector<const char> trimmed, int exponent,
|
|||||||
// result is not accurate.
|
// result is not accurate.
|
||||||
// We know that Windows32 with MSVC, unlike with MinGW32, uses 64 bits and is
|
// We know that Windows32 with MSVC, unlike with MinGW32, uses 64 bits and is
|
||||||
// therefore accurate.
|
// therefore accurate.
|
||||||
// Note that the ARM and MIPS simulators are compiled for 32bits. They
|
// Note that the ARM simulators are compiled for 32bits. They
|
||||||
// therefore exhibit the same problem.
|
// therefore exhibit the same problem.
|
||||||
USE(exact_powers_of_ten);
|
USE(exact_powers_of_ten);
|
||||||
USE(kMaxExactDoubleIntegerDecimalDigits);
|
USE(kMaxExactDoubleIntegerDecimalDigits);
|
||||||
|
@ -23,9 +23,9 @@
|
|||||||
#include <malloc.h>
|
#include <malloc.h>
|
||||||
#endif // !V8_OS_DARWIN
|
#endif // !V8_OS_DARWIN
|
||||||
|
|
||||||
#if (V8_OS_POSIX && !V8_OS_AIX) || V8_OS_WIN
|
#if (V8_OS_POSIX && !V8_OS_AIX && !V8_OS_SOLARIS) || V8_OS_WIN
|
||||||
#define V8_HAS_MALLOC_USABLE_SIZE 1
|
#define V8_HAS_MALLOC_USABLE_SIZE 1
|
||||||
#endif // (V8_OS_POSIX && !V8_OS_AIX) || V8_OS_WIN
|
#endif // (V8_OS_POSIX && !V8_OS_AIX && !V8_OS_SOLARIS) || V8_OS_WIN
|
||||||
|
|
||||||
namespace v8::base {
|
namespace v8::base {
|
||||||
|
|
||||||
@ -111,6 +111,8 @@ inline void AlignedFree(void* ptr) {
|
|||||||
// `AllocateAtLeast()` for a safe version.
|
// `AllocateAtLeast()` for a safe version.
|
||||||
inline size_t MallocUsableSize(void* ptr) {
|
inline size_t MallocUsableSize(void* ptr) {
|
||||||
#if V8_OS_WIN
|
#if V8_OS_WIN
|
||||||
|
// |_msize| cannot handle a null pointer.
|
||||||
|
if (!ptr) return 0;
|
||||||
return _msize(ptr);
|
return _msize(ptr);
|
||||||
#elif V8_OS_DARWIN
|
#elif V8_OS_DARWIN
|
||||||
return malloc_size(ptr);
|
return malloc_size(ptr);
|
||||||
@ -130,7 +132,7 @@ struct AllocationResult {
|
|||||||
|
|
||||||
// Allocates at least `n * sizeof(T)` uninitialized storage but may allocate
|
// Allocates at least `n * sizeof(T)` uninitialized storage but may allocate
|
||||||
// more which is indicated by the return value. Mimics C++23
|
// more which is indicated by the return value. Mimics C++23
|
||||||
// `allocate_ate_least()`.
|
// `allocate_at_least()`.
|
||||||
template <typename T>
|
template <typename T>
|
||||||
V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
|
V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
|
||||||
const size_t min_wanted_size = n * sizeof(T);
|
const size_t min_wanted_size = n * sizeof(T);
|
||||||
@ -140,13 +142,14 @@ V8_NODISCARD AllocationResult<T*> AllocateAtLeast(size_t n) {
|
|||||||
#else // V8_HAS_MALLOC_USABLE_SIZE
|
#else // V8_HAS_MALLOC_USABLE_SIZE
|
||||||
const size_t usable_size = MallocUsableSize(memory);
|
const size_t usable_size = MallocUsableSize(memory);
|
||||||
#if V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
|
#if V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
|
||||||
|
if (memory == nullptr)
|
||||||
|
return {nullptr, 0};
|
||||||
// UBSan (specifically, -fsanitize=bounds) assumes that any access outside
|
// UBSan (specifically, -fsanitize=bounds) assumes that any access outside
|
||||||
// of the requested size for malloc is UB and will trap in ud2 instructions.
|
// of the requested size for malloc is UB and will trap in ud2 instructions.
|
||||||
// This can be worked around by using `Realloc()` on the specific memory
|
// This can be worked around by using `Realloc()` on the specific memory
|
||||||
// region, assuming that the allocator doesn't actually reallocate the
|
// region.
|
||||||
// buffer.
|
|
||||||
if (usable_size != min_wanted_size) {
|
if (usable_size != min_wanted_size) {
|
||||||
CHECK_EQ(static_cast<T*>(Realloc(memory, usable_size)), memory);
|
memory = static_cast<T*>(Realloc(memory, usable_size));
|
||||||
}
|
}
|
||||||
#endif // V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
|
#endif // V8_USE_UNDEFINED_BEHAVIOR_SANITIZER
|
||||||
return {memory, usable_size};
|
return {memory, usable_size};
|
||||||
|
@ -278,7 +278,6 @@ class V8_BASE_EXPORT SharedMutex final {
|
|||||||
// pthread_rwlock_t is broken on MacOS when signals are being sent to the
|
// pthread_rwlock_t is broken on MacOS when signals are being sent to the
|
||||||
// process (see https://crbug.com/v8/11399).
|
// process (see https://crbug.com/v8/11399).
|
||||||
// We thus use std::shared_mutex on MacOS, which does not have this problem.
|
// We thus use std::shared_mutex on MacOS, which does not have this problem.
|
||||||
// TODO(13256): Use std::shared_mutex directly, on all platforms.
|
|
||||||
using NativeHandle = std::shared_mutex;
|
using NativeHandle = std::shared_mutex;
|
||||||
#elif V8_OS_POSIX
|
#elif V8_OS_POSIX
|
||||||
using NativeHandle = pthread_rwlock_t;
|
using NativeHandle = pthread_rwlock_t;
|
||||||
|
@ -293,9 +293,7 @@ bool OS::SetPermissions(void* address, size_t size, MemoryPermission access) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void OS::SetDataReadOnly(void* address, size_t size) {
|
void OS::SetDataReadOnly(void* address, size_t size) {
|
||||||
// TODO(v8:13194): Figure out which API to use on fuchsia. {vmar.protect}
|
CHECK(OS::SetPermissions(address, size, MemoryPermission::kRead));
|
||||||
// fails.
|
|
||||||
// CHECK(OS::SetPermissions(address, size, MemoryPermission::kRead));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
|
@ -1022,7 +1022,7 @@ void OS::SetDataReadOnly(void* address, size_t size) {
|
|||||||
|
|
||||||
unsigned long old_protection;
|
unsigned long old_protection;
|
||||||
CHECK(VirtualProtect(address, size, PAGE_READONLY, &old_protection));
|
CHECK(VirtualProtect(address, size, PAGE_READONLY, &old_protection));
|
||||||
CHECK_EQ(PAGE_READWRITE, old_protection);
|
CHECK(old_protection == PAGE_READWRITE || old_protection == PAGE_WRITECOPY);
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
|
@ -43,11 +43,6 @@
|
|||||||
#elif defined(V8_HOST_ARCH_ARM64) || \
|
#elif defined(V8_HOST_ARCH_ARM64) || \
|
||||||
(defined(V8_HOST_ARCH_ARM) && __ARM_ARCH >= 6)
|
(defined(V8_HOST_ARCH_ARM) && __ARM_ARCH >= 6)
|
||||||
#define YIELD_PROCESSOR __asm__ __volatile__("yield")
|
#define YIELD_PROCESSOR __asm__ __volatile__("yield")
|
||||||
#elif defined(V8_HOST_ARCH_MIPS)
|
|
||||||
// The MIPS32 docs state that the PAUSE instruction is a no-op on older
|
|
||||||
// architectures (first added in MIPS32r2). To avoid assembler errors when
|
|
||||||
// targeting pre-r2, we must encode the instruction manually.
|
|
||||||
#define YIELD_PROCESSOR __asm__ __volatile__(".word 0x00000140")
|
|
||||||
#elif defined(V8_HOST_ARCH_MIPS64EL) && __mips_isa_rev >= 2
|
#elif defined(V8_HOST_ARCH_MIPS64EL) && __mips_isa_rev >= 2
|
||||||
// Don't bother doing using .word here since r2 is the lowest supported mips64
|
// Don't bother doing using .word here since r2 is the lowest supported mips64
|
||||||
// that Chromium supports.
|
// that Chromium supports.
|
||||||
|
@ -110,6 +110,15 @@ class SmallVector {
|
|||||||
bool empty() const { return end_ == begin_; }
|
bool empty() const { return end_ == begin_; }
|
||||||
size_t capacity() const { return end_of_storage_ - begin_; }
|
size_t capacity() const { return end_of_storage_ - begin_; }
|
||||||
|
|
||||||
|
T& front() {
|
||||||
|
DCHECK_NE(0, size());
|
||||||
|
return begin_[0];
|
||||||
|
}
|
||||||
|
const T& front() const {
|
||||||
|
DCHECK_NE(0, size());
|
||||||
|
return begin_[0];
|
||||||
|
}
|
||||||
|
|
||||||
T& back() {
|
T& back() {
|
||||||
DCHECK_NE(0, size());
|
DCHECK_NE(0, size());
|
||||||
return end_[-1];
|
return end_[-1];
|
||||||
@ -146,6 +155,30 @@ class SmallVector {
|
|||||||
end_ -= count;
|
end_ -= count;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
T* insert(T* pos, const T& value) { return insert(pos, 1, value); }
|
||||||
|
T* insert(T* pos, size_t count, const T& value) {
|
||||||
|
DCHECK_LE(pos, end_);
|
||||||
|
size_t offset = pos - begin_;
|
||||||
|
size_t elements_to_move = end_ - pos;
|
||||||
|
resize_no_init(size() + count);
|
||||||
|
pos = begin_ + offset;
|
||||||
|
std::memmove(pos + count, pos, elements_to_move);
|
||||||
|
std::fill_n(pos, count, value);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
template <typename It>
|
||||||
|
T* insert(T* pos, It begin, It end) {
|
||||||
|
DCHECK_LE(pos, end_);
|
||||||
|
size_t offset = pos - begin_;
|
||||||
|
size_t count = std::distance(begin, end);
|
||||||
|
size_t elements_to_move = end_ - pos;
|
||||||
|
resize_no_init(size() + count);
|
||||||
|
pos = begin_ + offset;
|
||||||
|
std::memmove(pos + count, pos, elements_to_move);
|
||||||
|
std::copy(begin, end, pos);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
void resize_no_init(size_t new_size) {
|
void resize_no_init(size_t new_size) {
|
||||||
// Resizing without initialization is safe if T is trivially copyable.
|
// Resizing without initialization is safe if T is trivially copyable.
|
||||||
ASSERT_TRIVIALLY_COPYABLE(T);
|
ASSERT_TRIVIALLY_COPYABLE(T);
|
||||||
|
@ -126,5 +126,21 @@ int64_t SysInfo::AmountOfVirtualMemory() {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
uintptr_t SysInfo::AddressSpaceEnd() {
|
||||||
|
#if V8_OS_WIN
|
||||||
|
SYSTEM_INFO info;
|
||||||
|
GetSystemInfo(&info);
|
||||||
|
uintptr_t max_address =
|
||||||
|
reinterpret_cast<uintptr_t>(info.lpMaximumApplicationAddress);
|
||||||
|
return max_address + 1;
|
||||||
|
#else
|
||||||
|
// We don't query POSIX rlimits here (e.g. RLIMIT_AS) as they limit the size
|
||||||
|
// of memory mappings, but not the address space (e.g. even with a small
|
||||||
|
// RLIMIT_AS, a process can still map pages at high addresses).
|
||||||
|
return std::numeric_limits<uintptr_t>::max();
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace base
|
} // namespace base
|
||||||
} // namespace v8
|
} // namespace v8
|
||||||
|
@ -24,6 +24,12 @@ class V8_BASE_EXPORT SysInfo final {
|
|||||||
// Returns the number of bytes of virtual memory of this process. A return
|
// Returns the number of bytes of virtual memory of this process. A return
|
||||||
// value of zero means that there is no limit on the available virtual memory.
|
// value of zero means that there is no limit on the available virtual memory.
|
||||||
static int64_t AmountOfVirtualMemory();
|
static int64_t AmountOfVirtualMemory();
|
||||||
|
|
||||||
|
// Returns the end of the virtual address space available to this process.
|
||||||
|
// Memory mappings at or above this address cannot be addressed by this
|
||||||
|
// process, so all pointer values will be below this value.
|
||||||
|
// If the virtual address space is not limited, this will return -1.
|
||||||
|
static uintptr_t AddressSpaceEnd();
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace base
|
} // namespace base
|
||||||
|
@ -60,6 +60,11 @@ struct has_output_operator<
|
|||||||
T, TStream, decltype(void(std::declval<TStream&>() << std::declval<T>()))>
|
T, TStream, decltype(void(std::declval<TStream&>() << std::declval<T>()))>
|
||||||
: std::true_type {};
|
: std::true_type {};
|
||||||
|
|
||||||
|
// turn std::tuple<A...> into std::tuple<A..., T>.
|
||||||
|
template <class Tuple, class T>
|
||||||
|
using append_tuple_type = decltype(std::tuple_cat(
|
||||||
|
std::declval<Tuple>(), std::declval<std::tuple<T>>()));
|
||||||
|
|
||||||
} // namespace base
|
} // namespace base
|
||||||
} // namespace v8
|
} // namespace v8
|
||||||
|
|
||||||
|
@ -129,6 +129,14 @@ class V8_BASE_EXPORT RandomNumberGenerator final {
|
|||||||
|
|
||||||
static uint64_t MurmurHash3(uint64_t);
|
static uint64_t MurmurHash3(uint64_t);
|
||||||
|
|
||||||
|
// Implement the UniformRandomBitGenerator interface.
|
||||||
|
using result_type = unsigned;
|
||||||
|
result_type operator()() { return NextInt(); }
|
||||||
|
static constexpr result_type min() { return 0; }
|
||||||
|
static constexpr result_type max() {
|
||||||
|
return std::numeric_limits<result_type>::max();
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static const int64_t kMultiplier = 0x5'deec'e66d;
|
static const int64_t kMultiplier = 0x5'deec'e66d;
|
||||||
static const int64_t kAddend = 0xb;
|
static const int64_t kAddend = 0xb;
|
||||||
|
@ -142,8 +142,8 @@ class Vector {
|
|||||||
static Vector<T> cast(Vector<S> input) {
|
static Vector<T> cast(Vector<S> input) {
|
||||||
// Casting is potentially dangerous, so be really restrictive here. This
|
// Casting is potentially dangerous, so be really restrictive here. This
|
||||||
// might be lifted once we have use cases for that.
|
// might be lifted once we have use cases for that.
|
||||||
static_assert(std::is_pod<S>::value);
|
static_assert(std::is_trivial_v<S> && std::is_standard_layout_v<S>);
|
||||||
static_assert(std::is_pod<T>::value);
|
static_assert(std::is_trivial_v<T> && std::is_standard_layout_v<T>);
|
||||||
DCHECK_EQ(0, (input.size() * sizeof(S)) % sizeof(T));
|
DCHECK_EQ(0, (input.size() * sizeof(S)) % sizeof(T));
|
||||||
DCHECK_EQ(0, reinterpret_cast<uintptr_t>(input.begin()) % alignof(T));
|
DCHECK_EQ(0, reinterpret_cast<uintptr_t>(input.begin()) % alignof(T));
|
||||||
return Vector<T>(reinterpret_cast<T*>(input.begin()),
|
return Vector<T>(reinterpret_cast<T*>(input.begin()),
|
||||||
@ -193,22 +193,40 @@ class V8_NODISCARD ScopedVector : public Vector<T> {
|
|||||||
template <typename T>
|
template <typename T>
|
||||||
class OwnedVector {
|
class OwnedVector {
|
||||||
public:
|
public:
|
||||||
MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(OwnedVector);
|
OwnedVector() = default;
|
||||||
|
|
||||||
OwnedVector(std::unique_ptr<T[]> data, size_t length)
|
OwnedVector(std::unique_ptr<T[]> data, size_t length)
|
||||||
: data_(std::move(data)), length_(length) {
|
: data_(std::move(data)), length_(length) {
|
||||||
DCHECK_IMPLIES(length_ > 0, data_ != nullptr);
|
DCHECK_IMPLIES(length_ > 0, data_ != nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implicit conversion from {OwnedVector<U>} to {OwnedVector<T>}, instantiable
|
// Disallow copying.
|
||||||
// if {std::unique_ptr<U>} can be converted to {std::unique_ptr<T>}.
|
OwnedVector(const OwnedVector&) = delete;
|
||||||
// Can be used to convert {OwnedVector<T>} to {OwnedVector<const T>}.
|
OwnedVector& operator=(const OwnedVector&) = delete;
|
||||||
|
|
||||||
|
// Move construction and move assignment from {OwnedVector<U>} to
|
||||||
|
// {OwnedVector<T>}, instantiable if {std::unique_ptr<U>} can be converted to
|
||||||
|
// {std::unique_ptr<T>}. Can also be used to convert {OwnedVector<T>} to
|
||||||
|
// {OwnedVector<const T>}.
|
||||||
|
// These also function as the standard move construction/assignment operator.
|
||||||
|
// {other} is left as an empty vector.
|
||||||
template <typename U,
|
template <typename U,
|
||||||
typename = typename std::enable_if<std::is_convertible<
|
typename = typename std::enable_if<std::is_convertible<
|
||||||
std::unique_ptr<U>, std::unique_ptr<T>>::value>::type>
|
std::unique_ptr<U>, std::unique_ptr<T>>::value>::type>
|
||||||
OwnedVector(OwnedVector<U>&& other)
|
OwnedVector(OwnedVector<U>&& other) V8_NOEXCEPT {
|
||||||
: data_(std::move(other.data_)), length_(other.length_) {
|
*this = std::move(other);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename U,
|
||||||
|
typename = typename std::enable_if<std::is_convertible<
|
||||||
|
std::unique_ptr<U>, std::unique_ptr<T>>::value>::type>
|
||||||
|
OwnedVector& operator=(OwnedVector<U>&& other) V8_NOEXCEPT {
|
||||||
static_assert(sizeof(U) == sizeof(T));
|
static_assert(sizeof(U) == sizeof(T));
|
||||||
|
data_ = std::move(other.data_);
|
||||||
|
length_ = other.length_;
|
||||||
|
DCHECK_NULL(other.data_);
|
||||||
other.length_ = 0;
|
other.length_ = 0;
|
||||||
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the length of the vector as a size_t.
|
// Returns the length of the vector as a size_t.
|
||||||
@ -217,14 +235,12 @@ class OwnedVector {
|
|||||||
// Returns whether or not the vector is empty.
|
// Returns whether or not the vector is empty.
|
||||||
constexpr bool empty() const { return length_ == 0; }
|
constexpr bool empty() const { return length_ == 0; }
|
||||||
|
|
||||||
// Returns the pointer to the start of the data in the vector.
|
constexpr T* begin() const {
|
||||||
T* start() const {
|
|
||||||
DCHECK_IMPLIES(length_ > 0, data_ != nullptr);
|
DCHECK_IMPLIES(length_ > 0, data_ != nullptr);
|
||||||
return data_.get();
|
return data_.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
constexpr T* begin() const { return start(); }
|
constexpr T* end() const { return begin() + length_; }
|
||||||
constexpr T* end() const { return start() + size(); }
|
|
||||||
|
|
||||||
// Access individual vector elements - checks bounds in debug mode.
|
// Access individual vector elements - checks bounds in debug mode.
|
||||||
T& operator[](size_t index) const {
|
T& operator[](size_t index) const {
|
||||||
@ -233,7 +249,7 @@ class OwnedVector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Returns a {Vector<T>} view of the data in this vector.
|
// Returns a {Vector<T>} view of the data in this vector.
|
||||||
Vector<T> as_vector() const { return Vector<T>(start(), size()); }
|
Vector<T> as_vector() const { return {begin(), size()}; }
|
||||||
|
|
||||||
// Releases the backing data from this vector and transfers ownership to the
|
// Releases the backing data from this vector and transfers ownership to the
|
||||||
// caller. This vector will be empty afterwards.
|
// caller. This vector will be empty afterwards.
|
||||||
@ -269,7 +285,7 @@ class OwnedVector {
|
|||||||
using non_const_t = typename std::remove_const<T>::type;
|
using non_const_t = typename std::remove_const<T>::type;
|
||||||
auto vec =
|
auto vec =
|
||||||
OwnedVector<non_const_t>::NewForOverwrite(std::distance(begin, end));
|
OwnedVector<non_const_t>::NewForOverwrite(std::distance(begin, end));
|
||||||
std::copy(begin, end, vec.start());
|
std::copy(begin, end, vec.begin());
|
||||||
return vec;
|
return vec;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,6 +39,16 @@ VLQEncodeUnsigned(Function&& process_byte, uint32_t value) {
|
|||||||
} while (value > kDataMask);
|
} while (value > kDataMask);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline uint32_t VLQConvertToUnsigned(int32_t value) {
|
||||||
|
// This wouldn't handle kMinInt correctly if it ever encountered it.
|
||||||
|
DCHECK_NE(value, std::numeric_limits<int32_t>::min());
|
||||||
|
bool is_negative = value < 0;
|
||||||
|
// Encode sign in least significant bit.
|
||||||
|
uint32_t bits = static_cast<uint32_t>((is_negative ? -value : value) << 1) |
|
||||||
|
static_cast<uint32_t>(is_negative);
|
||||||
|
return bits;
|
||||||
|
}
|
||||||
|
|
||||||
// Encodes value using variable-length encoding and stores it using the passed
|
// Encodes value using variable-length encoding and stores it using the passed
|
||||||
// process_byte function.
|
// process_byte function.
|
||||||
template <typename Function>
|
template <typename Function>
|
||||||
@ -46,12 +56,7 @@ inline typename std::enable_if<
|
|||||||
std::is_same<decltype(std::declval<Function>()(0)), byte*>::value,
|
std::is_same<decltype(std::declval<Function>()(0)), byte*>::value,
|
||||||
void>::type
|
void>::type
|
||||||
VLQEncode(Function&& process_byte, int32_t value) {
|
VLQEncode(Function&& process_byte, int32_t value) {
|
||||||
// This wouldn't handle kMinInt correctly if it ever encountered it.
|
uint32_t bits = VLQConvertToUnsigned(value);
|
||||||
DCHECK_NE(value, std::numeric_limits<int32_t>::min());
|
|
||||||
bool is_negative = value < 0;
|
|
||||||
// Encode sign in least significant bit.
|
|
||||||
uint32_t bits = static_cast<uint32_t>((is_negative ? -value : value) << 1) |
|
|
||||||
static_cast<uint32_t>(is_negative);
|
|
||||||
VLQEncodeUnsigned(std::forward<Function>(process_byte), bits);
|
VLQEncodeUnsigned(std::forward<Function>(process_byte), bits);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -610,7 +610,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
|
assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -677,7 +677,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->CmpTagged(reg, kInterpreterAccumulatorRegister);
|
assembler_->masm()->CmpTagged(reg, kInterpreterAccumulatorRegister);
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -36,8 +36,6 @@
|
|||||||
#include "src/baseline/riscv/baseline-assembler-riscv-inl.h"
|
#include "src/baseline/riscv/baseline-assembler-riscv-inl.h"
|
||||||
#elif V8_TARGET_ARCH_MIPS64
|
#elif V8_TARGET_ARCH_MIPS64
|
||||||
#include "src/baseline/mips64/baseline-assembler-mips64-inl.h"
|
#include "src/baseline/mips64/baseline-assembler-mips64-inl.h"
|
||||||
#elif V8_TARGET_ARCH_MIPS
|
|
||||||
#include "src/baseline/mips/baseline-assembler-mips-inl.h"
|
|
||||||
#elif V8_TARGET_ARCH_LOONG64
|
#elif V8_TARGET_ARCH_LOONG64
|
||||||
#include "src/baseline/loong64/baseline-assembler-loong64-inl.h"
|
#include "src/baseline/loong64/baseline-assembler-loong64-inl.h"
|
||||||
#else
|
#else
|
||||||
|
@ -53,8 +53,6 @@
|
|||||||
#include "src/baseline/riscv/baseline-compiler-riscv-inl.h"
|
#include "src/baseline/riscv/baseline-compiler-riscv-inl.h"
|
||||||
#elif V8_TARGET_ARCH_MIPS64
|
#elif V8_TARGET_ARCH_MIPS64
|
||||||
#include "src/baseline/mips64/baseline-compiler-mips64-inl.h"
|
#include "src/baseline/mips64/baseline-compiler-mips64-inl.h"
|
||||||
#elif V8_TARGET_ARCH_MIPS
|
|
||||||
#include "src/baseline/mips/baseline-compiler-mips-inl.h"
|
|
||||||
#elif V8_TARGET_ARCH_LOONG64
|
#elif V8_TARGET_ARCH_LOONG64
|
||||||
#include "src/baseline/loong64/baseline-compiler-loong64-inl.h"
|
#include "src/baseline/loong64/baseline-compiler-loong64-inl.h"
|
||||||
#else
|
#else
|
||||||
@ -969,14 +967,6 @@ void BaselineCompiler::VisitDefineKeyedOwnPropertyInLiteral() {
|
|||||||
IndexAsTagged(3)); // slot
|
IndexAsTagged(3)); // slot
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaselineCompiler::VisitCollectTypeProfile() {
|
|
||||||
SaveAccumulatorScope accumulator_scope(&basm_);
|
|
||||||
CallRuntime(Runtime::kCollectTypeProfile,
|
|
||||||
IntAsSmi(0), // position
|
|
||||||
kInterpreterAccumulatorRegister, // value
|
|
||||||
FeedbackVector()); // feedback vector
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineCompiler::VisitAdd() {
|
void BaselineCompiler::VisitAdd() {
|
||||||
CallBuiltin<Builtin::kAdd_Baseline>(
|
CallBuiltin<Builtin::kAdd_Baseline>(
|
||||||
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
|
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
|
||||||
@ -1160,10 +1150,11 @@ void BaselineCompiler::VisitGetSuperConstructor() {
|
|||||||
StoreRegister(0, prototype);
|
StoreRegister(0, prototype);
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaselineCompiler::VisitFindNonDefaultConstructor() {
|
void BaselineCompiler::VisitFindNonDefaultConstructorOrConstruct() {
|
||||||
CallBuiltin<Builtin::kFindNonDefaultConstructor>(RegisterOperand(0),
|
SaveAccumulatorScope accumulator_scope(&basm_);
|
||||||
RegisterOperand(1));
|
CallBuiltin<Builtin::kFindNonDefaultConstructorOrConstruct>(
|
||||||
StoreRegister(2, kReturnRegister1);
|
RegisterOperand(0), RegisterOperand(1));
|
||||||
|
StoreRegisterPair(2, kReturnRegister0, kReturnRegister1);
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
@ -1422,9 +1413,9 @@ void BaselineCompiler::VisitIntrinsicAsyncGeneratorResolve(
|
|||||||
CallBuiltin<Builtin::kAsyncGeneratorResolve>(args);
|
CallBuiltin<Builtin::kAsyncGeneratorResolve>(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaselineCompiler::VisitIntrinsicAsyncGeneratorYield(
|
void BaselineCompiler::VisitIntrinsicAsyncGeneratorYieldWithAwait(
|
||||||
interpreter::RegisterList args) {
|
interpreter::RegisterList args) {
|
||||||
CallBuiltin<Builtin::kAsyncGeneratorYield>(args);
|
CallBuiltin<Builtin::kAsyncGeneratorYieldWithAwait>(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaselineCompiler::VisitConstruct() {
|
void BaselineCompiler::VisitConstruct() {
|
||||||
|
@ -577,7 +577,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
|
assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
|
||||||
assembler_->masm()->Assert(equal, AbortReason::kUnexpectedValue);
|
assembler_->masm()->Assert(equal, AbortReason::kAccumulatorClobbered);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -569,7 +569,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
|
|
||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered, reg,
|
||||||
Operand(kInterpreterAccumulatorRegister));
|
Operand(kInterpreterAccumulatorRegister));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,573 +0,0 @@
|
|||||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style license that can be
|
|
||||||
// found in the LICENSE file.
|
|
||||||
|
|
||||||
#ifndef V8_BASELINE_MIPS_BASELINE_ASSEMBLER_MIPS_INL_H_
|
|
||||||
#define V8_BASELINE_MIPS_BASELINE_ASSEMBLER_MIPS_INL_H_
|
|
||||||
|
|
||||||
#include "src/baseline/baseline-assembler.h"
|
|
||||||
#include "src/codegen/interface-descriptors.h"
|
|
||||||
#include "src/codegen/mips/assembler-mips-inl.h"
|
|
||||||
#include "src/objects/literal-objects-inl.h"
|
|
||||||
|
|
||||||
namespace v8 {
|
|
||||||
namespace internal {
|
|
||||||
namespace baseline {
|
|
||||||
|
|
||||||
class BaselineAssembler::ScratchRegisterScope {
|
|
||||||
public:
|
|
||||||
explicit ScratchRegisterScope(BaselineAssembler* assembler)
|
|
||||||
: assembler_(assembler),
|
|
||||||
prev_scope_(assembler->scratch_register_scope_),
|
|
||||||
wrapped_scope_(assembler->masm()) {
|
|
||||||
if (!assembler_->scratch_register_scope_) {
|
|
||||||
// If we haven't opened a scratch scope yet, for the first one add a
|
|
||||||
// couple of extra registers.
|
|
||||||
wrapped_scope_.Include({t4, t5, t6, t7});
|
|
||||||
}
|
|
||||||
assembler_->scratch_register_scope_ = this;
|
|
||||||
}
|
|
||||||
~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
|
|
||||||
|
|
||||||
Register AcquireScratch() { return wrapped_scope_.Acquire(); }
|
|
||||||
|
|
||||||
private:
|
|
||||||
BaselineAssembler* assembler_;
|
|
||||||
ScratchRegisterScope* prev_scope_;
|
|
||||||
UseScratchRegisterScope wrapped_scope_;
|
|
||||||
};
|
|
||||||
|
|
||||||
enum class Condition : uint32_t {
|
|
||||||
kEqual = eq,
|
|
||||||
kNotEqual = ne,
|
|
||||||
|
|
||||||
kLessThan = lt,
|
|
||||||
kGreaterThan = gt,
|
|
||||||
kLessThanEqual = le,
|
|
||||||
kGreaterThanEqual = ge,
|
|
||||||
|
|
||||||
kUnsignedLessThan = Uless,
|
|
||||||
kUnsignedGreaterThan = Ugreater,
|
|
||||||
kUnsignedLessThanEqual = Uless_equal,
|
|
||||||
kUnsignedGreaterThanEqual = Ugreater_equal,
|
|
||||||
|
|
||||||
kOverflow = overflow,
|
|
||||||
kNoOverflow = no_overflow,
|
|
||||||
|
|
||||||
kZero = eq,
|
|
||||||
kNotZero = ne,
|
|
||||||
};
|
|
||||||
|
|
||||||
inline internal::Condition AsMasmCondition(Condition cond) {
|
|
||||||
// This is important for arm, where the internal::Condition where each value
|
|
||||||
// represents an encoded bit field value.
|
|
||||||
static_assert(sizeof(internal::Condition) == sizeof(Condition));
|
|
||||||
return static_cast<internal::Condition>(cond);
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace detail {
|
|
||||||
|
|
||||||
#ifdef DEBUG
|
|
||||||
inline bool Clobbers(Register target, MemOperand op) {
|
|
||||||
return op.is_reg() && op.rm() == target;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
} // namespace detail
|
|
||||||
|
|
||||||
#define __ masm_->
|
|
||||||
|
|
||||||
MemOperand BaselineAssembler::RegisterFrameOperand(
|
|
||||||
interpreter::Register interpreter_register) {
|
|
||||||
return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::RegisterFrameAddress(
|
|
||||||
interpreter::Register interpreter_register, Register rscratch) {
|
|
||||||
return __ Addu(rscratch, fp,
|
|
||||||
interpreter_register.ToOperand() * kSystemPointerSize);
|
|
||||||
}
|
|
||||||
MemOperand BaselineAssembler::FeedbackVectorOperand() {
|
|
||||||
return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::Bind(Label* label) { __ bind(label); }
|
|
||||||
|
|
||||||
void BaselineAssembler::JumpTarget() {
|
|
||||||
// NOP.
|
|
||||||
}
|
|
||||||
void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
|
|
||||||
__ Branch(target);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
__ JumpIfRoot(value, index, target);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
__ JumpIfNotRoot(value, index, target);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfSmi(Register value, Label* target,
|
|
||||||
Label::Distance) {
|
|
||||||
__ JumpIfSmi(value, target);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
|
|
||||||
Label::Distance) {
|
|
||||||
__ JumpIfNotSmi(value, target);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
|
|
||||||
Label* target,
|
|
||||||
Label::Distance distance) {
|
|
||||||
JumpIf(cc, left, Operand(right), target, distance);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::CallBuiltin(Builtin builtin) {
|
|
||||||
ASM_CODE_COMMENT_STRING(masm_,
|
|
||||||
__ CommentForOffHeapTrampoline("call", builtin));
|
|
||||||
Register temp = t9;
|
|
||||||
__ LoadEntryFromBuiltin(builtin, temp);
|
|
||||||
__ Call(temp);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
|
|
||||||
ASM_CODE_COMMENT_STRING(masm_,
|
|
||||||
__ CommentForOffHeapTrampoline("tail call", builtin));
|
|
||||||
Register temp = t9;
|
|
||||||
__ LoadEntryFromBuiltin(builtin, temp);
|
|
||||||
__ Jump(temp);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ And(scratch, value, Operand(mask));
|
|
||||||
__ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
|
|
||||||
InstanceType instance_type,
|
|
||||||
Register map, Label* target,
|
|
||||||
Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register type = temps.AcquireScratch();
|
|
||||||
__ GetObjectType(object, map, type);
|
|
||||||
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
|
|
||||||
InstanceType instance_type,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register type = temps.AcquireScratch();
|
|
||||||
if (v8_flags.debug_code) {
|
|
||||||
__ AssertNotSmi(map);
|
|
||||||
__ GetObjectType(map, type, type);
|
|
||||||
__ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
|
|
||||||
}
|
|
||||||
__ Lw(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
|
|
||||||
__ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
|
|
||||||
MemOperand operand, Label* target,
|
|
||||||
Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ Lw(scratch, operand);
|
|
||||||
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ li(scratch, Operand(smi));
|
|
||||||
__ SmiUntag(scratch);
|
|
||||||
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
__ AssertSmi(lhs);
|
|
||||||
__ AssertSmi(rhs);
|
|
||||||
__ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
|
|
||||||
MemOperand operand, Label* target,
|
|
||||||
Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ Lw(scratch, operand);
|
|
||||||
__ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
|
|
||||||
Register value, Label* target,
|
|
||||||
Label::Distance) {
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ Lw(scratch, operand);
|
|
||||||
__ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
|
|
||||||
Label* target, Label::Distance) {
|
|
||||||
__ Branch(target, AsMasmCondition(cc), value, Operand(byte));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::Move(interpreter::Register output, Register source) {
|
|
||||||
Move(RegisterFrameOperand(output), source);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::Move(Register output, TaggedIndex value) {
|
|
||||||
__ li(output, Operand(value.ptr()));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::Move(MemOperand output, Register source) {
|
|
||||||
__ Sw(source, output);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::Move(Register output, ExternalReference reference) {
|
|
||||||
__ li(output, Operand(reference));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
|
|
||||||
__ li(output, Operand(value));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::Move(Register output, int32_t value) {
|
|
||||||
__ li(output, Operand(value));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
|
|
||||||
__ Move(output, source);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::MoveSmi(Register output, Register source) {
|
|
||||||
__ Move(output, source);
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace detail {
|
|
||||||
|
|
||||||
template <typename Arg>
|
|
||||||
inline Register ToRegister(BaselineAssembler* basm,
|
|
||||||
BaselineAssembler::ScratchRegisterScope* scope,
|
|
||||||
Arg arg) {
|
|
||||||
Register reg = scope->AcquireScratch();
|
|
||||||
basm->Move(reg, arg);
|
|
||||||
return reg;
|
|
||||||
}
|
|
||||||
inline Register ToRegister(BaselineAssembler* basm,
|
|
||||||
BaselineAssembler::ScratchRegisterScope* scope,
|
|
||||||
Register reg) {
|
|
||||||
return reg;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename... Args>
|
|
||||||
struct PushAllHelper;
|
|
||||||
template <>
|
|
||||||
struct PushAllHelper<> {
|
|
||||||
static int Push(BaselineAssembler* basm) { return 0; }
|
|
||||||
static int PushReverse(BaselineAssembler* basm) { return 0; }
|
|
||||||
};
|
|
||||||
// TODO(ishell): try to pack sequence of pushes into one instruction by
|
|
||||||
// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
|
|
||||||
// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
|
|
||||||
template <typename Arg>
|
|
||||||
struct PushAllHelper<Arg> {
|
|
||||||
static int Push(BaselineAssembler* basm, Arg arg) {
|
|
||||||
BaselineAssembler::ScratchRegisterScope scope(basm);
|
|
||||||
basm->masm()->Push(ToRegister(basm, &scope, arg));
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
static int PushReverse(BaselineAssembler* basm, Arg arg) {
|
|
||||||
return Push(basm, arg);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// TODO(ishell): try to pack sequence of pushes into one instruction by
|
|
||||||
// looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
|
|
||||||
// could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
|
|
||||||
template <typename Arg, typename... Args>
|
|
||||||
struct PushAllHelper<Arg, Args...> {
|
|
||||||
static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
|
|
||||||
PushAllHelper<Arg>::Push(basm, arg);
|
|
||||||
return 1 + PushAllHelper<Args...>::Push(basm, args...);
|
|
||||||
}
|
|
||||||
static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
|
|
||||||
int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
|
|
||||||
PushAllHelper<Arg>::Push(basm, arg);
|
|
||||||
return nargs + 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
template <>
|
|
||||||
struct PushAllHelper<interpreter::RegisterList> {
|
|
||||||
static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
|
|
||||||
for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
|
|
||||||
PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
|
|
||||||
}
|
|
||||||
return list.register_count();
|
|
||||||
}
|
|
||||||
static int PushReverse(BaselineAssembler* basm,
|
|
||||||
interpreter::RegisterList list) {
|
|
||||||
for (int reg_index = list.register_count() - 1; reg_index >= 0;
|
|
||||||
--reg_index) {
|
|
||||||
PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
|
|
||||||
}
|
|
||||||
return list.register_count();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
template <typename... T>
|
|
||||||
struct PopAllHelper;
|
|
||||||
template <>
|
|
||||||
struct PopAllHelper<> {
|
|
||||||
static void Pop(BaselineAssembler* basm) {}
|
|
||||||
};
|
|
||||||
// TODO(ishell): try to pack sequence of pops into one instruction by
|
|
||||||
// looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
|
|
||||||
// could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
|
|
||||||
template <>
|
|
||||||
struct PopAllHelper<Register> {
|
|
||||||
static void Pop(BaselineAssembler* basm, Register reg) {
|
|
||||||
basm->masm()->Pop(reg);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
template <typename... T>
|
|
||||||
struct PopAllHelper<Register, T...> {
|
|
||||||
static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
|
|
||||||
PopAllHelper<Register>::Pop(basm, reg);
|
|
||||||
PopAllHelper<T...>::Pop(basm, tail...);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace detail
|
|
||||||
|
|
||||||
template <typename... T>
|
|
||||||
int BaselineAssembler::Push(T... vals) {
|
|
||||||
return detail::PushAllHelper<T...>::Push(this, vals...);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename... T>
|
|
||||||
void BaselineAssembler::PushReverse(T... vals) {
|
|
||||||
detail::PushAllHelper<T...>::PushReverse(this, vals...);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename... T>
|
|
||||||
void BaselineAssembler::Pop(T... registers) {
|
|
||||||
detail::PopAllHelper<T...>::Pop(this, registers...);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
|
|
||||||
int offset) {
|
|
||||||
__ Lw(output, FieldMemOperand(source, offset));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
|
|
||||||
int offset) {
|
|
||||||
__ Lw(output, FieldMemOperand(source, offset));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
|
|
||||||
Register source,
|
|
||||||
int offset) {
|
|
||||||
LoadTaggedSignedField(output, source, offset);
|
|
||||||
SmiUntag(output);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
|
|
||||||
int offset) {
|
|
||||||
__ Lw(output, FieldMemOperand(source, offset));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
|
|
||||||
Register source, int offset) {
|
|
||||||
__ lhu(output, FieldMemOperand(source, offset));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::LoadWord8Field(Register output, Register source,
|
|
||||||
int offset) {
|
|
||||||
__ lb(output, FieldMemOperand(source, offset));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
|
|
||||||
Smi value) {
|
|
||||||
ASM_CODE_COMMENT(masm_);
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ li(scratch, Operand(value));
|
|
||||||
__ Sw(scratch, FieldMemOperand(target, offset));
|
|
||||||
}
|
|
||||||
void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
|
|
||||||
int offset,
|
|
||||||
Register value) {
|
|
||||||
ASM_CODE_COMMENT(masm_);
|
|
||||||
__ Sw(value, FieldMemOperand(target, offset));
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ RecordWriteField(target, offset, value, scratch, kRAHasNotBeenSaved,
|
|
||||||
SaveFPRegsMode::kIgnore);
|
|
||||||
}
|
|
||||||
void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
|
|
||||||
int offset,
|
|
||||||
Register value) {
|
|
||||||
__ Sw(value, FieldMemOperand(target, offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
|
|
||||||
Register feedback_vector,
|
|
||||||
FeedbackSlot slot,
|
|
||||||
Label* on_result,
|
|
||||||
Label::Distance) {
|
|
||||||
Label fallthrough;
|
|
||||||
LoadTaggedPointerField(scratch_and_result, feedback_vector,
|
|
||||||
FeedbackVector::OffsetOfElementAt(slot.ToInt()));
|
|
||||||
__ LoadWeakValue(scratch_and_result, scratch_and_result, &fallthrough);
|
|
||||||
// Is it marked_for_deoptimization? If yes, clear the slot.
|
|
||||||
{
|
|
||||||
ScratchRegisterScope temps(this);
|
|
||||||
Register scratch = temps.AcquireScratch();
|
|
||||||
__ TestCodeTIsMarkedForDeoptimizationAndJump(scratch_and_result, scratch,
|
|
||||||
eq, on_result);
|
|
||||||
__ li(scratch, __ ClearedValue());
|
|
||||||
StoreTaggedFieldNoWriteBarrier(
|
|
||||||
feedback_vector, FeedbackVector::OffsetOfElementAt(slot.ToInt()),
|
|
||||||
scratch);
|
|
||||||
}
|
|
||||||
__ bind(&fallthrough);
|
|
||||||
Move(scratch_and_result, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
|
|
||||||
int32_t weight, Label* skip_interrupt_label) {
|
|
||||||
ASM_CODE_COMMENT(masm_);
|
|
||||||
ScratchRegisterScope scratch_scope(this);
|
|
||||||
Register feedback_cell = scratch_scope.AcquireScratch();
|
|
||||||
LoadFunction(feedback_cell);
|
|
||||||
LoadTaggedPointerField(feedback_cell, feedback_cell,
|
|
||||||
JSFunction::kFeedbackCellOffset);
|
|
||||||
|
|
||||||
Register interrupt_budget = scratch_scope.AcquireScratch();
|
|
||||||
__ Lw(interrupt_budget,
|
|
||||||
FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
|
|
||||||
__ Addu(interrupt_budget, interrupt_budget, weight);
|
|
||||||
__ Sw(interrupt_budget,
|
|
||||||
FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
|
|
||||||
if (skip_interrupt_label) {
|
|
||||||
DCHECK_LT(weight, 0);
|
|
||||||
__ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
|
|
||||||
Register weight, Label* skip_interrupt_label) {
|
|
||||||
ASM_CODE_COMMENT(masm_);
|
|
||||||
ScratchRegisterScope scratch_scope(this);
|
|
||||||
Register feedback_cell = scratch_scope.AcquireScratch();
|
|
||||||
LoadFunction(feedback_cell);
|
|
||||||
LoadTaggedPointerField(feedback_cell, feedback_cell,
|
|
||||||
JSFunction::kFeedbackCellOffset);
|
|
||||||
|
|
||||||
Register interrupt_budget = scratch_scope.AcquireScratch();
|
|
||||||
__ Lw(interrupt_budget,
|
|
||||||
FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
|
|
||||||
__ Addu(interrupt_budget, interrupt_budget, weight);
|
|
||||||
__ Sw(interrupt_budget,
|
|
||||||
FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
|
|
||||||
if (skip_interrupt_label)
|
|
||||||
__ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
|
|
||||||
uint32_t depth) {
|
|
||||||
for (; depth > 0; --depth) {
|
|
||||||
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
|
||||||
}
|
|
||||||
LoadTaggedAnyField(kInterpreterAccumulatorRegister, context,
|
|
||||||
Context::OffsetOfElementAt(index));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::StaContextSlot(Register context, Register value,
|
|
||||||
uint32_t index, uint32_t depth) {
|
|
||||||
for (; depth > 0; --depth) {
|
|
||||||
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
|
||||||
}
|
|
||||||
StoreTaggedFieldWithWriteBarrier(context, Context::OffsetOfElementAt(index),
|
|
||||||
value);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
|
||||||
__ Addu(lhs, lhs, Operand(rhs));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
|
|
||||||
__ And(output, lhs, Operand(rhs));
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineAssembler::Switch(Register reg, int case_value_base,
|
|
||||||
Label** labels, int num_labels) {
|
|
||||||
ASM_CODE_COMMENT(masm_);
|
|
||||||
Label fallthrough;
|
|
||||||
if (case_value_base != 0) {
|
|
||||||
__ Subu(reg, reg, Operand(case_value_base));
|
|
||||||
}
|
|
||||||
|
|
||||||
__ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
|
|
||||||
reg, Operand(num_labels));
|
|
||||||
|
|
||||||
__ GenerateSwitchTable(reg, num_labels,
|
|
||||||
[labels](size_t i) { return labels[i]; });
|
|
||||||
|
|
||||||
__ bind(&fallthrough);
|
|
||||||
}
|
|
||||||
|
|
||||||
#undef __
|
|
||||||
|
|
||||||
#define __ basm.
|
|
||||||
|
|
||||||
void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|
||||||
ASM_CODE_COMMENT(masm);
|
|
||||||
BaselineAssembler basm(masm);
|
|
||||||
|
|
||||||
Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
|
|
||||||
Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
|
|
||||||
|
|
||||||
{
|
|
||||||
ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
|
|
||||||
|
|
||||||
Label skip_interrupt_label;
|
|
||||||
__ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
|
|
||||||
__ masm()->SmiTag(params_size);
|
|
||||||
__ masm()->Push(params_size, kInterpreterAccumulatorRegister);
|
|
||||||
|
|
||||||
__ LoadContext(kContextRegister);
|
|
||||||
__ LoadFunction(kJSFunctionRegister);
|
|
||||||
__ masm()->Push(kJSFunctionRegister);
|
|
||||||
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
|
|
||||||
|
|
||||||
__ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
|
|
||||||
__ masm()->SmiUntag(params_size);
|
|
||||||
|
|
||||||
__ Bind(&skip_interrupt_label);
|
|
||||||
}
|
|
||||||
|
|
||||||
BaselineAssembler::ScratchRegisterScope temps(&basm);
|
|
||||||
Register actual_params_size = temps.AcquireScratch();
|
|
||||||
// Compute the size of the actual parameters + receiver (in bytes).
|
|
||||||
__ Move(actual_params_size,
|
|
||||||
MemOperand(fp, StandardFrameConstants::kArgCOffset));
|
|
||||||
|
|
||||||
// If actual is bigger than formal, then we should use it to free up the stack
|
|
||||||
// arguments.
|
|
||||||
Label corrected_args_count;
|
|
||||||
__ masm()->Branch(&corrected_args_count, ge, params_size,
|
|
||||||
Operand(actual_params_size));
|
|
||||||
__ masm()->Move(params_size, actual_params_size);
|
|
||||||
__ Bind(&corrected_args_count);
|
|
||||||
|
|
||||||
// Leave the frame (also dropping the register file).
|
|
||||||
__ masm()->LeaveFrame(StackFrame::BASELINE);
|
|
||||||
|
|
||||||
// Drop receiver + arguments.
|
|
||||||
__ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
|
|
||||||
TurboAssembler::kCountIncludesReceiver);
|
|
||||||
|
|
||||||
__ masm()->Ret();
|
|
||||||
}
|
|
||||||
|
|
||||||
#undef __
|
|
||||||
|
|
||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
|
||||||
Register reg) {
|
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
|
|
||||||
Operand(kInterpreterAccumulatorRegister));
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace baseline
|
|
||||||
} // namespace internal
|
|
||||||
} // namespace v8
|
|
||||||
|
|
||||||
#endif // V8_BASELINE_MIPS_BASELINE_ASSEMBLER_MIPS_INL_H_
|
|
@ -1,78 +0,0 @@
|
|||||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style license that can be
|
|
||||||
// found in the LICENSE file.
|
|
||||||
|
|
||||||
#ifndef V8_BASELINE_MIPS_BASELINE_COMPILER_MIPS_INL_H_
|
|
||||||
#define V8_BASELINE_MIPS_BASELINE_COMPILER_MIPS_INL_H_
|
|
||||||
|
|
||||||
#include "src/base/logging.h"
|
|
||||||
#include "src/baseline/baseline-compiler.h"
|
|
||||||
|
|
||||||
namespace v8 {
|
|
||||||
namespace internal {
|
|
||||||
namespace baseline {
|
|
||||||
|
|
||||||
#define __ basm_.
|
|
||||||
|
|
||||||
void BaselineCompiler::Prologue() {
|
|
||||||
ASM_CODE_COMMENT(&masm_);
|
|
||||||
__ masm()->EnterFrame(StackFrame::BASELINE);
|
|
||||||
DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
|
|
||||||
int max_frame_size =
|
|
||||||
bytecode_->frame_size() + max_call_args_ * kSystemPointerSize;
|
|
||||||
CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
|
|
||||||
kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
|
|
||||||
max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);
|
|
||||||
|
|
||||||
PrologueFillFrame();
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineCompiler::PrologueFillFrame() {
|
|
||||||
ASM_CODE_COMMENT(&masm_);
|
|
||||||
// Inlined register frame fill
|
|
||||||
interpreter::Register new_target_or_generator_register =
|
|
||||||
bytecode_->incoming_new_target_or_generator_register();
|
|
||||||
__ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
|
|
||||||
int register_count = bytecode_->register_count();
|
|
||||||
// Magic value
|
|
||||||
const int kLoopUnrollSize = 8;
|
|
||||||
const int new_target_index = new_target_or_generator_register.index();
|
|
||||||
const bool has_new_target = new_target_index != kMaxInt;
|
|
||||||
if (has_new_target) {
|
|
||||||
DCHECK_LE(new_target_index, register_count);
|
|
||||||
__ masm()->Addu(sp, sp, Operand(-(kPointerSize * new_target_index)));
|
|
||||||
for (int i = 0; i < new_target_index; i++) {
|
|
||||||
__ masm()->Sw(kInterpreterAccumulatorRegister, MemOperand(sp, i * 4));
|
|
||||||
}
|
|
||||||
// Push new_target_or_generator.
|
|
||||||
__ Push(kJavaScriptCallNewTargetRegister);
|
|
||||||
register_count -= new_target_index + 1;
|
|
||||||
}
|
|
||||||
if (register_count < 2 * kLoopUnrollSize) {
|
|
||||||
// If the frame is small enough, just unroll the frame fill completely.
|
|
||||||
__ masm()->Addu(sp, sp, Operand(-(kPointerSize * register_count)));
|
|
||||||
for (int i = 0; i < register_count; ++i) {
|
|
||||||
__ masm()->Sw(kInterpreterAccumulatorRegister, MemOperand(sp, i * 4));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
__ masm()->Addu(sp, sp, Operand(-(kPointerSize * register_count)));
|
|
||||||
for (int i = 0; i < register_count; ++i) {
|
|
||||||
__ masm()->Sw(kInterpreterAccumulatorRegister, MemOperand(sp, i * 4));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void BaselineCompiler::VerifyFrameSize() {
|
|
||||||
ASM_CODE_COMMENT(&masm_);
|
|
||||||
__ masm()->Addu(kScratchReg, sp,
|
|
||||||
Operand(InterpreterFrameConstants::kFixedFrameSizeFromFp +
|
|
||||||
bytecode_->frame_size()));
|
|
||||||
__ masm()->Assert(eq, AbortReason::kUnexpectedStackPointer, kScratchReg,
|
|
||||||
Operand(fp));
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace baseline
|
|
||||||
} // namespace internal
|
|
||||||
} // namespace v8
|
|
||||||
|
|
||||||
#endif // V8_BASELINE_MIPS_BASELINE_COMPILER_MIPS_INL_H_
|
|
@ -581,7 +581,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
|
|
||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered, reg,
|
||||||
Operand(kInterpreterAccumulatorRegister));
|
Operand(kInterpreterAccumulatorRegister));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
#include "src/baseline/baseline-assembler.h"
|
#include "src/baseline/baseline-assembler.h"
|
||||||
#include "src/codegen/interface-descriptors.h"
|
#include "src/codegen/interface-descriptors.h"
|
||||||
#include "src/codegen/ppc/assembler-ppc-inl.h"
|
#include "src/codegen/ppc/assembler-ppc-inl.h"
|
||||||
|
#include "src/codegen/ppc/register-ppc.h"
|
||||||
#include "src/objects/literal-objects-inl.h"
|
#include "src/objects/literal-objects-inl.h"
|
||||||
|
|
||||||
namespace v8 {
|
namespace v8 {
|
||||||
@ -596,6 +597,7 @@ void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
|
|||||||
|
|
||||||
void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
|
void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
|
||||||
uint32_t depth) {
|
uint32_t depth) {
|
||||||
|
ASM_CODE_COMMENT(masm_);
|
||||||
for (; depth > 0; --depth) {
|
for (; depth > 0; --depth) {
|
||||||
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
||||||
}
|
}
|
||||||
@ -605,6 +607,7 @@ void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
|
|||||||
|
|
||||||
void BaselineAssembler::StaContextSlot(Register context, Register value,
|
void BaselineAssembler::StaContextSlot(Register context, Register value,
|
||||||
uint32_t index, uint32_t depth) {
|
uint32_t index, uint32_t depth) {
|
||||||
|
ASM_CODE_COMMENT(masm_);
|
||||||
for (; depth > 0; --depth) {
|
for (; depth > 0; --depth) {
|
||||||
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
||||||
}
|
}
|
||||||
@ -614,6 +617,7 @@ void BaselineAssembler::StaContextSlot(Register context, Register value,
|
|||||||
|
|
||||||
void BaselineAssembler::LdaModuleVariable(Register context, int cell_index,
|
void BaselineAssembler::LdaModuleVariable(Register context, int cell_index,
|
||||||
uint32_t depth) {
|
uint32_t depth) {
|
||||||
|
ASM_CODE_COMMENT(masm_);
|
||||||
for (; depth > 0; --depth) {
|
for (; depth > 0; --depth) {
|
||||||
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
||||||
}
|
}
|
||||||
@ -636,6 +640,7 @@ void BaselineAssembler::LdaModuleVariable(Register context, int cell_index,
|
|||||||
|
|
||||||
void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
||||||
int cell_index, uint32_t depth) {
|
int cell_index, uint32_t depth) {
|
||||||
|
ASM_CODE_COMMENT(masm_);
|
||||||
for (; depth > 0; --depth) {
|
for (; depth > 0; --depth) {
|
||||||
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
LoadTaggedPointerField(context, context, Context::kPreviousOffset);
|
||||||
}
|
}
|
||||||
@ -650,6 +655,7 @@ void BaselineAssembler::StaModuleVariable(Register context, Register value,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
|
||||||
|
ASM_CODE_COMMENT(masm_);
|
||||||
if (rhs.value() == 0) return;
|
if (rhs.value() == 0) return;
|
||||||
__ LoadSmiLiteral(r0, rhs);
|
__ LoadSmiLiteral(r0, rhs);
|
||||||
if (SmiValuesAre31Bits()) {
|
if (SmiValuesAre31Bits()) {
|
||||||
@ -754,7 +760,7 @@ inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
|||||||
} else {
|
} else {
|
||||||
assembler_->masm()->CmpU64(reg, kInterpreterAccumulatorRegister);
|
assembler_->masm()->CmpU64(reg, kInterpreterAccumulatorRegister);
|
||||||
}
|
}
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -602,7 +602,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
|
|
||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered, reg,
|
||||||
Operand(kInterpreterAccumulatorRegister));
|
Operand(kInterpreterAccumulatorRegister));
|
||||||
}
|
}
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -287,11 +287,20 @@ void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
|
|||||||
JumpIfHelper(masm_, cc, lhs, rhs, target);
|
JumpIfHelper(masm_, cc, lhs, rhs, target);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
constexpr static int stack_bias = 4;
|
||||||
|
|
||||||
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
|
void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
|
||||||
MemOperand operand, Label* target,
|
MemOperand operand, Label* target,
|
||||||
Label::Distance) {
|
Label::Distance) {
|
||||||
ASM_CODE_COMMENT(masm_);
|
ASM_CODE_COMMENT(masm_);
|
||||||
|
DCHECK(operand.rb() == fp || operand.rx() == fp);
|
||||||
|
if (COMPRESS_POINTERS_BOOL) {
|
||||||
|
MemOperand addr =
|
||||||
|
MemOperand(operand.rx(), operand.rb(), operand.offset() + stack_bias);
|
||||||
|
__ LoadTaggedPointerField(ip, addr, r0);
|
||||||
|
} else {
|
||||||
__ LoadTaggedPointerField(ip, operand, r0);
|
__ LoadTaggedPointerField(ip, operand, r0);
|
||||||
|
}
|
||||||
JumpIfHelper<COMPRESS_POINTERS_BOOL ? 32 : 64>(masm_, cc, value, ip, target);
|
JumpIfHelper<COMPRESS_POINTERS_BOOL ? 32 : 64>(masm_, cc, value, ip, target);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -299,7 +308,14 @@ void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
|
|||||||
Register value, Label* target,
|
Register value, Label* target,
|
||||||
Label::Distance) {
|
Label::Distance) {
|
||||||
ASM_CODE_COMMENT(masm_);
|
ASM_CODE_COMMENT(masm_);
|
||||||
|
DCHECK(operand.rb() == fp || operand.rx() == fp);
|
||||||
|
if (COMPRESS_POINTERS_BOOL) {
|
||||||
|
MemOperand addr =
|
||||||
|
MemOperand(operand.rx(), operand.rb(), operand.offset() + stack_bias);
|
||||||
|
__ LoadTaggedPointerField(ip, addr, r0);
|
||||||
|
} else {
|
||||||
__ LoadTaggedPointerField(ip, operand, r0);
|
__ LoadTaggedPointerField(ip, operand, r0);
|
||||||
|
}
|
||||||
JumpIfHelper<COMPRESS_POINTERS_BOOL ? 32 : 64>(masm_, cc, ip, value, target);
|
JumpIfHelper<COMPRESS_POINTERS_BOOL ? 32 : 64>(masm_, cc, ip, value, target);
|
||||||
}
|
}
|
||||||
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
|
void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
|
||||||
@ -752,7 +768,7 @@ inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
|||||||
} else {
|
} else {
|
||||||
assembler_->masm()->CmpU64(reg, kInterpreterAccumulatorRegister);
|
assembler_->masm()->CmpU64(reg, kInterpreterAccumulatorRegister);
|
||||||
}
|
}
|
||||||
assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
|
assembler_->masm()->Assert(eq, AbortReason::kAccumulatorClobbered);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -639,7 +639,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
|
|||||||
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
|
||||||
Register reg) {
|
Register reg) {
|
||||||
assembler_->masm()->cmp_tagged(reg, kInterpreterAccumulatorRegister);
|
assembler_->masm()->cmp_tagged(reg, kInterpreterAccumulatorRegister);
|
||||||
assembler_->masm()->Assert(equal, AbortReason::kUnexpectedValue);
|
assembler_->masm()->Assert(equal, AbortReason::kAccumulatorClobbered);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace baseline
|
} // namespace baseline
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
#include "src/objects/contexts.h"
|
#include "src/objects/contexts.h"
|
||||||
#include "src/objects/field-index-inl.h"
|
#include "src/objects/field-index-inl.h"
|
||||||
#include "src/objects/js-array-inl.h"
|
#include "src/objects/js-array-inl.h"
|
||||||
|
#include "src/objects/js-shared-array-inl.h"
|
||||||
#include "src/objects/module-inl.h"
|
#include "src/objects/module-inl.h"
|
||||||
#include "src/objects/property-details.h"
|
#include "src/objects/property-details.h"
|
||||||
#include "src/objects/prototype.h"
|
#include "src/objects/prototype.h"
|
||||||
@ -229,27 +230,6 @@ Handle<AccessorInfo> Accessors::MakeArrayLengthInfo(Isolate* isolate) {
|
|||||||
&ArrayLengthGetter, &ArrayLengthSetter);
|
&ArrayLengthGetter, &ArrayLengthSetter);
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
|
||||||
// Accessors::SharedArrayLength
|
|
||||||
//
|
|
||||||
|
|
||||||
void Accessors::SharedArrayLengthGetter(
|
|
||||||
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
|
|
||||||
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
|
|
||||||
DisallowGarbageCollection no_gc;
|
|
||||||
HandleScope scope(isolate);
|
|
||||||
|
|
||||||
Object value = *Utils::OpenHandle(*v8::Local<v8::Value>(info.This()));
|
|
||||||
|
|
||||||
Object result = Smi::FromInt(JSObject::cast(value).elements().length());
|
|
||||||
info.GetReturnValue().Set(Utils::ToLocal(Handle<Object>(result, isolate)));
|
|
||||||
}
|
|
||||||
|
|
||||||
Handle<AccessorInfo> Accessors::MakeSharedArrayLengthInfo(Isolate* isolate) {
|
|
||||||
return MakeAccessor(isolate, isolate->factory()->length_string(),
|
|
||||||
&SharedArrayLengthGetter, nullptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// Accessors::ModuleNamespaceEntry
|
// Accessors::ModuleNamespaceEntry
|
||||||
//
|
//
|
||||||
|
@ -44,8 +44,6 @@ class JavaScriptFrame;
|
|||||||
kHasSideEffectToReceiver) \
|
kHasSideEffectToReceiver) \
|
||||||
V(_, function_prototype, FunctionPrototype, kHasNoSideEffect, \
|
V(_, function_prototype, FunctionPrototype, kHasNoSideEffect, \
|
||||||
kHasSideEffectToReceiver) \
|
kHasSideEffectToReceiver) \
|
||||||
V(_, shared_array_length, SharedArrayLength, kHasNoSideEffect, \
|
|
||||||
kHasSideEffectToReceiver) \
|
|
||||||
V(_, string_length, StringLength, kHasNoSideEffect, \
|
V(_, string_length, StringLength, kHasNoSideEffect, \
|
||||||
kHasSideEffectToReceiver) \
|
kHasSideEffectToReceiver) \
|
||||||
V(_, value_unavailable, ValueUnavailable, kHasNoSideEffect, \
|
V(_, value_unavailable, ValueUnavailable, kHasNoSideEffect, \
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user