Skip to content

Commit

Permalink
deps: patch V8 to 10.2.154.13
Browse files Browse the repository at this point in the history
Refs: v8/v8@10.2.154.4...10.2.154.13

PR-URL: #43727
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
Reviewed-By: Tobias Nießen <tniessen@tnie.de>
  • Loading branch information
targos committed Jul 12, 2022
1 parent 7ffcd85 commit d6a9e93
Show file tree
Hide file tree
Showing 44 changed files with 710 additions and 280 deletions.
2 changes: 1 addition & 1 deletion deps/v8/include/v8-version.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#define V8_MAJOR_VERSION 10
#define V8_MINOR_VERSION 2
#define V8_BUILD_NUMBER 154
#define V8_PATCH_LEVEL 4
#define V8_PATCH_LEVEL 13

// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
Expand Down
23 changes: 17 additions & 6 deletions deps/v8/src/builtins/arm/builtins-arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2609,8 +2609,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// The function index was put in a register by the jump table trampoline.
// Convert to Smi for the runtime call.
__ SmiTag(kWasmCompileLazyFuncIndexRegister,
kWasmCompileLazyFuncIndexRegister);
__ SmiTag(kWasmCompileLazyFuncIndexRegister);
{
HardAbortScope hard_abort(masm); // Avoid calls to Abort.
FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
Expand Down Expand Up @@ -2640,22 +2639,34 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ stm(db_w, sp, gp_regs);
__ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);

// Pass instance and function index as explicit arguments to the runtime
// Push the Wasm instance for loading the jump table address after the
// runtime call.
__ push(kWasmInstanceRegister);

// Push the Wasm instance again as an explicit argument to the runtime
// function.
__ push(kWasmInstanceRegister);
// Push the function index as second argument.
__ push(kWasmCompileLazyFuncIndexRegister);
// Initialize the JavaScript context with 0. CEntry will use it to
// set the current context on the isolate.
__ Move(cp, Smi::zero());
__ CallRuntime(Runtime::kWasmCompileLazy, 2);
// The entrypoint address is the return value.
__ mov(r8, kReturnRegister0);
// The runtime function returns the jump table slot offset as a Smi. Use
// that to compute the jump target in r8.
__ pop(kWasmInstanceRegister);
__ ldr(r8, MemOperand(
kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset - kHeapObjectTag));
__ add(r8, r8, Operand::SmiUntag(kReturnRegister0));
// r8 now holds the jump table slot where we want to jump to in the end.

// Restore registers.
__ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
__ ldm(ia_w, sp, gp_regs);
}
// Finally, jump to the entrypoint.

// Finally, jump to the jump table slot for the function.
__ Jump(r8);
}

Expand Down
81 changes: 48 additions & 33 deletions deps/v8/src/builtins/arm64/builtins-arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3018,41 +3018,50 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// Sign extend and convert to Smi for the runtime call.
__ sxtw(kWasmCompileLazyFuncIndexRegister,
kWasmCompileLazyFuncIndexRegister.W());
__ SmiTag(kWasmCompileLazyFuncIndexRegister,
kWasmCompileLazyFuncIndexRegister);

UseScratchRegisterScope temps(masm);
{
HardAbortScope hard_abort(masm); // Avoid calls to Abort.
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);

// Save all parameter registers (see wasm-linkage.h). They might be
// overwritten in the runtime call below. We don't have any callee-saved
// registers in wasm, so no need to store anything else.
RegList gp_regs;
__ SmiTag(kWasmCompileLazyFuncIndexRegister);

// Compute register lists for parameters to be saved. We save all parameter
// registers (see wasm-linkage.h). They might be overwritten in the runtime
// call below. We don't have any callee-saved registers in wasm, so no need to
// store anything else.
constexpr RegList kSavedGpRegs = ([]() constexpr {
RegList saved_gp_regs;
for (Register gp_param_reg : wasm::kGpParamRegisters) {
gp_regs.set(gp_param_reg);
saved_gp_regs.set(gp_param_reg);
}
// Also push x1, because we must push multiples of 16 bytes (see
// {TurboAssembler::PushCPURegList}.
CHECK_EQ(1, gp_regs.Count() % 2);
gp_regs.set(x1);
CHECK_EQ(0, gp_regs.Count() % 2);
saved_gp_regs.set(x1);
// All set registers were unique.
CHECK_EQ(saved_gp_regs.Count(), arraysize(wasm::kGpParamRegisters) + 1);
// We push a multiple of 16 bytes.
CHECK_EQ(0, saved_gp_regs.Count() % 2);
// The Wasm instance must be part of the saved registers.
CHECK(saved_gp_regs.has(kWasmInstanceRegister));
CHECK_EQ(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs,
saved_gp_regs.Count());
return saved_gp_regs;
})();

DoubleRegList fp_regs;
constexpr DoubleRegList kSavedFpRegs = ([]() constexpr {
DoubleRegList saved_fp_regs;
for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
fp_regs.set(fp_param_reg);
saved_fp_regs.set(fp_param_reg);
}

CHECK_EQ(gp_regs.Count(), arraysize(wasm::kGpParamRegisters) + 1);
CHECK_EQ(fp_regs.Count(), arraysize(wasm::kFpParamRegisters));
CHECK_EQ(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs,
gp_regs.Count());
CHECK_EQ(saved_fp_regs.Count(), arraysize(wasm::kFpParamRegisters));
CHECK_EQ(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs,
fp_regs.Count());
saved_fp_regs.Count());
return saved_fp_regs;
})();

__ PushXRegList(gp_regs);
__ PushQRegList(fp_regs);
{
HardAbortScope hard_abort(masm); // Avoid calls to Abort.
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);

// Save registers that we need to keep alive across the runtime call.
__ PushXRegList(kSavedGpRegs);
__ PushQRegList(kSavedFpRegs);

// Pass instance and function index as explicit arguments to the runtime
// function.
Expand All @@ -3062,17 +3071,23 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ Mov(cp, Smi::zero());
__ CallRuntime(Runtime::kWasmCompileLazy, 2);

// Exclude x17 from the scope, there are hardcoded uses of it below.
temps.Exclude(x17);

// The entrypoint address is the return value.
__ Mov(x17, kReturnRegister0);
// Untag the returned Smi into into x17, for later use.
static_assert(!kSavedGpRegs.has(x17));
__ SmiUntag(x17, kReturnRegister0);

// Restore registers.
__ PopQRegList(fp_regs);
__ PopXRegList(gp_regs);
__ PopQRegList(kSavedFpRegs);
__ PopXRegList(kSavedGpRegs);
}
// Finally, jump to the entrypoint.

// The runtime function returned the jump table slot offset as a Smi (now in
// x17). Use that to compute the jump target.
static_assert(!kSavedGpRegs.has(x18));
__ ldr(x18, MemOperand(
kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset - kHeapObjectTag));
__ add(x17, x18, Operand(x17));
// Finally, jump to the jump table slot for the function.
__ Jump(x17);
}

Expand Down
27 changes: 18 additions & 9 deletions deps/v8/src/builtins/ia32/builtins-ia32.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2878,20 +2878,28 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
offset += kSimd128Size;
}

// Push the Wasm instance as an explicit argument to WasmCompileLazy.
// Push the Wasm instance for loading the jump table address after the
// runtime call.
__ Push(kWasmInstanceRegister);

// Push the Wasm instance again as an explicit argument to the runtime
// function.
__ Push(kWasmInstanceRegister);
// Push the function index as second argument.
__ Push(kWasmCompileLazyFuncIndexRegister);
// Initialize the JavaScript context with 0. CEntry will use it to
// set the current context on the isolate.
__ Move(kContextRegister, Smi::zero());
{
// At this point, ebx has been spilled to the stack but is not yet
// overwritten with another value. We can still use it as kRootRegister.
__ CallRuntime(Runtime::kWasmCompileLazy, 2);
}
// The entrypoint address is the return value.
__ mov(edi, kReturnRegister0);
__ CallRuntime(Runtime::kWasmCompileLazy, 2);
// The runtime function returns the jump table slot offset as a Smi. Use
// that to compute the jump target in edi.
__ Pop(kWasmInstanceRegister);
__ mov(edi, MemOperand(kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset -
kHeapObjectTag));
__ SmiUntag(kReturnRegister0);
__ add(edi, kReturnRegister0);
// edi now holds the jump table slot where we want to jump to in the end.

// Restore registers.
for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
Expand All @@ -2904,7 +2912,8 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ Pop(reg);
}
}
// Finally, jump to the entrypoint.

// Finally, jump to the jump table slot for the function.
__ jmp(edi);
}

Expand Down
73 changes: 49 additions & 24 deletions deps/v8/src/builtins/loong64/builtins-loong64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2648,37 +2648,50 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// The function index was put in t0 by the jump table trampoline.
// Convert to Smi for the runtime call
__ SmiTag(kWasmCompileLazyFuncIndexRegister);
{
HardAbortScope hard_abort(masm); // Avoid calls to Abort.
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);

// Save all parameter registers (see wasm-linkage.h). They might be
// overwritten in the runtime call below. We don't have any callee-saved
// registers in wasm, so no need to store anything else.
RegList gp_regs;
// Compute register lists for parameters to be saved. We save all parameter
// registers (see wasm-linkage.h). They might be overwritten in the runtime
// call below. We don't have any callee-saved registers in wasm, so no need to
// store anything else.
constexpr RegList kSavedGpRegs = ([]() constexpr {
RegList saved_gp_regs;
for (Register gp_param_reg : wasm::kGpParamRegisters) {
gp_regs.set(gp_param_reg);
saved_gp_regs.set(gp_param_reg);
}

DoubleRegList fp_regs;
// All set registers were unique.
CHECK_EQ(saved_gp_regs.Count(), arraysize(wasm::kGpParamRegisters));
// The Wasm instance must be part of the saved registers.
CHECK(saved_gp_regs.has(kWasmInstanceRegister));
CHECK_EQ(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs,
saved_gp_regs.Count());
return saved_gp_regs;
})();

constexpr DoubleRegList kSavedFpRegs = ([]() constexpr {
DoubleRegList saved_fp_regs;
for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
fp_regs.set(fp_param_reg);
saved_fp_regs.set(fp_param_reg);
}

CHECK_EQ(gp_regs.Count(), arraysize(wasm::kGpParamRegisters));
CHECK_EQ(fp_regs.Count(), arraysize(wasm::kFpParamRegisters));
CHECK_EQ(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs,
gp_regs.Count());
CHECK_EQ(saved_fp_regs.Count(), arraysize(wasm::kFpParamRegisters));
CHECK_EQ(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs,
fp_regs.Count());
saved_fp_regs.Count());
return saved_fp_regs;
})();

{
HardAbortScope hard_abort(masm); // Avoid calls to Abort.
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);

__ MultiPush(gp_regs);
__ MultiPushFPU(fp_regs);
// Save registers that we need to keep alive across the runtime call.
__ MultiPush(kSavedGpRegs);
__ MultiPushFPU(kSavedFpRegs);

// kFixedFrameSizeFromFp is hard coded to include space for Simd
// registers, so we still need to allocate extra (unused) space on the stack
// as if they were saved.
__ Sub_d(sp, sp, fp_regs.Count() * kDoubleSize);
__ Sub_d(sp, sp, kSavedFpRegs.Count() * kDoubleSize);

// Pass instance and function index as an explicit arguments to the runtime
// function.
Expand All @@ -2687,15 +2700,27 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// set the current context on the isolate.
__ Move(kContextRegister, Smi::zero());
__ CallRuntime(Runtime::kWasmCompileLazy, 2);
__ mov(t8, a0);

__ Add_d(sp, sp, fp_regs.Count() * kDoubleSize);
// Untag the returned Smi into into t7, for later use.
static_assert(!kSavedGpRegs.has(t7));
__ SmiUntag(t7, a0);

__ Add_d(sp, sp, kSavedFpRegs.Count() * kDoubleSize);
// Restore registers.
__ MultiPopFPU(fp_regs);
__ MultiPop(gp_regs);
__ MultiPopFPU(kSavedFpRegs);
__ MultiPop(kSavedGpRegs);
}
// Finally, jump to the entrypoint.
__ Jump(t8);

// The runtime function returned the jump table slot offset as a Smi (now in
// t7). Use that to compute the jump target.
static_assert(!kSavedGpRegs.has(t8));
__ Ld_d(t8, MemOperand(
kWasmInstanceRegister,
WasmInstanceObject::kJumpTableStartOffset - kHeapObjectTag));
__ Add_d(t7, t8, Operand(t7));

// Finally, jump to the jump table slot for the function.
__ Jump(t7);
}

void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
Expand Down
Loading

0 comments on commit d6a9e93

Please sign in to comment.