diff --git a/src/builtins/arm/builtins-arm.cc b/src/builtins/arm/builtins-arm.cc index 6c5a59ff8c4..bae7449604a 100644 --- a/src/builtins/arm/builtins-arm.cc +++ b/src/builtins/arm/builtins-arm.cc @@ -103,6 +103,20 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, namespace { +void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, + Register scratch, Label* stack_overflow) { + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + __ LoadRoot(scratch, RootIndex::kRealStackLimit); + // Make scratch the space we have left. The stack might already be overflowed + // here which will cause scratch to become negative. + __ sub(scratch, sp, scratch); + // Check if the arguments will overflow the stack. + __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2)); + __ b(le, stack_overflow); // Signed comparison. +} + void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r0 : number of arguments @@ -115,6 +129,10 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { Register scratch = r2; + Label stack_overflow; + + Generate_StackOverflowCheck(masm, r0, scratch, &stack_overflow); + // Enter a construct frame. { FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); @@ -170,20 +188,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ add(sp, sp, Operand(scratch, LSL, kPointerSizeLog2 - kSmiTagSize)); __ add(sp, sp, Operand(kPointerSize)); __ Jump(lr); -} -void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, - Register scratch, Label* stack_overflow) { - // Check the stack for overflow. We are not trying to catch - // interruptions (e.g. debug break and preemption) here, so the "real stack - // limit" is checked. - __ LoadRoot(scratch, RootIndex::kRealStackLimit); - // Make scratch the space we have left. The stack might already be overflowed - // here which will cause scratch to become negative. - __ sub(scratch, sp, scratch); - // Check if the arguments will overflow the stack. - __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2)); - __ b(le, stack_overflow); // Signed comparison. + __ bind(&stack_overflow); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ CallRuntime(Runtime::kThrowStackOverflow); + __ bkpt(0); // Unreachable code. + } } } // namespace diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc index 06734617d92..520a5bcb278 100644 --- a/src/builtins/arm64/builtins-arm64.cc +++ b/src/builtins/arm64/builtins-arm64.cc @@ -95,6 +95,44 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, namespace { +void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, + Label* stack_overflow) { + UseScratchRegisterScope temps(masm); + Register scratch = temps.AcquireX(); + + // Check the stack for overflow. + // We are not trying to catch interruptions (e.g. debug break and + // preemption) here, so the "real stack limit" is checked. + + __ LoadRoot(scratch, RootIndex::kRealStackLimit); + // Make scratch the space we have left. The stack might already be overflowed + // here which will cause scratch to become negative. + __ Sub(scratch, sp, scratch); + // Check if the arguments will overflow the stack. + __ Cmp(scratch, Operand(num_args, LSL, kSystemPointerSizeLog2)); + __ B(le, stack_overflow); + +#if defined(V8_OS_WIN) + // Simulate _chkstk to extend stack guard page on Windows ARM64. + const int kPageSize = 4096; + Label chkstk, chkstk_done; + Register probe = temps.AcquireX(); + + __ Sub(scratch, sp, Operand(num_args, LSL, kSystemPointerSizeLog2)); + __ Mov(probe, sp); + + // Loop start of stack probe. + __ Bind(&chkstk); + __ Sub(probe, probe, kPageSize); + __ Cmp(probe, scratch); + __ B(lo, &chkstk_done); + __ Ldrb(xzr, MemOperand(probe)); + __ B(&chkstk); + + __ Bind(&chkstk_done); +#endif +} + void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- x0 : number of arguments @@ -106,6 +144,9 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // ----------------------------------- ASM_LOCATION("Builtins::Generate_JSConstructStubHelper"); + Label stack_overflow; + + Generate_StackOverflowCheck(masm, x0, &stack_overflow); // Enter a construct frame. { @@ -196,44 +237,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // Remove caller arguments from the stack and return. __ DropArguments(x1, TurboAssembler::kCountExcludesReceiver); __ Ret(); -} -void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, - Label* stack_overflow) { - UseScratchRegisterScope temps(masm); - Register scratch = temps.AcquireX(); - - // Check the stack for overflow. - // We are not trying to catch interruptions (e.g. debug break and - // preemption) here, so the "real stack limit" is checked. - - __ LoadRoot(scratch, RootIndex::kRealStackLimit); - // Make scratch the space we have left. The stack might already be overflowed - // here which will cause scratch to become negative. - __ Sub(scratch, sp, scratch); - // Check if the arguments will overflow the stack. - __ Cmp(scratch, Operand(num_args, LSL, kSystemPointerSizeLog2)); - __ B(le, stack_overflow); - -#if defined(V8_OS_WIN) - // Simulate _chkstk to extend stack guard page on Windows ARM64. - const int kPageSize = 4096; - Label chkstk, chkstk_done; - Register probe = temps.AcquireX(); - - __ Sub(scratch, sp, Operand(num_args, LSL, kSystemPointerSizeLog2)); - __ Mov(probe, sp); - - // Loop start of stack probe. - __ Bind(&chkstk); - __ Sub(probe, probe, kPageSize); - __ Cmp(probe, scratch); - __ B(lo, &chkstk_done); - __ Ldrb(xzr, MemOperand(probe)); - __ B(&chkstk); - - __ Bind(&chkstk_done); -#endif + __ Bind(&stack_overflow); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ CallRuntime(Runtime::kThrowStackOverflow); + __ Unreachable(); + } } } // namespace diff --git a/src/builtins/ia32/builtins-ia32.cc b/src/builtins/ia32/builtins-ia32.cc index fa72ec22783..965c004e0b9 100644 --- a/src/builtins/ia32/builtins-ia32.cc +++ b/src/builtins/ia32/builtins-ia32.cc @@ -78,6 +78,30 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, namespace { +void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, + Register scratch, Label* stack_overflow, + bool include_receiver = false) { + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + ExternalReference real_stack_limit = + ExternalReference::address_of_real_stack_limit(masm->isolate()); + // Compute the space that is left as a negative number in scratch. If + // we already overflowed, this will be a positive number. + __ mov(scratch, __ ExternalReferenceAsOperand(real_stack_limit, scratch)); + __ sub(scratch, esp); + // Add the size of the arguments. + static_assert(kSystemPointerSize == 4, + "The next instruction assumes kSystemPointerSize == 4"); + __ lea(scratch, Operand(scratch, num_args, times_system_pointer_size, 0)); + if (include_receiver) { + __ add(scratch, Immediate(kSystemPointerSize)); + } + // See if we overflowed, i.e. scratch is positive. + __ cmp(scratch, Immediate(0)); + __ j(greater, stack_overflow); // Signed comparison. +} + void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax: number of arguments @@ -86,6 +110,10 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // -- esi: context // ----------------------------------- + Label stack_overflow; + + Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow); + // Enter a construct frame. { FrameScope scope(masm, StackFrame::CONSTRUCT); @@ -145,30 +173,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { 1 * kSystemPointerSize)); // 1 ~ receiver __ PushReturnAddressFrom(ecx); __ ret(0); -} -void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, - Register scratch, Label* stack_overflow, - bool include_receiver = false) { - // Check the stack for overflow. We are not trying to catch - // interruptions (e.g. debug break and preemption) here, so the "real stack - // limit" is checked. - ExternalReference real_stack_limit = - ExternalReference::address_of_real_stack_limit(masm->isolate()); - // Compute the space that is left as a negative number in scratch. If - // we already overflowed, this will be a positive number. - __ mov(scratch, __ ExternalReferenceAsOperand(real_stack_limit, scratch)); - __ sub(scratch, esp); - // Add the size of the arguments. - static_assert(kSystemPointerSize == 4, - "The next instruction assumes kSystemPointerSize == 4"); - __ lea(scratch, Operand(scratch, num_args, times_system_pointer_size, 0)); - if (include_receiver) { - __ add(scratch, Immediate(kSystemPointerSize)); + __ bind(&stack_overflow); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ CallRuntime(Runtime::kThrowStackOverflow); + __ int3(); // This should be unreachable. } - // See if we overflowed, i.e. scratch is positive. - __ cmp(scratch, Immediate(0)); - __ j(greater, stack_overflow); // Signed comparison. } } // namespace diff --git a/src/builtins/x64/builtins-x64.cc b/src/builtins/x64/builtins-x64.cc index 11bb9ca44aa..89d01da9838 100644 --- a/src/builtins/x64/builtins-x64.cc +++ b/src/builtins/x64/builtins-x64.cc @@ -77,6 +77,25 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, namespace { +void Generate_StackOverflowCheck( + MacroAssembler* masm, Register num_args, Register scratch, + Label* stack_overflow, + Label::Distance stack_overflow_distance = Label::kFar) { + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit); + __ movq(scratch, rsp); + // Make scratch the space we have left. The stack might already be overflowed + // here which will cause scratch to become negative. + __ subq(scratch, kScratchRegister); + __ sarq(scratch, Immediate(kSystemPointerSizeLog2)); + // Check if the arguments will overflow the stack. + __ cmpq(scratch, num_args); + // Signed comparison. + __ j(less_equal, stack_overflow, stack_overflow_distance); +} + void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax: number of arguments @@ -85,6 +104,9 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // -- rsi: context // ----------------------------------- + Label stack_overflow; + Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kFar); + // Enter a construct frame. { FrameScope scope(masm, StackFrame::CONSTRUCT); @@ -142,25 +164,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ PushReturnAddressFrom(rcx); __ ret(0); -} -void Generate_StackOverflowCheck( - MacroAssembler* masm, Register num_args, Register scratch, - Label* stack_overflow, - Label::Distance stack_overflow_distance = Label::kFar) { - // Check the stack for overflow. We are not trying to catch - // interruptions (e.g. debug break and preemption) here, so the "real stack - // limit" is checked. - __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit); - __ movq(scratch, rsp); - // Make scratch the space we have left. The stack might already be overflowed - // here which will cause scratch to become negative. - __ subq(scratch, kScratchRegister); - __ sarq(scratch, Immediate(kSystemPointerSizeLog2)); - // Check if the arguments will overflow the stack. - __ cmpq(scratch, num_args); - // Signed comparison. - __ j(less_equal, stack_overflow, stack_overflow_distance); + __ bind(&stack_overflow); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ CallRuntime(Runtime::kThrowStackOverflow); + __ int3(); // This should be unreachable. + } } } // namespace