Lines Matching refs:rsp

66  * and does not change rsp.
93 movq %rsp, PER_CPU_VAR(cpu_tss_rw + TSS_sp2)
94 SWITCH_TO_KERNEL_CR3 scratch_reg=%rsp
95 movq PER_CPU_VAR(pcpu_hot + X86_top_of_stack), %rsp
112 movq %rsp, %rdi
145 movq %rsp, %rdi
146 movq PER_CPU_VAR(cpu_tss_rw + TSS_sp0), %rsp
161 popq %rsp
190 movq %rsp, TASK_threadsp(%rdi)
191 movq TASK_threadsp(%rsi), %rsp
241 movq %rsp, %rsi /* regs */
298 ALTERNATIVE "call error_entry; movq %rax, %rsp", \
304 movq %rsp, %rdi /* pt_regs pointer into 1st argument*/
307 movq ORIG_RAX(%rsp), %rsi /* get error code into 2nd argument*/
308 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
352 testb $3, CS-ORIG_RAX(%rsp)
355 pushq 5*8(%rsp)
410 testb $3, CS-ORIG_RAX(%rsp)
418 movq %rsp, %rdi /* pt_regs pointer */
465 testb $3, CS-ORIG_RAX(%rsp)
482 movq %rsp, %rdi /* pt_regs pointer */
484 movq %rax, %rsp /* Switch to new stack */
490 movq ORIG_RAX(%rsp), %rsi /* get error code into 2nd argument*/
491 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
493 movq %rsp, %rdi /* pt_regs pointer */
529 movq %rsp, %rdi /* pt_regs pointer into first argument */
530 movq ORIG_RAX(%rsp), %rsi /* get error code into 2nd argument*/
531 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
571 add $8, %rsp /* orig_ax */
578 testb $3, 8(%rsp)
590 movq %rsp, %rdi
591 movq PER_CPU_VAR(cpu_tss_rw + TSS_sp0), %rsp
622 testb $3, CS(%rsp)
628 addq $8, %rsp /* skip regs->orig_ax */
647 testb $4, (SS-RIP)(%rsp)
690 movq (1*8)(%rsp), %rax /* user RIP */
692 movq (2*8)(%rsp), %rax /* user CS */
694 movq (3*8)(%rsp), %rax /* user RFLAGS */
696 movq (5*8)(%rsp), %rax /* user SS */
698 movq (4*8)(%rsp), %rax /* user RSP */
718 movq %rax, %rsp
798 movq %rdi, %rsp /* we don't return, adjust the stack frame */
824 cmpw %cx, 0x10(%rsp)
827 cmpw %cx, 0x18(%rsp)
830 cmpw %cx, 0x20(%rsp)
833 cmpw %cx, 0x28(%rsp)
836 movq (%rsp), %rcx
837 movq 8(%rsp), %r11
838 addq $0x30, %rsp
843 movq (%rsp), %rcx
844 movq 8(%rsp), %r11
845 addq $0x30, %rsp
1010 testb $3, CS+8(%rsp)
1024 leaq 8(%rsp), %rdi /* arg0 = pt_regs pointer */
1036 cmpq %rcx, RIP+8(%rsp)
1039 cmpq %rax, RIP+8(%rsp)
1041 cmpq $.Lgs_change, RIP+8(%rsp)
1058 leaq 8(%rsp), %rax /* return pt_regs pointer */
1064 movq %rcx, RIP+8(%rsp)
1082 leaq 8(%rsp), %rdi /* arg0 = pt_regs pointer */
1091 testb $3, CS(%rsp)
1152 testb $3, CS-RIP+8(%rsp)
1169 movq %rsp, %rdx
1170 movq PER_CPU_VAR(pcpu_hot + X86_top_of_stack), %rsp
1173 pushq 4*8(%rdx) /* pt_regs->rsp */
1191 movq %rsp, %rdi
1254 cmpq 8(%rsp), %rdx
1257 cmpq 8(%rsp), %rdx
1266 cmpl $1, -8(%rsp)
1281 lea 6*8(%rsp), %rdx
1282 /* Compare the NMI stack (rdx) with the stack we came from (4*8(%rsp)) */
1283 cmpq %rdx, 4*8(%rsp)
1288 cmpq %rdx, 4*8(%rsp)
1294 testb $(X86_EFLAGS_DF >> 8), (3*8 + 1)(%rsp)
1304 subq $8, %rsp
1305 leaq -10*8(%rsp), %rdx
1313 addq $(6*8), %rsp
1323 movq (%rsp), %rdx
1329 subq $(5*8), %rsp
1333 pushq 11*8(%rsp)
1345 pushq %rsp /* RSP (minus 8 because of the previous push) */
1346 addq $8, (%rsp) /* Fix up RSP */
1372 movq $1, 10*8(%rsp) /* Set "NMI executing". */
1379 addq $(10*8), %rsp
1381 pushq -6*8(%rsp)
1383 subq $(5*8), %rsp
1404 movq %rsp, %rdi
1440 addq $6*8, %rsp
1452 movq $0, 5*8(%rsp) /* clear "NMI executing" */
1489 leaq -PTREGS_SIZE(%rax), %rsp
1522 * This means that the stack is non-constant and ORC can't unwind it with %rsp
1531 mov %rsp, %rbp