Deleted Added
full compact
support.s (330446) support.s (347568)
1/*-
2 * Copyright (c) 1993 The Regents of the University of California.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 4. Neither the name of the University nor the names of its contributors
14 * may be used to endorse or promote products derived from this software
15 * without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
21 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 * SUCH DAMAGE.
28 *
1/*-
2 * Copyright (c) 1993 The Regents of the University of California.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 4. Neither the name of the University nor the names of its contributors
14 * may be used to endorse or promote products derived from this software
15 * without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
21 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 * SUCH DAMAGE.
28 *
29 * $FreeBSD: stable/11/sys/i386/i386/support.s 330446 2018-03-05 06:59:30Z eadler $
29 * $FreeBSD: stable/11/sys/i386/i386/support.s 347568 2019-05-14 17:05:02Z kib $
30 */
31
32#include <machine/asmacros.h>
33#include <machine/cputypes.h>
34#include <machine/pmap.h>
35#include <machine/specialreg.h>
36
37#include "assym.s"
38
39#define IDXSHIFT 10
40
41 .text
42
43/*
44 * bcopy family
45 * void bzero(void *buf, u_int len)
46 */
47ENTRY(bzero)
48 pushl %edi
49 movl 8(%esp),%edi
50 movl 12(%esp),%ecx
51 xorl %eax,%eax
52 shrl $2,%ecx
53 rep
54 stosl
55 movl 12(%esp),%ecx
56 andl $3,%ecx
57 rep
58 stosb
59 popl %edi
60 ret
61END(bzero)
62
63ENTRY(sse2_pagezero)
64 pushl %ebx
65 movl 8(%esp),%ecx
66 movl %ecx,%eax
67 addl $4096,%eax
68 xor %ebx,%ebx
691:
70 movnti %ebx,(%ecx)
71 addl $4,%ecx
72 cmpl %ecx,%eax
73 jne 1b
74 sfence
75 popl %ebx
76 ret
77END(sse2_pagezero)
78
79ENTRY(i686_pagezero)
80 pushl %edi
81 pushl %ebx
82
83 movl 12(%esp),%edi
84 movl $1024,%ecx
85
86 ALIGN_TEXT
871:
88 xorl %eax,%eax
89 repe
90 scasl
91 jnz 2f
92
93 popl %ebx
94 popl %edi
95 ret
96
97 ALIGN_TEXT
98
992:
100 incl %ecx
101 subl $4,%edi
102
103 movl %ecx,%edx
104 cmpl $16,%ecx
105
106 jge 3f
107
108 movl %edi,%ebx
109 andl $0x3f,%ebx
110 shrl %ebx
111 shrl %ebx
112 movl $16,%ecx
113 subl %ebx,%ecx
114
1153:
116 subl %ecx,%edx
117 rep
118 stosl
119
120 movl %edx,%ecx
121 testl %edx,%edx
122 jnz 1b
123
124 popl %ebx
125 popl %edi
126 ret
127END(i686_pagezero)
128
129/* fillw(pat, base, cnt) */
130ENTRY(fillw)
131 pushl %edi
132 movl 8(%esp),%eax
133 movl 12(%esp),%edi
134 movl 16(%esp),%ecx
135 rep
136 stosw
137 popl %edi
138 ret
139END(fillw)
140
141ENTRY(bcopyb)
142 pushl %esi
143 pushl %edi
144 movl 12(%esp),%esi
145 movl 16(%esp),%edi
146 movl 20(%esp),%ecx
147 movl %edi,%eax
148 subl %esi,%eax
149 cmpl %ecx,%eax /* overlapping && src < dst? */
150 jb 1f
151 rep
152 movsb
153 popl %edi
154 popl %esi
155 ret
156
157 ALIGN_TEXT
1581:
159 addl %ecx,%edi /* copy backwards. */
160 addl %ecx,%esi
161 decl %edi
162 decl %esi
163 std
164 rep
165 movsb
166 popl %edi
167 popl %esi
168 cld
169 ret
170END(bcopyb)
171
172/*
173 * bcopy(src, dst, cnt)
174 * ws@tools.de (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
175 */
176ENTRY(bcopy)
177 pushl %ebp
178 movl %esp,%ebp
179 pushl %esi
180 pushl %edi
181 movl 8(%ebp),%esi
182 movl 12(%ebp),%edi
183 movl 16(%ebp),%ecx
184
185 movl %edi,%eax
186 subl %esi,%eax
187 cmpl %ecx,%eax /* overlapping && src < dst? */
188 jb 1f
189
190 shrl $2,%ecx /* copy by 32-bit words */
191 rep
192 movsl
193 movl 16(%ebp),%ecx
194 andl $3,%ecx /* any bytes left? */
195 rep
196 movsb
197 popl %edi
198 popl %esi
199 popl %ebp
200 ret
201
202 ALIGN_TEXT
2031:
204 addl %ecx,%edi /* copy backwards */
205 addl %ecx,%esi
206 decl %edi
207 decl %esi
208 andl $3,%ecx /* any fractional bytes? */
209 std
210 rep
211 movsb
212 movl 16(%ebp),%ecx /* copy remainder by 32-bit words */
213 shrl $2,%ecx
214 subl $3,%esi
215 subl $3,%edi
216 rep
217 movsl
218 popl %edi
219 popl %esi
220 cld
221 popl %ebp
222 ret
223END(bcopy)
224
225/*
226 * Note: memcpy does not support overlapping copies
227 */
228ENTRY(memcpy)
229 pushl %edi
230 pushl %esi
231 movl 12(%esp),%edi
232 movl 16(%esp),%esi
233 movl 20(%esp),%ecx
234 movl %edi,%eax
235 shrl $2,%ecx /* copy by 32-bit words */
236 rep
237 movsl
238 movl 20(%esp),%ecx
239 andl $3,%ecx /* any bytes left? */
240 rep
241 movsb
242 popl %esi
243 popl %edi
244 ret
245END(memcpy)
246
247/*****************************************************************************/
248/* copyout and fubyte family */
249/*****************************************************************************/
250/*
251 * Access user memory from inside the kernel. These routines and possibly
252 * the math- and DOS emulators should be the only places that do this.
253 *
254 * We have to access the memory with user's permissions, so use a segment
255 * selector with RPL 3. For writes to user space we have to additionally
256 * check the PTE for write permission, because the 386 does not check
257 * write permissions when we are executing with EPL 0. The 486 does check
258 * this if the WP bit is set in CR0, so we can use a simpler version here.
259 *
260 * These routines set curpcb->pcb_onfault for the time they execute. When a
261 * protection violation occurs inside the functions, the trap handler
262 * returns to *curpcb->pcb_onfault instead of the function.
263 */
264
265/*
266 * copyout(from_kernel, to_user, len) - MP SAFE
267 */
268ENTRY(copyout)
269 movl PCPU(CURPCB),%eax
270 movl $copyout_fault,PCB_ONFAULT(%eax)
271 pushl %esi
272 pushl %edi
273 pushl %ebx
274 movl 16(%esp),%esi
275 movl 20(%esp),%edi
276 movl 24(%esp),%ebx
277 testl %ebx,%ebx /* anything to do? */
278 jz done_copyout
279
280 /*
281 * Check explicitly for non-user addresses. This check is essential
282 * because it prevents usermode from writing into the kernel. We do
283 * not verify anywhere else that the user did not specify a rogue
284 * address.
285 */
286 /*
287 * First, prevent address wrapping.
288 */
289 movl %edi,%eax
290 addl %ebx,%eax
291 jc copyout_fault
292/*
293 * XXX STOP USING VM_MAXUSER_ADDRESS.
294 * It is an end address, not a max, so every time it is used correctly it
295 * looks like there is an off by one error, and of course it caused an off
296 * by one error in several places.
297 */
298 cmpl $VM_MAXUSER_ADDRESS,%eax
299 ja copyout_fault
300
301 /* bcopy(%esi, %edi, %ebx) */
302 movl %ebx,%ecx
303
304 shrl $2,%ecx
305 rep
306 movsl
307 movb %bl,%cl
308 andb $3,%cl
309 rep
310 movsb
311
312done_copyout:
313 popl %ebx
314 popl %edi
315 popl %esi
316 xorl %eax,%eax
317 movl PCPU(CURPCB),%edx
318 movl %eax,PCB_ONFAULT(%edx)
319 ret
320END(copyout)
321
322 ALIGN_TEXT
323copyout_fault:
324 popl %ebx
325 popl %edi
326 popl %esi
327 movl PCPU(CURPCB),%edx
328 movl $0,PCB_ONFAULT(%edx)
329 movl $EFAULT,%eax
330 ret
331
332/*
333 * copyin(from_user, to_kernel, len) - MP SAFE
334 */
335ENTRY(copyin)
336 movl PCPU(CURPCB),%eax
337 movl $copyin_fault,PCB_ONFAULT(%eax)
338 pushl %esi
339 pushl %edi
340 movl 12(%esp),%esi /* caddr_t from */
341 movl 16(%esp),%edi /* caddr_t to */
342 movl 20(%esp),%ecx /* size_t len */
343
344 /*
345 * make sure address is valid
346 */
347 movl %esi,%edx
348 addl %ecx,%edx
349 jc copyin_fault
350 cmpl $VM_MAXUSER_ADDRESS,%edx
351 ja copyin_fault
352
353 movb %cl,%al
354 shrl $2,%ecx /* copy longword-wise */
355 rep
356 movsl
357 movb %al,%cl
358 andb $3,%cl /* copy remaining bytes */
359 rep
360 movsb
361
362 popl %edi
363 popl %esi
364 xorl %eax,%eax
365 movl PCPU(CURPCB),%edx
366 movl %eax,PCB_ONFAULT(%edx)
367 ret
368END(copyin)
369
370 ALIGN_TEXT
371copyin_fault:
372 popl %edi
373 popl %esi
374 movl PCPU(CURPCB),%edx
375 movl $0,PCB_ONFAULT(%edx)
376 movl $EFAULT,%eax
377 ret
378
379/*
380 * casueword. Compare and set user word. Returns -1 on fault,
381 * 0 on non-faulting access. The current value is in *oldp.
382 */
383ALTENTRY(casueword32)
384ENTRY(casueword)
385 movl PCPU(CURPCB),%ecx
386 movl $fusufault,PCB_ONFAULT(%ecx)
387 movl 4(%esp),%edx /* dst */
388 movl 8(%esp),%eax /* old */
389 movl 16(%esp),%ecx /* new */
390
391 cmpl $VM_MAXUSER_ADDRESS-4,%edx /* verify address is valid */
392 ja fusufault
393
394#ifdef SMP
395 lock
396#endif
397 cmpxchgl %ecx,(%edx) /* Compare and set. */
398
399 /*
400 * The old value is in %eax. If the store succeeded it will be the
401 * value we expected (old) from before the store, otherwise it will
402 * be the current value.
403 */
404
405 movl PCPU(CURPCB),%ecx
406 movl $0,PCB_ONFAULT(%ecx)
407 movl 12(%esp),%edx /* oldp */
408 movl %eax,(%edx)
409 xorl %eax,%eax
410 ret
411END(casueword32)
412END(casueword)
413
414/*
415 * Fetch (load) a 32-bit word, a 16-bit word, or an 8-bit byte from user
416 * memory.
417 */
418
419ALTENTRY(fueword32)
420ENTRY(fueword)
421 movl PCPU(CURPCB),%ecx
422 movl $fusufault,PCB_ONFAULT(%ecx)
423 movl 4(%esp),%edx /* from */
424
425 cmpl $VM_MAXUSER_ADDRESS-4,%edx /* verify address is valid */
426 ja fusufault
427
428 movl (%edx),%eax
429 movl $0,PCB_ONFAULT(%ecx)
430 movl 8(%esp),%edx
431 movl %eax,(%edx)
432 xorl %eax,%eax
433 ret
434END(fueword32)
435END(fueword)
436
437/*
438 * fuswintr() and suswintr() are specialized variants of fuword16() and
439 * suword16(), respectively. They are called from the profiling code,
440 * potentially at interrupt time. If they fail, that's okay; good things
441 * will happen later. They always fail for now, until the trap code is
442 * able to deal with this.
443 */
444ALTENTRY(suswintr)
445ENTRY(fuswintr)
446 movl $-1,%eax
447 ret
448END(suswintr)
449END(fuswintr)
450
451ENTRY(fuword16)
452 movl PCPU(CURPCB),%ecx
453 movl $fusufault,PCB_ONFAULT(%ecx)
454 movl 4(%esp),%edx
455
456 cmpl $VM_MAXUSER_ADDRESS-2,%edx
457 ja fusufault
458
459 movzwl (%edx),%eax
460 movl $0,PCB_ONFAULT(%ecx)
461 ret
462END(fuword16)
463
464ENTRY(fubyte)
465 movl PCPU(CURPCB),%ecx
466 movl $fusufault,PCB_ONFAULT(%ecx)
467 movl 4(%esp),%edx
468
469 cmpl $VM_MAXUSER_ADDRESS-1,%edx
470 ja fusufault
471
472 movzbl (%edx),%eax
473 movl $0,PCB_ONFAULT(%ecx)
474 ret
475END(fubyte)
476
477 ALIGN_TEXT
478fusufault:
479 movl PCPU(CURPCB),%ecx
480 xorl %eax,%eax
481 movl %eax,PCB_ONFAULT(%ecx)
482 decl %eax
483 ret
484
485/*
486 * Store a 32-bit word, a 16-bit word, or an 8-bit byte to user memory.
487 * All these functions are MPSAFE.
488 */
489
490ALTENTRY(suword32)
491ENTRY(suword)
492 movl PCPU(CURPCB),%ecx
493 movl $fusufault,PCB_ONFAULT(%ecx)
494 movl 4(%esp),%edx
495
496 cmpl $VM_MAXUSER_ADDRESS-4,%edx /* verify address validity */
497 ja fusufault
498
499 movl 8(%esp),%eax
500 movl %eax,(%edx)
501 xorl %eax,%eax
502 movl PCPU(CURPCB),%ecx
503 movl %eax,PCB_ONFAULT(%ecx)
504 ret
505END(suword32)
506END(suword)
507
508ENTRY(suword16)
509 movl PCPU(CURPCB),%ecx
510 movl $fusufault,PCB_ONFAULT(%ecx)
511 movl 4(%esp),%edx
512
513 cmpl $VM_MAXUSER_ADDRESS-2,%edx /* verify address validity */
514 ja fusufault
515
516 movw 8(%esp),%ax
517 movw %ax,(%edx)
518 xorl %eax,%eax
519 movl PCPU(CURPCB),%ecx /* restore trashed register */
520 movl %eax,PCB_ONFAULT(%ecx)
521 ret
522END(suword16)
523
524ENTRY(subyte)
525 movl PCPU(CURPCB),%ecx
526 movl $fusufault,PCB_ONFAULT(%ecx)
527 movl 4(%esp),%edx
528
529 cmpl $VM_MAXUSER_ADDRESS-1,%edx /* verify address validity */
530 ja fusufault
531
532 movb 8(%esp),%al
533 movb %al,(%edx)
534 xorl %eax,%eax
535 movl PCPU(CURPCB),%ecx /* restore trashed register */
536 movl %eax,PCB_ONFAULT(%ecx)
537 ret
538END(subyte)
539
540/*
541 * copyinstr(from, to, maxlen, int *lencopied) - MP SAFE
542 *
543 * copy a string from 'from' to 'to', stop when a 0 character is reached.
544 * return ENAMETOOLONG if string is longer than maxlen, and
545 * EFAULT on protection violations. If lencopied is non-zero,
546 * return the actual length in *lencopied.
547 */
548ENTRY(copyinstr)
549 pushl %esi
550 pushl %edi
551 movl PCPU(CURPCB),%ecx
552 movl $cpystrflt,PCB_ONFAULT(%ecx)
553
554 movl 12(%esp),%esi /* %esi = from */
555 movl 16(%esp),%edi /* %edi = to */
556 movl 20(%esp),%edx /* %edx = maxlen */
557
558 movl $VM_MAXUSER_ADDRESS,%eax
559
560 /* make sure 'from' is within bounds */
561 subl %esi,%eax
562 jbe cpystrflt
563
564 /* restrict maxlen to <= VM_MAXUSER_ADDRESS-from */
565 cmpl %edx,%eax
566 jae 1f
567 movl %eax,%edx
568 movl %eax,20(%esp)
5691:
570 incl %edx
571
5722:
573 decl %edx
574 jz 3f
575
576 lodsb
577 stosb
578 orb %al,%al
579 jnz 2b
580
581 /* Success -- 0 byte reached */
582 decl %edx
583 xorl %eax,%eax
584 jmp cpystrflt_x
5853:
586 /* edx is zero - return ENAMETOOLONG or EFAULT */
587 cmpl $VM_MAXUSER_ADDRESS,%esi
588 jae cpystrflt
5894:
590 movl $ENAMETOOLONG,%eax
591 jmp cpystrflt_x
592
593cpystrflt:
594 movl $EFAULT,%eax
595
596cpystrflt_x:
597 /* set *lencopied and return %eax */
598 movl PCPU(CURPCB),%ecx
599 movl $0,PCB_ONFAULT(%ecx)
600 movl 20(%esp),%ecx
601 subl %edx,%ecx
602 movl 24(%esp),%edx
603 testl %edx,%edx
604 jz 1f
605 movl %ecx,(%edx)
6061:
607 popl %edi
608 popl %esi
609 ret
610END(copyinstr)
611
612/*
613 * copystr(from, to, maxlen, int *lencopied) - MP SAFE
614 */
615ENTRY(copystr)
616 pushl %esi
617 pushl %edi
618
619 movl 12(%esp),%esi /* %esi = from */
620 movl 16(%esp),%edi /* %edi = to */
621 movl 20(%esp),%edx /* %edx = maxlen */
622 incl %edx
6231:
624 decl %edx
625 jz 4f
626 lodsb
627 stosb
628 orb %al,%al
629 jnz 1b
630
631 /* Success -- 0 byte reached */
632 decl %edx
633 xorl %eax,%eax
634 jmp 6f
6354:
636 /* edx is zero -- return ENAMETOOLONG */
637 movl $ENAMETOOLONG,%eax
638
6396:
640 /* set *lencopied and return %eax */
641 movl 20(%esp),%ecx
642 subl %edx,%ecx
643 movl 24(%esp),%edx
644 testl %edx,%edx
645 jz 7f
646 movl %ecx,(%edx)
6477:
648 popl %edi
649 popl %esi
650 ret
651END(copystr)
652
653ENTRY(bcmp)
654 pushl %edi
655 pushl %esi
656 movl 12(%esp),%edi
657 movl 16(%esp),%esi
658 movl 20(%esp),%edx
659
660 movl %edx,%ecx
661 shrl $2,%ecx
662 repe
663 cmpsl
664 jne 1f
665
666 movl %edx,%ecx
667 andl $3,%ecx
668 repe
669 cmpsb
6701:
671 setne %al
672 movsbl %al,%eax
673 popl %esi
674 popl %edi
675 ret
676END(bcmp)
677
678/*
679 * Handling of special 386 registers and descriptor tables etc
680 */
681/* void lgdt(struct region_descriptor *rdp); */
682ENTRY(lgdt)
683 /* reload the descriptor table */
684 movl 4(%esp),%eax
685 lgdt (%eax)
686
687 /* flush the prefetch q */
688 jmp 1f
689 nop
6901:
691 /* reload "stale" selectors */
692 movl $KDSEL,%eax
693 movl %eax,%ds
694 movl %eax,%es
695 movl %eax,%gs
696 movl %eax,%ss
697 movl $KPSEL,%eax
698 movl %eax,%fs
699
700 /* reload code selector by turning return into intersegmental return */
701 movl (%esp),%eax
702 pushl %eax
703 movl $KCSEL,4(%esp)
704 MEXITCOUNT
705 lret
706END(lgdt)
707
708/* ssdtosd(*ssdp,*sdp) */
709ENTRY(ssdtosd)
710 pushl %ebx
711 movl 8(%esp),%ecx
712 movl 8(%ecx),%ebx
713 shll $16,%ebx
714 movl (%ecx),%edx
715 roll $16,%edx
716 movb %dh,%bl
717 movb %dl,%bh
718 rorl $8,%ebx
719 movl 4(%ecx),%eax
720 movw %ax,%dx
721 andl $0xf0000,%eax
722 orl %eax,%ebx
723 movl 12(%esp),%ecx
724 movl %edx,(%ecx)
725 movl %ebx,4(%ecx)
726 popl %ebx
727 ret
728END(ssdtosd)
729
730/* void reset_dbregs() */
731ENTRY(reset_dbregs)
732 movl $0,%eax
733 movl %eax,%dr7 /* disable all breakpoints first */
734 movl %eax,%dr0
735 movl %eax,%dr1
736 movl %eax,%dr2
737 movl %eax,%dr3
738 movl %eax,%dr6
739 ret
740END(reset_dbregs)
741
742/*****************************************************************************/
743/* setjump, longjump */
744/*****************************************************************************/
745
746ENTRY(setjmp)
747 movl 4(%esp),%eax
748 movl %ebx,(%eax) /* save ebx */
749 movl %esp,4(%eax) /* save esp */
750 movl %ebp,8(%eax) /* save ebp */
751 movl %esi,12(%eax) /* save esi */
752 movl %edi,16(%eax) /* save edi */
753 movl (%esp),%edx /* get rta */
754 movl %edx,20(%eax) /* save eip */
755 xorl %eax,%eax /* return(0); */
756 ret
757END(setjmp)
758
759ENTRY(longjmp)
760 movl 4(%esp),%eax
761 movl (%eax),%ebx /* restore ebx */
762 movl 4(%eax),%esp /* restore esp */
763 movl 8(%eax),%ebp /* restore ebp */
764 movl 12(%eax),%esi /* restore esi */
765 movl 16(%eax),%edi /* restore edi */
766 movl 20(%eax),%edx /* get rta */
767 movl %edx,(%esp) /* put in return frame */
768 xorl %eax,%eax /* return(1); */
769 incl %eax
770 ret
771END(longjmp)
772
773/*
774 * Support for reading MSRs in the safe manner.
775 */
776ENTRY(rdmsr_safe)
777/* int rdmsr_safe(u_int msr, uint64_t *data) */
778 movl PCPU(CURPCB),%ecx
779 movl $msr_onfault,PCB_ONFAULT(%ecx)
780
781 movl 4(%esp),%ecx
782 rdmsr
783 movl 8(%esp),%ecx
784 movl %eax,(%ecx)
785 movl %edx,4(%ecx)
786 xorl %eax,%eax
787
788 movl PCPU(CURPCB),%ecx
789 movl %eax,PCB_ONFAULT(%ecx)
790
791 ret
792
793/*
794 * Support for writing MSRs in the safe manner.
795 */
796ENTRY(wrmsr_safe)
797/* int wrmsr_safe(u_int msr, uint64_t data) */
798 movl PCPU(CURPCB),%ecx
799 movl $msr_onfault,PCB_ONFAULT(%ecx)
800
801 movl 4(%esp),%ecx
802 movl 8(%esp),%eax
803 movl 12(%esp),%edx
804 wrmsr
805 xorl %eax,%eax
806
807 movl PCPU(CURPCB),%ecx
808 movl %eax,PCB_ONFAULT(%ecx)
809
810 ret
811
812/*
813 * MSR operations fault handler
814 */
815 ALIGN_TEXT
816msr_onfault:
817 movl PCPU(CURPCB),%ecx
818 movl $0,PCB_ONFAULT(%ecx)
819 movl $EFAULT,%eax
820 ret
821
822ENTRY(handle_ibrs_entry)
823 ret
824END(handle_ibrs_entry)
825
826ENTRY(handle_ibrs_exit)
827 ret
828END(handle_ibrs_exit)
30 */
31
32#include <machine/asmacros.h>
33#include <machine/cputypes.h>
34#include <machine/pmap.h>
35#include <machine/specialreg.h>
36
37#include "assym.s"
38
39#define IDXSHIFT 10
40
41 .text
42
43/*
44 * bcopy family
45 * void bzero(void *buf, u_int len)
46 */
47ENTRY(bzero)
48 pushl %edi
49 movl 8(%esp),%edi
50 movl 12(%esp),%ecx
51 xorl %eax,%eax
52 shrl $2,%ecx
53 rep
54 stosl
55 movl 12(%esp),%ecx
56 andl $3,%ecx
57 rep
58 stosb
59 popl %edi
60 ret
61END(bzero)
62
63ENTRY(sse2_pagezero)
64 pushl %ebx
65 movl 8(%esp),%ecx
66 movl %ecx,%eax
67 addl $4096,%eax
68 xor %ebx,%ebx
691:
70 movnti %ebx,(%ecx)
71 addl $4,%ecx
72 cmpl %ecx,%eax
73 jne 1b
74 sfence
75 popl %ebx
76 ret
77END(sse2_pagezero)
78
79ENTRY(i686_pagezero)
80 pushl %edi
81 pushl %ebx
82
83 movl 12(%esp),%edi
84 movl $1024,%ecx
85
86 ALIGN_TEXT
871:
88 xorl %eax,%eax
89 repe
90 scasl
91 jnz 2f
92
93 popl %ebx
94 popl %edi
95 ret
96
97 ALIGN_TEXT
98
992:
100 incl %ecx
101 subl $4,%edi
102
103 movl %ecx,%edx
104 cmpl $16,%ecx
105
106 jge 3f
107
108 movl %edi,%ebx
109 andl $0x3f,%ebx
110 shrl %ebx
111 shrl %ebx
112 movl $16,%ecx
113 subl %ebx,%ecx
114
1153:
116 subl %ecx,%edx
117 rep
118 stosl
119
120 movl %edx,%ecx
121 testl %edx,%edx
122 jnz 1b
123
124 popl %ebx
125 popl %edi
126 ret
127END(i686_pagezero)
128
129/* fillw(pat, base, cnt) */
130ENTRY(fillw)
131 pushl %edi
132 movl 8(%esp),%eax
133 movl 12(%esp),%edi
134 movl 16(%esp),%ecx
135 rep
136 stosw
137 popl %edi
138 ret
139END(fillw)
140
141ENTRY(bcopyb)
142 pushl %esi
143 pushl %edi
144 movl 12(%esp),%esi
145 movl 16(%esp),%edi
146 movl 20(%esp),%ecx
147 movl %edi,%eax
148 subl %esi,%eax
149 cmpl %ecx,%eax /* overlapping && src < dst? */
150 jb 1f
151 rep
152 movsb
153 popl %edi
154 popl %esi
155 ret
156
157 ALIGN_TEXT
1581:
159 addl %ecx,%edi /* copy backwards. */
160 addl %ecx,%esi
161 decl %edi
162 decl %esi
163 std
164 rep
165 movsb
166 popl %edi
167 popl %esi
168 cld
169 ret
170END(bcopyb)
171
172/*
173 * bcopy(src, dst, cnt)
174 * ws@tools.de (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
175 */
176ENTRY(bcopy)
177 pushl %ebp
178 movl %esp,%ebp
179 pushl %esi
180 pushl %edi
181 movl 8(%ebp),%esi
182 movl 12(%ebp),%edi
183 movl 16(%ebp),%ecx
184
185 movl %edi,%eax
186 subl %esi,%eax
187 cmpl %ecx,%eax /* overlapping && src < dst? */
188 jb 1f
189
190 shrl $2,%ecx /* copy by 32-bit words */
191 rep
192 movsl
193 movl 16(%ebp),%ecx
194 andl $3,%ecx /* any bytes left? */
195 rep
196 movsb
197 popl %edi
198 popl %esi
199 popl %ebp
200 ret
201
202 ALIGN_TEXT
2031:
204 addl %ecx,%edi /* copy backwards */
205 addl %ecx,%esi
206 decl %edi
207 decl %esi
208 andl $3,%ecx /* any fractional bytes? */
209 std
210 rep
211 movsb
212 movl 16(%ebp),%ecx /* copy remainder by 32-bit words */
213 shrl $2,%ecx
214 subl $3,%esi
215 subl $3,%edi
216 rep
217 movsl
218 popl %edi
219 popl %esi
220 cld
221 popl %ebp
222 ret
223END(bcopy)
224
225/*
226 * Note: memcpy does not support overlapping copies
227 */
228ENTRY(memcpy)
229 pushl %edi
230 pushl %esi
231 movl 12(%esp),%edi
232 movl 16(%esp),%esi
233 movl 20(%esp),%ecx
234 movl %edi,%eax
235 shrl $2,%ecx /* copy by 32-bit words */
236 rep
237 movsl
238 movl 20(%esp),%ecx
239 andl $3,%ecx /* any bytes left? */
240 rep
241 movsb
242 popl %esi
243 popl %edi
244 ret
245END(memcpy)
246
247/*****************************************************************************/
248/* copyout and fubyte family */
249/*****************************************************************************/
250/*
251 * Access user memory from inside the kernel. These routines and possibly
252 * the math- and DOS emulators should be the only places that do this.
253 *
254 * We have to access the memory with user's permissions, so use a segment
255 * selector with RPL 3. For writes to user space we have to additionally
256 * check the PTE for write permission, because the 386 does not check
257 * write permissions when we are executing with EPL 0. The 486 does check
258 * this if the WP bit is set in CR0, so we can use a simpler version here.
259 *
260 * These routines set curpcb->pcb_onfault for the time they execute. When a
261 * protection violation occurs inside the functions, the trap handler
262 * returns to *curpcb->pcb_onfault instead of the function.
263 */
264
265/*
266 * copyout(from_kernel, to_user, len) - MP SAFE
267 */
268ENTRY(copyout)
269 movl PCPU(CURPCB),%eax
270 movl $copyout_fault,PCB_ONFAULT(%eax)
271 pushl %esi
272 pushl %edi
273 pushl %ebx
274 movl 16(%esp),%esi
275 movl 20(%esp),%edi
276 movl 24(%esp),%ebx
277 testl %ebx,%ebx /* anything to do? */
278 jz done_copyout
279
280 /*
281 * Check explicitly for non-user addresses. This check is essential
282 * because it prevents usermode from writing into the kernel. We do
283 * not verify anywhere else that the user did not specify a rogue
284 * address.
285 */
286 /*
287 * First, prevent address wrapping.
288 */
289 movl %edi,%eax
290 addl %ebx,%eax
291 jc copyout_fault
292/*
293 * XXX STOP USING VM_MAXUSER_ADDRESS.
294 * It is an end address, not a max, so every time it is used correctly it
295 * looks like there is an off by one error, and of course it caused an off
296 * by one error in several places.
297 */
298 cmpl $VM_MAXUSER_ADDRESS,%eax
299 ja copyout_fault
300
301 /* bcopy(%esi, %edi, %ebx) */
302 movl %ebx,%ecx
303
304 shrl $2,%ecx
305 rep
306 movsl
307 movb %bl,%cl
308 andb $3,%cl
309 rep
310 movsb
311
312done_copyout:
313 popl %ebx
314 popl %edi
315 popl %esi
316 xorl %eax,%eax
317 movl PCPU(CURPCB),%edx
318 movl %eax,PCB_ONFAULT(%edx)
319 ret
320END(copyout)
321
322 ALIGN_TEXT
323copyout_fault:
324 popl %ebx
325 popl %edi
326 popl %esi
327 movl PCPU(CURPCB),%edx
328 movl $0,PCB_ONFAULT(%edx)
329 movl $EFAULT,%eax
330 ret
331
332/*
333 * copyin(from_user, to_kernel, len) - MP SAFE
334 */
335ENTRY(copyin)
336 movl PCPU(CURPCB),%eax
337 movl $copyin_fault,PCB_ONFAULT(%eax)
338 pushl %esi
339 pushl %edi
340 movl 12(%esp),%esi /* caddr_t from */
341 movl 16(%esp),%edi /* caddr_t to */
342 movl 20(%esp),%ecx /* size_t len */
343
344 /*
345 * make sure address is valid
346 */
347 movl %esi,%edx
348 addl %ecx,%edx
349 jc copyin_fault
350 cmpl $VM_MAXUSER_ADDRESS,%edx
351 ja copyin_fault
352
353 movb %cl,%al
354 shrl $2,%ecx /* copy longword-wise */
355 rep
356 movsl
357 movb %al,%cl
358 andb $3,%cl /* copy remaining bytes */
359 rep
360 movsb
361
362 popl %edi
363 popl %esi
364 xorl %eax,%eax
365 movl PCPU(CURPCB),%edx
366 movl %eax,PCB_ONFAULT(%edx)
367 ret
368END(copyin)
369
370 ALIGN_TEXT
371copyin_fault:
372 popl %edi
373 popl %esi
374 movl PCPU(CURPCB),%edx
375 movl $0,PCB_ONFAULT(%edx)
376 movl $EFAULT,%eax
377 ret
378
379/*
380 * casueword. Compare and set user word. Returns -1 on fault,
381 * 0 on non-faulting access. The current value is in *oldp.
382 */
383ALTENTRY(casueword32)
384ENTRY(casueword)
385 movl PCPU(CURPCB),%ecx
386 movl $fusufault,PCB_ONFAULT(%ecx)
387 movl 4(%esp),%edx /* dst */
388 movl 8(%esp),%eax /* old */
389 movl 16(%esp),%ecx /* new */
390
391 cmpl $VM_MAXUSER_ADDRESS-4,%edx /* verify address is valid */
392 ja fusufault
393
394#ifdef SMP
395 lock
396#endif
397 cmpxchgl %ecx,(%edx) /* Compare and set. */
398
399 /*
400 * The old value is in %eax. If the store succeeded it will be the
401 * value we expected (old) from before the store, otherwise it will
402 * be the current value.
403 */
404
405 movl PCPU(CURPCB),%ecx
406 movl $0,PCB_ONFAULT(%ecx)
407 movl 12(%esp),%edx /* oldp */
408 movl %eax,(%edx)
409 xorl %eax,%eax
410 ret
411END(casueword32)
412END(casueword)
413
414/*
415 * Fetch (load) a 32-bit word, a 16-bit word, or an 8-bit byte from user
416 * memory.
417 */
418
419ALTENTRY(fueword32)
420ENTRY(fueword)
421 movl PCPU(CURPCB),%ecx
422 movl $fusufault,PCB_ONFAULT(%ecx)
423 movl 4(%esp),%edx /* from */
424
425 cmpl $VM_MAXUSER_ADDRESS-4,%edx /* verify address is valid */
426 ja fusufault
427
428 movl (%edx),%eax
429 movl $0,PCB_ONFAULT(%ecx)
430 movl 8(%esp),%edx
431 movl %eax,(%edx)
432 xorl %eax,%eax
433 ret
434END(fueword32)
435END(fueword)
436
437/*
438 * fuswintr() and suswintr() are specialized variants of fuword16() and
439 * suword16(), respectively. They are called from the profiling code,
440 * potentially at interrupt time. If they fail, that's okay; good things
441 * will happen later. They always fail for now, until the trap code is
442 * able to deal with this.
443 */
444ALTENTRY(suswintr)
445ENTRY(fuswintr)
446 movl $-1,%eax
447 ret
448END(suswintr)
449END(fuswintr)
450
451ENTRY(fuword16)
452 movl PCPU(CURPCB),%ecx
453 movl $fusufault,PCB_ONFAULT(%ecx)
454 movl 4(%esp),%edx
455
456 cmpl $VM_MAXUSER_ADDRESS-2,%edx
457 ja fusufault
458
459 movzwl (%edx),%eax
460 movl $0,PCB_ONFAULT(%ecx)
461 ret
462END(fuword16)
463
464ENTRY(fubyte)
465 movl PCPU(CURPCB),%ecx
466 movl $fusufault,PCB_ONFAULT(%ecx)
467 movl 4(%esp),%edx
468
469 cmpl $VM_MAXUSER_ADDRESS-1,%edx
470 ja fusufault
471
472 movzbl (%edx),%eax
473 movl $0,PCB_ONFAULT(%ecx)
474 ret
475END(fubyte)
476
477 ALIGN_TEXT
478fusufault:
479 movl PCPU(CURPCB),%ecx
480 xorl %eax,%eax
481 movl %eax,PCB_ONFAULT(%ecx)
482 decl %eax
483 ret
484
485/*
486 * Store a 32-bit word, a 16-bit word, or an 8-bit byte to user memory.
487 * All these functions are MPSAFE.
488 */
489
490ALTENTRY(suword32)
491ENTRY(suword)
492 movl PCPU(CURPCB),%ecx
493 movl $fusufault,PCB_ONFAULT(%ecx)
494 movl 4(%esp),%edx
495
496 cmpl $VM_MAXUSER_ADDRESS-4,%edx /* verify address validity */
497 ja fusufault
498
499 movl 8(%esp),%eax
500 movl %eax,(%edx)
501 xorl %eax,%eax
502 movl PCPU(CURPCB),%ecx
503 movl %eax,PCB_ONFAULT(%ecx)
504 ret
505END(suword32)
506END(suword)
507
508ENTRY(suword16)
509 movl PCPU(CURPCB),%ecx
510 movl $fusufault,PCB_ONFAULT(%ecx)
511 movl 4(%esp),%edx
512
513 cmpl $VM_MAXUSER_ADDRESS-2,%edx /* verify address validity */
514 ja fusufault
515
516 movw 8(%esp),%ax
517 movw %ax,(%edx)
518 xorl %eax,%eax
519 movl PCPU(CURPCB),%ecx /* restore trashed register */
520 movl %eax,PCB_ONFAULT(%ecx)
521 ret
522END(suword16)
523
524ENTRY(subyte)
525 movl PCPU(CURPCB),%ecx
526 movl $fusufault,PCB_ONFAULT(%ecx)
527 movl 4(%esp),%edx
528
529 cmpl $VM_MAXUSER_ADDRESS-1,%edx /* verify address validity */
530 ja fusufault
531
532 movb 8(%esp),%al
533 movb %al,(%edx)
534 xorl %eax,%eax
535 movl PCPU(CURPCB),%ecx /* restore trashed register */
536 movl %eax,PCB_ONFAULT(%ecx)
537 ret
538END(subyte)
539
540/*
541 * copyinstr(from, to, maxlen, int *lencopied) - MP SAFE
542 *
543 * copy a string from 'from' to 'to', stop when a 0 character is reached.
544 * return ENAMETOOLONG if string is longer than maxlen, and
545 * EFAULT on protection violations. If lencopied is non-zero,
546 * return the actual length in *lencopied.
547 */
548ENTRY(copyinstr)
549 pushl %esi
550 pushl %edi
551 movl PCPU(CURPCB),%ecx
552 movl $cpystrflt,PCB_ONFAULT(%ecx)
553
554 movl 12(%esp),%esi /* %esi = from */
555 movl 16(%esp),%edi /* %edi = to */
556 movl 20(%esp),%edx /* %edx = maxlen */
557
558 movl $VM_MAXUSER_ADDRESS,%eax
559
560 /* make sure 'from' is within bounds */
561 subl %esi,%eax
562 jbe cpystrflt
563
564 /* restrict maxlen to <= VM_MAXUSER_ADDRESS-from */
565 cmpl %edx,%eax
566 jae 1f
567 movl %eax,%edx
568 movl %eax,20(%esp)
5691:
570 incl %edx
571
5722:
573 decl %edx
574 jz 3f
575
576 lodsb
577 stosb
578 orb %al,%al
579 jnz 2b
580
581 /* Success -- 0 byte reached */
582 decl %edx
583 xorl %eax,%eax
584 jmp cpystrflt_x
5853:
586 /* edx is zero - return ENAMETOOLONG or EFAULT */
587 cmpl $VM_MAXUSER_ADDRESS,%esi
588 jae cpystrflt
5894:
590 movl $ENAMETOOLONG,%eax
591 jmp cpystrflt_x
592
593cpystrflt:
594 movl $EFAULT,%eax
595
596cpystrflt_x:
597 /* set *lencopied and return %eax */
598 movl PCPU(CURPCB),%ecx
599 movl $0,PCB_ONFAULT(%ecx)
600 movl 20(%esp),%ecx
601 subl %edx,%ecx
602 movl 24(%esp),%edx
603 testl %edx,%edx
604 jz 1f
605 movl %ecx,(%edx)
6061:
607 popl %edi
608 popl %esi
609 ret
610END(copyinstr)
611
612/*
613 * copystr(from, to, maxlen, int *lencopied) - MP SAFE
614 */
615ENTRY(copystr)
616 pushl %esi
617 pushl %edi
618
619 movl 12(%esp),%esi /* %esi = from */
620 movl 16(%esp),%edi /* %edi = to */
621 movl 20(%esp),%edx /* %edx = maxlen */
622 incl %edx
6231:
624 decl %edx
625 jz 4f
626 lodsb
627 stosb
628 orb %al,%al
629 jnz 1b
630
631 /* Success -- 0 byte reached */
632 decl %edx
633 xorl %eax,%eax
634 jmp 6f
6354:
636 /* edx is zero -- return ENAMETOOLONG */
637 movl $ENAMETOOLONG,%eax
638
6396:
640 /* set *lencopied and return %eax */
641 movl 20(%esp),%ecx
642 subl %edx,%ecx
643 movl 24(%esp),%edx
644 testl %edx,%edx
645 jz 7f
646 movl %ecx,(%edx)
6477:
648 popl %edi
649 popl %esi
650 ret
651END(copystr)
652
653ENTRY(bcmp)
654 pushl %edi
655 pushl %esi
656 movl 12(%esp),%edi
657 movl 16(%esp),%esi
658 movl 20(%esp),%edx
659
660 movl %edx,%ecx
661 shrl $2,%ecx
662 repe
663 cmpsl
664 jne 1f
665
666 movl %edx,%ecx
667 andl $3,%ecx
668 repe
669 cmpsb
6701:
671 setne %al
672 movsbl %al,%eax
673 popl %esi
674 popl %edi
675 ret
676END(bcmp)
677
678/*
679 * Handling of special 386 registers and descriptor tables etc
680 */
681/* void lgdt(struct region_descriptor *rdp); */
682ENTRY(lgdt)
683 /* reload the descriptor table */
684 movl 4(%esp),%eax
685 lgdt (%eax)
686
687 /* flush the prefetch q */
688 jmp 1f
689 nop
6901:
691 /* reload "stale" selectors */
692 movl $KDSEL,%eax
693 movl %eax,%ds
694 movl %eax,%es
695 movl %eax,%gs
696 movl %eax,%ss
697 movl $KPSEL,%eax
698 movl %eax,%fs
699
700 /* reload code selector by turning return into intersegmental return */
701 movl (%esp),%eax
702 pushl %eax
703 movl $KCSEL,4(%esp)
704 MEXITCOUNT
705 lret
706END(lgdt)
707
708/* ssdtosd(*ssdp,*sdp) */
709ENTRY(ssdtosd)
710 pushl %ebx
711 movl 8(%esp),%ecx
712 movl 8(%ecx),%ebx
713 shll $16,%ebx
714 movl (%ecx),%edx
715 roll $16,%edx
716 movb %dh,%bl
717 movb %dl,%bh
718 rorl $8,%ebx
719 movl 4(%ecx),%eax
720 movw %ax,%dx
721 andl $0xf0000,%eax
722 orl %eax,%ebx
723 movl 12(%esp),%ecx
724 movl %edx,(%ecx)
725 movl %ebx,4(%ecx)
726 popl %ebx
727 ret
728END(ssdtosd)
729
730/* void reset_dbregs() */
731ENTRY(reset_dbregs)
732 movl $0,%eax
733 movl %eax,%dr7 /* disable all breakpoints first */
734 movl %eax,%dr0
735 movl %eax,%dr1
736 movl %eax,%dr2
737 movl %eax,%dr3
738 movl %eax,%dr6
739 ret
740END(reset_dbregs)
741
742/*****************************************************************************/
743/* setjump, longjump */
744/*****************************************************************************/
745
746ENTRY(setjmp)
747 movl 4(%esp),%eax
748 movl %ebx,(%eax) /* save ebx */
749 movl %esp,4(%eax) /* save esp */
750 movl %ebp,8(%eax) /* save ebp */
751 movl %esi,12(%eax) /* save esi */
752 movl %edi,16(%eax) /* save edi */
753 movl (%esp),%edx /* get rta */
754 movl %edx,20(%eax) /* save eip */
755 xorl %eax,%eax /* return(0); */
756 ret
757END(setjmp)
758
759ENTRY(longjmp)
760 movl 4(%esp),%eax
761 movl (%eax),%ebx /* restore ebx */
762 movl 4(%eax),%esp /* restore esp */
763 movl 8(%eax),%ebp /* restore ebp */
764 movl 12(%eax),%esi /* restore esi */
765 movl 16(%eax),%edi /* restore edi */
766 movl 20(%eax),%edx /* get rta */
767 movl %edx,(%esp) /* put in return frame */
768 xorl %eax,%eax /* return(1); */
769 incl %eax
770 ret
771END(longjmp)
772
773/*
774 * Support for reading MSRs in the safe manner.
775 */
776ENTRY(rdmsr_safe)
777/* int rdmsr_safe(u_int msr, uint64_t *data) */
778 movl PCPU(CURPCB),%ecx
779 movl $msr_onfault,PCB_ONFAULT(%ecx)
780
781 movl 4(%esp),%ecx
782 rdmsr
783 movl 8(%esp),%ecx
784 movl %eax,(%ecx)
785 movl %edx,4(%ecx)
786 xorl %eax,%eax
787
788 movl PCPU(CURPCB),%ecx
789 movl %eax,PCB_ONFAULT(%ecx)
790
791 ret
792
793/*
794 * Support for writing MSRs in the safe manner.
795 */
796ENTRY(wrmsr_safe)
797/* int wrmsr_safe(u_int msr, uint64_t data) */
798 movl PCPU(CURPCB),%ecx
799 movl $msr_onfault,PCB_ONFAULT(%ecx)
800
801 movl 4(%esp),%ecx
802 movl 8(%esp),%eax
803 movl 12(%esp),%edx
804 wrmsr
805 xorl %eax,%eax
806
807 movl PCPU(CURPCB),%ecx
808 movl %eax,PCB_ONFAULT(%ecx)
809
810 ret
811
812/*
813 * MSR operations fault handler
814 */
815 ALIGN_TEXT
816msr_onfault:
817 movl PCPU(CURPCB),%ecx
818 movl $0,PCB_ONFAULT(%ecx)
819 movl $EFAULT,%eax
820 ret
821
822ENTRY(handle_ibrs_entry)
823 ret
824END(handle_ibrs_entry)
825
826ENTRY(handle_ibrs_exit)
827 ret
828END(handle_ibrs_exit)
829
830ENTRY(mds_handler_void)
831 ret
832END(mds_handler_void)
833
834ENTRY(mds_handler_verw)
835 subl $4, %esp
836 movw %ds, (%esp)
837 verw (%esp)
838 addl $4, %esp
839 ret
840END(mds_handler_verw)
841
842ENTRY(mds_handler_ivb)
843 movl %cr0, %eax
844 testb $CR0_TS, %al
845 je 1f
846 clts
8471: movl PCPU(MDS_BUF), %edx
848 movdqa %xmm0, PCPU(MDS_TMP)
849 pxor %xmm0, %xmm0
850
851 lfence
852 orpd (%edx), %xmm0
853 orpd (%edx), %xmm0
854 mfence
855 movl $40, %ecx
856 addl $16, %edx
8572: movntdq %xmm0, (%edx)
858 addl $16, %edx
859 decl %ecx
860 jnz 2b
861 mfence
862
863 movdqa PCPU(MDS_TMP),%xmm0
864 testb $CR0_TS, %al
865 je 3f
866 movl %eax, %cr0
8673: ret
868END(mds_handler_ivb)
869
870ENTRY(mds_handler_bdw)
871 movl %cr0, %eax
872 testb $CR0_TS, %al
873 je 1f
874 clts
8751: movl PCPU(MDS_BUF), %ebx
876 movdqa %xmm0, PCPU(MDS_TMP)
877 pxor %xmm0, %xmm0
878
879 movl %ebx, %edi
880 movl %ebx, %esi
881 movl $40, %ecx
8822: movntdq %xmm0, (%ebx)
883 addl $16, %ebx
884 decl %ecx
885 jnz 2b
886 mfence
887 movl $1536, %ecx
888 rep; movsb
889 lfence
890
891 movdqa PCPU(MDS_TMP),%xmm0
892 testb $CR0_TS, %al
893 je 3f
894 movl %eax, %cr0
8953: ret
896END(mds_handler_bdw)
897
898ENTRY(mds_handler_skl_sse)
899 movl %cr0, %eax
900 testb $CR0_TS, %al
901 je 1f
902 clts
9031: movl PCPU(MDS_BUF), %edi
904 movl PCPU(MDS_BUF64), %edx
905 movdqa %xmm0, PCPU(MDS_TMP)
906 pxor %xmm0, %xmm0
907
908 lfence
909 orpd (%edx), %xmm0
910 orpd (%edx), %xmm0
911 xorl %eax, %eax
9122: clflushopt 5376(%edi, %eax, 8)
913 addl $8, %eax
914 cmpl $8 * 12, %eax
915 jb 2b
916 sfence
917 movl $6144, %ecx
918 xorl %eax, %eax
919 rep; stosb
920 mfence
921
922 movdqa PCPU(MDS_TMP), %xmm0
923 testb $CR0_TS, %al
924 je 3f
925 movl %eax, %cr0
9263: ret
927END(mds_handler_skl_sse)
928
929ENTRY(mds_handler_skl_avx)
930 movl %cr0, %eax
931 testb $CR0_TS, %al
932 je 1f
933 clts
9341: movl PCPU(MDS_BUF), %edi
935 movl PCPU(MDS_BUF64), %edx
936 vmovdqa %ymm0, PCPU(MDS_TMP)
937 vpxor %ymm0, %ymm0, %ymm0
938
939 lfence
940 vorpd (%edx), %ymm0, %ymm0
941 vorpd (%edx), %ymm0, %ymm0
942 xorl %eax, %eax
9432: clflushopt 5376(%edi, %eax, 8)
944 addl $8, %eax
945 cmpl $8 * 12, %eax
946 jb 2b
947 sfence
948 movl $6144, %ecx
949 xorl %eax, %eax
950 rep; stosb
951 mfence
952
953 vmovdqa PCPU(MDS_TMP), %ymm0
954 testb $CR0_TS, %al
955 je 3f
956 movl %eax, %cr0
9573: ret
958END(mds_handler_skl_avx)
959
960ENTRY(mds_handler_skl_avx512)
961 movl %cr0, %eax
962 testb $CR0_TS, %al
963 je 1f
964 clts
9651: movl PCPU(MDS_BUF), %edi
966 movl PCPU(MDS_BUF64), %edx
967 vmovdqa64 %zmm0, PCPU(MDS_TMP)
968 vpxor %zmm0, %zmm0, %zmm0
969
970 lfence
971 vorpd (%edx), %zmm0, %zmm0
972 vorpd (%edx), %zmm0, %zmm0
973 xorl %eax, %eax
9742: clflushopt 5376(%edi, %eax, 8)
975 addl $8, %eax
976 cmpl $8 * 12, %eax
977 jb 2b
978 sfence
979 movl $6144, %ecx
980 xorl %eax, %eax
981 rep; stosb
982 mfence
983
984 vmovdqa64 PCPU(MDS_TMP), %zmm0
985 testb $CR0_TS, %al
986 je 3f
987 movl %eax, %cr0
9883: ret
989END(mds_handler_skl_avx512)
990
991ENTRY(mds_handler_silvermont)
992 movl %cr0, %eax
993 testb $CR0_TS, %al
994 je 1f
995 clts
9961: movl PCPU(MDS_BUF), %edx
997 movdqa %xmm0, PCPU(MDS_TMP)
998 pxor %xmm0, %xmm0
999
1000 movl $16, %ecx
10012: movntdq %xmm0, (%edx)
1002 addl $16, %edx
1003 decl %ecx
1004 jnz 2b
1005 mfence
1006
1007 movdqa PCPU(MDS_TMP),%xmm0
1008 testb $CR0_TS, %al
1009 je 3f
1010 movl %eax, %cr0
10113: ret
1012END(mds_handler_silvermont)