1/*-
2 * Copyright (c) 2003 Peter Wemm.
3 * Copyright (c) 1993 The Regents of the University of California.
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 * 4. Neither the name of the University nor the names of its contributors
15 *    may be used to endorse or promote products derived from this software
16 *    without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
19 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
22 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 * SUCH DAMAGE.
29 *
30 * $FreeBSD$
31 */
32
33#include "opt_ddb.h"
34
35#include <machine/asmacros.h>
36#include <machine/intr_machdep.h>
37#include <machine/pmap.h>
38
39#include "assym.s"
40
41	.text
42
43/*
44 * bcopy family
45 * void bzero(void *buf, u_int len)
46 */
47
48/* done */
49ENTRY(bzero)
50	movq	%rsi,%rcx
51	xorl	%eax,%eax
52	shrq	$3,%rcx
53	cld
54	rep
55	stosq
56	movq	%rsi,%rcx
57	andq	$7,%rcx
58	rep
59	stosb
60	ret
61END(bzero)
62
63/* Address: %rdi */
64ENTRY(pagezero)
65	movq	$-PAGE_SIZE,%rdx
66	subq	%rdx,%rdi
67	xorl	%eax,%eax
681:
69	movnti	%rax,(%rdi,%rdx)
70	movnti	%rax,8(%rdi,%rdx)
71	movnti	%rax,16(%rdi,%rdx)
72	movnti	%rax,24(%rdi,%rdx)
73	addq	$32,%rdx
74	jne	1b
75	sfence
76	ret
77END(pagezero)
78
79ENTRY(bcmp)
80	movq	%rdx,%rcx
81	shrq	$3,%rcx
82	cld					/* compare forwards */
83	repe
84	cmpsq
85	jne	1f
86
87	movq	%rdx,%rcx
88	andq	$7,%rcx
89	repe
90	cmpsb
911:
92	setne	%al
93	movsbl	%al,%eax
94	ret
95END(bcmp)
96
97/*
98 * bcopy(src, dst, cnt)
99 *       rdi, rsi, rdx
100 *  ws@tools.de     (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
101 */
102ENTRY(bcopy)
103	xchgq	%rsi,%rdi
104	movq	%rdx,%rcx
105
106	movq	%rdi,%rax
107	subq	%rsi,%rax
108	cmpq	%rcx,%rax			/* overlapping && src < dst? */
109	jb	1f
110
111	shrq	$3,%rcx				/* copy by 64-bit words */
112	cld					/* nope, copy forwards */
113	rep
114	movsq
115	movq	%rdx,%rcx
116	andq	$7,%rcx				/* any bytes left? */
117	rep
118	movsb
119	ret
120
121	/* ALIGN_TEXT */
1221:
123	addq	%rcx,%rdi			/* copy backwards */
124	addq	%rcx,%rsi
125	decq	%rdi
126	decq	%rsi
127	andq	$7,%rcx				/* any fractional bytes? */
128	std
129	rep
130	movsb
131	movq	%rdx,%rcx			/* copy remainder by 32-bit words */
132	shrq	$3,%rcx
133	subq	$7,%rsi
134	subq	$7,%rdi
135	rep
136	movsq
137	cld
138	ret
139END(bcopy)
140
141/*
142 * Note: memcpy does not support overlapping copies
143 */
144ENTRY(memcpy)
145	movq	%rdx,%rcx
146	shrq	$3,%rcx				/* copy by 64-bit words */
147	cld					/* copy forwards */
148	rep
149	movsq
150	movq	%rdx,%rcx
151	andq	$7,%rcx				/* any bytes left? */
152	rep
153	movsb
154	ret
155END(memcpy)
156
157/*
158 * pagecopy(%rdi=from, %rsi=to)
159 */
160ENTRY(pagecopy)
161	movq	$-PAGE_SIZE,%rax
162	movq	%rax,%rdx
163	subq	%rax,%rdi
164	subq	%rax,%rsi
1651:
166	prefetchnta (%rdi,%rax)
167	addq	$64,%rax
168	jne	1b
1692:
170	movq	(%rdi,%rdx),%rax
171	movnti	%rax,(%rsi,%rdx)
172	movq	8(%rdi,%rdx),%rax
173	movnti	%rax,8(%rsi,%rdx)
174	movq	16(%rdi,%rdx),%rax
175	movnti	%rax,16(%rsi,%rdx)
176	movq	24(%rdi,%rdx),%rax
177	movnti	%rax,24(%rsi,%rdx)
178	addq	$32,%rdx
179	jne	2b
180	sfence
181	ret
182END(pagecopy)
183
184/* fillw(pat, base, cnt) */
185/*       %rdi,%rsi, %rdx */
186ENTRY(fillw)
187	movq	%rdi,%rax
188	movq	%rsi,%rdi
189	movq	%rdx,%rcx
190	cld
191	rep
192	stosw
193	ret
194END(fillw)
195
196/*****************************************************************************/
197/* copyout and fubyte family                                                 */
198/*****************************************************************************/
199/*
200 * Access user memory from inside the kernel. These routines should be
201 * the only places that do this.
202 *
203 * These routines set curpcb->pcb_onfault for the time they execute. When a
204 * protection violation occurs inside the functions, the trap handler
205 * returns to *curpcb->pcb_onfault instead of the function.
206 */
207
208/*
209 * copyout(from_kernel, to_user, len)  - MP SAFE
210 *         %rdi,        %rsi,    %rdx
211 */
212ENTRY(copyout)
213	movq	PCPU(CURPCB),%rax
214	movq	$copyout_fault,PCB_ONFAULT(%rax)
215	testq	%rdx,%rdx			/* anything to do? */
216	jz	done_copyout
217
218	/*
219	 * Check explicitly for non-user addresses.  If 486 write protection
220	 * is being used, this check is essential because we are in kernel
221	 * mode so the h/w does not provide any protection against writing
222	 * kernel addresses.
223	 */
224
225	/*
226	 * First, prevent address wrapping.
227	 */
228	movq	%rsi,%rax
229	addq	%rdx,%rax
230	jc	copyout_fault
231/*
232 * XXX STOP USING VM_MAXUSER_ADDRESS.
233 * It is an end address, not a max, so every time it is used correctly it
234 * looks like there is an off by one error, and of course it caused an off
235 * by one error in several places.
236 */
237	movq	$VM_MAXUSER_ADDRESS,%rcx
238	cmpq	%rcx,%rax
239	ja	copyout_fault
240
241	xchgq	%rdi,%rsi
242	/* bcopy(%rsi, %rdi, %rdx) */
243	movq	%rdx,%rcx
244
245	shrq	$3,%rcx
246	cld
247	rep
248	movsq
249	movb	%dl,%cl
250	andb	$7,%cl
251	rep
252	movsb
253
254done_copyout:
255	xorl	%eax,%eax
256	movq	PCPU(CURPCB),%rdx
257	movq	%rax,PCB_ONFAULT(%rdx)
258	ret
259
260	ALIGN_TEXT
261copyout_fault:
262	movq	PCPU(CURPCB),%rdx
263	movq	$0,PCB_ONFAULT(%rdx)
264	movq	$EFAULT,%rax
265	ret
266END(copyout)
267
268/*
269 * copyin(from_user, to_kernel, len) - MP SAFE
270 *        %rdi,      %rsi,      %rdx
271 */
272ENTRY(copyin)
273	movq	PCPU(CURPCB),%rax
274	movq	$copyin_fault,PCB_ONFAULT(%rax)
275	testq	%rdx,%rdx			/* anything to do? */
276	jz	done_copyin
277
278	/*
279	 * make sure address is valid
280	 */
281	movq	%rdi,%rax
282	addq	%rdx,%rax
283	jc	copyin_fault
284	movq	$VM_MAXUSER_ADDRESS,%rcx
285	cmpq	%rcx,%rax
286	ja	copyin_fault
287
288	xchgq	%rdi,%rsi
289	movq	%rdx,%rcx
290	movb	%cl,%al
291	shrq	$3,%rcx				/* copy longword-wise */
292	cld
293	rep
294	movsq
295	movb	%al,%cl
296	andb	$7,%cl				/* copy remaining bytes */
297	rep
298	movsb
299
300done_copyin:
301	xorl	%eax,%eax
302	movq	PCPU(CURPCB),%rdx
303	movq	%rax,PCB_ONFAULT(%rdx)
304	ret
305
306	ALIGN_TEXT
307copyin_fault:
308	movq	PCPU(CURPCB),%rdx
309	movq	$0,PCB_ONFAULT(%rdx)
310	movq	$EFAULT,%rax
311	ret
312END(copyin)
313
314/*
315 * casuword32.  Compare and set user integer.  Returns -1 or the current value.
316 *        dst = %rdi, old = %rsi, new = %rdx
317 */
318ENTRY(casuword32)
319	movq	PCPU(CURPCB),%rcx
320	movq	$fusufault,PCB_ONFAULT(%rcx)
321
322	movq	$VM_MAXUSER_ADDRESS-4,%rax
323	cmpq	%rax,%rdi			/* verify address is valid */
324	ja	fusufault
325
326	movl	%esi,%eax			/* old */
327#ifdef SMP
328	lock
329#endif
330	cmpxchgl %edx,(%rdi)			/* new = %edx */
331
332	/*
333	 * The old value is in %eax.  If the store succeeded it will be the
334	 * value we expected (old) from before the store, otherwise it will
335	 * be the current value.
336	 */
337
338	movq	PCPU(CURPCB),%rcx
339	movq	$0,PCB_ONFAULT(%rcx)
340	ret
341END(casuword32)
342
343/*
344 * casuword.  Compare and set user word.  Returns -1 or the current value.
345 *        dst = %rdi, old = %rsi, new = %rdx
346 */
347ENTRY(casuword)
348	movq	PCPU(CURPCB),%rcx
349	movq	$fusufault,PCB_ONFAULT(%rcx)
350
351	movq	$VM_MAXUSER_ADDRESS-4,%rax
352	cmpq	%rax,%rdi			/* verify address is valid */
353	ja	fusufault
354
355	movq	%rsi,%rax			/* old */
356#ifdef SMP
357	lock
358#endif
359	cmpxchgq %rdx,(%rdi)			/* new = %rdx */
360
361	/*
362	 * The old value is in %eax.  If the store succeeded it will be the
363	 * value we expected (old) from before the store, otherwise it will
364	 * be the current value.
365	 */
366
367	movq	PCPU(CURPCB),%rcx
368	movq	$fusufault,PCB_ONFAULT(%rcx)
369	movq	$0,PCB_ONFAULT(%rcx)
370	ret
371END(casuword)
372
373/*
374 * Fetch (load) a 64-bit word, a 32-bit word, a 16-bit word, or an 8-bit
375 * byte from user memory.  All these functions are MPSAFE.
376 * addr = %rdi
377 */
378
379ALTENTRY(fuword64)
380ENTRY(fuword)
381	movq	PCPU(CURPCB),%rcx
382	movq	$fusufault,PCB_ONFAULT(%rcx)
383
384	movq	$VM_MAXUSER_ADDRESS-8,%rax
385	cmpq	%rax,%rdi			/* verify address is valid */
386	ja	fusufault
387
388	movq	(%rdi),%rax
389	movq	$0,PCB_ONFAULT(%rcx)
390	ret
391END(fuword64)
392END(fuword)
393
394ENTRY(fuword32)
395	movq	PCPU(CURPCB),%rcx
396	movq	$fusufault,PCB_ONFAULT(%rcx)
397
398	movq	$VM_MAXUSER_ADDRESS-4,%rax
399	cmpq	%rax,%rdi			/* verify address is valid */
400	ja	fusufault
401
402	movl	(%rdi),%eax
403	movq	$0,PCB_ONFAULT(%rcx)
404	ret
405END(fuword32)
406
407/*
408 * fuswintr() and suswintr() are specialized variants of fuword16() and
409 * suword16(), respectively.  They are called from the profiling code,
410 * potentially at interrupt time.  If they fail, that's okay; good things
411 * will happen later.  They always fail for now, until the trap code is
412 * able to deal with this.
413 */
414ALTENTRY(suswintr)
415ENTRY(fuswintr)
416	movq	$-1,%rax
417	ret
418END(suswintr)
419END(fuswintr)
420
421ENTRY(fuword16)
422	movq	PCPU(CURPCB),%rcx
423	movq	$fusufault,PCB_ONFAULT(%rcx)
424
425	movq	$VM_MAXUSER_ADDRESS-2,%rax
426	cmpq	%rax,%rdi
427	ja	fusufault
428
429	movzwl	(%rdi),%eax
430	movq	$0,PCB_ONFAULT(%rcx)
431	ret
432END(fuword16)
433
434ENTRY(fubyte)
435	movq	PCPU(CURPCB),%rcx
436	movq	$fusufault,PCB_ONFAULT(%rcx)
437
438	movq	$VM_MAXUSER_ADDRESS-1,%rax
439	cmpq	%rax,%rdi
440	ja	fusufault
441
442	movzbl	(%rdi),%eax
443	movq	$0,PCB_ONFAULT(%rcx)
444	ret
445END(fubyte)
446
447	ALIGN_TEXT
448fusufault:
449	movq	PCPU(CURPCB),%rcx
450	xorl	%eax,%eax
451	movq	%rax,PCB_ONFAULT(%rcx)
452	decq	%rax
453	ret
454
455/*
456 * Store a 64-bit word, a 32-bit word, a 16-bit word, or an 8-bit byte to
457 * user memory.  All these functions are MPSAFE.
458 * addr = %rdi, value = %rsi
459 */
460ALTENTRY(suword64)
461ENTRY(suword)
462	movq	PCPU(CURPCB),%rcx
463	movq	$fusufault,PCB_ONFAULT(%rcx)
464
465	movq	$VM_MAXUSER_ADDRESS-8,%rax
466	cmpq	%rax,%rdi			/* verify address validity */
467	ja	fusufault
468
469	movq	%rsi,(%rdi)
470	xorl	%eax,%eax
471	movq	PCPU(CURPCB),%rcx
472	movq	%rax,PCB_ONFAULT(%rcx)
473	ret
474END(suword64)
475END(suword)
476
477ENTRY(suword32)
478	movq	PCPU(CURPCB),%rcx
479	movq	$fusufault,PCB_ONFAULT(%rcx)
480
481	movq	$VM_MAXUSER_ADDRESS-4,%rax
482	cmpq	%rax,%rdi			/* verify address validity */
483	ja	fusufault
484
485	movl	%esi,(%rdi)
486	xorl	%eax,%eax
487	movq	PCPU(CURPCB),%rcx
488	movq	%rax,PCB_ONFAULT(%rcx)
489	ret
490END(suword32)
491
492ENTRY(suword16)
493	movq	PCPU(CURPCB),%rcx
494	movq	$fusufault,PCB_ONFAULT(%rcx)
495
496	movq	$VM_MAXUSER_ADDRESS-2,%rax
497	cmpq	%rax,%rdi			/* verify address validity */
498	ja	fusufault
499
500	movw	%si,(%rdi)
501	xorl	%eax,%eax
502	movq	PCPU(CURPCB),%rcx		/* restore trashed register */
503	movq	%rax,PCB_ONFAULT(%rcx)
504	ret
505END(suword16)
506
507ENTRY(subyte)
508	movq	PCPU(CURPCB),%rcx
509	movq	$fusufault,PCB_ONFAULT(%rcx)
510
511	movq	$VM_MAXUSER_ADDRESS-1,%rax
512	cmpq	%rax,%rdi			/* verify address validity */
513	ja	fusufault
514
515	movl	%esi,%eax
516	movb	%al,(%rdi)
517	xorl	%eax,%eax
518	movq	PCPU(CURPCB),%rcx		/* restore trashed register */
519	movq	%rax,PCB_ONFAULT(%rcx)
520	ret
521END(subyte)
522
523/*
524 * copyinstr(from, to, maxlen, int *lencopied) - MP SAFE
525 *           %rdi, %rsi, %rdx, %rcx
526 *
527 *	copy a string from from to to, stop when a 0 character is reached.
528 *	return ENAMETOOLONG if string is longer than maxlen, and
529 *	EFAULT on protection violations. If lencopied is non-zero,
530 *	return the actual length in *lencopied.
531 */
532ENTRY(copyinstr)
533	movq	%rdx,%r8			/* %r8 = maxlen */
534	movq	%rcx,%r9			/* %r9 = *len */
535	xchgq	%rdi,%rsi			/* %rdi = from, %rsi = to */
536	movq	PCPU(CURPCB),%rcx
537	movq	$cpystrflt,PCB_ONFAULT(%rcx)
538
539	movq	$VM_MAXUSER_ADDRESS,%rax
540
541	/* make sure 'from' is within bounds */
542	subq	%rsi,%rax
543	jbe	cpystrflt
544
545	/* restrict maxlen to <= VM_MAXUSER_ADDRESS-from */
546	cmpq	%rdx,%rax
547	jae	1f
548	movq	%rax,%rdx
549	movq	%rax,%r8
5501:
551	incq	%rdx
552	cld
553
5542:
555	decq	%rdx
556	jz	3f
557
558	lodsb
559	stosb
560	orb	%al,%al
561	jnz	2b
562
563	/* Success -- 0 byte reached */
564	decq	%rdx
565	xorl	%eax,%eax
566	jmp	cpystrflt_x
5673:
568	/* rdx is zero - return ENAMETOOLONG or EFAULT */
569	movq	$VM_MAXUSER_ADDRESS,%rax
570	cmpq	%rax,%rsi
571	jae	cpystrflt
5724:
573	movq	$ENAMETOOLONG,%rax
574	jmp	cpystrflt_x
575
576cpystrflt:
577	movq	$EFAULT,%rax
578
579cpystrflt_x:
580	/* set *lencopied and return %eax */
581	movq	PCPU(CURPCB),%rcx
582	movq	$0,PCB_ONFAULT(%rcx)
583
584	testq	%r9,%r9
585	jz	1f
586	subq	%rdx,%r8
587	movq	%r8,(%r9)
5881:
589	ret
590END(copyinstr)
591
592/*
593 * copystr(from, to, maxlen, int *lencopied) - MP SAFE
594 *         %rdi, %rsi, %rdx, %rcx
595 */
596ENTRY(copystr)
597	movq	%rdx,%r8			/* %r8 = maxlen */
598
599	xchgq	%rdi,%rsi
600	incq	%rdx
601	cld
6021:
603	decq	%rdx
604	jz	4f
605	lodsb
606	stosb
607	orb	%al,%al
608	jnz	1b
609
610	/* Success -- 0 byte reached */
611	decq	%rdx
612	xorl	%eax,%eax
613	jmp	6f
6144:
615	/* rdx is zero -- return ENAMETOOLONG */
616	movq	$ENAMETOOLONG,%rax
617
6186:
619
620	testq	%rcx,%rcx
621	jz	7f
622	/* set *lencopied and return %rax */
623	subq	%rdx,%r8
624	movq	%r8,(%rcx)
6257:
626	ret
627END(copystr)
628
629/*
630 * Handling of special amd64 registers and descriptor tables etc
631 * %rdi
632 */
633/* void lgdt(struct region_descriptor *rdp); */
634ENTRY(lgdt)
635	/* reload the descriptor table */
636	lgdt	(%rdi)
637
638	/* flush the prefetch q */
639	jmp	1f
640	nop
6411:
642	movl	$KDSEL,%eax
643	movl	%eax,%ds
644	movl	%eax,%es
645	movl	%eax,%fs	/* Beware, use wrmsr to set 64 bit base */
646	movl	%eax,%gs
647	movl	%eax,%ss
648
649	/* reload code selector by turning return into intersegmental return */
650	popq	%rax
651	pushq	$KCSEL
652	pushq	%rax
653	MEXITCOUNT
654	lretq
655END(lgdt)
656
657/*****************************************************************************/
658/* setjump, longjump                                                         */
659/*****************************************************************************/
660
661ENTRY(setjmp)
662	movq	%rbx,0(%rdi)			/* save rbx */
663	movq	%rsp,8(%rdi)			/* save rsp */
664	movq	%rbp,16(%rdi)			/* save rbp */
665	movq	%r12,24(%rdi)			/* save r12 */
666	movq	%r13,32(%rdi)			/* save r13 */
667	movq	%r14,40(%rdi)			/* save r14 */
668	movq	%r15,48(%rdi)			/* save r15 */
669	movq	0(%rsp),%rdx			/* get rta */
670	movq	%rdx,56(%rdi)			/* save rip */
671	xorl	%eax,%eax			/* return(0); */
672	ret
673END(setjmp)
674
675ENTRY(longjmp)
676	movq	0(%rdi),%rbx			/* restore rbx */
677	movq	8(%rdi),%rsp			/* restore rsp */
678	movq	16(%rdi),%rbp			/* restore rbp */
679	movq	24(%rdi),%r12			/* restore r12 */
680	movq	32(%rdi),%r13			/* restore r13 */
681	movq	40(%rdi),%r14			/* restore r14 */
682	movq	48(%rdi),%r15			/* restore r15 */
683	movq	56(%rdi),%rdx			/* get rta */
684	movq	%rdx,0(%rsp)			/* put in return frame */
685	xorl	%eax,%eax			/* return(1); */
686	incl	%eax
687	ret
688END(longjmp)
689
690/*
691 * Support for reading MSRs in the safe manner.
692 */
693ENTRY(rdmsr_safe)
694/* int rdmsr_safe(u_int msr, uint64_t *data) */
695	movq	PCPU(CURPCB),%r8
696	movq	$msr_onfault,PCB_ONFAULT(%r8)
697	movl	%edi,%ecx
698	rdmsr			/* Read MSR pointed by %ecx. Returns
699				   hi byte in edx, lo in %eax */
700	salq	$32,%rdx	/* sign-shift %rdx left */
701	movl	%eax,%eax	/* zero-extend %eax -> %rax */
702	orq	%rdx,%rax
703	movq	%rax,(%rsi)
704	xorq	%rax,%rax
705	movq	%rax,PCB_ONFAULT(%r8)
706	ret
707
708/*
709 * Support for writing MSRs in the safe manner.
710 */
711ENTRY(wrmsr_safe)
712/* int wrmsr_safe(u_int msr, uint64_t data) */
713	movq	PCPU(CURPCB),%r8
714	movq	$msr_onfault,PCB_ONFAULT(%r8)
715	movl	%edi,%ecx
716	movl	%esi,%eax
717	sarq	$32,%rsi
718	movl	%esi,%edx
719	wrmsr			/* Write MSR pointed by %ecx. Accepts
720				   hi byte in edx, lo in %eax. */
721	xorq	%rax,%rax
722	movq	%rax,PCB_ONFAULT(%r8)
723	ret
724
725/*
726 * MSR operations fault handler
727 */
728	ALIGN_TEXT
729msr_onfault:
730	movq	$0,PCB_ONFAULT(%r8)
731	movl	$EFAULT,%eax
732	ret
733