1// Copyright 2016 The Fuchsia Authors
2//
3// Use of this source code is governed by a MIT-style
4// license that can be found in the LICENSE file or at
5// https://opensource.org/licenses/MIT
6
7#include <asm.h>
8#include <err.h>
9#include <lib/code_patching.h>
10
11#define STAC APPLY_CODE_PATCH_FUNC(fill_out_stac_instruction, 3)
12#define CLAC APPLY_CODE_PATCH_FUNC(fill_out_clac_instruction, 3)
13
14/* Register use in this code:
15 * %rdi = argument 1, void* dst
16 * %rsi = argument 2, const void* src
17 * %rdx = argument 3, size_t len
18 *   - moved to %rcx
19 * %rcx = argument 4, void** fault_return
20 *   - moved to %r10
21 */
22
23// zx_status_t _x86_copy_to_or_from_user(void *dst, const void *src, size_t len, void **fault_return)
24FUNCTION(_x86_copy_to_or_from_user)
25    // Copy fault_return out of %rcx, because %rcx is used by "rep movsb" later.
26    movq %rcx, %r10
27
28    // Disable SMAP protection if SMAP is enabled
29    STAC
30
31    // Setup page fault return
32    leaq .Lfault_copy(%rip), %rax
33    movq %rax, (%r10)
34
35    // Between now and the reset of the fault return, we cannot make a function
36    // call or manipulate the stack.  We need to be able to restore all callee
37    // registers, without any knowledge of where between these two points we
38    // faulted.
39
40    // Perform the actual copy
41    cld
42    // %rdi and %rsi already contain the destination and source addresses.
43    movq %rdx, %rcx
44    rep movsb  // while (rcx-- > 0) *rdi++ = *rsi++;
45
46    mov $ZX_OK, %rax
47
48.Lcleanup_copy:
49    // Reset fault return
50    movq $0, (%r10)
51
52    // Re-enable SMAP protection
53    CLAC
54    ret
55
56.Lfault_copy:
57    mov $ZX_ERR_INVALID_ARGS, %rax
58    jmp .Lcleanup_copy
59END_FUNCTION(_x86_copy_to_or_from_user)
60