1/* SPDX-License-Identifier: GPL-2.0+ */
2/*
3 *  relocate - common relocation function for ARM U-Boot
4 *
5 *  Copyright (c) 2013  Albert ARIBAUD <albert.u.boot@aribaud.net>
6 */
7
8#include <asm-offsets.h>
9#include <asm/assembler.h>
10#include <config.h>
11#include <elf.h>
12#include <linux/linkage.h>
13#ifdef CONFIG_CPU_V7M
14#include <asm/armv7m.h>
15#endif
16
17/*
18 * Default/weak exception vectors relocation routine
19 *
20 * This routine covers the standard ARM cases: normal (0x00000000),
21 * high (0xffff0000) and VBAR. SoCs which do not comply with any of
22 * the standard cases must provide their own, strong, version.
23 */
24
25	.section	.text.relocate_vectors,"ax",%progbits
26
27WEAK(relocate_vectors)
28
29#ifdef CONFIG_CPU_V7M
30	/*
31	 * On ARMv7-M we only have to write the new vector address
32	 * to VTOR register.
33	 */
34	ldr	r0, [r9, #GD_RELOCADDR]	/* r0 = gd->relocaddr */
35	ldr	r1, =V7M_SCB_BASE
36	str	r0, [r1, V7M_SCB_VTOR]
37#else
38#ifdef CONFIG_HAS_VBAR
39	/*
40	 * If the ARM processor has the security extensions,
41	 * use VBAR to relocate the exception vectors.
42	 */
43	ldr	r0, [r9, #GD_RELOCADDR]	/* r0 = gd->relocaddr */
44	mcr     p15, 0, r0, c12, c0, 0  /* Set VBAR */
45#else
46	/*
47	 * Copy the relocated exception vectors to the
48	 * correct address
49	 * CP15 c1 V bit gives us the location of the vectors:
50	 * 0x00000000 or 0xFFFF0000.
51	 */
52	ldr	r0, [r9, #GD_RELOCADDR]	/* r0 = gd->relocaddr */
53	mrc	p15, 0, r2, c1, c0, 0	/* V bit (bit[13]) in CP15 c1 */
54	ands	r2, r2, #(1 << 13)
55	ldreq	r1, =0x00000000		/* If V=0 */
56	ldrne	r1, =0xFFFF0000		/* If V=1 */
57	ldmia	r0!, {r2-r8,r10}
58	stmia	r1!, {r2-r8,r10}
59	ldmia	r0!, {r2-r8,r10}
60	stmia	r1!, {r2-r8,r10}
61#endif
62#endif
63	ret	lr
64
65ENDPROC(relocate_vectors)
66
67/*
68 * void relocate_code(addr_moni)
69 *
70 * This function relocates the monitor code.
71 *
72 * NOTE:
73 * To prevent the code below from containing references with an R_ARM_ABS32
74 * relocation record type, we never refer to linker-defined symbols directly.
75 * Instead, we declare literals which contain their relative location with
76 * respect to relocate_code, and at run time, add relocate_code back to them.
77 */
78
79ENTRY(relocate_code)
80relocate_base:
81	adr	r3, relocate_base
82	ldr	r1, _image_copy_start_ofs
83	add	r1, r3			/* r1 <- Run &__image_copy_start */
84	subs	r4, r0, r1		/* r4 <- Run to copy offset      */
85	beq	relocate_done		/* skip relocation               */
86	ldr	r1, _image_copy_start_ofs
87	add	r1, r3			/* r1 <- Run &__image_copy_start */
88	ldr	r2, _image_copy_end_ofs
89	add	r2, r3			/* r2 <- Run &__image_copy_end   */
90copy_loop:
91	ldmia	r1!, {r10-r11}		/* copy from source address [r1] */
92	stmia	r0!, {r10-r11}		/* copy to   target address [r0] */
93	cmp	r1, r2			/* until source end address [r2] */
94	blo	copy_loop
95
96	/*
97	 * fix .rel.dyn relocations
98	 */
99	ldr	r1, _rel_dyn_start_ofs
100	add	r2, r1, r3		/* r2 <- Run &__rel_dyn_start */
101	ldr	r1, _rel_dyn_end_ofs
102	add	r3, r1, r3		/* r3 <- Run &__rel_dyn_end */
103fixloop:
104	ldmia	r2!, {r0-r1}		/* (r0,r1) <- (SRC location,fixup) */
105	and	r1, r1, #0xff
106	cmp	r1, #R_ARM_RELATIVE
107	bne	fixnext
108
109	/* relative fix: increase location by offset */
110	add	r0, r0, r4
111	ldr	r1, [r0]
112	add	r1, r1, r4
113	str	r1, [r0]
114fixnext:
115	cmp	r2, r3
116	blo	fixloop
117
118relocate_done:
119
120#ifdef __XSCALE__
121	/*
122	 * On xscale, icache must be invalidated and write buffers drained,
123	 * even with cache disabled - 4.2.7 of xscale core developer's manual
124	 */
125	mcr	p15, 0, r0, c7, c7, 0	/* invalidate icache */
126	mcr	p15, 0, r0, c7, c10, 4	/* drain write buffer */
127#endif
128
129	ret	lr
130
131ENDPROC(relocate_code)
132
133_image_copy_start_ofs:
134	.word	__image_copy_start - relocate_code
135_image_copy_end_ofs:
136	.word	__image_copy_end - relocate_code
137_rel_dyn_start_ofs:
138	.word	__rel_dyn_start - relocate_code
139_rel_dyn_end_ofs:
140	.word	__rel_dyn_end - relocate_code
141