1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1996, 1999, 2000, 2001 by Ralf Baechle
7 * Copyright (C) 1999, 2000, 2001 Silicon Graphics, Inc.
8 */
9#ifndef _ASM_UNALIGNED_H
10#define _ASM_UNALIGNED_H
11
12extern void __get_unaligned_bad_length(void);
13extern void __put_unaligned_bad_length(void);
14
15/*
16 * Load quad unaligned.
17 */
18static inline unsigned long __ldq_u(const unsigned long * __addr)
19{
20	unsigned long __res;
21
22	__asm__("uld\t%0,%1"
23		: "=&r" (__res)
24		: "m" (*__addr));
25
26	return __res;
27}
28
29/*
30 * Load long unaligned.
31 */
32static inline unsigned long __ldl_u(const unsigned int * __addr)
33{
34	unsigned long __res;
35
36	__asm__("ulw\t%0,%1"
37		: "=&r" (__res)
38		: "m" (*__addr));
39
40	return __res;
41}
42
43/*
44 * Load word unaligned.
45 */
46static inline unsigned long __ldw_u(const unsigned short * __addr)
47{
48	unsigned long __res;
49
50	__asm__("ulh\t%0,%1"
51		: "=&r" (__res)
52		: "m" (*__addr));
53
54	return __res;
55}
56
57/*
58 * Store quad unaligned.
59 */
60static inline void __stq_u(unsigned long __val, unsigned long * __addr)
61{
62	__asm__("usd\t%1, %0"
63		: "=m" (*__addr)
64		: "r" (__val));
65}
66
67/*
68 * Store long unaligned.
69 */
70static inline void __stl_u(unsigned long __val, unsigned int * __addr)
71{
72	__asm__("usw\t%1, %0"
73		: "=m" (*__addr)
74		: "r" (__val));
75}
76
77/*
78 * Store word unaligned.
79 */
80static inline void __stw_u(unsigned long __val, unsigned short * __addr)
81{
82	__asm__("ush\t%1, %0"
83		: "=m" (*__addr)
84		: "r" (__val));
85}
86
87/*
88 * get_unaligned - get value from possibly mis-aligned location
89 * @ptr: pointer to value
90 *
91 * This macro should be used for accessing values larger in size than
92 * single bytes at locations that are expected to be improperly aligned,
93 * e.g. retrieving a u16 value from a location not u16-aligned.
94 *
95 * Note that unaligned accesses can be very expensive on some architectures.
96 */
97#define get_unaligned(ptr)						\
98({									\
99	__typeof__(*(ptr)) __val;					\
100									\
101	switch (sizeof(*(ptr))) {					\
102	case 1:								\
103		__val = *(const unsigned char *)(ptr);			\
104		break;							\
105	case 2:								\
106		__val = __ldw_u((const unsigned short *)(ptr));		\
107		break;							\
108	case 4:								\
109		__val = __ldl_u((const unsigned int *)(ptr));		\
110		break;							\
111	case 8:								\
112		__val = __ldq_u((const unsigned long *)(ptr));		\
113		break;							\
114	default:							\
115		__get_unaligned_bad_length();				\
116		break;							\
117	}								\
118									\
119	__val;								\
120})
121
122/*
123 * put_unaligned - put value to a possibly mis-aligned location
124 * @val: value to place
125 * @ptr: pointer to location
126 *
127 * This macro should be used for placing values larger in size than
128 * single bytes at locations that are expected to be improperly aligned,
129 * e.g. writing a u16 value to a location not u16-aligned.
130 *
131 * Note that unaligned accesses can be very expensive on some architectures.
132 */
133#define put_unaligned(val,ptr)						\
134do {									\
135	switch (sizeof(*(ptr))) {					\
136	case 1:								\
137		*(unsigned char *)(ptr) = (val);			\
138		break;							\
139	case 2:								\
140		__stw_u((val), (unsigned short *)(ptr));		\
141		break;							\
142	case 4:								\
143		__stl_u((val), (unsigned int *)(ptr));			\
144		break;							\
145	case 8:								\
146		__stq_u((val), (unsigned long long *)(ptr));		\
147		break;							\
148	default:							\
149		__put_unaligned_bad_length();				\
150		break;							\
151	}								\
152} while(0)
153
154#endif /* _ASM_UNALIGNED_H */
155