summaryrefslogtreecommitdiffstats
path: root/arch/xtensa/include/asm/asmmacro.h
blob: bfc89e11f4698ee5f7dee1de89d6d89dd6e16358 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
/*
 * include/asm-xtensa/asmmacro.h
 *
 * This file is subject to the terms and conditions of the GNU General Public
 * License.  See the file "COPYING" in the main directory of this archive
 * for more details.
 *
 * Copyright (C) 2005 Tensilica Inc.
 */

#ifndef _XTENSA_ASMMACRO_H
#define _XTENSA_ASMMACRO_H

#include <asm/core.h>

/*
 * Some little helpers for loops. Use zero-overhead-loops
 * where applicable and if supported by the processor.
 *
 * __loopi ar, at, size, inc
 *         ar	register initialized with the start address
 *	   at	scratch register used by macro
 *	   size	size immediate value
 *	   inc	increment
 *
 * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
 *	   ar	register initialized with the start address
 *	   as	register initialized with the size
 *	   at	scratch register use by macro
 *	   inc_log2	increment [in log2]
 *	   mask_log2	mask [in log2]
 *	   cond		true condition (used in loop'cond')
 *	   ncond	false condition (used in b'ncond')
 *
 * __loop  as
 *	   restart loop. 'as' register must not have been modified!
 *
 * __endla ar, as, incr
 *	   ar	start address (modified)
 *	   as	scratch register used by __loops/__loopi macros or
 *		end address used by __loopt macro
 *	   inc	increment
 */

/*
 * loop for given size as immediate
 */

	.macro	__loopi ar, at, size, incr

#if XCHAL_HAVE_LOOPS
		movi	\at, ((\size + \incr - 1) / (\incr))
		loop	\at, 99f
#else
		addi	\at, \ar, \size
		98:
#endif

	.endm

/*
 * loop for given size in register
 */

	.macro	__loops	ar, as, at, incr_log2, mask_log2, cond, ncond

#if XCHAL_HAVE_LOOPS
		.ifgt \incr_log2 - 1
			addi	\at, \as, (1 << \incr_log2) - 1
			.ifnc \mask_log2,
				extui	\at, \at, \incr_log2, \mask_log2
			.else
				srli	\at, \at, \incr_log2
			.endif
		.endif
		loop\cond	\at, 99f
#else
		.ifnc \mask_log2,
			extui	\at, \as, \incr_log2, \mask_log2
		.else
			.ifnc \ncond,
				srli	\at, \as, \incr_log2
			.endif
		.endif
		.ifnc \ncond,
			b\ncond	\at, 99f

		.endif
		.ifnc \mask_log2,
			slli	\at, \at, \incr_log2
			add	\at, \ar, \at
		.else
			add	\at, \ar, \as
		.endif
#endif
		98:

	.endm

/*
 * loop from ar to as
 */

	.macro	__loopt	ar, as, at, incr_log2

#if XCHAL_HAVE_LOOPS
		sub	\at, \as, \ar
		.ifgt	\incr_log2 - 1
			addi	\at, \at, (1 << \incr_log2) - 1
			srli	\at, \at, \incr_log2
		.endif
		loop	\at, 99f
#else
		98:
#endif

	.endm

/*
 * restart loop. registers must be unchanged
 */

	.macro	__loop	as

#if XCHAL_HAVE_LOOPS
		loop	\as, 99f
#else
		98:
#endif

	.endm

/*
 * end of loop with no increment of the address.
 */

	.macro	__endl	ar, as
#if !XCHAL_HAVE_LOOPS
		bltu	\ar, \as, 98b
#endif
		99:
	.endm

/*
 * end of loop with increment of the address.
 */

	.macro	__endla	ar, as, incr
		addi	\ar, \ar, \incr
		__endl	\ar \as
	.endm

/* Load or store instructions that may cause exceptions use the EX macro. */

#define EX(handler)				\
	.section __ex_table, "a";		\
	.word	97f, handler;			\
	.previous				\
97:


/*
 * Extract unaligned word that is split between two registers w0 and w1
 * into r regardless of machine endianness. SAR must be loaded with the
 * starting bit of the word (see __ssa8).
 */

	.macro __src_b	r, w0, w1
#ifdef __XTENSA_EB__
		src	\r, \w0, \w1
#else
		src	\r, \w1, \w0
#endif
	.endm

/*
 * Load 2 lowest address bits of r into SAR for __src_b to extract unaligned
 * word starting at r from two registers loaded from consecutive aligned
 * addresses covering r regardless of machine endianness.
 *
 *      r   0   1   2   3
 * LE SAR   0   8  16  24
 * BE SAR  32  24  16   8
 */

	.macro __ssa8	r
#ifdef __XTENSA_EB__
		ssa8b	\r
#else
		ssa8l	\r
#endif
	.endm

#define XTENSA_STACK_ALIGNMENT		16

#if defined(__XTENSA_WINDOWED_ABI__)
#define XTENSA_FRAME_SIZE_RESERVE	16
#define XTENSA_SPILL_STACK_RESERVE	32

#define abi_entry(frame_size) \
	entry sp, (XTENSA_FRAME_SIZE_RESERVE + \
		   (((frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
		    -XTENSA_STACK_ALIGNMENT))
#define abi_entry_default abi_entry(0)

#define abi_ret(frame_size) retw
#define abi_ret_default retw

#elif defined(__XTENSA_CALL0_ABI__)

#define XTENSA_SPILL_STACK_RESERVE	0

#define abi_entry(frame_size) __abi_entry (frame_size)

	.macro	__abi_entry frame_size
	.ifgt \frame_size
	addi sp, sp, -(((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
		       -XTENSA_STACK_ALIGNMENT)
	.endif
	.endm

#define abi_entry_default

#define abi_ret(frame_size) __abi_ret (frame_size)

	.macro	__abi_ret frame_size
	.ifgt \frame_size
	addi sp, sp, (((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
		      -XTENSA_STACK_ALIGNMENT)
	.endif
	ret
	.endm

#define abi_ret_default ret

#else
#error Unsupported Xtensa ABI
#endif

#define __XTENSA_HANDLER	.section ".exception.text", "ax"

#endif /* _XTENSA_ASMMACRO_H */