summaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/insn.h
blob: faf3d1c28368f5cd78801680fd9177f8af83472e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef __ASM_ARM_INSN_H
#define __ASM_ARM_INSN_H

#include <linux/types.h>

/*
 * Avoid a literal load by emitting a sequence of ADD/LDR instructions with the
 * appropriate relocations. The combined sequence has a range of -/+ 256 MiB,
 * which should be sufficient for the core kernel as well as modules loaded
 * into the module region. (Not supported by LLD before release 14)
 */
#define LOAD_SYM_ARMV6(reg, sym)					\
	"	.globl	" #sym "				\n\t"	\
	"	.reloc	10f, R_ARM_ALU_PC_G0_NC, " #sym "	\n\t"	\
	"	.reloc	11f, R_ARM_ALU_PC_G1_NC, " #sym "	\n\t"	\
	"	.reloc	12f, R_ARM_LDR_PC_G2, " #sym "		\n\t"	\
	"10:	sub	" #reg ", pc, #8			\n\t"	\
	"11:	sub	" #reg ", " #reg ", #4			\n\t"	\
	"12:	ldr	" #reg ", [" #reg ", #0]		\n\t"

static inline unsigned long
arm_gen_nop(void)
{
#ifdef CONFIG_THUMB2_KERNEL
	return 0xf3af8000; /* nop.w */
#else
	return 0xe1a00000; /* mov r0, r0 */
#endif
}

unsigned long
__arm_gen_branch(unsigned long pc, unsigned long addr, bool link, bool warn);

static inline unsigned long
arm_gen_branch(unsigned long pc, unsigned long addr)
{
	return __arm_gen_branch(pc, addr, false, true);
}

static inline unsigned long
arm_gen_branch_link(unsigned long pc, unsigned long addr, bool warn)
{
	return __arm_gen_branch(pc, addr, true, warn);
}

#endif