aboutsummaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm/runtime-const.h
blob: be5915669d239a1d9a10bd633baf99d430207a2c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_RUNTIME_CONST_H
#define _ASM_RUNTIME_CONST_H

#include <asm/cacheflush.h>

/* Sigh. You can still run arm64 in BE mode */
#include <asm/byteorder.h>

#define runtime_const_ptr(sym) ({				\
	typeof(sym) __ret;					\
	asm_inline("1:\t"					\
		"movz %0, #0xcdef\n\t"				\
		"movk %0, #0x89ab, lsl #16\n\t"			\
		"movk %0, #0x4567, lsl #32\n\t"			\
		"movk %0, #0x0123, lsl #48\n\t"			\
		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
		".long 1b - .\n\t"				\
		".popsection"					\
		:"=r" (__ret));					\
	__ret; })

#define runtime_const_shift_right_32(val, sym) ({		\
	unsigned long __ret;					\
	asm_inline("1:\t"					\
		"lsr %w0,%w1,#12\n\t"				\
		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
		".long 1b - .\n\t"				\
		".popsection"					\
		:"=r" (__ret)					\
		:"r" (0u+(val)));				\
	__ret; })

#define runtime_const_init(type, sym) do {		\
	extern s32 __start_runtime_##type##_##sym[];	\
	extern s32 __stop_runtime_##type##_##sym[];	\
	runtime_const_fixup(__runtime_fixup_##type,	\
		(unsigned long)(sym), 			\
		__start_runtime_##type##_##sym,		\
		__stop_runtime_##type##_##sym);		\
} while (0)

/* 16-bit immediate for wide move (movz and movk) in bits 5..20 */
static inline void __runtime_fixup_16(__le32 *p, unsigned int val)
{
	u32 insn = le32_to_cpu(*p);
	insn &= 0xffe0001f;
	insn |= (val & 0xffff) << 5;
	*p = cpu_to_le32(insn);
}

static inline void __runtime_fixup_caches(void *where, unsigned int insns)
{
	unsigned long va = (unsigned long)where;
	caches_clean_inval_pou(va, va + 4*insns);
}

static inline void __runtime_fixup_ptr(void *where, unsigned long val)
{
	__le32 *p = lm_alias(where);
	__runtime_fixup_16(p, val);
	__runtime_fixup_16(p+1, val >> 16);
	__runtime_fixup_16(p+2, val >> 32);
	__runtime_fixup_16(p+3, val >> 48);
	__runtime_fixup_caches(where, 4);
}

/* Immediate value is 6 bits starting at bit #16 */
static inline void __runtime_fixup_shift(void *where, unsigned long val)
{
	__le32 *p = lm_alias(where);
	u32 insn = le32_to_cpu(*p);
	insn &= 0xffc0ffff;
	insn |= (val & 63) << 16;
	*p = cpu_to_le32(insn);
	__runtime_fixup_caches(where, 1);
}

static inline void runtime_const_fixup(void (*fn)(void *, unsigned long),
	unsigned long val, s32 *start, s32 *end)
{
	while (start < end) {
		fn(*start + (void *)start, val);
		start++;
	}
}

#endif