Skip to content

Commit 5b3fb8a

Browse files
ryanhrobctmarinas
authored andcommitted
arm64: mm: Re-implement the __tlbi_level macro as a C function
As part of efforts to reduce our reliance on complex preprocessor macros for TLB invalidation routines, convert the __tlbi_level macro to a C function for by-level TLB invalidation. Each specific tlbi level op is implemented as a C function and the appropriate function pointer is passed to __tlbi_level(). Since everything is declared inline and is statically resolvable, the compiler will convert the indirect function call to a direct inline execution. Suggested-by: Linus Torvalds <torvalds@linux-foundation.org> Reviewed-by: Jonathan Cameron <jonathan.cameron@huawei.com> Signed-off-by: Ryan Roberts <ryan.roberts@arm.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
1 parent 1f318b9 commit 5b3fb8a

1 file changed

Lines changed: 54 additions & 13 deletions

File tree

arch/arm64/include/asm/tlbflush.h

Lines changed: 54 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -97,19 +97,60 @@ static inline unsigned long get_trans_granule(void)
9797

9898
#define TLBI_TTL_UNKNOWN INT_MAX
9999

100-
#define __tlbi_level(op, addr, level) do { \
101-
u64 arg = addr; \
102-
\
103-
if (alternative_has_cap_unlikely(ARM64_HAS_ARMv8_4_TTL) && \
104-
level >= 0 && level <= 3) { \
105-
u64 ttl = level & 3; \
106-
ttl |= get_trans_granule() << 2; \
107-
arg &= ~TLBI_TTL_MASK; \
108-
arg |= FIELD_PREP(TLBI_TTL_MASK, ttl); \
109-
} \
110-
\
111-
__tlbi(op, arg); \
112-
} while(0)
100+
typedef void (*tlbi_op)(u64 arg);
101+
102+
static __always_inline void vae1is(u64 arg)
103+
{
104+
__tlbi(vae1is, arg);
105+
}
106+
107+
static __always_inline void vae2is(u64 arg)
108+
{
109+
__tlbi(vae2is, arg);
110+
}
111+
112+
static __always_inline void vale1(u64 arg)
113+
{
114+
__tlbi(vale1, arg);
115+
}
116+
117+
static __always_inline void vale1is(u64 arg)
118+
{
119+
__tlbi(vale1is, arg);
120+
}
121+
122+
static __always_inline void vale2is(u64 arg)
123+
{
124+
__tlbi(vale2is, arg);
125+
}
126+
127+
static __always_inline void vaale1is(u64 arg)
128+
{
129+
__tlbi(vaale1is, arg);
130+
}
131+
132+
static __always_inline void ipas2e1(u64 arg)
133+
{
134+
__tlbi(ipas2e1, arg);
135+
}
136+
137+
static __always_inline void ipas2e1is(u64 arg)
138+
{
139+
__tlbi(ipas2e1is, arg);
140+
}
141+
142+
static __always_inline void __tlbi_level(tlbi_op op, u64 addr, u32 level)
143+
{
144+
u64 arg = addr;
145+
146+
if (alternative_has_cap_unlikely(ARM64_HAS_ARMv8_4_TTL) && level <= 3) {
147+
u64 ttl = level | (get_trans_granule() << 2);
148+
149+
FIELD_MODIFY(TLBI_TTL_MASK, &arg, ttl);
150+
}
151+
152+
op(arg);
153+
}
113154

114155
#define __tlbi_user_level(op, arg, level) do { \
115156
if (arm64_kernel_unmapped_at_el0()) \

0 commit comments

Comments
 (0)