/* SPDX-License-Identifier: GPL-2.0 */
/* Copyright (C) 2009 PaX Team <[email protected]> */
#ifndef _ASM_X86_ALTERNATIVE_H
#define _ASM_X86_ALTERNATIVE_H
#include <linux/types.h>
#include <linux/stringify.h>
#include <asm/asm.h>
#include <asm/bug.h>
#include <asm/irq_vectors.h>
#define ALT_FLAGS_SHIFT 16
#define ALT_FLAG_NOT (1 << 0)
#define ALT_NOT(feature) ((ALT_FLAG_NOT << ALT_FLAGS_SHIFT) | (feature))
#define ALT_FLAG_DISABLE (1 << 1)
#define ALT_ENABLE(config, feature) ((((1-IS_ENABLED(config)) * ALT_FLAG_DISABLE) << ALT_FLAGS_SHIFT) | (feature))
#define ALT_INSTR_STRUCT "=iiIbb" /* XXX: kernel version depended! */
#ifndef __ASSEMBLY__
#include <linux/stddef.h>
/*
* Alternative inline assembly for SMP.
*
* The LOCK_PREFIX macro defined here replaces the LOCK and
* LOCK_PREFIX macros used everywhere in the source tree.
*
* SMP alternatives use the same data structures as the other
* alternatives and the X86_FEATURE_UP flag to indicate the case of a
* UP system running a SMP kernel. The existing apply_alternatives()
* works fine for patching a SMP kernel for UP.
*
* The SMP alternative tables can be kept after boot and contain both
* UP and SMP versions of the instructions to allow switching back to
* SMP at runtime, when hotplugging in a new CPU, which is especially
* useful in virtualized environments.
*
* The very common lock prefix is handled as special case in a
* separate table which is a pure address list without replacement ptr
* and size information. That keeps the table sizes small.
*/
#ifdef CONFIG_SMP
#define LOCK_PREFIX_HERE \
".pushsection .smp_locks,\"a\"\n" \
".balign 4\n" \
".long 671f - .\n" /* offset */ \
".popsection\n" \
"671:"
#define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; "
#else /* ! CONFIG_SMP */
#define LOCK_PREFIX_HERE ""
#define LOCK_PREFIX ""
#endif
/*
* objtool annotation to ignore the alternatives and only consider the original
* instruction(s).
*/
#define ANNOTATE_IGNORE_ALTERNATIVE \
"999:\n\t" \
".pushsection .discard.ignore_alts\n\t" \
".long 999b\n\t" \
".popsection\n\t"
/*
* The patching flags are part of the upper bits of the @ft_flags parameter when
* specifying them. The split is currently like this:
*
* [31... flags ...16][15... CPUID feature bit ...0]
*
* but since this is all hidden in the macros argument being split, those fields can be
* extended in the future to fit in a u64 or however the need arises.
*
* XXX: Keep in sync with ALT_INSTR_STRUCT!
*/
struct alt_instr {
s32 instr_offset; /* original instruction */
s32 repl_offset; /* offset to replacement instruction */
union {
struct {
u32 cpuid: 16; /* CPUID bit set for replacement */
u32 flags: 16; /* patching control flags */
};
u32 ft_flags;
};
u8 instrlen; /* length of original instruction */
u8 replacementlen; /* length of new instruction */
} __packed;
/*
* Debug flag that can be tested to see whether alternative
* instructions were patched in already:
*/
extern int alternatives_patched;
extern void alternative_instructions(void);
extern void apply_alternatives(const struct alt_instr *start, const struct alt_instr *end, const u8 *altinstr_replacements, const u8 *altinstr_replacements_end);
extern void apply_retpolines(const s32 *start, const s32 *end);
extern void apply_returns(const s32 *start, const s32 *end);
extern void apply_seal_endbr(const s32 *start, const s32 *end);
extern void apply_fineibt(const s32 *start_retpoline, const s32 *end_retpoine,
const s32 *start_cfi, const s32 *end_cfi);
struct module;
struct paravirt_patch_site;
struct callthunk_sites {
s32 *call_start, *call_end;
struct paravirt_patch_site *pv_start, *pv_end;
};
#ifdef CONFIG_CALL_THUNKS
extern void callthunks_patch_builtin_calls(void);
extern void callthunks_patch_module_calls(struct callthunk_sites *sites,
struct module *mod);
extern void *callthunks_translate_call_dest(void *dest);
extern int x86_call_depth_emit_accounting(u8 **pprog, void *func);
#else
static __always_inline void callthunks_patch_builtin_calls(void) {}
static __always_inline void
callthunks_patch_module_calls(struct callthunk_sites *sites,
struct module *mod) {}
static __always_inline void *callthunks_translate_call_dest(void *dest)
{
return dest;
}
static __always_inline int x86_call_depth_emit_accounting(u8 **pprog,
void *func)
{
return 0;
}
#endif
#ifdef CONFIG_MITIGATION_ITS
extern u8 *its_static_thunk(int reg, const void *ip);
#else /* CONFIG_MITIGATION_ITS */
static inline u8 *its_static_thunk(int reg, const void *ip)
{
WARN_ONCE(1, "ITS not compiled in\n");
return NULL;
}
#endif
#ifdef CONFIG_RETHUNK
extern bool cpu_wants_rethunk(void);
extern bool cpu_wants_rethunk_at(void *addr);
#else
static __always_inline bool cpu_wants_rethunk(void)
{
return false;
}
static __always_inline bool cpu_wants_rethunk_at(void *addr)
{
return false;
}
#endif
#ifdef CONFIG_SMP
extern void alternatives_smp_module_add(struct module *mod, char *name,
void *locks, void *locks_end,
void *text, void *text_end);
extern void alternatives_smp_module_del(struct module *mod);
extern void alternatives_enable_smp(void);
extern int alternatives_text_reserved(void *start, void *end);
extern bool skip_smp_alternatives;
#else
static inline void alternatives_smp_module_add(struct module *mod, char *name,
void *locks, void *locks_end,
void *text, void *text_end) {}
static inline void alternatives_smp_module_del(struct module *mod) {}
static inline void alternatives_enable_smp(void) {}
static inline int alternatives_text_reserved(void *start, void *end)
{
return 0;
}
#endif /* CONFIG_SMP */
/* can only use gas macros in C when plugins are enabled */
#if defined(CONFIG_PLUGIN_WANTS_ASMMACRO)
#define PAX_PARAVIRT_CALL(target, hash) "pax_paravirt_call " target ", " hash
#define PAX_RET_NOSPEC "pax_ret_nospec"
#else
#define PAX_PARAVIRT_CALL(target, hash) PAX_INDIRECT_SLS_CALL_HASH(target, hash)
#define PAX_RET_NOSPEC "ret"
#endif
#ifdef CONFIG_SLS
#define PAX_DIRECT_SLS_CALL(target) "pax_direct_sls_call " target
#define PAX_DIRECT_SLS_CALL_HASH(target, hash) "pax_direct_sls_call " target ", " hash
#define PAX_INDIRECT_SLS_CALL_HASH(target, hash) "pax_indirect_call " target ", " hash
#define PAX_SLS_CALL(target) "pax_sls_call " target
#define PAX_JMP(target) "pax_jmp " target
#else
#define PAX_DIRECT_SLS_CALL(target) PAX_DIRECT_CALL(target)
#define PAX_DIRECT_SLS_CALL_HASH(target, hash) PAX_DIRECT_CALL_HASH(target, hash)
#define PAX_INDIRECT_SLS_CALL_HASH(target, hash) ANNOTATE_RETPOLINE_SAFE "call *" target
#define PAX_SLS_CALL(target) "call " target
#define PAX_JMP(target) "jmp " target
#endif
#ifdef CONFIG_PAX_RAP_HASH
#define PAX_DIRECT_CALL(target) "pax_direct_call " target
#define PAX_DIRECT_CALL_HASH(target, hash) "pax_direct_call " target ", " hash
#define PAX_DIRECT_CALL_HASHVAL(target, hashval) "pax_direct_call " target ", hashval=" hashval
#define PAX_INDIRECT_CALL(target, extra) CALL_NOSPEC_INLINE(target, extra)
#define PAX_INDIRECT_JUMP(target) "pax_indirect_jmp " target
#else
#define PAX_DIRECT_CALL(target) "call " target
#define PAX_DIRECT_CALL_HASH(target, hash) "call " target
#define PAX_DIRECT_CALL_HASHVAL(target, hashval) "call " target
#define PAX_INDIRECT_CALL(target, extra) CALL_NOSPEC
#define PAX_INDIRECT_JUMP(target) "jmp " target
#endif
#ifdef CONFIG_PAX_RAP_RET
#define PAX_RET(extra) "pax_ret " extra
#else
#define PAX_RET(extra) PAX_RET_NOSPEC
#endif
#define b_replacement(num) "664"#num
#define e_replacement(num) "665"#num
#define alt_end_marker "663"
#define alt_slen "662b-661b"
#define alt_total_slen alt_end_marker"b-661b"
#define alt_rlen(num) e_replacement(num)"f-"b_replacement(num)"f"
#define OLDINSTR(oldinstr, num) \
"# ALT: oldnstr\n" \
"661:\n\t" oldinstr "\n662:\n" \
"# ALT: padding\n" \
".skip -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * " \
"((" alt_rlen(num) ")-(" alt_slen ")),0x90\n" \
alt_end_marker ":\n"
/*
* gas compatible max based on the idea from:
* http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
*
* The additional "-" is needed because gas uses a "true" value of -1.
*/
#define alt_max_short(a, b) "((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"
/*
* Pad the second replacement alternative with additional NOPs if it is
* additionally longer than the first replacement alternative.
*/
#define OLDINSTR_2(oldinstr, num1, num2) \
"# ALT: oldinstr2\n" \
"661:\n\t" oldinstr "\n662:\n" \
"# ALT: padding2\n" \
".skip -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * " \
"(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")), 0x90\n" \
alt_end_marker ":\n"
#define OLDINSTR_3(oldinsn, n1, n2, n3) \
"# ALT: oldinstr3\n" \
"661:\n\t" oldinsn "\n662:\n" \
"# ALT: padding3\n" \
".skip -((" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)) \
" - (" alt_slen ")) > 0) * " \
"(" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)) \
" - (" alt_slen ")), 0x90\n" \
alt_end_marker ":\n"
#define OLDINSTR_4(oldinsn, n1, n2, n3, n4) \
"# ALT: oldinstr4\n" \
"661:\n\t" oldinsn "\n662:\n" \
"# ALT: padding4\n" \
".skip -((" alt_max_short(alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)), alt_rlen(n4)) \
" - (" alt_slen ")) > 0) * " \
"(" alt_max_short(alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)), alt_rlen(n4)) \
" - (" alt_slen ")), 0x90\n" \
alt_end_marker ":\n"
#define ALTINSTR_ENTRY(ft_flags, num) \
" .long 661b - .\n" /* label */ \
" .long " b_replacement(num)"f - .\n" /* new instruction */ \
" .4byte " __stringify(ft_flags) "\n" /* feature + flags */ \
" .byte " alt_total_slen "\n" /* source len */ \
" .byte " alt_rlen(num) "\n" /* replacement len */
#define ALTINSTR_REPLACEMENT(newinstr, num) /* replacement */ \
"# ALT: replacement " #num "\n" \
b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n"
/* alternative assembly primitive: */
#define ALTERNATIVE(oldinstr, newinstr, ft_flags) \
OLDINSTR(oldinstr, 1) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags, 1) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinstr, 1) \
".popsection\n"
#define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
OLDINSTR_2(oldinstr, 1, 2) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags1, 1) \
ALTINSTR_ENTRY(ft_flags2, 2) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinstr1, 1) \
ALTINSTR_REPLACEMENT(newinstr2, 2) \
".popsection\n"
/* If @ft_flags is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
newinstr_yes, ft_flags)
#define ALTERNATIVE_3(oldinsn, newinsn1, enabled1, ft_flags1, \
newinsn2, enabled2, ft_flags2, \
newinsn3, enabled3, ft_flags3) \
CHOOSE(enabled1, ALTERNATIVE_2(oldinsn, newinsn2, ft_flags2, newinsn3, ft_flags3), \
CHOOSE(enabled2, ALTERNATIVE_2(oldinsn, newinsn1, ft_flags1, newinsn3, ft_flags3), \
CHOOSE(enabled3, ALTERNATIVE_2(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2), \
OLDINSTR_3(oldinsn, 1, 2, 3) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags1, 1) \
ALTINSTR_ENTRY(ft_flags2, 2) \
ALTINSTR_ENTRY(ft_flags3, 3) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinsn1, 1) \
ALTINSTR_REPLACEMENT(newinsn2, 2) \
ALTINSTR_REPLACEMENT(newinsn3, 3) \
".popsection\n")))
#define ALTERNATIVE_4(oldinsn, newinsn1, enabled1, ft_flags1, \
newinsn2, enabled2, ft_flags2, \
newinsn3, enabled3, ft_flags3, \
newinsn4, enabled4, ft_flags4) \
CHOOSE(enabled1, ALTERNATIVE_3(oldinsn, newinsn2, enabled2, ft_flags2, newinsn3, enabled3, ft_flags3, newinsn4, enabled4, ft_flags4), \
CHOOSE(enabled2, ALTERNATIVE_3(oldinsn, newinsn1, enabled1, ft_flags1, newinsn3, enabled3, ft_flags3, newinsn4, enabled4, ft_flags4), \
CHOOSE(enabled3, ALTERNATIVE_3(oldinsn, newinsn1, enabled1, ft_flags1, newinsn2, enabled2, ft_flags2, newinsn4, enabled4, ft_flags4), \
CHOOSE(enabled4, ALTERNATIVE_3(oldinsn, newinsn1, enabled1, ft_flags1, newinsn2, enabled2, ft_flags2, newinsn3, enabled3, ft_flags3), \
OLDINSTR_4(oldinsn, 1, 2, 3, 4) \
".pushsection .altinstructions,\"a\"\n" \
ALTINSTR_ENTRY(ft_flags1, 1) \
ALTINSTR_ENTRY((ft_flags2), 2) \
ALTINSTR_ENTRY(ft_flags3, 3) \
ALTINSTR_ENTRY((ft_flags4), 4) \
".popsection\n" \
".pushsection .altinstr_replacement, \"ax\"\n" \
ALTINSTR_REPLACEMENT(newinsn1, 1) \
ALTINSTR_REPLACEMENT(newinsn2, 2) \
ALTINSTR_REPLACEMENT(newinsn3, 3) \
ALTINSTR_REPLACEMENT(newinsn4, 4) \
".popsection\n"))))
/*
* Alternative instructions for different CPU types or capabilities.
*
* This allows to use optimized instructions even on generic binary
* kernels.
*
* length of oldinstr must be longer or equal the length of newinstr
* It can be padded with nops as needed.
*
* For non barrier like inlines please define new variants
* without volatile and memory clobber.
*/
#define alternative(oldinstr, newinstr, ft_flags) \
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
#define alternative_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3) \
asm_inline volatile(ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3) ::: "memory")
#define alternative_ternary(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
asm_inline volatile(ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) ::: "memory")
/*
* Alternative inline assembly with input.
*
* Peculiarities:
* No memory clobber here.
* Argument numbers start with 1.
* Leaving an unused argument 0 to keep API compatibility.
*/
#define alternative_input(oldinstr, newinstr, ft_flags, input...) \
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: : "i" (0), ## input)
/*
* This is similar to alternative_input. But it has two features and
* respective instructions.
*
* If CPU has feature2, newinstr2 is used.
* Otherwise, if CPU has feature1, newinstr1 is used.
* Otherwise, oldinstr is used.
*/
#define alternative_input_2(oldinstr, newinstr1, ft_flags1, newinstr2, \
ft_flags2, input...) \
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, \
newinstr2, ft_flags2) \
: : "i" (0), ## input)
/* Like alternative_input, but with a single output argument */
#define alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: output : "i" (0), ## input)
/* Like alternative_io, but for replacing a direct call with another one. */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
asm_inline volatile (ALTERNATIVE(PAX_DIRECT_SLS_CALL("%c[old]"), PAX_DIRECT_SLS_CALL("%c[new]"), ft_flags) \
: output, ASM_CALL_CONSTRAINT : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
/*
* Like alternative_call, but there are two features and respective functions.
* If CPU has feature2, function2 is used.
* Otherwise, if CPU has feature1, function1 is used.
* Otherwise, old function is used.
*/
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
output, input...) \
asm_inline volatile (ALTERNATIVE_2(PAX_DIRECT_SLS_CALL("%c[old]"), PAX_DIRECT_SLS_CALL("%c[new1]"), ft_flags1, \
PAX_DIRECT_SLS_CALL("%c[new2]"), ft_flags2) \
: output, ASM_CALL_CONSTRAINT \
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
[new2] "i" (newfunc2), ## input)
/*
* use this macro(s) if you need more than one output parameter
* in alternative_io
*/
#define ASM_OUTPUT2(a...) a
/*
* use this macro if you need clobbers but no inputs in
* alternative_{input,io,call}()
*/
#define ASM_NO_INPUT_CLOBBER(clbr...) "i" (0) : clbr
#ifdef CONFIG_PAX_REFCOUNT
#define __PAX_REFCOUNT32(saturator) \
"111:\tmovl $" __stringify(saturator) ", %[counter]\n"
#define __PAX_REFCOUNT64(saturator) \
"111:\tmovabs $" __stringify(saturator) ", %%rcx\n\t" \
"movq %%rcx, %[counter]\n"
#define __PAX_REFCOUNT_overflow32o __PAX_REFCOUNT32(__INT_MAX__)
#define __PAX_REFCOUNT_underflow32o __PAX_REFCOUNT32(-__INT_MAX__-1)
#define __PAX_REFCOUNT_overflow64o __PAX_REFCOUNT64(__LONG_LONG_MAX__)
#define __PAX_REFCOUNT_underflow64o __PAX_REFCOUNT64(-__LONG_LONG_MAX__-1)
#define __PAX_REFCOUNT_overflow32s __PAX_REFCOUNT32(-__INT_MAX__/2-1)
#define __PAX_REFCOUNT_underflow32s __PAX_REFCOUNT32(-__INT_MAX__/2-1)
#define __PAX_REFCOUNT_overflow64s __PAX_REFCOUNT64(-__LONG_LONG_MAX__/2-1)
#define __PAX_REFCOUNT_underflow64s __PAX_REFCOUNT64(-__LONG_LONG_MAX__/2-1)
#define __PAX_REFCOUNT(dir, size, cc) \
"j" #cc " 111f\n\t" \
".pushsection .text.refcount" #size "_" #dir "." #cc"\n"\
__PAX_REFCOUNT_##dir##size##cc \
"222:\tint $"__stringify(X86_REFCOUNT_VECTOR)"\n\t" \
".popsection\n" \
"333:\n" \
_ASM_EXTABLE(222b, 333b)
#define PAX_REFCOUNT_OVERFLOW(size) __PAX_REFCOUNT(overflow,size,o)
#define PAX_REFCOUNT_UNDERFLOW(size) __PAX_REFCOUNT(underflow,size,o)
#else
#define __PAX_REFCOUNT(dir, size, cc)
#define PAX_REFCOUNT_OVERFLOW(size)
#define PAX_REFCOUNT_UNDERFLOW(size)
#endif
#else /* __ASSEMBLY__ */
#include <linux/linkage.h>
#include <asm/page.h>
#include <asm/percpu.h>
#ifdef CONFIG_SMP
.macro LOCK_PREFIX
.Llock_prefix_\@:
lock
.pushsection .smp_locks,"a"
.balign 4
.long .Llock_prefix_\@ - .
.popsection
.endm
#else
.macro LOCK_PREFIX
.endm
#endif
/*
* objtool annotation to ignore the alternatives and only consider the original
* instruction(s).
*/
.macro ANNOTATE_IGNORE_ALTERNATIVE
.Lannotate_\@:
.pushsection .discard.ignore_alts
.long .Lannotate_\@
.popsection
.endm
.macro pax_force_retaddr_bts rip=0
#ifdef KERNEXEC_PLUGIN
btsq $63,\rip(%rsp)
#endif
.endm
#if defined(CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_BTS) && defined(CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_OR)
#error PAX: the KERNEXEC BTS and OR methods must not be enabled at once
#endif
#if defined(CONFIG_PAX_RAP_XOR) && defined(CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_OR)
#error PAX: RAP and the KERNEXEC OR methods must not be enabled at once
#endif
.macro pax_force_retaddr rip=0
#ifdef CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_BTS
btsq $63,\rip(%rsp)
#endif
#ifdef CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_OR
orq %r12,\rip(%rsp)
#endif
.endm
.macro pax_force_fptr ptr:req
#ifdef CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_BTS
btsq $63,\ptr
#endif
#ifdef CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_OR
orq %r12,\ptr
#endif
.endm
.macro pax_set_fptr_mask altstack=0
#ifdef CONFIG_PAX_KERNEXEC_PLUGIN_METHOD_OR
movabs $0x8000000000000000,%r12
#endif
#ifdef CONFIG_PAX_RAP_XOR
.if \altstack
movq PER_CPU_VAR(pcpu_hot + X86_top_of_stack),%r12
.else
movq %rsp,%r12
.endif
andq $~(THREAD_SIZE_asm-1),%r12
movq (%r12),%r12
#endif
.endm
.macro pax_jmp target:req
jmp \target
#ifdef CONFIG_SLS
int3
#endif
.endm
.macro rap_retloc callee="" hashval=0 rethooks=1 annotation=
#ifdef CONFIG_PAX_RAP_RET
.ifb \callee
.ifeq \hashval
.error "either 'callee' or 'hashval' must be specified"
.endif
.endif
.Lrap_retloc_start_\@:
UNWIND_HINT_RETLOC
\annotation
pax_jmp .Lrap_retloc_end_\@
.skip RAP_RET_HASH_OFFSET-RAP_HASH_INSN_LEN-(.-.Lrap_retloc_start_\@),0xcc
.ifnb \callee
ASM_RAP_RET_HASH(__rap_hash_ret_\callee)
.elseif \hashval
ASM_RAP_RET_HASH(\hashval)
.endif
.ifeq \rethooks
.Lrap_retloc_end_\@:
.exitm
.endif
#ifdef CONFIG_RETHOOK
.byte 0xe9 /* JMP32_INSN_OPCODE */
.long arch_rethook_trampoline - .Lrap_retloc_rethook_\@
.Lrap_retloc_rethook_\@:
#ifdef CONFIG_SLS
int3
#endif
.ifnb \callee
ASM_RAP_RET_HASH(__rap_hash_ret_\callee)
.elseif \hashval
ASM_RAP_RET_HASH(\hashval)
.endif
#endif
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
.byte 0xe9 /* JMP32_INSN_OPCODE */
.long return_to_handler - .Lrap_retloc_fgraph_\@
.Lrap_retloc_fgraph_\@:
#ifdef CONFIG_SLS
int3
#endif
.ifnb \callee
ASM_RAP_RET_HASH(__rap_hash_ret_\callee)
.elseif \hashval
ASM_RAP_RET_HASH(\hashval)
.endif
#endif
.Lrap_retloc_end_\@:
#else
\annotation
#endif
.endm
#ifdef CONFIG_PAX_RAP_RET
.macro rap_call target:req hashsym="" hashval=0 sym="" lfence=0 rethooks=1 annotation=
// .p2align 5,,2
.ifnb \sym
SYM_INNER_LABEL(\sym, SYM_L_GLOBAL)
ANNOTATE_NOENDBR
.endif
.if \lfence
lfence
.endif
\annotation
call \target
.ifnb \hashsym
rap_retloc callee=\hashsym, rethooks=\rethooks
.elseif \hashval
rap_retloc hashval=\hashval, rethooks=\rethooks
.else
rap_retloc callee=\target, rethooks=\rethooks
.endif
.endm
.macro rap_ret func:req
RET
.endm
#endif
.macro pax_indirect_jmp target:req
pax_jmp *\target
.endm
.macro pax_direct_call_global target:req sym:req
#ifdef CONFIG_SLS
xor %eax, %eax
#endif
#ifdef CONFIG_PAX_RAP_RET
rap_call \target, sym=\sym, rethooks=0
#else
ANNOTATE_NOENDBR
SYM_INNER_LABEL(\sym, SYM_L_GLOBAL)
call \target
#endif
.endm
.macro pax_indirect_call target:req hashsym:req lfence=0 skip_target_check=0
#ifdef CONFIG_SLS
.ifc \target,%_ASM_AX
.else
xor %eax, %eax
.endif
#endif
.ifeq \skip_target_check
/*
* Ensure 'target' isn't a memory reference, even though the macros
* could cope with it and an indirect call using a memory reference is
* just fine -- instruction wise. However, the alternatives patching
* code wouldn't handle it, i.e. not relocate it, leading to fireworks
* at runtime.
*
* There's one exception: PARAVIRT. It does exactly that, indirect
* calls using a memory reference. However, it also has its own
* patching machinery which can handle this case just fine.
*/
.pushsection .discard.pax_indirect_call_check
xor \target, \target
.popsection
.endif
#ifdef CONFIG_PAX_RAP_RET
rap_call *\target, hashsym=\hashsym, annotation=ANNOTATE_RETPOLINE_SAFE, lfence=\lfence
#else
.if \lfence
lfence
.endif
ANNOTATE_RETPOLINE_SAFE
call *\target
#endif
.endm
.macro pax_direct_call target:req hashsym="" hashval=0
#ifdef CONFIG_PAX_RAP_RET
rap_call \target, hashsym=\hashsym, hashval=\hashval
#else
call \target
#endif
.endm
.macro pax_paravirt_call target:req hashsym:req
pax_indirect_call \target, \hashsym, skip_target_check=1
.endm
.macro pax_direct_sls_call target:req hashsym="" hashval=0
#ifdef CONFIG_SLS
xor %eax, %eax
#endif
pax_direct_call \target, hashsym=\hashsym, hashval=\hashval
.endm
.macro pax_sls_call target:req
#ifdef CONFIG_SLS
xor %eax, %eax
#endif
call \target
.endm
.macro pax_ret func:req
pax_force_retaddr
#ifdef CONFIG_PAX_RAP_RET
rap_ret \func
#else
RET
#endif
.endm
/* expands to pax_jmp under RETHUNK, and will add the appropriate SLS barrier */
.macro pax_ret_nospec
RET
.endm
.macro pax_ret_nospec_alternative
#ifdef CONFIG_RETHUNK
.pushsection .return_sites,"a",@progbits
.long .Lreturn_site_\@ - .
.popsection
.Lreturn_site_\@:
pax_jmp __x86_return_thunk
#else
RET
#endif
.endm
/*
* Issue one struct alt_instr descriptor entry (need to put it into
* the section .altinstructions, see below). This entry contains
* enough information for the alternatives patching code to patch an
* instruction. See apply_alternatives().
*/
.macro altinstr_entry orig alt ft_flags orig_len alt_len
.long \orig - .
.long \alt - .
.4byte \ft_flags
.byte \orig_len
.byte \alt_len
.endm
/*
* Define an alternative between two instructions. If @ft_flags is
* present, early code in apply_alternatives() replaces @oldinstr with
* @newinstr. ".skip" directive takes care of proper instruction padding
* in case @newinstr is longer than @oldinstr.
*/
.macro ALTERNATIVE oldinstr, newinstr, ft_flags
.if \ft_flags & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
\oldinstr
.exitm
.endif
# define old_len .Lold_pad_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_\@
# define new_len .Lnew_end_\@-.Lnew_\@
.Lold_\@\():
\oldinstr
.Lold_pad_\@\():
.skip -(((new_len)-(old_len)) > 0) * ((new_len)-(old_len)),0x90
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_\@,.Lnew_\@,\ft_flags,old_full_len,new_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew_\@\():
\newinstr
.Lnew_end_\@\():
.popsection
# undef old_len
# undef old_full_len
# undef new_len
.endm
/*
* gas compatible max based on the idea from:
* http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
*
* The additional "-" is needed because gas uses a "true" value of -1.
*/
#define alt_max_2(a, b) ((a) ^ (((a) ^ (b)) & -(-((a) < (b)))))
#define alt_max_3(a, b, c) (alt_max_2(alt_max_2(a, b), c))
#define alt_max_4(a, b, c, d) (alt_max_2(alt_max_3(a, b, c), d))
/*
* Same as ALTERNATIVE macro above but for two alternatives. If CPU
* has @ft_flags1, it replaces @oldinstr with @newinstr1. If CPU has
* @ft_flags2, it replaces @oldinstr with @newinstr2.
*/
.macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
.if \ft_flags1 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE "\oldinstr", "\newinstr2", \ft_flags2
.exitm
.endif
.if \ft_flags2 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE "\oldinstr", "\newinstr1", \ft_flags1
.exitm
.endif
# define old_len .Lold_pad_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_\@
# define new1_len .Lnew1_end_\@-.Lnew1_\@
# define new2_len .Lnew2_end_\@-.Lnew2_\@
.Lold_\@\():
\oldinstr
.Lold_pad_\@\():
.skip -((alt_max_2(new1_len, new2_len) - (old_len)) > 0) * \
(alt_max_2(new1_len, new2_len) - (old_len)),0x90
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_\@,.Lnew1_\@,\ft_flags1,old_full_len,new1_len
altinstr_entry .Lold_\@,.Lnew2_\@,\ft_flags2,old_full_len,new2_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew1_\@\():
\newinstr1
.Lnew1_end_\@\():
.Lnew2_\@\():
\newinstr2
.Lnew2_end_\@:
.popsection
# undef old_len
# undef old_full_len
# undef new1_len
# undef new2_len
.endm
.macro ALTERNATIVE_2_PREPAD oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
.if \ft_flags1 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_PREPAD "\oldinstr", "\newinstr2", \ft_flags2
.exitm
.endif
.if \ft_flags2 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_PREPAD "\oldinstr", "\newinstr1", \ft_flags1
.exitm
.endif
# define old_len .Lold_end_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_pad_\@
# define new1_len .Lnew1_end_\@-.Lnew1_\@
# define new1_full_len .Lnew1_end_\@-.Lnew1_pad_\@
# define new2_len .Lnew2_end_\@-.Lnew2_\@
# define new2_full_len .Lnew2_end_\@-.Lnew2_pad_\@
.Lold_pad_\@\():
.skip -((alt_max_2(new1_len, new2_len) - (old_len)) > 0) * \
(alt_max_2(new1_len, new2_len) - (old_len)),0x90
.Lold_\@\():
\oldinstr
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_pad_\@,.Lnew1_pad_\@,\ft_flags1,old_full_len,new1_full_len
altinstr_entry .Lold_pad_\@,.Lnew2_pad_\@,\ft_flags2,old_full_len,new2_full_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew1_pad_\@\():
.skip -((alt_max_2(new2_len, old_len) - (new1_len)) > 0) * \
(alt_max_2(new2_len, old_len) - (new1_len)),0x90
.Lnew1_\@\():
\newinstr1
.Lnew1_end_\@\():
.Lnew2_pad_\@\():
.skip -((alt_max_2(new1_len, old_len) - (new2_len)) > 0) * \
(alt_max_2(new1_len, old_len) - (new2_len)),0x90
.Lnew2_\@\():
\newinstr2
.Lnew2_end_\@\():
.popsection
# undef old_len
# undef old_full_len
# undef new1_len
# undef new1_full_len
# undef new2_len
# undef new2_full_len
.endm
/*
* Same as ALTERNATIVE macro above but for three alternatives. If CPU
* has @ft_flags1, it replaces @oldinstr with @newinstr1. If CPU has
* @ft_flags2, it replaces @oldinstr with @newinstr2. If CPU has
* @ft_flags3, it replaces @oldinstr with @newinstr3.
*/
.macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
.if \ft_flags1 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_2 "\oldinstr", "\newinstr2", \ft_flags2, "\newinstr3", \ft_flags3
.exitm
.endif
.if \ft_flags2 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_2 "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr3", \ft_flags3
.exitm
.endif
.if \ft_flags3 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_2 "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr2", \ft_flags2
.exitm
.endif
# define old_len .Lold_pad_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_\@
# define new1_len .Lnew1_end_\@-.Lnew1_\@
# define new2_len .Lnew2_end_\@-.Lnew2_\@
# define new3_len .Lnew3_end_\@-.Lnew3_\@
.Lold_\@\():
\oldinstr
.Lold_pad_\@\():
.skip -((alt_max_3(new1_len, new2_len, new3_len) - (old_len)) > 0) * \
(alt_max_3(new1_len, new2_len, new3_len) - (old_len)),0x90
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_\@,.Lnew1_\@,\ft_flags1,old_full_len,new1_len
altinstr_entry .Lold_\@,.Lnew2_\@,\ft_flags2,old_full_len,new2_len
altinstr_entry .Lold_\@,.Lnew3_\@,\ft_flags3,old_full_len,new3_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew1_\@\():
\newinstr1
.Lnew1_end_\@\():
.Lnew2_\@\():
\newinstr2
.Lnew2_end_\@:
.Lnew3_\@\():
\newinstr3
.Lnew3_end_\@:
.popsection
# undef old_len
# undef old_full_len
# undef new1_len
# undef new2_len
# undef new3_len
.endm
.macro ALTERNATIVE_3_PREPAD oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
.if \ft_flags1 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_2_PREPAD "\oldinstr", "\newinstr2", \ft_flags2, "\newinstr3", \ft_flags3
.exitm
.endif
.if \ft_flags2 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_2_PREPAD "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr3", \ft_flags3
.exitm
.endif
.if \ft_flags3 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_2_PREPAD "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr2", \ft_flags2
.exitm
.endif
# define old_len .Lold_end_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_pad_\@
# define new1_len .Lnew1_end_\@-.Lnew1_\@
# define new1_full_len .Lnew1_end_\@-.Lnew1_pad_\@
# define new2_len .Lnew2_end_\@-.Lnew2_\@
# define new2_full_len .Lnew2_end_\@-.Lnew2_pad_\@
# define new3_len .Lnew3_end_\@-.Lnew3_\@
# define new3_full_len .Lnew3_end_\@-.Lnew3_pad_\@
.Lold_pad_\@\():
.skip -((alt_max_3(new1_len, new2_len, new3_len) - (old_len)) > 0) * \
(alt_max_3(new1_len, new2_len, new3_len) - (old_len)),0x90
.Lold_\@\():
\oldinstr
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_pad_\@,.Lnew1_pad_\@,\ft_flags1,old_full_len,new1_full_len
altinstr_entry .Lold_pad_\@,.Lnew2_pad_\@,\ft_flags2,old_full_len,new2_full_len
altinstr_entry .Lold_pad_\@,.Lnew3_pad_\@,\ft_flags3,old_full_len,new3_full_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew1_pad_\@\():
.skip -((alt_max_3(new3_len, new2_len, old_len) - (new1_len)) > 0) * \
(alt_max_3(new3_len, new2_len, old_len) - (new1_len)),0x90
.Lnew1_\@\():
\newinstr1
.Lnew1_end_\@\():
.Lnew2_pad_\@\():
.skip -((alt_max_3(new3_len, new1_len, old_len) - (new2_len)) > 0) * \
(alt_max_3(new3_len, new1_len, old_len) - (new2_len)),0x90
.Lnew2_\@\():
\newinstr2
.Lnew2_end_\@\():
.Lnew3_pad_\@\():
.skip -((alt_max_3(new2_len, new1_len, old_len) - (new3_len)) > 0) * \
(alt_max_3(new2_len, new1_len, old_len) - (new3_len)),0x90
.Lnew3_\@\():
\newinstr3
.Lnew3_end_\@:
.popsection
# undef old_len
# undef old_full_len
# undef new1_len
# undef new1_full_len
# undef new2_len
# undef new2_full_len
# undef new3_len
# undef new3_full_len
.endm
/*
* Same as ALTERNATIVE macro above but for four alternatives. If CPU
* has @ft_flags1, it replaces @oldinstr with @newinstr1. If CPU has
* @ft_flags2, it replaces @oldinstr with @newinstr2. If CPU has
* @ft_flags3, it replaces @oldinstr with @newinstr3. If CPU has
* @ft_flags4, it replaces @oldinstr with @newinstr4.
*/
.macro ALTERNATIVE_4 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3, newinstr4, ft_flags4
.if \ft_flags1 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3 "\oldinstr", "\newinstr2", \ft_flags2, "\newinstr3", \ft_flags3, "\newinstr4", \ft_flags4
.exitm
.endif
.if \ft_flags2 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3 "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr3", \ft_flags3, "\newinstr4", \ft_flags4
.exitm
.endif
.if \ft_flags3 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3 "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr2", \ft_flags2, "\newinstr4", \ft_flags4
.exitm
.endif
.if \ft_flags4 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3 "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr2", \ft_flags2, "\newinstr3", \ft_flags3
.exitm
.endif
# define old_len .Lold_pad_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_\@
# define new1_len .Lnew1_end_\@-.Lnew1_\@
# define new2_len .Lnew2_end_\@-.Lnew2_\@
# define new3_len .Lnew3_end_\@-.Lnew3_\@
# define new4_len .Lnew4_end_\@-.Lnew4_\@
.Lold_\@\():
\oldinstr
.Lold_pad_\@\():
.skip -((alt_max_4(new1_len, new2_len, new3_len, new4_len) - (old_len)) > 0) * \
(alt_max_4(new1_len, new2_len, new3_len, new4_len) - (old_len)),0x90
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_\@,.Lnew1_\@,\ft_flags1,old_full_len,new1_len
altinstr_entry .Lold_\@,.Lnew2_\@,\ft_flags2,old_full_len,new2_len
altinstr_entry .Lold_\@,.Lnew3_\@,\ft_flags3,old_full_len,new3_len
altinstr_entry .Lold_\@,.Lnew4_\@,\ft_flags4,old_full_len,new4_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew1_\@\():
\newinstr1
.Lnew1_end_\@\():
.Lnew2_\@\():
\newinstr2
.Lnew2_end_\@:
.Lnew3_\@\():
\newinstr3
.Lnew3_end_\@:
.Lnew4_\@\():
\newinstr4
.Lnew4_end_\@:
.popsection
# undef old_len
# undef old_full_len
# undef new1_len
# undef new2_len
# undef new3_len
# undef new4_len
.endm
.macro ALTERNATIVE_4_PREPAD oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3, newinstr4, ft_flags4
.if \ft_flags1 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3_PREPAD "\oldinstr", "\newinstr2", \ft_flags2, "\newinstr3", \ft_flags3, "\newinstr4", \ft_flags4
.exitm
.endif
.if \ft_flags2 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3_PREPAD "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr3", \ft_flags3, "\newinstr4", \ft_flags4
.exitm
.endif
.if \ft_flags3 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3_PREPAD "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr2", \ft_flags2, "\newinstr4", \ft_flags4
.exitm
.endif
.if \ft_flags4 & (ALT_FLAG_DISABLE << ALT_FLAGS_SHIFT)
ALTERNATIVE_3_PREPAD "\oldinstr", "\newinstr1", \ft_flags1, "\newinstr2", \ft_flags2, "\newinstr3", \ft_flags3
.exitm
.endif
# define old_len .Lold_end_\@-.Lold_\@
# define old_full_len .Lold_end_\@-.Lold_pad_\@
# define new1_len .Lnew1_end_\@-.Lnew1_\@
# define new1_full_len .Lnew1_end_\@-.Lnew1_pad_\@
# define new2_len .Lnew2_end_\@-.Lnew2_\@
# define new2_full_len .Lnew2_end_\@-.Lnew2_pad_\@
# define new3_len .Lnew3_end_\@-.Lnew3_\@
# define new3_full_len .Lnew3_end_\@-.Lnew3_pad_\@
# define new4_len .Lnew4_end_\@-.Lnew4_\@
# define new4_full_len .Lnew4_end_\@-.Lnew4_pad_\@
.Lold_pad_\@\():
.skip -((alt_max_4(new1_len, new2_len, new3_len, new4_len) - (old_len)) > 0) * \
(alt_max_4(new1_len, new2_len, new3_len, new4_len) - (old_len)),0x90
.Lold_\@\():
\oldinstr
.Lold_end_\@\():
.pushsection .altinstructions,"a"
altinstr_entry .Lold_pad_\@,.Lnew1_pad_\@,\ft_flags1,old_full_len,new1_full_len
altinstr_entry .Lold_pad_\@,.Lnew2_pad_\@,\ft_flags2,old_full_len,new2_full_len
altinstr_entry .Lold_pad_\@,.Lnew3_pad_\@,\ft_flags3,old_full_len,new3_full_len
altinstr_entry .Lold_pad_\@,.Lnew4_pad_\@,\ft_flags4,old_full_len,new4_full_len
.popsection
.pushsection .altinstr_replacement,"ax"
.Lnew1_pad_\@\():
.skip -((alt_max_4(new4_len, new3_len, new2_len, old_len) - (new1_len)) > 0) * \
(alt_max_4(new4_len, new3_len, new2_len, old_len) - (new1_len)),0x90
.Lnew1_\@\():
\newinstr1
.Lnew1_end_\@\():
.Lnew2_pad_\@\():
.skip -((alt_max_4(new4_len, new3_len, new1_len, old_len) - (new2_len)) > 0) * \
(alt_max_4(new4_len, new3_len, new1_len, old_len) - (new2_len)),0x90
.Lnew2_\@\():
\newinstr2
.Lnew2_end_\@\():
.Lnew3_pad_\@\():
.skip -((alt_max_4(new4_len, new2_len, new1_len, old_len) - (new3_len)) > 0) * \
(alt_max_4(new4_len, new2_len, new1_len, old_len) - (new3_len)),0x90
.Lnew3_\@\():
\newinstr3
.Lnew3_end_\@:
.Lnew4_pad_\@\():
.skip -((alt_max_4(new3_len, new2_len, new1_len, old_len) - (new4_len)) > 0) * \
(alt_max_4(new3_len, new2_len, new1_len, old_len) - (new4_len)),0x90
.Lnew4_\@\():
\newinstr4
.Lnew4_end_\@:
.popsection
# undef old_len
# undef old_full_len
# undef new1_len
# undef new1_full_len
# undef new2_len
# undef new2_full_len
# undef new3_len
# undef new3_full_len
# undef new4_len
# undef new4_full_len
.endm
.macro __PAX_REFCOUNT section, counter
#ifdef CONFIG_PAX_REFCOUNT
jo .Lrefcount_error_\@
.pushsection .text.\section\().o
.Lrefcount_error_\@:
.if \section == refcount64_overflow
movl $__LONG_LONG_MAX__ & 0xffffffff,\counter
movl $__LONG_LONG_MAX__ >> 32,4\counter
.else
movl $(-__LONG_LONG_MAX__-1) & 0xffffffff,\counter
movl $(-__LONG_LONG_MAX__-1) >> 32,4\counter
.endif
.Lrefcount_report_\@:
int $X86_REFCOUNT_VECTOR
.popsection
.Lrefcount_continue_\@:
_ASM_EXTABLE(.Lrefcount_report_\@, .Lrefcount_continue_\@)
#endif
.endm
.macro PAX_REFCOUNT64_OVERFLOW counter
__PAX_REFCOUNT refcount64_overflow, \counter
.endm
.macro PAX_REFCOUNT64_UNDERFLOW counter
__PAX_REFCOUNT refcount64_underflow, \counter
.endm
/* If @ft_flags is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
ALTERNATIVE_2 oldinstr, newinstr_no, ALT_NOT(ft_flags), \
newinstr_yes, ft_flags
#endif /* __ASSEMBLY__ */
#endif /* _ASM_X86_ALTERNATIVE_H */