linux/arch/x86/kernel/static_call.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2#include <linux/static_call.h>
   3#include <linux/memory.h>
   4#include <linux/bug.h>
   5#include <asm/text-patching.h>
   6
   7enum insn_type {
   8        CALL = 0, /* site call */
   9        NOP = 1,  /* site cond-call */
  10        JMP = 2,  /* tramp / site tail-call */
  11        RET = 3,  /* tramp / site cond-tail-call */
  12};
  13
  14/*
  15 * data16 data16 xorq %rax, %rax - a single 5 byte instruction that clears %rax
  16 * The REX.W cancels the effect of any data16.
  17 */
  18static const u8 xor5rax[] = { 0x66, 0x66, 0x48, 0x31, 0xc0 };
  19
  20static void __ref __static_call_transform(void *insn, enum insn_type type, void *func)
  21{
  22        const void *emulate = NULL;
  23        int size = CALL_INSN_SIZE;
  24        const void *code;
  25
  26        switch (type) {
  27        case CALL:
  28                code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
  29                if (func == &__static_call_return0) {
  30                        emulate = code;
  31                        code = &xor5rax;
  32                }
  33
  34                break;
  35
  36        case NOP:
  37                code = x86_nops[5];
  38                break;
  39
  40        case JMP:
  41                code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
  42                break;
  43
  44        case RET:
  45                code = text_gen_insn(RET_INSN_OPCODE, insn, func);
  46                size = RET_INSN_SIZE;
  47                break;
  48        }
  49
  50        if (memcmp(insn, code, size) == 0)
  51                return;
  52
  53        if (unlikely(system_state == SYSTEM_BOOTING))
  54                return text_poke_early(insn, code, size);
  55
  56        text_poke_bp(insn, code, size, emulate);
  57}
  58
  59static void __static_call_validate(void *insn, bool tail)
  60{
  61        u8 opcode = *(u8 *)insn;
  62
  63        if (tail) {
  64                if (opcode == JMP32_INSN_OPCODE ||
  65                    opcode == RET_INSN_OPCODE)
  66                        return;
  67        } else {
  68                if (opcode == CALL_INSN_OPCODE ||
  69                    !memcmp(insn, x86_nops[5], 5) ||
  70                    !memcmp(insn, xor5rax, 5))
  71                        return;
  72        }
  73
  74        /*
  75         * If we ever trigger this, our text is corrupt, we'll probably not live long.
  76         */
  77        WARN_ONCE(1, "unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
  78}
  79
  80static inline enum insn_type __sc_insn(bool null, bool tail)
  81{
  82        /*
  83         * Encode the following table without branches:
  84         *
  85         *      tail    null    insn
  86         *      -----+-------+------
  87         *        0  |   0   |  CALL
  88         *        0  |   1   |  NOP
  89         *        1  |   0   |  JMP
  90         *        1  |   1   |  RET
  91         */
  92        return 2*tail + null;
  93}
  94
  95void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
  96{
  97        mutex_lock(&text_mutex);
  98
  99        if (tramp) {
 100                __static_call_validate(tramp, true);
 101                __static_call_transform(tramp, __sc_insn(!func, true), func);
 102        }
 103
 104        if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
 105                __static_call_validate(site, tail);
 106                __static_call_transform(site, __sc_insn(!func, tail), func);
 107        }
 108
 109        mutex_unlock(&text_mutex);
 110}
 111EXPORT_SYMBOL_GPL(arch_static_call_transform);
 112