1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
|
// SPDX-License-Identifier: GPL-2.0
/* Copyright (c) 2025 Meta Platforms, Inc. and affiliates. */
#include <linux/bpf.h>
#include <bpf/bpf_helpers.h>
#include "../../../include/linux/filter.h"
#include "bpf_misc.h"
SEC("raw_tp")
__description("may_goto 0")
__arch_x86_64
__xlated("0: r0 = 1")
__xlated("1: exit")
__success
__naked void may_goto_simple(void)
{
asm volatile (
".8byte %[may_goto];"
"r0 = 1;"
".8byte %[may_goto];"
"exit;"
:
: __imm_insn(may_goto, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("batch 2 of may_goto 0")
__arch_x86_64
__xlated("0: r0 = 1")
__xlated("1: exit")
__success
__naked void may_goto_batch_0(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto1];"
"r0 = 1;"
".8byte %[may_goto1];"
".8byte %[may_goto1];"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("may_goto batch with offsets 2/1/0")
__arch_x86_64
__xlated("0: r0 = 1")
__xlated("1: exit")
__success
__naked void may_goto_batch_1(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto2];"
".8byte %[may_goto3];"
"r0 = 1;"
".8byte %[may_goto1];"
".8byte %[may_goto2];"
".8byte %[may_goto3];"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
__imm_insn(may_goto2, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 1 /* offset */, 0)),
__imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("may_goto batch with offsets 2/0 - x86_64")
__arch_x86_64
__xlated("0: *(u64 *)(r10 -16) = 65535")
__xlated("1: *(u64 *)(r10 -8) = 0")
__xlated("2: r11 = *(u64 *)(r10 -16)")
__xlated("3: if r11 == 0x0 goto pc+6")
__xlated("4: r11 -= 1")
__xlated("5: if r11 != 0x0 goto pc+2")
__xlated("6: r11 = -16")
__xlated("7: call unknown")
__xlated("8: *(u64 *)(r10 -16) = r11")
__xlated("9: r0 = 1")
__xlated("10: r0 = 2")
__xlated("11: exit")
__success
__naked void may_goto_batch_2_x86_64(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto3];"
"r0 = 1;"
"r0 = 2;"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
__imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
SEC("raw_tp")
__description("may_goto batch with offsets 2/0 - arm64")
__arch_arm64
__xlated("0: *(u64 *)(r10 -8) = 8388608")
__xlated("1: r11 = *(u64 *)(r10 -8)")
__xlated("2: if r11 == 0x0 goto pc+3")
__xlated("3: r11 -= 1")
__xlated("4: *(u64 *)(r10 -8) = r11")
__xlated("5: r0 = 1")
__xlated("6: r0 = 2")
__xlated("7: exit")
__success
__naked void may_goto_batch_2_arm64(void)
{
asm volatile (
".8byte %[may_goto1];"
".8byte %[may_goto3];"
"r0 = 1;"
"r0 = 2;"
"exit;"
:
: __imm_insn(may_goto1, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 2 /* offset */, 0)),
__imm_insn(may_goto3, BPF_RAW_INSN(BPF_JMP | BPF_JCOND, 0, 0, 0 /* offset */, 0))
: __clobber_all);
}
char _license[] SEC("license") = "GPL";
|