blob: 1a5b4da8a23555c88b2fa4b14fee375fa68285f3 [file] [log] [blame]
David Brazdil0f672f62019-12-10 10:32:29 +00001/* SPDX-License-Identifier: GPL-2.0-only */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00002/*
3 * bpf_jit.h: BPF JIT compiler for PPC
4 *
5 * Copyright 2011 Matt Evans <matt@ozlabs.org>, IBM Corporation
6 * 2016 Naveen N. Rao <naveen.n.rao@linux.vnet.ibm.com>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00007 */
8#ifndef _BPF_JIT_H
9#define _BPF_JIT_H
10
11#ifndef __ASSEMBLY__
12
13#include <asm/types.h>
Olivier Deprez157378f2022-04-04 15:47:50 +020014#include <asm/ppc-opcode.h>
15#include <asm/code-patching.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000016
17#ifdef PPC64_ELF_ABI_v1
18#define FUNCTION_DESCR_SIZE 24
19#else
20#define FUNCTION_DESCR_SIZE 0
21#endif
22
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000023#define PLANT_INSTR(d, idx, instr) \
24 do { if (d) { (d)[idx] = instr; } idx++; } while (0)
25#define EMIT(instr) PLANT_INSTR(image, ctx->idx, instr)
26
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000027/* Long jump; (unconditional 'branch') */
Olivier Deprez157378f2022-04-04 15:47:50 +020028#define PPC_JMP(dest) \
29 do { \
30 long offset = (long)(dest) - (ctx->idx * 4); \
31 if (!is_offset_in_branch_range(offset)) { \
32 pr_err_ratelimited("Branch offset 0x%lx (@%u) out of range\n", offset, ctx->idx); \
33 return -ERANGE; \
34 } \
35 EMIT(PPC_INST_BRANCH | (offset & 0x03fffffc)); \
36 } while (0)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000037/* "cond" here covers BO:BI fields. */
Olivier Deprez157378f2022-04-04 15:47:50 +020038#define PPC_BCC_SHORT(cond, dest) \
39 do { \
40 long offset = (long)(dest) - (ctx->idx * 4); \
41 if (!is_offset_in_cond_branch_range(offset)) { \
42 pr_err_ratelimited("Conditional branch offset 0x%lx (@%u) out of range\n", offset, ctx->idx); \
43 return -ERANGE; \
44 } \
45 EMIT(PPC_INST_BRANCH_COND | (((cond) & 0x3ff) << 16) | (offset & 0xfffc)); \
46 } while (0)
47
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000048/* Sign-extended 32-bit immediate load */
49#define PPC_LI32(d, i) do { \
50 if ((int)(uintptr_t)(i) >= -32768 && \
51 (int)(uintptr_t)(i) < 32768) \
Olivier Deprez157378f2022-04-04 15:47:50 +020052 EMIT(PPC_RAW_LI(d, i)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000053 else { \
Olivier Deprez157378f2022-04-04 15:47:50 +020054 EMIT(PPC_RAW_LIS(d, IMM_H(i))); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000055 if (IMM_L(i)) \
Olivier Deprez157378f2022-04-04 15:47:50 +020056 EMIT(PPC_RAW_ORI(d, d, IMM_L(i))); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000057 } } while(0)
58
59#define PPC_LI64(d, i) do { \
60 if ((long)(i) >= -2147483648 && \
61 (long)(i) < 2147483648) \
62 PPC_LI32(d, i); \
63 else { \
64 if (!((uintptr_t)(i) & 0xffff800000000000ULL)) \
Olivier Deprez157378f2022-04-04 15:47:50 +020065 EMIT(PPC_RAW_LI(d, ((uintptr_t)(i) >> 32) & \
66 0xffff)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000067 else { \
Olivier Deprez157378f2022-04-04 15:47:50 +020068 EMIT(PPC_RAW_LIS(d, ((uintptr_t)(i) >> 48))); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000069 if ((uintptr_t)(i) & 0x0000ffff00000000ULL) \
Olivier Deprez157378f2022-04-04 15:47:50 +020070 EMIT(PPC_RAW_ORI(d, d, \
71 ((uintptr_t)(i) >> 32) & 0xffff)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000072 } \
Olivier Deprez157378f2022-04-04 15:47:50 +020073 EMIT(PPC_RAW_SLDI(d, d, 32)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000074 if ((uintptr_t)(i) & 0x00000000ffff0000ULL) \
Olivier Deprez157378f2022-04-04 15:47:50 +020075 EMIT(PPC_RAW_ORIS(d, d, \
76 ((uintptr_t)(i) >> 16) & 0xffff)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000077 if ((uintptr_t)(i) & 0x000000000000ffffULL) \
Olivier Deprez157378f2022-04-04 15:47:50 +020078 EMIT(PPC_RAW_ORI(d, d, (uintptr_t)(i) & \
79 0xffff)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000080 } } while (0)
81
82#ifdef CONFIG_PPC64
83#define PPC_FUNC_ADDR(d,i) do { PPC_LI64(d, i); } while(0)
84#else
85#define PPC_FUNC_ADDR(d,i) do { PPC_LI32(d, i); } while(0)
86#endif
87
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000088/*
89 * The fly in the ointment of code size changing from pass to pass is
90 * avoided by padding the short branch case with a NOP. If code size differs
91 * with different branch reaches we will have the issue of code moving from
92 * one pass to the next and will need a few passes to converge on a stable
93 * state.
94 */
95#define PPC_BCC(cond, dest) do { \
Olivier Deprez157378f2022-04-04 15:47:50 +020096 if (is_offset_in_cond_branch_range((long)(dest) - (ctx->idx * 4))) { \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000097 PPC_BCC_SHORT(cond, dest); \
Olivier Deprez157378f2022-04-04 15:47:50 +020098 EMIT(PPC_RAW_NOP()); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000099 } else { \
100 /* Flip the 'T or F' bit to invert comparison */ \
101 PPC_BCC_SHORT(cond ^ COND_CMP_TRUE, (ctx->idx+2)*4); \
102 PPC_JMP(dest); \
103 } } while(0)
104
105/* To create a branch condition, select a bit of cr0... */
106#define CR0_LT 0
107#define CR0_GT 1
108#define CR0_EQ 2
109/* ...and modify BO[3] */
110#define COND_CMP_TRUE 0x100
111#define COND_CMP_FALSE 0x000
112/* Together, they make all required comparisons: */
113#define COND_GT (CR0_GT | COND_CMP_TRUE)
114#define COND_GE (CR0_LT | COND_CMP_FALSE)
115#define COND_EQ (CR0_EQ | COND_CMP_TRUE)
116#define COND_NE (CR0_EQ | COND_CMP_FALSE)
117#define COND_LT (CR0_LT | COND_CMP_TRUE)
118#define COND_LE (CR0_GT | COND_CMP_FALSE)
119
120#endif
121
122#endif