1
2
3
4
5
6
7
8
9
10#include <linux/moduleloader.h>
11#include <asm/cacheflush.h>
12#include <asm/asm-compat.h>
13#include <linux/netdevice.h>
14#include <linux/filter.h>
15#include <linux/if_vlan.h>
16#include <asm/kprobes.h>
17#include <linux/bpf.h>
18
19#include "bpf_jit64.h"
20
21static inline bool bpf_has_stack_frame(struct codegen_context *ctx)
22{
23
24
25
26
27
28
29 return ctx->seen & SEEN_FUNC || bpf_is_seen_register(ctx, b2p[BPF_REG_FP]);
30}
31
32
33
34
35
36
37
38
39
40
41
42
43static int bpf_jit_stack_local(struct codegen_context *ctx)
44{
45 if (bpf_has_stack_frame(ctx))
46 return STACK_FRAME_MIN_SIZE + ctx->stack_size;
47 else
48 return -(BPF_PPC_STACK_SAVE + 16);
49}
50
51static int bpf_jit_stack_tailcallcnt(struct codegen_context *ctx)
52{
53 return bpf_jit_stack_local(ctx) + 8;
54}
55
56static int bpf_jit_stack_offsetof(struct codegen_context *ctx, int reg)
57{
58 if (reg >= BPF_PPC_NVR_MIN && reg < 32)
59 return (bpf_has_stack_frame(ctx) ?
60 (BPF_PPC_STACKFRAME + ctx->stack_size) : 0)
61 - (8 * (32 - reg));
62
63 pr_err("BPF JIT is asking about unknown registers");
64 BUG();
65}
66
67void bpf_jit_realloc_regs(struct codegen_context *ctx)
68{
69}
70
71void bpf_jit_build_prologue(u32 *image, struct codegen_context *ctx)
72{
73 int i;
74
75
76
77
78
79
80 if (ctx->seen & SEEN_TAILCALL) {
81 EMIT(PPC_RAW_LI(b2p[TMP_REG_1], 0));
82
83 PPC_BPF_STL(b2p[TMP_REG_1], 1, -(BPF_PPC_STACK_SAVE + 8));
84 } else {
85 EMIT(PPC_RAW_NOP());
86 EMIT(PPC_RAW_NOP());
87 }
88
89#define BPF_TAILCALL_PROLOGUE_SIZE 8
90
91 if (bpf_has_stack_frame(ctx)) {
92
93
94
95
96 if (ctx->seen & SEEN_FUNC) {
97 EMIT(PPC_RAW_MFLR(_R0));
98 PPC_BPF_STL(0, 1, PPC_LR_STKOFF);
99 }
100
101 PPC_BPF_STLU(1, 1, -(BPF_PPC_STACKFRAME + ctx->stack_size));
102 }
103
104
105
106
107
108
109 for (i = BPF_REG_6; i <= BPF_REG_10; i++)
110 if (bpf_is_seen_register(ctx, b2p[i]))
111 PPC_BPF_STL(b2p[i], 1, bpf_jit_stack_offsetof(ctx, b2p[i]));
112
113
114 if (bpf_is_seen_register(ctx, b2p[BPF_REG_FP]))
115 EMIT(PPC_RAW_ADDI(b2p[BPF_REG_FP], 1,
116 STACK_FRAME_MIN_SIZE + ctx->stack_size));
117}
118
119static void bpf_jit_emit_common_epilogue(u32 *image, struct codegen_context *ctx)
120{
121 int i;
122
123
124 for (i = BPF_REG_6; i <= BPF_REG_10; i++)
125 if (bpf_is_seen_register(ctx, b2p[i]))
126 PPC_BPF_LL(b2p[i], 1, bpf_jit_stack_offsetof(ctx, b2p[i]));
127
128
129 if (bpf_has_stack_frame(ctx)) {
130 EMIT(PPC_RAW_ADDI(1, 1, BPF_PPC_STACKFRAME + ctx->stack_size));
131 if (ctx->seen & SEEN_FUNC) {
132 PPC_BPF_LL(0, 1, PPC_LR_STKOFF);
133 EMIT(PPC_RAW_MTLR(0));
134 }
135 }
136}
137
138void bpf_jit_build_epilogue(u32 *image, struct codegen_context *ctx)
139{
140 bpf_jit_emit_common_epilogue(image, ctx);
141
142
143 EMIT(PPC_RAW_MR(3, b2p[BPF_REG_0]));
144
145 EMIT(PPC_RAW_BLR());
146}
147
148static void bpf_jit_emit_func_call_hlp(u32 *image, struct codegen_context *ctx,
149 u64 func)
150{
151#ifdef PPC64_ELF_ABI_v1
152
153 PPC_LI64(b2p[TMP_REG_2], func);
154
155 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_2], 0);
156
157 EMIT(PPC_RAW_MTCTR(b2p[TMP_REG_1]));
158
159
160
161
162
163
164 PPC_BPF_LL(2, b2p[TMP_REG_2], 8);
165#else
166
167 PPC_FUNC_ADDR(12, func);
168 EMIT(PPC_RAW_MTCTR(12));
169#endif
170 EMIT(PPC_RAW_BCTRL());
171}
172
173void bpf_jit_emit_func_call_rel(u32 *image, struct codegen_context *ctx, u64 func)
174{
175 unsigned int i, ctx_idx = ctx->idx;
176
177
178 PPC_LI64(12, func);
179
180
181
182
183
184
185
186
187
188
189
190 for (i = ctx->idx - ctx_idx; i < 5; i++)
191 EMIT(PPC_RAW_NOP());
192
193#ifdef PPC64_ELF_ABI_v1
194
195
196
197
198
199
200 PPC_BPF_LL(2, 12, 8);
201
202 PPC_BPF_LL(12, 12, 0);
203#endif
204
205 EMIT(PPC_RAW_MTCTR(12));
206 EMIT(PPC_RAW_BCTRL());
207}
208
209static void bpf_jit_emit_tail_call(u32 *image, struct codegen_context *ctx, u32 out)
210{
211
212
213
214
215
216
217 int b2p_bpf_array = b2p[BPF_REG_2];
218 int b2p_index = b2p[BPF_REG_3];
219
220
221
222
223
224 EMIT(PPC_RAW_LWZ(b2p[TMP_REG_1], b2p_bpf_array, offsetof(struct bpf_array, map.max_entries)));
225 EMIT(PPC_RAW_RLWINM(b2p_index, b2p_index, 0, 0, 31));
226 EMIT(PPC_RAW_CMPLW(b2p_index, b2p[TMP_REG_1]));
227 PPC_BCC(COND_GE, out);
228
229
230
231
232
233 PPC_BPF_LL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx));
234 EMIT(PPC_RAW_CMPLWI(b2p[TMP_REG_1], MAX_TAIL_CALL_CNT));
235 PPC_BCC(COND_GT, out);
236
237
238
239
240 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], 1));
241 PPC_BPF_STL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx));
242
243
244 EMIT(PPC_RAW_MULI(b2p[TMP_REG_1], b2p_index, 8));
245 EMIT(PPC_RAW_ADD(b2p[TMP_REG_1], b2p[TMP_REG_1], b2p_bpf_array));
246 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_1], offsetof(struct bpf_array, ptrs));
247
248
249
250
251
252 EMIT(PPC_RAW_CMPLDI(b2p[TMP_REG_1], 0));
253 PPC_BCC(COND_EQ, out);
254
255
256 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_1], offsetof(struct bpf_prog, bpf_func));
257#ifdef PPC64_ELF_ABI_v1
258
259 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1],
260 FUNCTION_DESCR_SIZE + BPF_TAILCALL_PROLOGUE_SIZE));
261#else
262 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], BPF_TAILCALL_PROLOGUE_SIZE));
263#endif
264 EMIT(PPC_RAW_MTCTR(b2p[TMP_REG_1]));
265
266
267 bpf_jit_emit_common_epilogue(image, ctx);
268
269 EMIT(PPC_RAW_BCTR());
270
271}
272
273
274int bpf_jit_build_body(struct bpf_prog *fp, u32 *image, struct codegen_context *ctx,
275 u32 *addrs, bool extra_pass)
276{
277 const struct bpf_insn *insn = fp->insnsi;
278 int flen = fp->len;
279 int i, ret;
280
281
282 u32 exit_addr = addrs[flen];
283
284 for (i = 0; i < flen; i++) {
285 u32 code = insn[i].code;
286 u32 dst_reg = b2p[insn[i].dst_reg];
287 u32 src_reg = b2p[insn[i].src_reg];
288 s16 off = insn[i].off;
289 s32 imm = insn[i].imm;
290 bool func_addr_fixed;
291 u64 func_addr;
292 u64 imm64;
293 u32 true_cond;
294 u32 tmp_idx;
295
296
297
298
299
300 addrs[i] = ctx->idx * 4;
301
302
303
304
305
306
307
308
309
310
311
312
313 if (dst_reg >= BPF_PPC_NVR_MIN && dst_reg < 32)
314 bpf_set_seen_register(ctx, dst_reg);
315 if (src_reg >= BPF_PPC_NVR_MIN && src_reg < 32)
316 bpf_set_seen_register(ctx, src_reg);
317
318 switch (code) {
319
320
321
322 case BPF_ALU | BPF_ADD | BPF_X:
323 case BPF_ALU64 | BPF_ADD | BPF_X:
324 EMIT(PPC_RAW_ADD(dst_reg, dst_reg, src_reg));
325 goto bpf_alu32_trunc;
326 case BPF_ALU | BPF_SUB | BPF_X:
327 case BPF_ALU64 | BPF_SUB | BPF_X:
328 EMIT(PPC_RAW_SUB(dst_reg, dst_reg, src_reg));
329 goto bpf_alu32_trunc;
330 case BPF_ALU | BPF_ADD | BPF_K:
331 case BPF_ALU | BPF_SUB | BPF_K:
332 case BPF_ALU64 | BPF_ADD | BPF_K:
333 case BPF_ALU64 | BPF_SUB | BPF_K:
334 if (BPF_OP(code) == BPF_SUB)
335 imm = -imm;
336 if (imm) {
337 if (imm >= -32768 && imm < 32768)
338 EMIT(PPC_RAW_ADDI(dst_reg, dst_reg, IMM_L(imm)));
339 else {
340 PPC_LI32(b2p[TMP_REG_1], imm);
341 EMIT(PPC_RAW_ADD(dst_reg, dst_reg, b2p[TMP_REG_1]));
342 }
343 }
344 goto bpf_alu32_trunc;
345 case BPF_ALU | BPF_MUL | BPF_X:
346 case BPF_ALU64 | BPF_MUL | BPF_X:
347 if (BPF_CLASS(code) == BPF_ALU)
348 EMIT(PPC_RAW_MULW(dst_reg, dst_reg, src_reg));
349 else
350 EMIT(PPC_RAW_MULD(dst_reg, dst_reg, src_reg));
351 goto bpf_alu32_trunc;
352 case BPF_ALU | BPF_MUL | BPF_K:
353 case BPF_ALU64 | BPF_MUL | BPF_K:
354 if (imm >= -32768 && imm < 32768)
355 EMIT(PPC_RAW_MULI(dst_reg, dst_reg, IMM_L(imm)));
356 else {
357 PPC_LI32(b2p[TMP_REG_1], imm);
358 if (BPF_CLASS(code) == BPF_ALU)
359 EMIT(PPC_RAW_MULW(dst_reg, dst_reg,
360 b2p[TMP_REG_1]));
361 else
362 EMIT(PPC_RAW_MULD(dst_reg, dst_reg,
363 b2p[TMP_REG_1]));
364 }
365 goto bpf_alu32_trunc;
366 case BPF_ALU | BPF_DIV | BPF_X:
367 case BPF_ALU | BPF_MOD | BPF_X:
368 if (BPF_OP(code) == BPF_MOD) {
369 EMIT(PPC_RAW_DIVWU(b2p[TMP_REG_1], dst_reg, src_reg));
370 EMIT(PPC_RAW_MULW(b2p[TMP_REG_1], src_reg,
371 b2p[TMP_REG_1]));
372 EMIT(PPC_RAW_SUB(dst_reg, dst_reg, b2p[TMP_REG_1]));
373 } else
374 EMIT(PPC_RAW_DIVWU(dst_reg, dst_reg, src_reg));
375 goto bpf_alu32_trunc;
376 case BPF_ALU64 | BPF_DIV | BPF_X:
377 case BPF_ALU64 | BPF_MOD | BPF_X:
378 if (BPF_OP(code) == BPF_MOD) {
379 EMIT(PPC_RAW_DIVDU(b2p[TMP_REG_1], dst_reg, src_reg));
380 EMIT(PPC_RAW_MULD(b2p[TMP_REG_1], src_reg,
381 b2p[TMP_REG_1]));
382 EMIT(PPC_RAW_SUB(dst_reg, dst_reg, b2p[TMP_REG_1]));
383 } else
384 EMIT(PPC_RAW_DIVDU(dst_reg, dst_reg, src_reg));
385 break;
386 case BPF_ALU | BPF_MOD | BPF_K:
387 case BPF_ALU | BPF_DIV | BPF_K:
388 case BPF_ALU64 | BPF_MOD | BPF_K:
389 case BPF_ALU64 | BPF_DIV | BPF_K:
390 if (imm == 0)
391 return -EINVAL;
392 else if (imm == 1)
393 goto bpf_alu32_trunc;
394
395 PPC_LI32(b2p[TMP_REG_1], imm);
396 switch (BPF_CLASS(code)) {
397 case BPF_ALU:
398 if (BPF_OP(code) == BPF_MOD) {
399 EMIT(PPC_RAW_DIVWU(b2p[TMP_REG_2],
400 dst_reg,
401 b2p[TMP_REG_1]));
402 EMIT(PPC_RAW_MULW(b2p[TMP_REG_1],
403 b2p[TMP_REG_1],
404 b2p[TMP_REG_2]));
405 EMIT(PPC_RAW_SUB(dst_reg, dst_reg,
406 b2p[TMP_REG_1]));
407 } else
408 EMIT(PPC_RAW_DIVWU(dst_reg, dst_reg,
409 b2p[TMP_REG_1]));
410 break;
411 case BPF_ALU64:
412 if (BPF_OP(code) == BPF_MOD) {
413 EMIT(PPC_RAW_DIVDU(b2p[TMP_REG_2],
414 dst_reg,
415 b2p[TMP_REG_1]));
416 EMIT(PPC_RAW_MULD(b2p[TMP_REG_1],
417 b2p[TMP_REG_1],
418 b2p[TMP_REG_2]));
419 EMIT(PPC_RAW_SUB(dst_reg, dst_reg,
420 b2p[TMP_REG_1]));
421 } else
422 EMIT(PPC_RAW_DIVDU(dst_reg, dst_reg,
423 b2p[TMP_REG_1]));
424 break;
425 }
426 goto bpf_alu32_trunc;
427 case BPF_ALU | BPF_NEG:
428 case BPF_ALU64 | BPF_NEG:
429 EMIT(PPC_RAW_NEG(dst_reg, dst_reg));
430 goto bpf_alu32_trunc;
431
432
433
434
435 case BPF_ALU | BPF_AND | BPF_X:
436 case BPF_ALU64 | BPF_AND | BPF_X:
437 EMIT(PPC_RAW_AND(dst_reg, dst_reg, src_reg));
438 goto bpf_alu32_trunc;
439 case BPF_ALU | BPF_AND | BPF_K:
440 case BPF_ALU64 | BPF_AND | BPF_K:
441 if (!IMM_H(imm))
442 EMIT(PPC_RAW_ANDI(dst_reg, dst_reg, IMM_L(imm)));
443 else {
444
445 PPC_LI32(b2p[TMP_REG_1], imm);
446 EMIT(PPC_RAW_AND(dst_reg, dst_reg, b2p[TMP_REG_1]));
447 }
448 goto bpf_alu32_trunc;
449 case BPF_ALU | BPF_OR | BPF_X:
450 case BPF_ALU64 | BPF_OR | BPF_X:
451 EMIT(PPC_RAW_OR(dst_reg, dst_reg, src_reg));
452 goto bpf_alu32_trunc;
453 case BPF_ALU | BPF_OR | BPF_K:
454 case BPF_ALU64 | BPF_OR | BPF_K:
455 if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) {
456
457 PPC_LI32(b2p[TMP_REG_1], imm);
458 EMIT(PPC_RAW_OR(dst_reg, dst_reg, b2p[TMP_REG_1]));
459 } else {
460 if (IMM_L(imm))
461 EMIT(PPC_RAW_ORI(dst_reg, dst_reg, IMM_L(imm)));
462 if (IMM_H(imm))
463 EMIT(PPC_RAW_ORIS(dst_reg, dst_reg, IMM_H(imm)));
464 }
465 goto bpf_alu32_trunc;
466 case BPF_ALU | BPF_XOR | BPF_X:
467 case BPF_ALU64 | BPF_XOR | BPF_X:
468 EMIT(PPC_RAW_XOR(dst_reg, dst_reg, src_reg));
469 goto bpf_alu32_trunc;
470 case BPF_ALU | BPF_XOR | BPF_K:
471 case BPF_ALU64 | BPF_XOR | BPF_K:
472 if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) {
473
474 PPC_LI32(b2p[TMP_REG_1], imm);
475 EMIT(PPC_RAW_XOR(dst_reg, dst_reg, b2p[TMP_REG_1]));
476 } else {
477 if (IMM_L(imm))
478 EMIT(PPC_RAW_XORI(dst_reg, dst_reg, IMM_L(imm)));
479 if (IMM_H(imm))
480 EMIT(PPC_RAW_XORIS(dst_reg, dst_reg, IMM_H(imm)));
481 }
482 goto bpf_alu32_trunc;
483 case BPF_ALU | BPF_LSH | BPF_X:
484
485 EMIT(PPC_RAW_SLW(dst_reg, dst_reg, src_reg));
486
487 if (insn_is_zext(&insn[i + 1]))
488 addrs[++i] = ctx->idx * 4;
489 break;
490 case BPF_ALU64 | BPF_LSH | BPF_X:
491 EMIT(PPC_RAW_SLD(dst_reg, dst_reg, src_reg));
492 break;
493 case BPF_ALU | BPF_LSH | BPF_K:
494
495 EMIT(PPC_RAW_SLWI(dst_reg, dst_reg, imm));
496 if (insn_is_zext(&insn[i + 1]))
497 addrs[++i] = ctx->idx * 4;
498 break;
499 case BPF_ALU64 | BPF_LSH | BPF_K:
500 if (imm != 0)
501 EMIT(PPC_RAW_SLDI(dst_reg, dst_reg, imm));
502 break;
503 case BPF_ALU | BPF_RSH | BPF_X:
504 EMIT(PPC_RAW_SRW(dst_reg, dst_reg, src_reg));
505 if (insn_is_zext(&insn[i + 1]))
506 addrs[++i] = ctx->idx * 4;
507 break;
508 case BPF_ALU64 | BPF_RSH | BPF_X:
509 EMIT(PPC_RAW_SRD(dst_reg, dst_reg, src_reg));
510 break;
511 case BPF_ALU | BPF_RSH | BPF_K:
512 EMIT(PPC_RAW_SRWI(dst_reg, dst_reg, imm));
513 if (insn_is_zext(&insn[i + 1]))
514 addrs[++i] = ctx->idx * 4;
515 break;
516 case BPF_ALU64 | BPF_RSH | BPF_K:
517 if (imm != 0)
518 EMIT(PPC_RAW_SRDI(dst_reg, dst_reg, imm));
519 break;
520 case BPF_ALU | BPF_ARSH | BPF_X:
521 EMIT(PPC_RAW_SRAW(dst_reg, dst_reg, src_reg));
522 goto bpf_alu32_trunc;
523 case BPF_ALU64 | BPF_ARSH | BPF_X:
524 EMIT(PPC_RAW_SRAD(dst_reg, dst_reg, src_reg));
525 break;
526 case BPF_ALU | BPF_ARSH | BPF_K:
527 EMIT(PPC_RAW_SRAWI(dst_reg, dst_reg, imm));
528 goto bpf_alu32_trunc;
529 case BPF_ALU64 | BPF_ARSH | BPF_K:
530 if (imm != 0)
531 EMIT(PPC_RAW_SRADI(dst_reg, dst_reg, imm));
532 break;
533
534
535
536
537 case BPF_ALU | BPF_MOV | BPF_X:
538 case BPF_ALU64 | BPF_MOV | BPF_X:
539 if (imm == 1) {
540
541 EMIT(PPC_RAW_RLWINM(dst_reg, dst_reg, 0, 0, 31));
542 break;
543 }
544 EMIT(PPC_RAW_MR(dst_reg, src_reg));
545 goto bpf_alu32_trunc;
546 case BPF_ALU | BPF_MOV | BPF_K:
547 case BPF_ALU64 | BPF_MOV | BPF_K:
548 PPC_LI32(dst_reg, imm);
549 if (imm < 0)
550 goto bpf_alu32_trunc;
551 else if (insn_is_zext(&insn[i + 1]))
552 addrs[++i] = ctx->idx * 4;
553 break;
554
555bpf_alu32_trunc:
556
557 if (BPF_CLASS(code) == BPF_ALU && !fp->aux->verifier_zext)
558 EMIT(PPC_RAW_RLWINM(dst_reg, dst_reg, 0, 0, 31));
559 break;
560
561
562
563
564 case BPF_ALU | BPF_END | BPF_FROM_LE:
565 case BPF_ALU | BPF_END | BPF_FROM_BE:
566#ifdef __BIG_ENDIAN__
567 if (BPF_SRC(code) == BPF_FROM_BE)
568 goto emit_clear;
569#else
570 if (BPF_SRC(code) == BPF_FROM_LE)
571 goto emit_clear;
572#endif
573 switch (imm) {
574 case 16:
575
576 EMIT(PPC_RAW_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 16, 23));
577
578 EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 24, 31));
579
580 EMIT(PPC_RAW_MR(dst_reg, b2p[TMP_REG_1]));
581 break;
582 case 32:
583
584
585
586
587
588 EMIT(PPC_RAW_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 0, 31));
589
590 EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 0, 7));
591
592 EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 16, 23));
593 EMIT(PPC_RAW_MR(dst_reg, b2p[TMP_REG_1]));
594 break;
595 case 64:
596
597
598
599
600
601
602
603
604 PPC_BPF_STL(dst_reg, 1, bpf_jit_stack_local(ctx));
605 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], 1, bpf_jit_stack_local(ctx)));
606 EMIT(PPC_RAW_LDBRX(dst_reg, 0, b2p[TMP_REG_1]));
607 break;
608 }
609 break;
610
611emit_clear:
612 switch (imm) {
613 case 16:
614
615 EMIT(PPC_RAW_RLDICL(dst_reg, dst_reg, 0, 48));
616 if (insn_is_zext(&insn[i + 1]))
617 addrs[++i] = ctx->idx * 4;
618 break;
619 case 32:
620 if (!fp->aux->verifier_zext)
621
622 EMIT(PPC_RAW_RLDICL(dst_reg, dst_reg, 0, 32));
623 break;
624 case 64:
625
626 break;
627 }
628 break;
629
630
631
632
633 case BPF_ST | BPF_NOSPEC:
634 break;
635
636
637
638
639 case BPF_STX | BPF_MEM | BPF_B:
640 case BPF_ST | BPF_MEM | BPF_B:
641 if (BPF_CLASS(code) == BPF_ST) {
642 EMIT(PPC_RAW_LI(b2p[TMP_REG_1], imm));
643 src_reg = b2p[TMP_REG_1];
644 }
645 EMIT(PPC_RAW_STB(src_reg, dst_reg, off));
646 break;
647 case BPF_STX | BPF_MEM | BPF_H:
648 case BPF_ST | BPF_MEM | BPF_H:
649 if (BPF_CLASS(code) == BPF_ST) {
650 EMIT(PPC_RAW_LI(b2p[TMP_REG_1], imm));
651 src_reg = b2p[TMP_REG_1];
652 }
653 EMIT(PPC_RAW_STH(src_reg, dst_reg, off));
654 break;
655 case BPF_STX | BPF_MEM | BPF_W:
656 case BPF_ST | BPF_MEM | BPF_W:
657 if (BPF_CLASS(code) == BPF_ST) {
658 PPC_LI32(b2p[TMP_REG_1], imm);
659 src_reg = b2p[TMP_REG_1];
660 }
661 EMIT(PPC_RAW_STW(src_reg, dst_reg, off));
662 break;
663 case BPF_STX | BPF_MEM | BPF_DW:
664 case BPF_ST | BPF_MEM | BPF_DW:
665 if (BPF_CLASS(code) == BPF_ST) {
666 PPC_LI32(b2p[TMP_REG_1], imm);
667 src_reg = b2p[TMP_REG_1];
668 }
669 PPC_BPF_STL(src_reg, dst_reg, off);
670 break;
671
672
673
674
675 case BPF_STX | BPF_ATOMIC | BPF_W:
676 if (imm != BPF_ADD) {
677 pr_err_ratelimited(
678 "eBPF filter atomic op code %02x (@%d) unsupported\n",
679 code, i);
680 return -ENOTSUPP;
681 }
682
683
684
685
686 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], dst_reg, off));
687 tmp_idx = ctx->idx * 4;
688
689 EMIT(PPC_RAW_LWARX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1], 0));
690
691 EMIT(PPC_RAW_ADD(b2p[TMP_REG_2], b2p[TMP_REG_2], src_reg));
692
693 EMIT(PPC_RAW_STWCX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1]));
694
695 PPC_BCC_SHORT(COND_NE, tmp_idx);
696 break;
697 case BPF_STX | BPF_ATOMIC | BPF_DW:
698 if (imm != BPF_ADD) {
699 pr_err_ratelimited(
700 "eBPF filter atomic op code %02x (@%d) unsupported\n",
701 code, i);
702 return -ENOTSUPP;
703 }
704
705
706 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], dst_reg, off));
707 tmp_idx = ctx->idx * 4;
708 EMIT(PPC_RAW_LDARX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1], 0));
709 EMIT(PPC_RAW_ADD(b2p[TMP_REG_2], b2p[TMP_REG_2], src_reg));
710 EMIT(PPC_RAW_STDCX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1]));
711 PPC_BCC_SHORT(COND_NE, tmp_idx);
712 break;
713
714
715
716
717
718 case BPF_LDX | BPF_MEM | BPF_B:
719 EMIT(PPC_RAW_LBZ(dst_reg, src_reg, off));
720 if (insn_is_zext(&insn[i + 1]))
721 addrs[++i] = ctx->idx * 4;
722 break;
723
724 case BPF_LDX | BPF_MEM | BPF_H:
725 EMIT(PPC_RAW_LHZ(dst_reg, src_reg, off));
726 if (insn_is_zext(&insn[i + 1]))
727 addrs[++i] = ctx->idx * 4;
728 break;
729
730 case BPF_LDX | BPF_MEM | BPF_W:
731 EMIT(PPC_RAW_LWZ(dst_reg, src_reg, off));
732 if (insn_is_zext(&insn[i + 1]))
733 addrs[++i] = ctx->idx * 4;
734 break;
735
736 case BPF_LDX | BPF_MEM | BPF_DW:
737 PPC_BPF_LL(dst_reg, src_reg, off);
738 break;
739
740
741
742
743
744 case BPF_LD | BPF_IMM | BPF_DW:
745 imm64 = ((u64)(u32) insn[i].imm) |
746 (((u64)(u32) insn[i+1].imm) << 32);
747
748 addrs[++i] = ctx->idx * 4;
749 PPC_LI64(dst_reg, imm64);
750 break;
751
752
753
754
755 case BPF_JMP | BPF_EXIT:
756
757
758
759
760
761 if (i != flen - 1)
762 PPC_JMP(exit_addr);
763
764 break;
765
766
767
768
769 case BPF_JMP | BPF_CALL:
770 ctx->seen |= SEEN_FUNC;
771
772 ret = bpf_jit_get_func_addr(fp, &insn[i], extra_pass,
773 &func_addr, &func_addr_fixed);
774 if (ret < 0)
775 return ret;
776
777 if (func_addr_fixed)
778 bpf_jit_emit_func_call_hlp(image, ctx, func_addr);
779 else
780 bpf_jit_emit_func_call_rel(image, ctx, func_addr);
781
782 EMIT(PPC_RAW_MR(b2p[BPF_REG_0], 3));
783 break;
784
785
786
787
788 case BPF_JMP | BPF_JA:
789 PPC_JMP(addrs[i + 1 + off]);
790 break;
791
792 case BPF_JMP | BPF_JGT | BPF_K:
793 case BPF_JMP | BPF_JGT | BPF_X:
794 case BPF_JMP | BPF_JSGT | BPF_K:
795 case BPF_JMP | BPF_JSGT | BPF_X:
796 case BPF_JMP32 | BPF_JGT | BPF_K:
797 case BPF_JMP32 | BPF_JGT | BPF_X:
798 case BPF_JMP32 | BPF_JSGT | BPF_K:
799 case BPF_JMP32 | BPF_JSGT | BPF_X:
800 true_cond = COND_GT;
801 goto cond_branch;
802 case BPF_JMP | BPF_JLT | BPF_K:
803 case BPF_JMP | BPF_JLT | BPF_X:
804 case BPF_JMP | BPF_JSLT | BPF_K:
805 case BPF_JMP | BPF_JSLT | BPF_X:
806 case BPF_JMP32 | BPF_JLT | BPF_K:
807 case BPF_JMP32 | BPF_JLT | BPF_X:
808 case BPF_JMP32 | BPF_JSLT | BPF_K:
809 case BPF_JMP32 | BPF_JSLT | BPF_X:
810 true_cond = COND_LT;
811 goto cond_branch;
812 case BPF_JMP | BPF_JGE | BPF_K:
813 case BPF_JMP | BPF_JGE | BPF_X:
814 case BPF_JMP | BPF_JSGE | BPF_K:
815 case BPF_JMP | BPF_JSGE | BPF_X:
816 case BPF_JMP32 | BPF_JGE | BPF_K:
817 case BPF_JMP32 | BPF_JGE | BPF_X:
818 case BPF_JMP32 | BPF_JSGE | BPF_K:
819 case BPF_JMP32 | BPF_JSGE | BPF_X:
820 true_cond = COND_GE;
821 goto cond_branch;
822 case BPF_JMP | BPF_JLE | BPF_K:
823 case BPF_JMP | BPF_JLE | BPF_X:
824 case BPF_JMP | BPF_JSLE | BPF_K:
825 case BPF_JMP | BPF_JSLE | BPF_X:
826 case BPF_JMP32 | BPF_JLE | BPF_K:
827 case BPF_JMP32 | BPF_JLE | BPF_X:
828 case BPF_JMP32 | BPF_JSLE | BPF_K:
829 case BPF_JMP32 | BPF_JSLE | BPF_X:
830 true_cond = COND_LE;
831 goto cond_branch;
832 case BPF_JMP | BPF_JEQ | BPF_K:
833 case BPF_JMP | BPF_JEQ | BPF_X:
834 case BPF_JMP32 | BPF_JEQ | BPF_K:
835 case BPF_JMP32 | BPF_JEQ | BPF_X:
836 true_cond = COND_EQ;
837 goto cond_branch;
838 case BPF_JMP | BPF_JNE | BPF_K:
839 case BPF_JMP | BPF_JNE | BPF_X:
840 case BPF_JMP32 | BPF_JNE | BPF_K:
841 case BPF_JMP32 | BPF_JNE | BPF_X:
842 true_cond = COND_NE;
843 goto cond_branch;
844 case BPF_JMP | BPF_JSET | BPF_K:
845 case BPF_JMP | BPF_JSET | BPF_X:
846 case BPF_JMP32 | BPF_JSET | BPF_K:
847 case BPF_JMP32 | BPF_JSET | BPF_X:
848 true_cond = COND_NE;
849
850
851cond_branch:
852 switch (code) {
853 case BPF_JMP | BPF_JGT | BPF_X:
854 case BPF_JMP | BPF_JLT | BPF_X:
855 case BPF_JMP | BPF_JGE | BPF_X:
856 case BPF_JMP | BPF_JLE | BPF_X:
857 case BPF_JMP | BPF_JEQ | BPF_X:
858 case BPF_JMP | BPF_JNE | BPF_X:
859 case BPF_JMP32 | BPF_JGT | BPF_X:
860 case BPF_JMP32 | BPF_JLT | BPF_X:
861 case BPF_JMP32 | BPF_JGE | BPF_X:
862 case BPF_JMP32 | BPF_JLE | BPF_X:
863 case BPF_JMP32 | BPF_JEQ | BPF_X:
864 case BPF_JMP32 | BPF_JNE | BPF_X:
865
866 if (BPF_CLASS(code) == BPF_JMP32)
867 EMIT(PPC_RAW_CMPLW(dst_reg, src_reg));
868 else
869 EMIT(PPC_RAW_CMPLD(dst_reg, src_reg));
870 break;
871 case BPF_JMP | BPF_JSGT | BPF_X:
872 case BPF_JMP | BPF_JSLT | BPF_X:
873 case BPF_JMP | BPF_JSGE | BPF_X:
874 case BPF_JMP | BPF_JSLE | BPF_X:
875 case BPF_JMP32 | BPF_JSGT | BPF_X:
876 case BPF_JMP32 | BPF_JSLT | BPF_X:
877 case BPF_JMP32 | BPF_JSGE | BPF_X:
878 case BPF_JMP32 | BPF_JSLE | BPF_X:
879
880 if (BPF_CLASS(code) == BPF_JMP32)
881 EMIT(PPC_RAW_CMPW(dst_reg, src_reg));
882 else
883 EMIT(PPC_RAW_CMPD(dst_reg, src_reg));
884 break;
885 case BPF_JMP | BPF_JSET | BPF_X:
886 case BPF_JMP32 | BPF_JSET | BPF_X:
887 if (BPF_CLASS(code) == BPF_JMP) {
888 EMIT(PPC_RAW_AND_DOT(b2p[TMP_REG_1], dst_reg,
889 src_reg));
890 } else {
891 int tmp_reg = b2p[TMP_REG_1];
892
893 EMIT(PPC_RAW_AND(tmp_reg, dst_reg, src_reg));
894 EMIT(PPC_RAW_RLWINM_DOT(tmp_reg, tmp_reg, 0, 0,
895 31));
896 }
897 break;
898 case BPF_JMP | BPF_JNE | BPF_K:
899 case BPF_JMP | BPF_JEQ | BPF_K:
900 case BPF_JMP | BPF_JGT | BPF_K:
901 case BPF_JMP | BPF_JLT | BPF_K:
902 case BPF_JMP | BPF_JGE | BPF_K:
903 case BPF_JMP | BPF_JLE | BPF_K:
904 case BPF_JMP32 | BPF_JNE | BPF_K:
905 case BPF_JMP32 | BPF_JEQ | BPF_K:
906 case BPF_JMP32 | BPF_JGT | BPF_K:
907 case BPF_JMP32 | BPF_JLT | BPF_K:
908 case BPF_JMP32 | BPF_JGE | BPF_K:
909 case BPF_JMP32 | BPF_JLE | BPF_K:
910 {
911 bool is_jmp32 = BPF_CLASS(code) == BPF_JMP32;
912
913
914
915
916
917 if (imm >= 0 && imm < 32768) {
918 if (is_jmp32)
919 EMIT(PPC_RAW_CMPLWI(dst_reg, imm));
920 else
921 EMIT(PPC_RAW_CMPLDI(dst_reg, imm));
922 } else {
923
924 PPC_LI32(b2p[TMP_REG_1], imm);
925
926 if (is_jmp32)
927 EMIT(PPC_RAW_CMPLW(dst_reg,
928 b2p[TMP_REG_1]));
929 else
930 EMIT(PPC_RAW_CMPLD(dst_reg,
931 b2p[TMP_REG_1]));
932 }
933 break;
934 }
935 case BPF_JMP | BPF_JSGT | BPF_K:
936 case BPF_JMP | BPF_JSLT | BPF_K:
937 case BPF_JMP | BPF_JSGE | BPF_K:
938 case BPF_JMP | BPF_JSLE | BPF_K:
939 case BPF_JMP32 | BPF_JSGT | BPF_K:
940 case BPF_JMP32 | BPF_JSLT | BPF_K:
941 case BPF_JMP32 | BPF_JSGE | BPF_K:
942 case BPF_JMP32 | BPF_JSLE | BPF_K:
943 {
944 bool is_jmp32 = BPF_CLASS(code) == BPF_JMP32;
945
946
947
948
949
950 if (imm >= -32768 && imm < 32768) {
951 if (is_jmp32)
952 EMIT(PPC_RAW_CMPWI(dst_reg, imm));
953 else
954 EMIT(PPC_RAW_CMPDI(dst_reg, imm));
955 } else {
956 PPC_LI32(b2p[TMP_REG_1], imm);
957 if (is_jmp32)
958 EMIT(PPC_RAW_CMPW(dst_reg,
959 b2p[TMP_REG_1]));
960 else
961 EMIT(PPC_RAW_CMPD(dst_reg,
962 b2p[TMP_REG_1]));
963 }
964 break;
965 }
966 case BPF_JMP | BPF_JSET | BPF_K:
967 case BPF_JMP32 | BPF_JSET | BPF_K:
968
969 if (imm >= 0 && imm < 32768)
970
971 EMIT(PPC_RAW_ANDI(b2p[TMP_REG_1], dst_reg, imm));
972 else {
973 int tmp_reg = b2p[TMP_REG_1];
974
975 PPC_LI32(tmp_reg, imm);
976 if (BPF_CLASS(code) == BPF_JMP) {
977 EMIT(PPC_RAW_AND_DOT(tmp_reg, dst_reg,
978 tmp_reg));
979 } else {
980 EMIT(PPC_RAW_AND(tmp_reg, dst_reg,
981 tmp_reg));
982 EMIT(PPC_RAW_RLWINM_DOT(tmp_reg, tmp_reg,
983 0, 0, 31));
984 }
985 }
986 break;
987 }
988 PPC_BCC(true_cond, addrs[i + 1 + off]);
989 break;
990
991
992
993
994 case BPF_JMP | BPF_TAIL_CALL:
995 ctx->seen |= SEEN_TAILCALL;
996 bpf_jit_emit_tail_call(image, ctx, addrs[i + 1]);
997 break;
998
999 default:
1000
1001
1002
1003
1004
1005 pr_err_ratelimited("eBPF filter opcode %04x (@%d) unsupported\n",
1006 code, i);
1007 return -ENOTSUPP;
1008 }
1009 }
1010
1011
1012 addrs[i] = ctx->idx * 4;
1013
1014 return 0;
1015}
1016